diff --git a/docs/_downloads/066a96b3e7333f9a789eb7e687c45875/plot_histogram_2d.py b/docs/_downloads/066a96b3e7333f9a789eb7e687c45875/plot_histogram_2d.py new file mode 100644 index 00000000..3cc6baeb --- /dev/null +++ b/docs/_downloads/066a96b3e7333f9a789eb7e687c45875/plot_histogram_2d.py @@ -0,0 +1,305 @@ +""" +Histogram 2D +------------ + +This 2D histogram class allows efficient updating of histograms, plotting and +saving as HDF5. + +""" + +#%% +import h5py +import geobipy +from geobipy import StatArray +from geobipy import Histogram +import matplotlib.pyplot as plt +import matplotlib.gridspec as gridspec +from geobipy import RectilinearMesh2D +import numpy as np + + +#%% +# Create some histogram bins in x and y +x = StatArray(np.linspace(-4.0, 4.0, 100), 'Variable 1') +y = StatArray(np.linspace(-4.0, 4.0, 105), 'Variable 2') + +mesh = RectilinearMesh2D(x_edges=x, y_edges=y) +#%% +# Instantiate +H = Histogram(mesh) + +#%% +# Generate some random numbers +a = np.random.randn(1000000) +b = np.random.randn(1000000) + +#%% +# Update the histogram counts +H.update(a, b) + +#%% +plt.figure() +plt.subplot(131) +plt.title("2D Histogram") +_ = H.plot(cmap='gray_r') +plt.subplot(132) +H.pdf.plot(cmap='gray_r') +plt.subplot(133) +H.pmf.plot(cmap='gray_r') + + +plt.figure() +plt.subplot(131) +H.cdf(axis=0).plot() +plt.subplot(132) +H.cdf(axis=1).plot() +plt.subplot(133) +H.cdf().plot() + +#%% +# We can overlay the histogram with its credible intervals +plt.figure() +plt.title("90% credible intervals overlain") +H.pcolor(cmap='gray_r') +H.plotCredibleIntervals(axis=0, percent=95.0) +_ = H.plotCredibleIntervals(axis=1, percent=95.0) + +#%% +# Generate marginal histograms along an axis +h1 = H.marginalize(axis=0) +h2 = H.marginalize(axis=1) + +#%% +# Note that the names of the variables are automatically displayed +plt.figure() +plt.suptitle("Marginals along each axis") +plt.subplot(121) +h1.plot() +plt.subplot(122) +_ = h2.plot() + +#%% +# Create a combination plot with marginal histograms. +# sphinx_gallery_thumbnail_number = 3 +plt.figure() +gs = gridspec.GridSpec(5, 5) +gs.update(wspace=0.3, hspace=0.3) +ax = [plt.subplot(gs[1:, :4])] +H.pcolor(colorbar = False) + +ax.append(plt.subplot(gs[:1, :4])) +h = H.marginalize(axis=0).plot() +plt.xlabel(''); plt.ylabel('') +plt.xticks([]); plt.yticks([]) +ax[-1].spines["left"].set_visible(False) + +ax.append(plt.subplot(gs[1:, 4:])) +h = H.marginalize(axis=1).plot(transpose=True) +plt.ylabel(''); plt.xlabel('') +plt.yticks([]); plt.xticks([]) +ax[-1].spines["bottom"].set_visible(False) + +#%% +# Take the mean or median estimates from the histogram +mean = H.mean() +median = H.median() + +#%% +plt.figure(figsize=(9.5, 5)) +plt.suptitle("Mean, median, and credible interval overlain") +ax = plt.subplot(121) +H.pcolor(cmap='gray_r', colorbar=False) +H.plotCredibleIntervals(axis=0) +H.plotMedian(axis=0, color='g') +H.plotMean(axis=0, color='y') +plt.legend() + +plt.subplot(122, sharex=ax, sharey=ax) +H.pcolor(cmap='gray_r', colorbar=False) +H.plotCredibleIntervals(axis=1) +H.plotMedian(axis=1, color='g') +H.plotMean(axis=1, color='y') +plt.legend() + +#%% +# Get the range between credible intervals +H.credible_range(percent=95.0) + +#%% +# We can map the credible range to an opacity or transparency +H.opacity() +H.transparency() + +# H.animate(0, 'test.mp4') + +import h5py +with h5py.File('h2d.h5', 'w') as f: + H.toHdf(f, 'h2d') + +with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + +plt.close('all') + +x = StatArray(5.0 + np.linspace(-4.0, 4.0, 100), 'Variable 1') +y = StatArray(10.0 + np.linspace(-4.0, 4.0, 105), 'Variable 2') + +mesh = RectilinearMesh2D(x_edges=x, x_relative_to=5.0, y_edges=y, y_relative_to=10.0) +#%% +# Instantiate +H = Histogram(mesh) + +#%% +# Generate some random numbers +a = np.random.randn(1000000) + 5.0 +b = np.random.randn(1000000) + 10.0 + +#%% +# Update the histogram counts +H.update(a, b) + +#%% +plt.figure() +plt.subplot(131) +plt.title("2D Histogram") +_ = H.plot(cmap='gray_r') +plt.subplot(132) +H.pdf.plot(cmap='gray_r') +plt.subplot(133) +H.pmf.plot(cmap='gray_r') + +plt.figure() +plt.subplot(131) +H.cdf(axis=0).plot() +plt.subplot(132) +H.cdf(axis=1).plot() +plt.subplot(133) +H.cdf().plot() + +#%% +# We can overlay the histogram with its credible intervals +plt.figure() +plt.title("90% credible intervals overlain") +H.pcolor(cmap='gray_r') +H.plotCredibleIntervals(axis=0, percent=95.0) +_ = H.plotCredibleIntervals(axis=1, percent=95.0) + +# Generate marginal histograms along an axis +h1 = H.marginalize(axis=0) +h2 = H.marginalize(axis=1) + +#%% +# Note that the names of the variables are automatically displayed +plt.figure() +plt.suptitle("Marginals along each axis") +plt.subplot(121) +h1.plot() +plt.subplot(122) +_ = h2.plot() + +#%% +# Create a combination plot with marginal histograms. +# sphinx_gallery_thumbnail_number = 3 +plt.figure() +gs = gridspec.GridSpec(5, 5) +gs.update(wspace=0.3, hspace=0.3) +ax = [plt.subplot(gs[1:, :4])] +H.pcolor(colorbar = False) + +ax.append(plt.subplot(gs[:1, :4])) +h = H.marginalize(axis=0).plot() +plt.xlabel(''); plt.ylabel('') +plt.xticks([]); plt.yticks([]) +ax[-1].spines["left"].set_visible(False) + +ax.append(plt.subplot(gs[1:, 4:])) +h = H.marginalize(axis=1).plot(transpose=True) +plt.ylabel(''); plt.xlabel('') +plt.yticks([]); plt.xticks([]) +ax[-1].spines["bottom"].set_visible(False) + +#%% +# Take the mean or median estimates from the histogram +mean = H.mean() +median = H.median() + +#%% +plt.figure(figsize=(9.5, 5)) +plt.suptitle("Mean, median, and credible interval overlain") +ax = plt.subplot(121) +H.pcolor(cmap='gray_r', colorbar=False) +H.plotCredibleIntervals(axis=0) +H.plotMedian(axis=0, color='g') +H.plotMean(axis=0, color='y') +plt.legend() + +plt.subplot(122, sharex=ax, sharey=ax) +H.pcolor(cmap='gray_r', colorbar=False) +H.plotCredibleIntervals(axis=1) +H.plotMedian(axis=1, color='g') +H.plotMean(axis=1, color='y') +plt.legend() + +#%% +# Get the range between credible intervals +H.credible_range(percent=95.0) + +#%% +# We can map the credible range to an opacity or transparency +H.opacity() +H.transparency() + +# # H.animate(0, 'test.mp4') + +with h5py.File('h2d.h5', 'w') as f: + H.toHdf(f, 'h2d') + +with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + +plt.figure(figsize=(9.5, 5)) +plt.suptitle("Mean, median, and credible interval overlain") +ax = plt.subplot(121) +H1.pcolor(cmap='gray_r', colorbar=False) +H1.plotCredibleIntervals(axis=0) +H1.plotMedian(axis=0, color='g') +H1.plotMean(axis=0, color='y') +plt.legend() + +plt.subplot(122, sharex=ax, sharey=ax) +H1.pcolor(cmap='gray_r', colorbar=False) +H1.plotCredibleIntervals(axis=1) +H1.plotMedian(axis=1, color='g') +H1.plotMean(axis=1, color='y') +plt.legend() + +with h5py.File('h2d.h5', 'w') as f: + H.createHdf(f, 'h2d', add_axis=StatArray(np.arange(3.0), name='Easting', units="m")) + for i in range(3): + H.writeHdf(f, 'h2d', index=i) + +with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d'], index=0) + +plt.figure(figsize=(9.5, 5)) +plt.suptitle("Mean, median, and credible interval overlain") +ax = plt.subplot(121) +H1.pcolor(cmap='gray_r', colorbar=False) +H1.plotCredibleIntervals(axis=0) +H1.plotMedian(axis=0, color='g') +H1.plotMean(axis=0, color='y') +plt.legend() + +plt.subplot(122, sharex=ax, sharey=ax) +H1.pcolor(cmap='gray_r', colorbar=False) +H1.plotCredibleIntervals(axis=1) +H1.plotMedian(axis=1, color='g') +H1.plotMean(axis=1, color='y') +plt.legend() + +with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + +# H1.pyvista_mesh().save('h3d_read.vtk') + +plt.show() diff --git a/docs/_downloads/09f048ba9debdfb740a9a828a370954b/plot_inference_1d_resolve.py b/docs/_downloads/09f048ba9debdfb740a9a828a370954b/plot_inference_1d_resolve.py new file mode 100644 index 00000000..50ae8ebc --- /dev/null +++ b/docs/_downloads/09f048ba9debdfb740a9a828a370954b/plot_inference_1d_resolve.py @@ -0,0 +1,89 @@ +""" +Running GeoBIPy to invert Resolve data +++++++++++++++++++++++++++++++++++++++ +""" + +import os +import sys +import pathlib +from datetime import timedelta +import time +import numpy as np +from geobipy import Inference3D +from geobipy import user_parameters +from geobipy import get_prng + +def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + +#%% +np.random.seed(0) + +args = checkCommandArguments() +sys.path.append(os.getcwd()) + +models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] +data_type = "Resolve" +model_type = models[args.index] + +#%% +# The directory where HDF files will be stored +#%% +file_path = os.path.join(data_type, model_type) +pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + +for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + +output_directory = file_path + +data_filename = data_type + '_' + model_type + +supplementary = "..//..//supplementary//" + +parameter_file = supplementary + "//options_files//{}_options".format(data_type) +inputFile = pathlib.Path(parameter_file) +assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + +output_directory = pathlib.Path(output_directory) +assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + +print('Using user input file {}'.format(parameter_file)) +print('Output files will be produced at {}'.format(output_directory)) + +kwargs = user_parameters.read(inputFile) + +kwargs['n_markov_chains'] = 5000 + +kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' +kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename'] + +# Everyone needs the system classes read in early. +data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + +# Start keeping track of time. +t0 = time.time() + +seed = 146100583096709124601953385843316024947 +prng = get_prng(seed=seed) + +inference3d = Inference3D(data, prng=prng) +inference3d.create_hdf5(directory=output_directory, **kwargs) + +print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + +inference3d.infer(index=30, **kwargs) \ No newline at end of file diff --git a/docs/_downloads/0dde35d89bbe5f91c6bdca4eb7376ef5/plot_inference_1d_tempest.zip b/docs/_downloads/0dde35d89bbe5f91c6bdca4eb7376ef5/plot_inference_1d_tempest.zip new file mode 100644 index 00000000..e88716dc Binary files /dev/null and b/docs/_downloads/0dde35d89bbe5f91c6bdca4eb7376ef5/plot_inference_1d_tempest.zip differ diff --git a/docs/_downloads/1380a312ea4fdf4a772aca7bd6fd33ce/plot_model_2d.zip b/docs/_downloads/1380a312ea4fdf4a772aca7bd6fd33ce/plot_model_2d.zip new file mode 100644 index 00000000..519a0227 Binary files /dev/null and b/docs/_downloads/1380a312ea4fdf4a772aca7bd6fd33ce/plot_model_2d.zip differ diff --git a/docs/_downloads/2144e9773886d1394652515ba8d8dc41/plot_histogram_1d.zip b/docs/_downloads/2144e9773886d1394652515ba8d8dc41/plot_histogram_1d.zip new file mode 100644 index 00000000..a42261bc Binary files /dev/null and b/docs/_downloads/2144e9773886d1394652515ba8d8dc41/plot_histogram_1d.zip differ diff --git a/docs/_downloads/2b313676e6ab4e7fef5cb85211b43803/plot_model_3d.py b/docs/_downloads/2b313676e6ab4e7fef5cb85211b43803/plot_model_3d.py new file mode 100644 index 00000000..0d0aadaa --- /dev/null +++ b/docs/_downloads/2b313676e6ab4e7fef5cb85211b43803/plot_model_3d.py @@ -0,0 +1,171 @@ +""" +3D Rectilinear Model +-------------------- +This 3D rectilinear model defines a grid with straight cell boundaries. + +""" + +#%% +from geobipy import StatArray +from geobipy import RectilinearMesh3D +from geobipy import Model +import matplotlib.pyplot as plt +import numpy as np +import h5py + + +""" +3D Rectilinear Mesh +------------------- +This 3D rectilinear mesh defines a grid with straight cell boundaries. + +""" + +#%% +from geobipy import StatArray +from geobipy import RectilinearMesh3D +from geobipy import Model +import matplotlib.pyplot as plt +import numpy as np +import h5py + + +#%% +# Specify some cell centres in x and y +x = StatArray(np.arange(10.0), 'Easting', 'm') +y = StatArray(np.arange(15.0), 'Northing', 'm') +z = StatArray(np.arange(20.0), 'Depth', 'm') + +mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + +xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) +values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Height") +values = np.repeat(values[:, :, None], mesh.z.nCells, 2) + +model = Model(mesh=mesh, values=values) + +model1 = model[:5, :5, :5] +model2 = model[:, :, 5] +model3 = model[:, 5, :] +model4 = model[5, :, :] + +plt.figure() +plt.subplot(231) +model2.pcolor() +plt.subplot(232) +model3.pcolor() +plt.subplot(233) +model4.pcolor() + +#%% +model2 = model[:, 5, 5] +model3 = model[5, :, 5] +model4 = model[5, 5, :] + +plt.subplot(234) +model2.pcolor() +plt.subplot(235) +model3.pcolor() +plt.subplot(236) +model4.pcolor() + +#%% +with h5py.File('model3d.h5', 'w') as f: + model.createHdf(f, 'test') + model.writeHdf(f, 'test') + +with h5py.File('model3d.h5', 'r') as f: + model2 = Model.fromHdf(f['test']) + +model.pyvista_mesh().save('model3d.vtk') + + +xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) +z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") +mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re) +model = Model(mesh=mesh, values=values) + +model1 = model[:5, :5, :5] +model2 = model[:, :, 5] +model3 = model[:, 5, :] +model4 = model[5, :, :] + +plt.figure() +plt.subplot(231) +model2.pcolor() +plt.subplot(232) +model3.pcolor() +plt.subplot(233) +model4.pcolor() + +#%% +# We can plot the mesh in 3D! +pv = model.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +model.pyvista_mesh().save('model3d_re1.vtk') + + +x_re = StatArray(np.sin(np.repeat(mesh.y.centres[:, None], mesh.z.nCells, 1)), "x_re") +mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re) +model = Model(mesh=mesh, values=values) + +model1 = model[:5, :5, :5] +model2 = model[:, :, 5] +model3 = model[:, 5, :] +model4 = model[5, :, :] + +plt.figure() +plt.subplot(231) +model2.pcolor() +plt.subplot(232) +model3.pcolor() +plt.subplot(233) +model4.pcolor() + +#%% +# We can plot the mesh in 3D! +pv = model.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +model.pyvista_mesh().save('model3d_re2.vtk') + + +xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres) +y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + +mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) +model = Model(mesh=mesh, values=values) + +model1 = model[:5, :5, :5] +model2 = model[:, :, 5] +model3 = model[:, 5, :] +model4 = model[5, :, :] + +plt.figure() +plt.subplot(231) +model2.pcolor() +plt.subplot(232) +model3.pcolor() +plt.subplot(233) +model4.pcolor() + +#%% +# We can plot the mesh in 3D! +pv = model.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +model.pyvista_mesh().save('model3d_re3.vtk') + +# with h5py.File('mesh3d.h5', 'w') as f: +# mesh.toHdf(f, 'test') + +# with h5py.File('mesh3d.h5', 'r') as f: +# mesh2 = RectilinearMesh3D.fromHdf(f['test']) + +# mesh2.pyvista_mesh().save('mesh3d_read.vtk') + +plt.show() diff --git a/docs/_downloads/2dd62cc642303de6b0d2af1a789eccaa/plot_skytem_datapoint.py b/docs/_downloads/2dd62cc642303de6b0d2af1a789eccaa/plot_skytem_datapoint.py new file mode 100644 index 00000000..e2412647 --- /dev/null +++ b/docs/_downloads/2dd62cc642303de6b0d2af1a789eccaa/plot_skytem_datapoint.py @@ -0,0 +1,244 @@ +""" +Skytem Datapoint Class +---------------------- +""" + +#%% +# Credits: +# We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +# https://github.com/GeoscienceAustralia/ga-aem +# +# For ground-based time domain data, we are using Dieter Werthmuller's python package Empymod +# https://empymod.github.io/ +# +# Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy + +#%% +from os.path import join +import numpy as np +import h5py +import matplotlib.pyplot as plt +from geobipy import Waveform +from geobipy import SquareLoop, CircularLoop +from geobipy import butterworth +from geobipy import TdemSystem +from geobipy import TdemData +from geobipy import TdemDataPoint +from geobipy import RectilinearMesh1D +from geobipy import Model +from geobipy import StatArray +from geobipy import Distribution + +dataFolder = "..//..//supplementary//data//" + +# Obtaining a datapoint from a dataset +# ++++++++++++++++++++++++++++++++++++ +# More often than not, our observed data is stored in a file on disk. +# We can read in a dataset and pull datapoints from it. +# +# For more information about the time domain data set, see :ref:`Time domain dataset` + +# The data file name +dataFile=dataFolder + 'skytem_saline_clay.csv' +# The EM system file name +systemFile=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm'] + +#%% +# Initialize and read an EM data set +# Prepare the dataset so that we can read a point at a time. +Dataset = TdemData._initialize_sequential_reading(dataFile, systemFile) +# Get a datapoint from the file. +tdp = Dataset._read_record() + +Dataset._file.close() + +#%% +# Using a time domain datapoint +# +++++++++++++++++++++++++++++ + +#%% +# We can define a 1D layered earth model, and use it to predict some data +par = StatArray(np.r_[500.0, 20.0], "Conductivity", "$\frac{S}{m}$") +mod = Model(RectilinearMesh1D(edges=np.r_[0, 75.0, np.inf]), values=par) + +#%% +# Forward model the data +tdp.forward(mod) + +#%% +plt.figure() +plt.subplot(121) +_ = mod.pcolor() +plt.subplot(122) +_ = tdp.plot() +_ = tdp.plot_predicted() +plt.tight_layout() + +#%% +plt.figure() +tdp.plotDataResidual(yscale='log', xscale='log') +plt.title('new') + +#%% +# Compute the sensitivity matrix for a given model +J = tdp.sensitivity(mod) +plt.figure() +_ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + +#%% +# Attaching statistical descriptors to the skytem datapoint +# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +# Set values of relative and additive error for both systems. +tdp.relative_error = np.r_[0.05, 0.05] +tdp.additive_error = np.r_[1e-14, 1e-13] +# Define a multivariate normal distribution as the prior on the predicted data. +data_prior = Distribution('MvNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng) + +tdp.set_priors(data_prior=data_prior) + + +#%% +# This allows us to evaluate the likelihood of the predicted data +print(tdp.likelihood(log=True)) +# Or the misfit +print(tdp.data_misfit()) + +#%% +# Plot the misfits for a range of half space conductivities +plt.figure() +_ = tdp.plot_halfspace_responses(-6.0, 4.0, 200) +plt.title("Halfspace responses") + +#%% +# We can perform a quick search for the best fitting half space +halfspace = tdp.find_best_halfspace() + +print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) +plt.figure() +_ = tdp.plot() +_ = tdp.plot_predicted() + +#%% +# Compute the misfit between observed and predicted data +print(tdp.data_misfit()) + +#%% +# We can attach priors to the height of the datapoint, +# the relative error multiplier, and the additive error noise floor + +# Define the distributions used as priors. +z_prior = Distribution('Uniform', min=np.float64(tdp.z) - 2.0, max=np.float64(tdp.z) + 2.0, prng=prng) +relativePrior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng) +additivePrior = Distribution('Uniform', min=np.r_[1e-16, 1e-16], max=np.r_[1e-10, 1e-10], log=True, prng=prng) +tdp.set_priors(relative_error_prior=relativePrior, additive_error_prior=additivePrior, z_prior=z_prior, prng=prng) + +#%% +# In order to perturb our solvable parameters, we need to attach proposal distributions +z_proposal = Distribution('Normal', mean=tdp.z, variance = 0.01, prng=prng) +relativeProposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-7, prng=prng) +additiveProposal = Distribution('MvLogNormal', mean=tdp.additive_error, variance=2.5e-3, linearSpace=True, prng=prng) +tdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal, prng=prng) + +#%% +# With priorss set we can auto generate the posteriors +tdp.set_posteriors() + +#%% +# Perturb the datapoint and record the perturbations +# Note we are not using the priors to accept or reject perturbations. +for i in range(10): + tdp.perturb() + tdp.update_posteriors() + + +#%% +# Plot the posterior distributions +plt.figure() +tdp.plot_posteriors(overlay=tdp) + +plt.show() + +#%% +# File Format for a time domain datapoint +# +++++++++++++++++++++++++++++++++++++++ +# Here we describe the file format for a time domain datapoint. +# +# For individual datapoints we are using the AarhusInv data format. +# +# Here we take the description for the AarhusInv TEM data file, modified to reflect what we can +# currently handle in GeoBIPy. +# +# Line 1 :: string +# User-defined label describing the TEM datapoint. +# This line must contain the following, separated by semicolons. +# XUTM= +# YUTM= +# Elevation= +# StationNumber= +# LineNumber= +# Current= +# +# Line 2 :: first integer, sourceType +# 7 = Rectangular loop source parallel to the x - y plane +# Line 2 :: second integer, polarization +# 3 = Vertical magnetic field +# +# Line 3 :: 6 floats, transmitter and receiver offsets relative to X/Y UTM location. +# If sourceType = 7, Position of the center loop sounding. +# +# Line 4 :: Transmitter loop dimensions +# If sourceType = 7, 2 floats. Loop side length in the x and y directions +# +# Line 5 :: Fixed +# 3 3 3 +# +# Line 6 :: first integer, transmitter waveform type. Fixed +# 3 = User defined waveform. +# +# Line 6 :: second integer, number of transmitter waveforms. Fixed +# 1 +# +# Line 7 :: transmitter waveform definition +# A user-defined waveform with piecewise linear segments. +# A full transmitter waveform definition consists of a number of linear segments +# This line contains an integer as the first entry, which specifies the number of +# segments, followed by each segment with 4 floats each. The 4 floats per segment +# are the start and end times, and start and end amplitudes of the waveform. e.g. +# 3 -8.333e-03 -8.033e-03 0.0 1.0 -8.033e-03 0.0 1.0 1.0 0.0 5.4e-06 1.0 0.0 +# +# Line 8 :: On time information. Not used but needs specifying. +# 1 1 1 +# +# Line 9 :: On time low-pass filters. Not used but need specifying. +# 0 +# +# Line 10 :: On time high-pass filters. Not used but need specifying. +# 0 +# +# Line 11 :: Front-gate time. Not used but need specifying. +# 0.0 +# +# Line 12 :: first integer, Number of off time filters +# Number of filters +# +# Line 12 :: second integer, Order of the butterworth filter +# 1 or 2 +# +# Line 12 :: cutoff frequencies Hz, one per the number of filters +# e.g. 4.5e5 +# +# Line 13 :: Off time high pass filters. +# See Line 12 +# +# Lines after 13 contain 3 columns that pertain to +# Measurement Time, Data Value, Estimated Standard Deviation +# +# Example data files are contained in +# `the supplementary folder`_ in this repository +# +# .. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data \ No newline at end of file diff --git a/docs/_downloads/315f11f9fe5f088152f7ee4638bd2209/plot_model_1d.zip b/docs/_downloads/315f11f9fe5f088152f7ee4638bd2209/plot_model_1d.zip new file mode 100644 index 00000000..364bddd4 Binary files /dev/null and b/docs/_downloads/315f11f9fe5f088152f7ee4638bd2209/plot_model_1d.zip differ diff --git a/docs/_downloads/3bf606a1e1bcade69bb1094393e978cd/plot_histogram_3d.py b/docs/_downloads/3bf606a1e1bcade69bb1094393e978cd/plot_histogram_3d.py new file mode 100644 index 00000000..ca109493 --- /dev/null +++ b/docs/_downloads/3bf606a1e1bcade69bb1094393e978cd/plot_histogram_3d.py @@ -0,0 +1,157 @@ +""" +Histogram 3D +------------ + +This 3D histogram class allows efficient updating of histograms, plotting and +saving as HDF5. + +""" + +#%% +import geobipy +from geobipy import StatArray +from geobipy import Histogram +import matplotlib.pyplot as plt +from geobipy import RectilinearMesh3D +import numpy as np + + +#%% +# Create some histogram bins in x and y +x = StatArray(np.linspace(-4.0, 4.0, 11), 'Variable 1') +y = StatArray(np.linspace(-4.0, 4.0, 21), 'Variable 2') +z = StatArray(np.linspace(-4.0, 4.0, 31), 'Variable 3') + +mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + +#%% +# Instantiate +H = Histogram(mesh=mesh) + +#%% +# Generate some random numbers +a = np.random.randn(100000) +b = np.random.randn(100000) +c = np.random.randn(100000) +# x = np.asarray([a, b, c]) + + +#%% +# Update the histogram counts +H.update(a, b, c) + +#%% +plt.figure() +plt.suptitle("Slice half way along each dimension") +for axis in range(3): + plt.subplot(1, 3, axis+1) + s = [5 if i == axis else np.s_[:] for i in range(3)] + _ = H[tuple(s)].pcolor(cmap='gray_r') + +#%% +# Generate marginal histograms along an axis +plt.figure() +plt.suptitle("Marginals along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.marginalize(axis=axis).plot() + + +#%% +# Take the mean estimate from the histogram +plt.figure() +plt.suptitle("Mean along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.mean(axis=axis).pcolor() + +#%% +# Take the median estimate from the histogram +plt.figure() +plt.suptitle("Median along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.median(axis=axis).pcolor() + +# #%% +# # We can map the credible range to an opacity or transparency +# H.opacity() +# H.transparency() + +H.animate(0, 'test.mp4') + +H.to_vtk('h3d.vtk') + + + + +# Create some histogram bins in x and y +xx, yy = np.meshgrid(mesh.z.centres, mesh.y.centres) +x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re") + +xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres) +y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + +xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) +z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + +mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) + +#%% +# Instantiate +H = Histogram(mesh=mesh) + +#%% +# Generate some random numbers +a = np.random.randn(100000) +b = np.random.randn(100000) +c = np.random.randn(100000) +# x = np.asarray([a, b, c]) + +#%% +# Update the histogram counts +H.update(a, b, c) + +#%% +plt.figure() +plt.suptitle("Slice half way along each dimension") +for axis in range(3): + plt.subplot(1, 3, axis+1) + s = [5 if i == axis else np.s_[:] for i in range(3)] + _ = H[tuple(s)].pcolor(cmap='gray_r') + +#%% +# Generate marginal histograms along an axis +plt.figure() +plt.suptitle("Marginals along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.marginalize(axis=axis).plot() + + +#%% +# Take the mean estimate from the histogram +plt.figure() +plt.suptitle("Mean along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.mean(axis=axis).pcolor() + +#%% +# Take the median estimate from the histogram +plt.figure() +plt.suptitle("Median along each axis") +for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.median(axis=axis).pcolor() + +# #%% +# # We can map the credible range to an opacity or transparency +# H.opacity() +# H.transparency() + +H.animate(0, 'test.mp4') + +plt.show() + +# H.to_vtk('h3d.vtk') diff --git a/docs/_downloads/3c764380cfa74d0cf19eca0975ff0c0d/plot_histogram_3d.zip b/docs/_downloads/3c764380cfa74d0cf19eca0975ff0c0d/plot_histogram_3d.zip new file mode 100644 index 00000000..7621cedb Binary files /dev/null and b/docs/_downloads/3c764380cfa74d0cf19eca0975ff0c0d/plot_histogram_3d.zip differ diff --git a/docs/_downloads/427deb048b69f9c7dc66abcfe303e6d9/plot_inference_1d_resolve.zip b/docs/_downloads/427deb048b69f9c7dc66abcfe303e6d9/plot_inference_1d_resolve.zip new file mode 100644 index 00000000..04cc0857 Binary files /dev/null and b/docs/_downloads/427deb048b69f9c7dc66abcfe303e6d9/plot_inference_1d_resolve.zip differ diff --git a/docs/_downloads/4299735e0ab2a252322239b0f8491c1a/plot_DataArray.zip b/docs/_downloads/4299735e0ab2a252322239b0f8491c1a/plot_DataArray.zip new file mode 100644 index 00000000..3d533c24 Binary files /dev/null and b/docs/_downloads/4299735e0ab2a252322239b0f8491c1a/plot_DataArray.zip differ diff --git a/docs/_downloads/439c06574d36795305525eb492c7861b/plot_tempest_datapoint.py b/docs/_downloads/439c06574d36795305525eb492c7861b/plot_tempest_datapoint.py new file mode 100644 index 00000000..95b559c4 --- /dev/null +++ b/docs/_downloads/439c06574d36795305525eb492c7861b/plot_tempest_datapoint.py @@ -0,0 +1,184 @@ +""" +Tempest Datapoint Class +----------------------- +""" + +#%% +# Credits: +# We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +# https://github.com/GeoscienceAustralia/ga-aem +# +# For ground-based time domain data, we are using Dieter Werthmuller's python package Empymod +# https://empymod.github.io/ +# +# Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy + +#%% +from os.path import join +import numpy as np +import h5py +import matplotlib.pyplot as plt +from geobipy import TempestData +# from geobipy import TemDataPoint +from geobipy import RectilinearMesh1D +from geobipy import Model +from geobipy import StatArray +from geobipy import Distribution +from geobipy import get_prng + +dataFolder = "..//..//supplementary//data//" +# dataFolder = "source//examples//supplementary//Data" + +# Obtaining a tempest datapoint from a dataset +# ++++++++++++++++++++++++++++++++++++++++++++ +# More often than not, our observed data is stored in a file on disk. +# We can read in a dataset and pull datapoints from it. +# +# For more information about the time domain data set, see :ref:`Time domain dataset` + +# The data file name +dataFile = dataFolder + 'tempest_saline_clay.csv' +# The EM system file name +systemFile = dataFolder + 'Tempest.stm' + +# Prepare the dataset so that we can read a point at a time. +Dataset = TempestData._initialize_sequential_reading(dataFile, systemFile) +# Get a datapoint from the file. +tdp = Dataset._read_record(0) + +plt.figure() +tdp.plot() + +prng = get_prng(seed=146100583096709124601953385843316024947) + +#%% +# Using a tempest domain datapoint +# ++++++++++++++++++++++++++++++++ + +#%% +# We can define a 1D layered earth model, and use it to predict some data +par = StatArray(np.r_[0.01, 0.1, 1.], "Conductivity", "$\frac{S}{m}$") +mod = Model(mesh=RectilinearMesh1D(edges=np.r_[0.0, 50.0, 75.0, np.inf]), values=par) + +par = StatArray(np.logspace(-3, 3, 30), "Conductivity", "$\frac{S}{m}$") +e = np.linspace(0, 350, 31); e[-1] = np.inf +mod = Model(mesh=RectilinearMesh1D(edges=e), values=par) + +#%% +# Forward model the data +tdp.forward(mod) + +print('primary', tdp.primary_field) +print('sx', tdp.secondary_field[:15]) +print('sz', tdp.secondary_field[15:]) + +# #%% +# plt.figure() +# plt.subplot(121) +# _ = mod.pcolor(transpose=True) +# plt.subplot(122) +# _ = tdp.plot() +# _ = tdp.plot_predicted() +# plt.tight_layout() +# plt.suptitle('Model and response') + +# #%% +# # plt.figure() +# # tdp.plotDataResidual(xscale='log') +# # plt.title('data residual') + +# #%% +# # Compute the sensitivity matrix for a given model +J = tdp.sensitivity(mod) +# plt.figure() +# _ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + +print('J', J) +# print('J shape', J.shape) +# print('sx 0', J[:16, 0]) + +tdp.fm_dlogc(mod) + +print('new primary', tdp.primary_field) +print('sx', tdp.secondary_field[:15]) +print('sz', tdp.secondary_field[15:]) + +print('new J', tdp.sensitivity_matrix) + +#%% +# Attaching statistical descriptors to the tempest datapoint +# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +# Set relative errors for the primary fields, and secondary fields. +tdp.relative_error = np.r_[0.001, 0.001] + +# Set the additive errors for +tdp.additive_error = np.hstack([[0.011474, 0.012810, 0.008507, 0.005154, 0.004742, 0.004477, 0.004168, 0.003539, 0.003352, 0.003213, 0.003161, 0.003122, 0.002587, 0.002038, 0.002201], + [0.007383, 0.005693, 0.005178, 0.003659, 0.003426, 0.003046, 0.003095, 0.003247, 0.002775, 0.002627, 0.002460, 0.002178, 0.001754, 0.001405, 0.001283]]) +# Define a multivariate log normal distribution as the prior on the predicted data. +tdp.predictedData.prior = Distribution('MvLogNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng) + +#%% +# This allows us to evaluate the likelihood of the predicted data +print(tdp.likelihood(log=True)) +# Or the misfit +print(tdp.data_misfit()) + +#%% +# Plot the misfits for a range of half space conductivities +plt.figure() +plt.subplot(1, 2, 1) +_ = tdp.plot_halfspace_responses(-6.0, 4.0, 200) +plt.title("Halfspace responses") + +#%% +# We can perform a quick search for the best fitting half space +halfspace = tdp.find_best_halfspace() +print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) +plt.subplot(1, 2, 2) +_ = tdp.plot() +_ = tdp.plot_predicted() + +plt.figure() +tdp.plot_secondary_field() +tdp.plot_predicted_secondary_field() + +# #%% +# # We can attach priors to the height of the datapoint, +# # the relative error multiplier, and the additive error noise floor + +# Define the distributions used as priors. +relative_prior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng) +receiver_x_prior = Distribution('Uniform', min=np.float64(tdp.receiver.x) - 1.0, max=np.float64(tdp.receiver.x) + 1.0, prng=prng) +receiver_z_prior = Distribution('Uniform', min=np.float64(tdp.receiver.z) - 1.0, max=np.float64(tdp.receiver.z) + 1.0, prng=prng) +receiver_pitch_prior = Distribution('Uniform', min=tdp.receiver.pitch - 5.0, max=tdp.receiver.pitch + 5.0, prng=prng) +tdp.set_priors(relative_error_prior=relative_prior, receiver_x_prior=receiver_x_prior, receiver_z_prior=receiver_z_prior, receiver_pitch_prior=receiver_pitch_prior, prng=prng) + +#%% +# In order to perturb our solvable parameters, we need to attach proposal distributions +relative_proposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-4, prng=prng) +receiver_x_proposal = Distribution('Normal', mean=tdp.receiver.x, variance = 0.01, prng=prng) +receiver_z_proposal = Distribution('Normal', mean=tdp.receiver.z, variance = 0.01, prng=prng) +receiver_pitch_proposal = Distribution('Normal', mean=tdp.receiver.pitch, variance = 0.01, prng=prng) +tdp.set_proposals(relative_error_proposal=relative_proposal, + receiver_x_proposal=receiver_x_proposal, + receiver_z_proposal=receiver_z_proposal, + receiver_pitch_proposal=receiver_pitch_proposal, + solve_additive_error=True, additive_error_proposal_variance=1e-4, prng=prng) + +#%% +# With priors set we can auto generate the posteriors +tdp.set_posteriors() + +#%% +# Perturb the datapoint and record the perturbations +# Note we are not using the priors to accept or reject perturbations. +for i in range(10): + tdp.perturb() + tdp.update_posteriors() + +plt.show() \ No newline at end of file diff --git a/docs/_downloads/48c7722b252e7ddf33d95322115dcaf5/plot_rectilinear_mesh_2d.py b/docs/_downloads/48c7722b252e7ddf33d95322115dcaf5/plot_rectilinear_mesh_2d.py new file mode 100644 index 00000000..d580939b --- /dev/null +++ b/docs/_downloads/48c7722b252e7ddf33d95322115dcaf5/plot_rectilinear_mesh_2d.py @@ -0,0 +1,210 @@ +""" +2D Rectilinear Mesh +------------------- +This 2D rectilinear mesh defines a grid with straight cell boundaries. + +It can be instantiated in two ways. + +The first is by providing the cell centres or +cell edges in two dimensions. + +The second embeds the 2D mesh in 3D by providing the cell centres or edges in three dimensions. +The first two dimensions specify the mesh coordinates in the horiztontal cartesian plane +while the third discretizes in depth. This allows us to characterize a mesh whose horizontal coordinates +do not follow a line that is parallel to either the "x" or "y" axis. + +""" + +#%% +import h5py +from geobipy import StatArray +from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh3D +import matplotlib.pyplot as plt +import numpy as np + + +#%% +# Specify some cell centres in x and y +x = StatArray(np.arange(10.0), 'Easting', 'm') +y = StatArray(np.arange(20.0), 'Depth', 'm') +rm = RectilinearMesh2D(x_centres=x, y_centres=y) + +#%% +# We can plot the grid lines of the mesh. +p=0; +plt.figure(p) +_ = rm.plot_grid(flipY=True, linewidth=0.5) + +# Intersecting multisegment lines with a mesh +arr = np.zeros(rm.shape) +i = rm.line_indices([0.0, 3.0, 6.0, 9], [2.0, 6.0, 0.0, 10]) +arr[i[:, 0], i[:, 1]] = 1 +p += 1; plt.figure(p) +rm.pcolor(values = arr) + +#%% +# We can pcolor the mesh by providing cell values. +xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) +arr = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values") + +rm2, values2 = rm.resample(0.5, 0.5, arr, method='linear') + + +p += 1; plt.figure(p) +_ = rm.pcolor(arr, grid=True, flipY=True, linewidth=0.5) + + + +#%% +# Mask the x axis cells by a distance +rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, values=arr) +p += 1; plt.figure(p) +_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + +#%% +# Mask the z axis cells by a distance +rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(y_distance=0.2, values=arr) +p += 1; plt.figure(p) +_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + +#%% +# Mask axes by a distance +rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, y_distance=0.2, values=arr) +p += 1; plt.figure(p) +_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + +x = StatArray(np.arange(10.0), 'Easting', 'm') +y = StatArray(np.cumsum(np.arange(15.0)), 'Depth', 'm') +rm = RectilinearMesh2D(x_centres=x, y_centres=y) + +#%% +# We can perform some interval statistics on the cell values of the mesh +# Generate some values +a = np.repeat(np.arange(1.0, np.float64(rm.x.nCells+1))[:, np.newaxis], rm.y.nCells, 1) + +#%% +# Compute the mean over an interval for the mesh. +rm.intervalStatistic(a, intervals=[6.8, 12.4], axis=0, statistic='mean') + +#%% +# Compute the mean over multiple intervals for the mesh. +rm.intervalStatistic(a, intervals=[6.8, 12.4, 20.0, 40.0], axis=0, statistic='mean') + +#%% +# We can specify either axis +rm.intervalStatistic(a, intervals=[2.8, 4.2], axis=1, statistic='mean') + +#%% +rm.intervalStatistic(a, intervals=[2.8, 4.2, 5.1, 8.4], axis=1, statistic='mean') + +#%% +# Slice the 2D mesh to retrieve either a 2D mesh or 1D mesh +rm2 = rm[:5, :5] +rm3 = rm[:5, 5] +rm4 = rm[5, :5] + +p += 1; plt.figure(p) +plt.subplot(131) +rm2.plot_grid() +plt.subplot(132) +rm3.plot_grid() +plt.subplot(133) +rm4.plot_grid(transpose=True) + +#%% +# Resample a grid +values = StatArray(np.random.randn(*rm.shape)) +rm2, values2 = rm.resample(0.5, 0.5, values) + +p += 1; plt.figure(p) +plt.subplot(121) +rm.pcolor(values) +plt.subplot(122) +rm2.pcolor(values2) + +#%% +# Axes in log space +# +++++++++++++++++ +x = StatArray(np.logspace(-1, 4, 10), 'x') +y = StatArray(np.logspace(0, 3, 10), 'y') +rm = RectilinearMesh2D(x_edges=x, x_log=10, y_edges=y, y_log=10) + +# We can plot the grid lines of the mesh. +p += 1; plt.figure(p) +_ = rm.plot_grid(linewidth=0.5) + +#%% +with h5py.File('rm2d.h5', 'w') as f: + rm.toHdf(f, 'test') + +with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test']) + +arr = np.random.randn(*rm.shape) +p += 1; plt.figure(p) +plt.subplot(211) +rm.pcolor(arr) +plt.subplot(212) +rm2.pcolor(arr) + +#%% +# relative_to +# ++++++++++ +x = StatArray(np.arange(10.0), 'Northing', 'm') +y = StatArray(np.arange(20.0), 'Depth', 'm') + +rm = RectilinearMesh2D(x_centres=x, y_centres=y) + +p += 1; plt.figure(p) +plt.subplot(121) +_ = rm.plot_grid(linewidth=0.5, flipY=True) +rm = RectilinearMesh2D(x_centres=x, x_relative_to=0.2*np.random.randn(y.size), y_centres=y, y_relative_to=0.2*np.random.randn(x.size)) +plt.subplot(122) +_ = rm.plot_grid(linewidth=0.5, flipY=True) + +# relative_to single +with h5py.File('rm2d.h5', 'w') as f: + rm.toHdf(f, 'test') + +with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test']) + +arr = np.random.randn(*rm.shape) +p += 1; plt.figure(p) +plt.subplot(211) +rm.pcolor(arr, flipY=True) +plt.subplot(212) +rm2.pcolor(arr, flipY=True) + +# relative_to expanded +with h5py.File('rm2d.h5', 'w') as f: + rm.createHdf(f, 'test', add_axis=RectilinearMesh1D(centres=StatArray(np.arange(3.0), name='Easting', units="m"), relative_to = 0.2*np.random.randn(x.size, y.size))) + for i in range(3): + rm.x.relative_to += 0.5 + rm.y.relative_to += 0.5 + rm.writeHdf(f, 'test', index=i) + +with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test'], index=0) + +with h5py.File('rm2d.h5', 'r') as f: + rm3 = RectilinearMesh3D.fromHdf(f['test']) + +p += 1; plt.figure(p) +plt.subplot(311) +rm.pcolor(arr, flipY=True) +plt.subplot(312) +rm2.pcolor(arr, flipY=True) + +p += 1; plt.figure(p) +arr = np.random.randn(*rm3.shape) +plt.subplot(311) +mesh = rm3[0, :, :] +mesh.pcolor(arr[0, :, :], flipY=True) +plt.subplot(312) +mesh = rm3[:, 0, :] +mesh.pcolor(arr[:, 0, :], flipY=True) +plt.subplot(313) +rm3[:, :, 0].pcolor(arr[:, :, 0]) + +plt.show() diff --git a/docs/_downloads/48f0be9987c0cd931b23ce187c305d56/plot_inference_2d_resolve.py b/docs/_downloads/48f0be9987c0cd931b23ce187c305d56/plot_inference_2d_resolve.py new file mode 100644 index 00000000..b0f4d8a7 --- /dev/null +++ b/docs/_downloads/48f0be9987c0cd931b23ce187c305d56/plot_inference_2d_resolve.py @@ -0,0 +1,150 @@ +""" +2D Posterior analysis of Resolve inference +------------------------------------------ + +All plotting in GeoBIPy can be carried out using the 3D inference class + +""" + +import matplotlib.pyplot as plt +import numpy as np +from geobipy import Model +from geobipy import Inference2D + +def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + true_model.mesh.y_edges = true_model.mesh.y_edges / 10.0 + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-160, 60]) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + + plt.show() + # plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + +if __name__ == '__main__': + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + for model in models: + try: + plot_2d_summary("../../../Parallel_Inference/", "resolve", model) + except Exception as e: + print(model) + print(e) + pass diff --git a/docs/_downloads/517d2f5356b7a3ac11e723033c4b0fb3/plot_inference_2d_resolve.zip b/docs/_downloads/517d2f5356b7a3ac11e723033c4b0fb3/plot_inference_2d_resolve.zip new file mode 100644 index 00000000..25473e78 Binary files /dev/null and b/docs/_downloads/517d2f5356b7a3ac11e723033c4b0fb3/plot_inference_2d_resolve.zip differ diff --git a/docs/_downloads/537a99c8a809708fd1370175a0022351/plot_inference_2d_tempest.zip b/docs/_downloads/537a99c8a809708fd1370175a0022351/plot_inference_2d_tempest.zip new file mode 100644 index 00000000..0e3b4b9a Binary files /dev/null and b/docs/_downloads/537a99c8a809708fd1370175a0022351/plot_inference_2d_tempest.zip differ diff --git a/docs/_downloads/53dc65a717762d4693b891c0caabbde0/plot_StatArray.py b/docs/_downloads/53dc65a717762d4693b891c0caabbde0/plot_StatArray.py new file mode 100644 index 00000000..c6db6e60 --- /dev/null +++ b/docs/_downloads/53dc65a717762d4693b891c0caabbde0/plot_StatArray.py @@ -0,0 +1,636 @@ +""" +StatArray Class +---------------- + +Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays. + +""" +#%% +import numpy as np +import matplotlib.pyplot as plt +import h5py +from geobipy import DataArray, StatArray, Histogram, Distribution, RectilinearMesh1D + + +# plt.style.use('seaborn-pastel') + +#%% +# Instantiating a new StatArray class +# +++++++++++++++++++++++++++++++++++ +# + +# Integer +test = StatArray(1, name='1') +assert isinstance(test, StatArray) and test.size == 1 and test.item() == 0.0, TypeError("da 0") +print(test.summary) +test = StatArray(10, name='10') +assert isinstance(test, StatArray) and test.size == 10 and np.all(test == 0.0), TypeError("da 1") +print(test.summary) +# tuple/Shape +test = StatArray((2, 10), name='(2, 10)') +assert isinstance(test, StatArray) and np.all(test.shape == (2, 10)) and np.all(test == 0.0), TypeError("da 2") +print(test.summary) + +test = StatArray([2, 10], name='(2, 10)') +assert isinstance(test, StatArray) and np.all(test == [2, 10]), TypeError("da 2") +print(test.summary) + +# float +test = StatArray(45.454, name='45.454') +assert isinstance(test, StatArray) and test.size == 1 and test.item() == 45.454, TypeError("da 3") +print(test.summary) +test = StatArray(np.float64(45.454), name='45.454') +assert isinstance(test, StatArray) and test.size == 1 and test.item() == 45.454, TypeError("da 4") +print(test.summary) + +# array +test = StatArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$") +assert isinstance(test, StatArray) and test.size == 1, TypeError("da 5") +print(test.summary) + +test = StatArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") +assert isinstance(test, StatArray) and test.size == 10, TypeError("da 6") +print(test.summary) + + +test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") +test = StatArray(test) +assert isinstance(test, StatArray) and test.size == 10, TypeError("da 6") +print(test.summary) + + + + +# The StatArray can take any numpy function that returns an array as an input. +# The name and units of the variable can be assigned to the StatArray. + +#%% +# Attaching Prior and Proposal Distributions to a StatArray +# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +# +# The StatArray class has been built so that we may easily +# attach not only names and units, but statistical distributions too. +# We won't go into too much detail about the different distribution +# +# Two types of distributions can be attached to the StatArray. +# +# * Prior Distribution +# The prior represents how the user believes the variable should +# behave from a statistical standpoint. +# The values of the variable can be evaluated against the attached prior, +# to determine how likely they are to have occured https://en.wikipedia.org/wiki/Prior_probability +# +# * Proposal Distribution +# The proposal describes a probability distribution from which to +# sample when we wish to perturb the variable +# https://en.wikipedia.org/wiki/Metropolis%E2%80%93Hastings_algorithm + +# Obtain an instantiation of a random number generator. +# This is optional, but is an important consideration for parallel programming. +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +Density = StatArray(10.0, name="test", units="$\frac{g}{cc}$") + +Density.prior = Distribution('Uniform', -2.0, 2.0, prng=prng) + +#%% +# We can also attach a proposal distribution +Density.proposal = Distribution('Normal', 0.0, 1.0, prng=prng) +print(Density.summary) +print("Class type of the prior: ",type(Density.prior)) +print("Class type of the proposal: ",type(Density.proposal)) + + +#%% +# The values in the variable can be evaluated against the prior. +# In this case, we have 3 elements in the variable, and a univariate Normal for the prior. +# Therefore each element is evaluated to get 3 probabilities, one for each element. +print(Density.probability(log=False)) + +#%% +# The univariate proposal distribution can generate random samples from itself. +print(Density.propose()) + +#%% +# From a sampling stand point we can either sample using only the proposal +# Or we can only generate samples that simultaneously satisfy the prior. +print(Density.propose(relative=True)) + +#%% +# We can perturb the variable by drawing from the attached proposal distribution. + +Density.perturb() +print(Density.summary) + +#%% +# Attaching a Histogram to capture the posterior distribution +# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +# The StatArray can perturb itself, evaluate its current probability given its priors +# and a histogram can be attached to capture its posterior distribution. +# As an example, lets create a Histogram class with bins generated from the prior. +bins = Density.prior.bins() +#%% +# Attach the histogram +Density.posterior = Histogram(mesh = RectilinearMesh1D(edges=bins)) + +#%% +# In an iterative sense, we can propose and evaluate new values, and update the posterior +for i in range(1000): + Density.perturb() + p = Density.probability(log=False) + + if p > 0.0: # This is a simple example! + Density.update_posterior() + +#%% +plt.figure() +Density.summaryPlot() + +#%% +# Attach a multivariate normal distribution as the prior and proposal +# +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +# +# Attach the multivariate prior + +mean = np.random.randn(Density.size) +variance = np.ones(Density.size) +Density.prior = Distribution('MvNormal', mean, variance, prng=prng) + + +#%% +# Since the prior is multivariate, the appropriate equations are used to +# evaluate the probability for all elements in the StatArray. +# This produces a single probability. + +print(Density.probability(log=False)) + +#%% +# Attach the multivariate proposal + +mean = np.random.randn(Density.size) +variance = np.ones(Density.size) +Density.proposal = Distribution('MvNormal', mean, variance, prng=prng) + + +#%% +# Perturb the variables using the multivariate proposal. + +Density.perturb() +Density.summary + +with h5py.File('statarray.h5', 'w') as f: + Density.createHdf(f, 'statarray', withPosterior=True, add_axis=3) + Density.writeHdf(f, 'statarray', withPosterior=True, index=0) + +with h5py.File('statarray.h5', 'r') as f: + tmp = StatArray.fromHdf(f, 'statarray', index=0, skip_posterior=False) + +with h5py.File('statarray.h5', 'r') as f: + tmp = StatArray.fromHdf(f, 'statarray', skip_posterior=False) + + +#%% +# Basic manipulation +# ++++++++++++++++++ +# +# The StatArray contains other functions to perform basic array manipulations +# +# These routines essentially wrap around numpy functions, +# but the result will have the same name and units, +# and if any prior or proposal are set, those will be carried through too. +# +# 1D example +# __________ + +x = StatArray(-np.cumsum(np.arange(10.0))) +print(x) + +#%% + + +print(x.insert(i=[0, 9], values=[999.0, 999.0])) + + +#%% + + +print(x.prepend(999.0)) + + +#%% + + +print(x.prepend([998.0, 999.0])) + + +#%% + + +print(x.append([998.0, 999.0])) + + +#%% + + +print(x.resize(14)) + + +#%% + + +print(x.delete([5,8])) + + +#%% + + +print(x.edges()) + + +#%% + + +print(x.internalEdges()) + + +#%% + + +print(x.firstNonZero()) + + +#%% + + +print(x.lastNonZero()) + + +#%% + + +print(x.abs()) + + +#%% +# 2D example +# __________ + +x = StatArray(np.asarray([[0, -2, 3],[3, 0, -1],[1, 2, 0]])) +print(x) + + +#%% + + +print(x.insert(i=0, values=4)) + + +#%% + + +print(x.insert(i=[2, 3], values=5, axis=1)) + + +#%% + + +print(x.insert(i=2, values=[10, 11, 12], axis=1)) + + +#%% + + +print(x.prepend(999)) + + +#%% + + +print(x.prepend([999, 998, 997], axis=1)) + + +#%% + + +print(x.append([[999, 998, 997]])) + + +#%% + + +print(x.resize([5,5])) + + +#%% + + +print(x.delete(5)) + + +#%% + + +print(x.delete(2, axis=0)) + + +#%% + + +print(x.firstNonZero(axis=0)) + + +#%% + + +print(x.lastNonZero(axis=0)) + + +#%% + + +print(x.firstNonZero(axis=1)) + + +#%% + + +print(x.lastNonZero(axis=1)) + + +#%% + + +print(x.abs()) + + +#%% +# Plotting +# ++++++++ +# +# We can easily plot the StatArray with its built in plotting functions. +# All plotting functions can take matplotlib keywords + +# The simplest is to just plot the array + +Density = StatArray(np.random.randn(100),name="Density",units="$\frac{g}{cc}$") +Time = StatArray(np.linspace(0, 100, Density.size), name='Time', units='s') +Depth = StatArray(np.random.exponential(size=Density.size), name='Depth', units='m') + + +#%% + + +plt.figure() +_ = Density.plot(linewidth=0.5, marker='x', markersize=1.0) + +#%% +# We can quickly plot a bar graph. + +plt.figure() +_ = Density.bar() + + +#%% +# We can scatter the contents of the StatArray if it is 1D + +plt.figure() +_ = Density.scatter(alpha=0.7) + + +#%% +# Histogram Equalization +# ______________________ +# +# A neat trick with colourmaps is histogram equalization. +# This approach forces all colours in the images to have an equal weight. +# This distorts the colour bar, but can really highlight the lower and higher +# ends of whatever you are plotting. Just add the equalize keyword! + +plt.figure() +_ = Density.scatter(alpha=0.7, equalize=True) + + +#%% +# Take the log base(x) of the data +# +# We can also take the data to a log, log10, log2, or a custom number! + +plt.figure() +_ = Density.scatter(alpha=0.7,edgecolor='k',log='e') # could also use log='e', log=2, log=x) where x is the base you require + +#%% +# X and Y axes +# +# We can specify the x axis of the scatter plot. + + +plt.figure() +_ = Density.scatter(x=Time, alpha=0.7, edgecolor='k') + + +#%% +# Notice that I never specified the y axis, so the y axis defaulted to the values in the StatArray. +# In this case, any operations applied to the colours, are also applied to the y axis, e.g. log=10. +# When I take the values of Density to log base 10, because I do not specify the y plotting locations, those locations are similarly affected. +# +# I can however force the y co-ordinates by specifying it as input. +# In the second subplot I explicitly plot distance on the y axis. +# In the first subplot, the y axis is the same as the colourbar. + + +plt.figure() +ax1 = plt.subplot(211) +Density.scatter(x=Time, alpha=0.7, edgecolor='k', log=10) +plt.subplot(212, sharex=ax1) +_ = Density.scatter(x=Time, y=Depth, alpha=0.7, edgecolor='k', log=10) + + +#%% +# Point sizes +# +# Since the plotting functions take matplotlib keywords, I can also specify the size of each points. + +#%% + + +s = np.ceil(100*(np.abs(np.random.randn(Density.size)))) +plt.figure() +plt.tight_layout() +ax1 = plt.subplot(211) +Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=2) +plt.subplot(212, sharex=ax1) +#Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', sizeLegend=[1.0, 100, 200, 300]) +v = np.abs(Density)+1.0 +_ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=[1.0, 100, 200, 300], log=10) + + + + +#%% +# Of course we can still take the log, or equalize the colour histogram + +plt.figure() +_ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k',equalize=True,log=10) + + +#%% +# Typically pcolor only works with 2D arrays. The StatArray has a pcolor method that will pcolor a 1D array + +plt.figure() +plt.subplot(221) +Density.pcolor() +plt.subplot(222) +Density.pcolor(y=Time) +plt.subplot(223) +Density.pcolor(y=Time, flip=True) +plt.subplot(224) +_ = Density.pcolor(y=Time, log=10, equalize=True) + + +#%% +# We can add grid lines, and add opacity to each element in the pcolor image +# +# This is useful if the colour values need to be scaled by another variable e.g. variance. + + +plt.figure() +plt.subplot(121) +Density.pcolor(grid=True, cmap='jet') +plt.subplot(122) +a = np.linspace(1.0, 0.0, Density.size) +_ = Density.pcolor(grid=True, alpha=a, cmap='jet') + + +#%% +# We can plot a histogram of the StatArray + +plt.figure() +_ = Density.hist(100) + + +#%% +# We can write the StatArray to a HDF5 file. HDF5 files are binary files that can include compression. They allow quick and easy access to parts of the file, and can also be written to and read from in parallel! + +with h5py.File('1Dtest.h5','w') as f: + Density.toHdf(f,'test') + + +#%% +# We can then read the StatArray from the file +# Here x is a new variable, that is read in from the hdf5 file we just wrote. + +x = StatArray.fromHdf('1Dtest.h5', 'test') +print('x has the same values as Density? ',np.all(x == Density)) +x[2] = 5.0 # Change one of the values in x +print('x has its own memory allocated (not a reference/pointer)? ', id(x) != id(Density)) + + +#%% +# We can also define a 2D array + +Density = StatArray(np.random.randn(50,100),"Density","$\frac{g}{cc}$") +Density.summary + + +#%% +# The StatArray Class's functions work whether it is 1D or 2D +# +# We can still do a histogram + +plt.figure() +_ = Density.hist() + + +#%% +# And we can use pcolor to plot the 2D array + +plt.figure() +_ = Density.pcolor() + + +#%% +# The StatArray comes with extra plotting options +# +# Here we specify the x and y axes for the 2D array using two other 1D StatArrays + +plt.figure() +x = StatArray(np.arange(101),name='x Axis',units = 'mm') +y = StatArray(np.arange(51),name='y Axis',units = 'elephants') +_ = Density.pcolor(x=x, y=y) + + +#%% +# We can plot using a log10 scale, in this case, we have values that are less +# than or equal to 0.0. Plotting with the log option will by default mask any +# of those values, and will let you know that it has done so! + +plt.figure() +_ = Density.pcolor(x=x,y=y,log=2) + + +#%% +# A neat trick with colourmaps is histogram equalization. +# This approach forces all colours in the image to have an equal amount. +# This distorts the colours, but can really highlight the lower and higher +# ends of whatever you are plotting + +plt.figure() +_ = Density.pcolor(x=x, y=y, equalize=True) + + +#%% +# We can equalize the log10 plot too :) + +plt.figure() +_ = Density.pcolor(x=x,y=y,equalize=True, log=10) + + +#%% +# We can add opacity to each pixel in the image + +a = StatArray(np.random.random(Density.shape), 'Opacity from 0.0 to 1.0') + + +#%% + + +plt.figure() +ax1 = plt.subplot(131) +ax = Density.pcolor(x=x, y=y, flipY=True, linewidth=0.1, colorbar=False) +plt.subplot(132, sharex=ax1, sharey=ax1) +ax = Density.pcolor(x=x, y=y, alpha=a, flipY=True, linewidth=0.1, colorbar=False) +plt.subplot(133, sharex=ax1, sharey=ax1) +_ = a.pcolor(x=x, y=y, flipY=True) + + +#%% +# If the array potentially has a lot of white space around the edges, we can trim the image + +Density[:10, :] = 0.0 +Density[-10:, :] = 0.0 +Density[:, :10] = 0.0 +Density[:, -10:] = 0.0 +plt.figure() +plt.subplot(121) +Density.pcolor() +plt.subplot(122) +_ = Density.pcolor(trim=0.0) + + +#%% +# Create a stacked area plot of a 2D StatArray + +A = StatArray(np.abs(np.random.randn(13,100)), name='Variable', units="units") +x = StatArray(np.arange(100),name='x Axis',units = 'mm') +plt.figure() +ax1 = plt.subplot(211) +A.stackedAreaPlot(x=x, axis=1) +plt.subplot(212, sharex=ax1) +_ = A.stackedAreaPlot(x=x, i=np.s_[[1,3,4],:], axis=1, labels=['a','b','c']) + +plt.show() diff --git a/docs/_downloads/54ce350904e65ae0e6717397118ed43f/plot_inference_2d_tempest.py b/docs/_downloads/54ce350904e65ae0e6717397118ed43f/plot_inference_2d_tempest.py new file mode 100644 index 00000000..3bff5d1c --- /dev/null +++ b/docs/_downloads/54ce350904e65ae0e6717397118ed43f/plot_inference_2d_tempest.py @@ -0,0 +1,152 @@ +""" +2D Posterior analysis of Tempest inference +------------------------------------------ + +All plotting in GeoBIPy can be carried out using the 3D inference class + +""" + +import argparse +import matplotlib.pyplot as plt +import numpy as np +from geobipy import Model +from geobipy import Inference2D + +def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-550, 60]) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # plt.show() + plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + + +if __name__ == '__main__': + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + # import warnings + # warnings.filterwarnings('error') + for model in models: + try: + plot_2d_summary('../../../Parallel_Inference/', "tempest", model) + except Exception as e: + print(model) + print(e) + pass diff --git a/docs/_downloads/54daa4e7c10c4176fd98102617f46076/plot_pointcloud3d.zip b/docs/_downloads/54daa4e7c10c4176fd98102617f46076/plot_pointcloud3d.zip new file mode 100644 index 00000000..4d29820c Binary files /dev/null and b/docs/_downloads/54daa4e7c10c4176fd98102617f46076/plot_pointcloud3d.zip differ diff --git a/docs/_downloads/553c44c8f0a927032d89154ad011db0e/plot_rectilinear_mesh_1d.py b/docs/_downloads/553c44c8f0a927032d89154ad011db0e/plot_rectilinear_mesh_1d.py new file mode 100644 index 00000000..43972362 --- /dev/null +++ b/docs/_downloads/553c44c8f0a927032d89154ad011db0e/plot_rectilinear_mesh_1d.py @@ -0,0 +1,289 @@ +""" +1D Rectilinear Mesh +------------------- +""" +#%% +from copy import deepcopy +from geobipy import DataArray, StatArray +from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh2D_stitched +import matplotlib.gridspec as gridspec +import matplotlib.pyplot as plt +import numpy as np +import h5py + +#%% +# The basics +# ++++++++++ +# Instantiate a new 1D rectilinear mesh by specifying cell centres, edges, or widths. +x = StatArray(np.cumsum(np.arange(0.0, 10.0)), 'Depth', 'm') + +#%% +# Cell edges +rm = RectilinearMesh1D(edges=x, centres=None, widths=None) + +#%% +# We can plot the grid of the mesh +# Or Pcolor the mesh showing. An array of cell values is used as the colour. +arr = StatArray(np.random.randn(*rm.shape), "Name", "Units") +p=0; plt.figure(p) +plt.subplot(121) +_ = rm.plot_grid(transpose=True, flip=True) +plt.subplot(122) +_ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + +# Mask the mesh cells by a distance +rm_masked, indices, arr2 = rm.mask_cells(2.0, values=arr) +p+=1; plt.figure(p) +_ = rm_masked.pcolor(StatArray(arr2), grid=True, transpose=True, flip=True) + +# Writing and reading to/from HDF5 +# ++++++++++++++++++++++++++++++++ +with h5py.File('rm1d.h5', 'w') as f: + rm.toHdf(f, 'rm1d') + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(122) +_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + +with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=10) + for i in range(10): + rm.writeHdf(f, 'rm1d', index=i) + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) +with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(131) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(132) +_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) +plt.subplot(133) +_ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True) + + +#%% +# Log-space rectilinear mesh +# ++++++++++++++++++++++++++ +# Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +# Here we use edges +x = StatArray(np.logspace(-3, 3, 10), 'Depth', 'm') + +#%% +rm = RectilinearMesh1D(edges=x, log=10) + +# We can plot the grid of the mesh +# Or Pcolor the mesh showing. An array of cell values is used as the colour. +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.plot_grid(transpose=True, flip=True) +plt.subplot(122) +arr = StatArray(np.random.randn(rm.nCells), "Name", "Units") +_ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + +# Writing and reading to/from HDF5 +# ++++++++++++++++++++++++++++++++ +with h5py.File('rm1d.h5', 'w') as f: + rm.toHdf(f, 'rm1d') + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(122) +_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + +with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=10) + for i in range(10): + rm.writeHdf(f, 'rm1d', index=i) + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) +with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(131) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(132) +_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) +plt.subplot(133) +_ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True) + +#%% +# relative_to +# ++++++++++ +# Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +# Here we use edges +x = StatArray(np.arange(11.0), 'Deviation', 'm') + +#%% +rm = RectilinearMesh1D(edges=x, relative_to=5.0) + +#%% +# We can plot the grid of the mesh +# Or Pcolor the mesh showing. An array of cell values is used as the colour. +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.plot_grid(transpose=True, flip=True) +plt.subplot(122) +arr = StatArray(np.random.randn(rm.nCells), "Name", "Units") +_ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + +# Writing and reading to/from HDF5 +# ++++++++++++++++++++++++++++++++ +with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d') + rm.writeHdf(f, 'rm1d') + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(122) +_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + +with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=3) + for i in range(3): + rm.relative_to += 0.5 + rm.writeHdf(f, 'rm1d', index=i) + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) +with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(131) +_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) +plt.subplot(132) +_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) +plt.subplot(133) +_ = rm2.pcolor(np.repeat(arr[None, :], 3, 0), grid=True, flipY=True) + + +# Making a mesh perturbable +# +++++++++++++++++++++++++ +n_cells = 2 +widths = DataArray(np.full(n_cells, fill_value=10.0), 'test') +rm = RectilinearMesh1D(widths=widths, relative_to=0.0) + +#%% +# Randomness and Model Perturbations +# ++++++++++++++++++++++++++++++++++ +# We can set the priors on the 1D model by assigning minimum and maximum layer +# depths and a maximum number of layers. These are used to create priors on +# the number of cells in the model, a new depth interface, new parameter values +# and the vertical gradient of those parameters. +# The halfSpaceValue is used as a reference value for the parameter prior. +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +# Set the priors +rm.set_priors(min_edge = 1.0, + max_edge = 150.0, + max_cells = 30, + prng = prng) + +#%% +# We can evaluate the prior of the model using depths only +print('Log probability of the Mesh given its priors: ', rm.probability) + +#%% +# To propose new meshes, we specify the probabilities of creating, removing, perturbing, and not changing +# an edge interface +# Here we force the creation of a layer. +rm.set_proposals(probabilities = [0.25, 0.25, 0.25, 0.25], prng=prng) +rm.set_posteriors() + +rm0 = deepcopy(rm) + +#%% +# We can then perturb the layers of the model +for i in range(1000): + rm = rm.perturb() + rm.update_posteriors() + +#%% +p+=1; fig = plt.figure(p) +ax = rm._init_posterior_plots(fig) + +rm.plot_posteriors(axes=ax) + +with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', withPosterior = True) + rm.writeHdf(f, 'rm1d', withPosterior = True) + +with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(121) +_ = rm.pcolor(StatArray(rm.shape), grid=True, transpose=True, flip=True) +plt.subplot(122) +_ = rm1.pcolor(StatArray(rm1.shape), grid=True, transpose=True, flip=True) + +p+=1; fig = plt.figure(p) +ax = rm1._init_posterior_plots(fig) +rm1.plot_posteriors(axes=ax) + +#%% +# Expanded +with h5py.File('rm1d.h5', 'w') as f: + tmp = rm.pad(rm.max_cells) + tmp.createHdf(f, 'rm1d', withPosterior=True, add_axis=DataArray(np.arange(3.0), name='Easting', units="m")) + + print(list(f['rm1d'].keys())) + + rm.relative_to = 5.0 + print(rm.summary) + rm.writeHdf(f, 'rm1d', withPosterior = True, index=0) + + rm = deepcopy(rm0) + for i in range(1000): + rm = rm.perturb(); rm.update_posteriors() + rm.relative_to = 10.0 + rm.writeHdf(f, 'rm1d', withPosterior = True, index=1) + + rm = deepcopy(rm0) + for i in range(1000): + rm = rm.perturb(); rm.update_posteriors() + rm.relative_to = 25.0 + rm.writeHdf(f, 'rm1d', withPosterior = True, index=2) + +with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + +p+=1; plt.figure(p) +plt.subplot(121) +arr = np.random.randn(3, rm.max_cells) * 10 +_ = rm0.pcolor(arr[0, :rm0.nCells.item()], grid=True, transpose=True, flip=True) +plt.subplot(122) +_ = rm2.pcolor(arr, grid=True, flipY=True, equalize=True) + +from geobipy import RectilinearMesh2D +with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d'], index=0) + +plt.figure() +plt.subplot(121) +rm2.plot_grid(transpose=True, flip=True) +plt.subplot(122) +rm2.edges.posterior.pcolor(transpose=True, flip=True) + +plt.show() \ No newline at end of file diff --git a/docs/_downloads/5b5d77f6a7b150a24e5d195babd5a68d/plot_DataArray.py b/docs/_downloads/5b5d77f6a7b150a24e5d195babd5a68d/plot_DataArray.py new file mode 100644 index 00000000..1d25796a --- /dev/null +++ b/docs/_downloads/5b5d77f6a7b150a24e5d195babd5a68d/plot_DataArray.py @@ -0,0 +1,48 @@ +""" +DataArray Class +---------------- + +Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays. + +""" +import numpy as np +from geobipy import DataArray, StatArray + +# Integer +test = DataArray(1, name='1') +assert isinstance(test, DataArray) and test.size == 1 and test.item() == 0.0, TypeError("da 0") +print(test.summary) +test = DataArray(10, name='10') +assert isinstance(test, DataArray) and test.size == 10 and np.all(test == 0.0), TypeError("da 1") +print(test.summary) +# tuple/Shape +test = DataArray((2, 10), name='(2, 10)') +assert isinstance(test, DataArray) and np.all(test.shape == (2, 10)) and np.all(test == 0.0), TypeError("da 2") +print(test.summary) + +test = DataArray([2, 10], name='(2, 10)') +assert isinstance(test, DataArray) and np.all(test == [2, 10]), TypeError("da 2") +print(test.summary) + +# float +test = DataArray(45.454, name='45.454') +assert isinstance(test, DataArray) and test.size == 1 and test.item() == 45.454, TypeError("da 3") +print(test.summary) +test = DataArray(np.float64(45.454), name='45.454') +assert isinstance(test, DataArray) and test.size == 1 and test.item() == 45.454, TypeError("da 4") +print(test.summary) + +# array +test = DataArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$") +assert isinstance(test, DataArray) and test.size == 1, TypeError("da 5") +print(test.summary) + +test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") +assert isinstance(test, DataArray) and test.size == 10, TypeError("da 6") +print(test.summary) + +test = DataArray(test) +assert isinstance(test, DataArray) and test.size == 10, TypeError("da 6") +print(test.summary) \ No newline at end of file diff --git a/docs/_downloads/5c85bfce6697082c125824937647a640/plot_histogram_2d.zip b/docs/_downloads/5c85bfce6697082c125824937647a640/plot_histogram_2d.zip new file mode 100644 index 00000000..a8a546f4 Binary files /dev/null and b/docs/_downloads/5c85bfce6697082c125824937647a640/plot_histogram_2d.zip differ diff --git a/docs/_downloads/5cf20ebc694f8382ff502459c2eae019/plot_inference_1d_skytem.zip b/docs/_downloads/5cf20ebc694f8382ff502459c2eae019/plot_inference_1d_skytem.zip new file mode 100644 index 00000000..ab9d9832 Binary files /dev/null and b/docs/_downloads/5cf20ebc694f8382ff502459c2eae019/plot_inference_1d_skytem.zip differ diff --git a/docs/_downloads/6c96ba056672a7e415f479abafdaf81d/plot_pointcloud3d.py b/docs/_downloads/6c96ba056672a7e415f479abafdaf81d/plot_pointcloud3d.py new file mode 100644 index 00000000..d395cc7f --- /dev/null +++ b/docs/_downloads/6c96ba056672a7e415f479abafdaf81d/plot_pointcloud3d.py @@ -0,0 +1,137 @@ +""" +3D Point Cloud class +-------------------- +""" + +#%% + +from geobipy import Point +from os.path import join +import numpy as np +import matplotlib.pyplot as plt +import h5py + +nPoints = 200 + +#%% +# Create a quick test example using random points +# $z=x(1-x)cos(4\pi x)sin(4\pi y^{2})^{2}$ +x = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0) +y = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0) +z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y) + +PC3D = Point(x=x, y=y, z=z) + +#%% +# Append pointclouds together +x = np.abs((2.0 * np.random.rand(nPoints)) - 1.0) +y = np.abs((2.0 * np.random.rand(nPoints)) - 1.0) +z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y) + +Other_PC = Point(x=x, y=y, z=z) +PC3D.append(Other_PC) + +#%% +# Write a summary of the contents of the point cloud + +print(PC3D.summary) + +#%% +# Get a single location from the point as a 3x1 vector + +Point = PC3D[50] +# Print the point to the screen + +#%% +# Plot the locations with Height as colour + +plt.figure() +PC3D.scatter2D(edgecolor='k') + +#%% +# Plotting routines take matplotlib arguments for customization +# +# For example, plotting the size of the points according to the absolute value of height +plt.figure() +ax = PC3D.scatter2D(s=100*np.abs(PC3D.z), edgecolor='k') + +#%% +# Interpolate the points to a 2D rectilinear mesh +mesh, dum = PC3D.interpolate(0.01, 0.01, values=PC3D.z, method='sibson', mask=0.03) + +# We can save that mesh to VTK +PC3D.to_vtk('pc3d.vtk') +mesh.to_vtk('interpolated_pc3d.vtk') + +#%% +# Grid the points using a triangulated CloughTocher, or minimum curvature interpolation + +plt.figure() +plt.subplot(331) +PC3D.map(dx=0.01, dy=0.01, method='ct') +plt.subplot(332) +PC3D.map(dx=0.01, dy=0.01, method='mc') +plt.subplot(333) +PC3D.map(dx=0.01, dy=0.01, method='sibson') + +plt.subplot(334) +PC3D.map(dx=0.01, dy=0.01, method='ct', mask=0.03) +plt.subplot(335) +PC3D.map(dx=0.01, dy=0.01, method='mc', mask=0.3) +plt.subplot(336) +PC3D.map(dx=0.01, dy=0.01, method='sibson', mask=0.03) +#%% +# For lots of points, these surfaces can look noisy. Using a block filter will help +PCsub = PC3D.block_median(0.05, 0.05) +plt.subplot(337) +PCsub.map(dx=0.01, dy=0.01, method='ct', mask=0.03) +plt.subplot(338) +PCsub.map(dx=0.01, dy=0.01, method='mc', mask=0.03) +plt.subplot(339) +PCsub.map(dx=0.01, dy=0.01, method='sibson', mask=0.03) + + +#%% +# We can perform spatial searches on the 3D point cloud + +PC3D.set_kdtree(ndim=2) +p = PC3D.nearest((0.0,0.0), k=200, p=2, radius=0.3) + +#%% +# .nearest returns the distances and indices into the point cloud of the nearest points. +# We can then obtain those points as another point cloud + +# pNear = PC3D[p[1]] +# plt.figure() +# ax1 = plt.subplot(1,2,1) +# pNear.scatter2D() +# plt.plot(0.0, 0.0, 'x') +# plt.subplot(1,2,2, sharex=ax1, sharey=ax1) +# ax, sc, cb = PC3D.scatter2D(edgecolor='k') +# searchRadius = plt.Circle((0.0, 0.0), 0.3, color='b', fill=False) +# ax.add_artist(searchRadius) +# plt.plot(0.0, 0.0, 'x') + +#%% +# Read in the xyz co-ordinates in columns 2,3,4 from a file. Skip 1 header line. + +dataFolder = "..//..//supplementary//Data//" + +PC3D.read_csv(filename=dataFolder + 'Resolve1.txt') + + +#%% +plt.figure() +f = PC3D.scatter2D(s=10) + +with h5py.File('test.h5', 'w') as f: + PC3D.createHdf(f, 'test') + PC3D.writeHdf(f, 'test') + +with h5py.File('test.h5', 'r') as f: + PC3D1 = Point.fromHdf(f['test']) + +with h5py.File('test.h5', 'r') as f: + point = Point.fromHdf(f['test'], index=0) + +plt.show() diff --git a/docs/_downloads/6d14d763a9a22883051386bb3eea91aa/plot_rectilinear_mesh_2d.zip b/docs/_downloads/6d14d763a9a22883051386bb3eea91aa/plot_rectilinear_mesh_2d.zip new file mode 100644 index 00000000..9ddd4d9b Binary files /dev/null and b/docs/_downloads/6d14d763a9a22883051386bb3eea91aa/plot_rectilinear_mesh_2d.zip differ diff --git a/docs/_downloads/6ea40b011244de987af10f1723709b1e/plot_StatArray.zip b/docs/_downloads/6ea40b011244de987af10f1723709b1e/plot_StatArray.zip new file mode 100644 index 00000000..3daf158c Binary files /dev/null and b/docs/_downloads/6ea40b011244de987af10f1723709b1e/plot_StatArray.zip differ diff --git a/docs/_downloads/762d4f393eb8c7294ca843748ae360e3/plot_skytem_dataset.py b/docs/_downloads/762d4f393eb8c7294ca843748ae360e3/plot_skytem_dataset.py new file mode 100644 index 00000000..0426d5e4 --- /dev/null +++ b/docs/_downloads/762d4f393eb8c7294ca843748ae360e3/plot_skytem_dataset.py @@ -0,0 +1,159 @@ +""" +Skytem dataset +-------------- +""" +#%% +from geobipy import plotting as cP +from os.path import join +import matplotlib.pyplot as plt +import numpy as np +from geobipy import StatArray +from geobipy import TdemData +import h5py + +#%% +# Reading in the Data +# +++++++++++++++++++ + +#%% +dataFolder = "..//..//supplementary//data//" +# The data file name +dataFiles=dataFolder + 'skytem_saline_clay.csv' +# dataFiles = dataFolder + 'Skytem.csv' +# The EM system file name +systemFiles=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm'] + +from pathlib import Path +for f in systemFiles[:1]: + txt = Path(f).read_text() + print(txt) + +#%% +# Read in the data from file +TD = TdemData.read_csv(dataFiles, systemFiles) + +#%% +# Plot the locations of the data points +plt.figure(1, figsize=(8,6)) +_ = TD.scatter2D() + +#%% +# Plot all the data along the specified line +plt.figure(2, figsize=(8,6)) +_ = TD.plotLine(0.0, log=10) + +#%% +# Or, plot specific channels in the data +plt.figure(3, figsize=(8,6)) +_ = TD.plot_data(system=0, channels=[1, 3, 5], log=10) + +#%% +plt.figure(4) +plt.subplot(211) +_ = TD.pcolor(system=0, xscale='log', log=10) +plt.subplot(212) +_ = TD.pcolor(system=1, xscale='log', log=10) + +#%% +plt.figure(5) +ax = TD.scatter2D(c=TD.secondary_field[:, TD.channel_index(system=0, channel=6)], log=10) +plt.axis('equal') + + +# with h5py.File('tdem.h5', 'w') as f: +# TD.createHdf(f, 'tdem') +# TD.writeHdf(f, 'tdem') + +# with h5py.File('tdem.h5', 'r') as f: +# TD3 = TdemData.fromHdf(f['tdem']) + +# with h5py.File('tdem.h5', 'r') as f: +# tdp = TdemData.fromHdf(f['tdem'], index=0) + + +# #%% +# # Obtain a line from the data set +# # +++++++++++++++++++++++++++++++ +# line = TD.line(0.0) + +# #%% +# plt.figure(6) +# _ = line.scatter2D(c=line.secondary_field[:, line.channel_index(system=0, channel=6)], log=10) + +# #%% +# plt.figure(7) +# _ = line.plot(xAxis='index', log=10) + +# Prepare the dataset so that we can read a point at a time. +Dataset = TdemData._initialize_sequential_reading(dataFiles, systemFiles) +# Get a datapoint from the file. +DataPoint = Dataset._read_record() + +plt.show() + +#%% +# File Format for time domain data +# ++++++++++++++++++++++++++++++++ +# Here we describe the file format for time domain data. +# +# The data columns are read in according to the column names in the first line +# +# In this description, the column name or its alternatives are given followed by what the name represents +# Optional columns are also described. +# +# Required columns +# ________________ +# line +# Line number for the data point +# fid +# Unique identification number of the data point +# x or northing or n +# Northing co-ordinate of the data point, (m) +# y or easting or e +# Easting co-ordinate of the data point, (m) +# z or alt +# Altitude of the transmitter coil above ground level (m) +# elevation +# Elevation of the ground at the data point (m) +# txrx_dx +# Distance in x between transmitter and reciever (m) +# txrx_dy +# Distance in y between transmitter and reciever (m) +# txrx_dz +# Distance in z between transmitter and reciever (m) +# Tx_Pitch +# Pitch of the transmitter loop +# Tx_Roll +# Roll of the transmitter loop +# Tx_Yaw +# Yaw of the transmitter loop +# Rx_Pitch +# Pitch of the receiver loop +# Rx_Roll +# Roll of the receiver loop +# Rx_Yaw +# Yaw of the receiver loop +# Off_time[0] Off_time[1] ... Off_time[last] - with the number and square brackets +# The measurements for each time gate specified in the accompanying system file under Receiver Window Times +# The total number of off_time columns should equal the sum of the receiver windows in all system files. +# Optional columns +# ________________ +# Off_time_Error[0] Off_time_Error[1] ... Off_time_Error[last] +# Estimates of standard deviation for each off time measurement +# Example Header +# ______________ +# Line fid easting northing elevation height txrx_dx txrx_dy txrx_dz TxPitch TxRoll TxYaw RxPitch RxRoll RxYaw Off[0] Off[1] + +#%% +# File Format for a time domain system +# ++++++++++++++++++++++++++++++++++++ +# Please see Page 13 of Ross Brodie's `instructions`_ +# +# .. _instructions: https://github.com/GeoscienceAustralia/ga-aem/blob/master/docs/GA%20AEM%20Programs%20User%20Manual.pdf +# +# We use GA-AEM for our airborne time domain forward modeller. +# +# Example system files are contained in +# `the supplementary folder`_ in this repository +# +# .. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data diff --git a/docs/_downloads/76ee0e4ad4daaf5556108538842b1772/hdf5.py b/docs/_downloads/76ee0e4ad4daaf5556108538842b1772/hdf5.py new file mode 100644 index 00000000..df6a170a --- /dev/null +++ b/docs/_downloads/76ee0e4ad4daaf5556108538842b1772/hdf5.py @@ -0,0 +1,100 @@ +""" +Using HDF5 within GeoBIPy +------------------------- + +Inference for large scale datasets in GeoBIPy is handled using MPI and distributed memory systems. +A common bottleneck with large parallel algorithms is the input output of information to disk. +We use HDF5 to read and write data in order to leverage the parallel capabililties of the HDF5 API. + +Each object within GeoBIPy has a create_hdf, write_hdf, and read_hdf routine. + +""" +import numpy as np +import h5py +from geobipy import StatArray + +#%% +# StatArray + +# Instantiate a StatArray +x = StatArray(np.arange(10.0), name = 'an Array', units = 'some units') + +# Write the StatArray to a HDF file. +with h5py.File("x.h5", 'w') as f: + x.toHdf(f, "x") + +# Read the StatArray back in. +with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x') + +print('x', x) +print('y', y) + +#%% +# There are actually steps within the "toHdf" function. +# First, space is created within the HDF file and second, the data is written to that space +# These functions are split because during the execution of a parallel enabled program, +# all the space within the HDF file needs to be allocated before we can write to the file +# using multiple cores. + +# Write the StatArray to a HDF file. +with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x") + x.writeHdf(f, "x") + +# Read the StatArray back in. +with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x') + +print('x', x) +print('y', y) + +#%% +# The create and write HDF methods also allow extra space to be allocated so that +# the extra memory can be written later, perhaps by multiple cores. +# Here we specify space for 2 arrays, the memory is stored contiguously as a numpy array. +# We then write to only the first index. + +# Write the StatArray to a HDF file. +with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=2) + x.writeHdf(f, "x", index=0) + +# Read the StatArray back in. +with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=0) + +print('x', x) +print('y', y) + + +#%% +# The duplication can also be a shape. + +# Write the StatArray to a HDF file. +with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=(2, 2)) + x.writeHdf(f, "x", index=(0, 0)) + +# Read the StatArray back in. +with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=(0, 0)) + +print('x', x) +print('y', y) + +#%% +# Similarly, we can duplicate a 2D array with an extra 2D duplication + +x = StatArray(np.random.randn(2, 2), name = 'an Array', units = 'some units') +# Write the StatArray to a HDF file. +with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=(2, 2)) + x.writeHdf(f, "x", index=(0, 0)) + +# Read the StatArray back in. +with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=(0, 0)) + +print('x', x) +print('y', y) \ No newline at end of file diff --git a/docs/_downloads/7f0310ad46a1dedad4717ad1e7d657a4/plot_tempest_datapoint.zip b/docs/_downloads/7f0310ad46a1dedad4717ad1e7d657a4/plot_tempest_datapoint.zip new file mode 100644 index 00000000..af7f94da Binary files /dev/null and b/docs/_downloads/7f0310ad46a1dedad4717ad1e7d657a4/plot_tempest_datapoint.zip differ diff --git a/docs/_downloads/7f3bb43ebb9eba21697953fbfc04f6d2/plot_rectilinear_mesh_3d.py b/docs/_downloads/7f3bb43ebb9eba21697953fbfc04f6d2/plot_rectilinear_mesh_3d.py new file mode 100644 index 00000000..d07cbb5c --- /dev/null +++ b/docs/_downloads/7f3bb43ebb9eba21697953fbfc04f6d2/plot_rectilinear_mesh_3d.py @@ -0,0 +1,152 @@ +""" +3D Rectilinear Mesh +------------------- +This 3D rectilinear mesh defines a grid with straight cell boundaries. + +""" + +#%% +from geobipy import StatArray +from geobipy import RectilinearMesh3D +import matplotlib.pyplot as plt +import numpy as np +import h5py + + +#%% +# Specify some cell centres in x and y +x = StatArray(np.arange(10.0), 'Easting', 'm') +y = StatArray(np.arange(15.0), 'Northing', 'm') +z = StatArray(np.arange(20.0), 'Depth', 'm') + +rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + +rm1 = rm[:5, :5, :5] +rm2 = rm[:, :, 5] +rm3 = rm[:, 5, :] +rm4 = rm[5, :, :] + +plt.figure() +plt.subplot(231) +rm2.plot_grid() +plt.subplot(232) +rm3.plot_grid() +plt.subplot(233) +rm4.plot_grid() + +#%% +rm2 = rm[:, 5, 5] +rm3 = rm[5, :, 5] +rm4 = rm[5, 5, :] + +plt.subplot(234) +rm2.plot_grid() +plt.subplot(235) +rm3.plot_grid() +plt.subplot(236) +rm4.plot_grid() + +#%% +with h5py.File('rm3d.h5', 'w') as f: + rm.createHdf(f, 'test') + rm.writeHdf(f, 'test') + +with h5py.File('rm3d.h5', 'r') as f: + rm2 = RectilinearMesh3D.fromHdf(f['test']) + +rm.pyvista_mesh().save('rm3d.vtk') + + +xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) +z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") +rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re) + +rm1 = rm[:5, :5, :5] +rm2 = rm[:, :, 5] +rm3 = rm[:, 5, :] +rm4 = rm[5, :, :] + +plt.figure() +plt.subplot(231) +rm2.plot_grid() +plt.subplot(232) +rm3.plot_grid() +plt.subplot(233) +rm4.plot_grid() + +#%% +# We can plot the mesh in 3D! +pv = rm.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +mesh = rm.pyvista_mesh().save('rm3d_re1.vtk') + +x_re = StatArray(np.sin(np.repeat(rm.y.centres[:, None], rm.z.nCells, 1)), "x_re") + +xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) +z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") +rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re) + +rm1 = rm[:5, :5, :5] +rm2 = rm[:, :, 5] +rm3 = rm[:, 5, :] +rm4 = rm[5, :, :] + +plt.figure() +plt.subplot(231) +rm2.plot_grid() +plt.subplot(232) +rm3.plot_grid() +plt.subplot(233) +rm4.plot_grid() + +#%% +# We can plot the mesh in 3D! +pv = rm.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +mesh = rm.pyvista_mesh().save('rm3d_re2.vtk') + + +xx, yy = np.meshgrid(rm.z.centres, rm.y.centres) +x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re") + +xx, yy = np.meshgrid(rm.z.centres, rm.x.centres) +y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + +xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) +z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") +rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) + +rm1 = rm[:5, :5, :5] +rm2 = rm[:, :, 5] +rm3 = rm[:, 5, :] +rm4 = rm[5, :, :] + +plt.figure() +plt.subplot(231) +rm2.plot_grid() +plt.subplot(232) +rm3.plot_grid() +plt.subplot(233) +rm4.plot_grid() + +#%% +# We can plot the mesh in 3D! +pv = rm.pyvista_plotter() + +#%% +# We can plot the mesh in 3D! +mesh = rm.pyvista_mesh().save('rm3d_re3.vtk') + +with h5py.File('rm3d.h5', 'w') as f: + rm.toHdf(f, 'test') + +with h5py.File('rm3d.h5', 'r') as f: + rm2 = RectilinearMesh3D.fromHdf(f['test']) + +rm2.pyvista_mesh().save('rm3d_read.vtk') + +plt.show() diff --git a/docs/_downloads/82b91681ed5787c4d84b792aeddadc44/plot_skytem_datapoint.zip b/docs/_downloads/82b91681ed5787c4d84b792aeddadc44/plot_skytem_datapoint.zip new file mode 100644 index 00000000..da975f9b Binary files /dev/null and b/docs/_downloads/82b91681ed5787c4d84b792aeddadc44/plot_skytem_datapoint.zip differ diff --git a/docs/_downloads/8a8596f0390404145eca225a8f310218/plot_frequency_dataset.py b/docs/_downloads/8a8596f0390404145eca225a8f310218/plot_frequency_dataset.py new file mode 100644 index 00000000..74a873f8 --- /dev/null +++ b/docs/_downloads/8a8596f0390404145eca225a8f310218/plot_frequency_dataset.py @@ -0,0 +1,266 @@ +""" +Frequency domain dataset +------------------------ +""" +#%% +import matplotlib.pyplot as plt +from geobipy import CircularLoop +from geobipy import FdemSystem +from geobipy import FdemData +import h5py +import numpy as np + + +#%% +# Defining data using a frequency domain system +# +++++++++++++++++++++++++++++++++++++++++++++ + +#%% +# We can start by defining the frequencies, transmitter loops, and receiver loops +# For each frequency we need to define a pair of loops +frequencies = np.asarray([395.0, 822.0, 3263.0, 8199.0, 38760.0, 128755.0]) + +#%% +# Transmitter positions are defined relative to the observation locations in the data +# This is usually a constant offset for all data points. +transmitters = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[0,0,0,0,0,0], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + +#%% +# Receiver positions are defined relative to the transmitter +receivers = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + +# Instantiate the system for the data +system = FdemSystem(frequencies=frequencies, transmitter=transmitters, receiver=receivers) + +# Create some data with random co-ordinates +x = np.random.randn(100) +y = np.random.randn(100) +z = np.random.randn(100) + +data = FdemData(x=x, y=-y, z=z, system = system) + +#%% +# Reading in the Data +# +++++++++++++++++++ +# Of course measured field data is stored on disk. So instead we can read data from file. + +#%% +dataFolder = "..//..//supplementary//data//" +# The data file name +dataFile = dataFolder + 'Resolve2.txt' +# The EM system file name +systemFile = dataFolder + 'FdemSystem2.stm' + +#%% +# Read in a data set from file. +FD1 = FdemData.read_csv(dataFile, systemFile) + +#%% +# Take a look at the channel names +for name in FD1.channel_names: + print(name) + +# #%% +# # Get data points by slicing +# FDa = FD1[10:] +# FD1 = FD1[:10] + +# #%% +# # Append data sets together +# FD1.append(FDa) + + +# #%% +# # Plot the locations of the data points +# plt.figure(figsize=(8,6)) +# _ = FD1.scatter2D(); + +# #%% +# # Plot all the data along the specified line +# plt.figure(figsize=(8,6)) +# _ = FD1.plotLine(30010.0, log=10); + +# #%% +# # Or, plot specific channels in the data +# plt.figure(figsize=(8,6)) +# _ = FD1.plot(channels=[0,11,8], log=10, linewidth=0.5); + +#%% +# Read in a second data set +FD2 = FdemData.read_csv(dataFilename=dataFolder + 'Resolve1.txt', system=dataFolder + 'FdemSystem1.stm') + +#%% +# We can create maps of the elevations in two separate figures +plt.figure(figsize=(8,6)) +_ = FD1.map(dx=50.0, dy=50.0, mask = 200.0) +plt.axis('equal'); + +#%% + +plt.figure(figsize=(8,6)) +_ = FD2.map(dx=50.0, dy=50.0, mask = 200.0) +plt.axis('equal'); + +#%% +# Or, we can plot both data sets in one figure to see their positions relative +# to each other. +# +# In this case, I use a 2D scatter plot of the data point co-ordinates, and pass +# one of the channels as the colour. + +plt.figure(figsize=(8,6)) +_ = FD1.scatter2D(s=1.0, c=FD1.data[:, 0]) +_ = FD2.scatter2D(s=1.0, c=FD2.data[:, 0], cmap='jet'); + +#%% +# Or, interpolate the values to create a gridded "map". mapChannel will +# interpolate the specified channel number. + +plt.figure(figsize=(8,6)) +_ = FD1.mapData(channel=3, system=0, dx=200, dy=200, mask=250) +plt.axis('equal'); + +#%% +# Export the data to VTK +FD1.to_vtk('FD_one.vtk') +# FD2.to_vtk('FD_two.vtk') + +#%% +# Obtain a line from the data set +# +++++++++++++++++++++++++++++++ + +#%% +# Take a look at the line numbers in the dataset +print(np.unique(FD1.lineNumber)) + +#%% +L = FD1.line(30010.0) + +#%% +# A summary will now show the properties of the line. + +print(L.summary) + +#%% +# And we can scatter2D the points in the line. + +plt.figure(figsize=(8,6)) +_ = L.scatter2D(); + +#%% +# We can specify the axis along which to plot. +# xAxis can be index, x, y, z, r2d, r3d +plt.figure(figsize=(8,6)) +_ = FD1.plot_data(channels=np.r_[0, 11, 8], log=10, linewidth=0.5); + +with h5py.File('fdem.h5', 'w') as f: + FD1.createHdf(f, 'fdem') + FD1.writeHdf(f, 'fdem') + +with h5py.File('fdem.h5', 'r') as f: + FD3 = FdemData.fromHdf(f['fdem']) + +with h5py.File('fdem.h5', 'r') as f: + fdp = FdemData.fromHdf(f['fdem'], index=0) + + +# #%% +# # Obtain a single datapoint from the data set +# # +++++++++++++++++++++++++++++++++++++++++++ +# # +# # Checkout :ref:`Frequency domain datapoint` for an example +# # about how to use a datapoint once it is instantiated. +# dp = FD1.datapoint(0) + +# # Prepare the dataset so that we can read a point at a time. +# Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile) +# # Get a datapoint from the file. +# DataPoint = Dataset._read_record() + +plt.show() + +#%% +# File Format for frequency domain data +# +++++++++++++++++++++++++++++++++++++ +# Here we describe the file format for frequency domain data. +# +# The data columns are read in according to the column names in the first line. +# +# In this description, the column name or its alternatives are given followed by what the name represents. +# Optional columns are also described. +# +# Required columns +# ________________ +# line +# Line number for the data point +# fid +# Unique identification number of the data point +# x or northing or n +# Northing co-ordinate of the data point, (m) +# y or easting or e +# Easting co-ordinate of the data point, (m) +# z or alt +# Altitude of the transmitter coil above ground level (m) +# elevation +# Elevation of the ground at the data point (m) +# I_ Q_ ... I_ Q_ - with the number and square brackets +# The measurements for each frequency specified in the accompanying system file. +# I is the real inphase measurement in (ppm) +# Q is the imaginary quadrature measurement in (ppm) +# Optional columns +# ________________ +# InphaseErr[0] QuadratureErr[0] ... InphaseErr[nFrequencies] QuadratureErr[nFrequencies] +# Estimates of standard deviation for each inphase and quadrature measurement. +# These must appear after the data colums. +# +# Example Header +# ______________ +# Line fid easting northing elevation height I_380 Q_380 ... ... I_129550 Q_129550 + +#%% +# File Format for a frequency domain system +# +++++++++++++++++++++++++++++++++++++++++ +# .. role:: raw-html(raw) +# :format: html +# +# The system file is structured using columns with the first line containing header information +# +# Each subsequent row contains the information for each measurement frequency +# +# freq +# Frequency of the channel +# tor +# Orientation of the transmitter loop 'x', or 'z' +# tmom +# Transmitter moment +# tx, ty, tx +# Offset of the transmitter with respect to the observation locations +# ror +# Orientation of the receiver loop 'x', or 'z' +# rmom +# Receiver moment +# rx, ry, rz +# Offset of the receiver with respect to the transmitter location +# +# Example system files are contained in +# `the supplementary folder`_ in this repository +# +# .. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data +# +# See the Resolve.stm files. \ No newline at end of file diff --git a/docs/_downloads/950360871c2acdf8ad1b1627e54009da/plot_resolve_datapoint.py b/docs/_downloads/950360871c2acdf8ad1b1627e54009da/plot_resolve_datapoint.py new file mode 100644 index 00000000..1db92427 --- /dev/null +++ b/docs/_downloads/950360871c2acdf8ad1b1627e54009da/plot_resolve_datapoint.py @@ -0,0 +1,227 @@ +""" +Frequency domain datapoint +-------------------------- +""" +#%% +from os.path import join +import numpy as np +import h5py +import matplotlib.pyplot as plt +from geobipy import CircularLoop +from geobipy import FdemSystem +from geobipy import FdemData +from geobipy import FdemDataPoint +from geobipy import RectilinearMesh1D +from geobipy import Model +from geobipy import StatArray +from geobipy import Distribution + +# Instantiating a frequency domain data point +# +++++++++++++++++++++++++++++++++++++++++++ +# +# To instantiate a frequency domain datapoint we need to define some +# characteristics of the acquisition system. +# +# We need to define the frequencies in Hz of the transmitter, +# and the geometery of the loops used for each frequency. + +frequencies = np.asarray([380.0, 1776.0, 3345.0, 8171.0, 41020.0, 129550.0]) + +# Transmitter positions are defined relative to the observation locations in the data +# This is usually a constant offset for all data points. +transmitters = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[0,0,0,0,0,0], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + +# Receiver positions are defined relative to the transmitter +receivers = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + +# Now we can instantiate the system. +fds = FdemSystem(frequencies, transmitters, receivers) + +# And use the system to instantiate a datapoint +# +# Note the extra arguments that can be used to create the data point. +# data is for any observed data one might have, while std are the estimated standard +# deviations of those observed data. +# +# Define some in-phase then quadrature data for each frequency. +data = np.r_[145.3, 435.8, 260.6, 875.1, 1502.7, 1516.9, + 217.9, 412.5, 178.7, 516.5, 405.7, 255.7] + +fdp = FdemDataPoint(x=0.0, y=0.0, z=30.0, elevation=0.0, + data=data, std=None, predictedData=None, + system=fds, lineNumber=0.0, fiducial=0.0) + +# plt.figure() +# _ = fdp.plot() + +# Obtaining a datapoint from a dataset +# ++++++++++++++++++++++++++++++++++++ +# +# More often than not, our observed data is stored in a file on disk. +# We can read in a dataset and pull datapoints from it. +# +# For more information about the frequency domain data set see :ref:`Frequency domain dataset` + +# Set some paths and file names +dataFolder = "..//..//supplementary//Data//" +# The data file name +dataFile = dataFolder + 'Resolve2.txt' +# The EM system file name +systemFile = dataFolder + 'FdemSystem2.stm' + +#%% +# Initialize and read an EM data set +# Prepare the dataset so that we can read a point at a time. +Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile) +# Get a datapoint from the file. +fdp = Dataset._read_record() +#%% + +# # Initialize and read an EM data set +# D = FdemData.read_csv(dataFile,systemFile) + +# # Get a data point from the dataset +# fdp = D.datapoint(0) +# plt.figure() +# _ = fdp.plot() + +# Using a resolve datapoint +# +++++++++++++++++++++++++ + +# We can define a 1D layered earth model, and use it to predict some data +nCells = 19 +par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$") +depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm') +depth[-1] = np.inf +mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par) + +# Forward model the data +fdp.forward(mod) + +plt.figure() +plt.subplot(121) +_ = mod.pcolor(transpose=True) +plt.subplot(122) +_ = fdp.plot_predicted() +plt.tight_layout() + +# Compute the sensitivity matrix for a given model +J = fdp.sensitivity(mod) + +plt.figure() +_ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + +# Attaching statistical descriptors to the resolve datapoint +# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +# Set values of relative and additive error for both systems. +fdp.relative_error = 0.05 +fdp.additive_error = 10.0 +# Define a multivariate log normal distribution as the prior on the predicted data. +fdp.predictedData.prior = Distribution('MvLogNormal', fdp.data[fdp.active], fdp.std[fdp.active]**2.0, prng=prng) + +# This allows us to evaluate the likelihood of the predicted data +print(fdp.likelihood(log=True)) +# Or the misfit +print(fdp.data_misfit()) + +# Plot the misfits for a range of half space conductivities +plt.figure() +_ = fdp.plot_halfspace_responses(-6.0, 4.0, 200) + +plt.title("Halfspace responses"); + +# We can perform a quick search for the best fitting half space +halfspace = fdp.find_best_halfspace() +print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) +plt.figure() +_ = fdp.plot() +_ = fdp.plot_predicted() + +# Compute the misfit between observed and predicted data +print(fdp.data_misfit()) + +# We can attach priors to the height of the datapoint, +# the relative error multiplier, and the additive error noise floor + + +# Define the distributions used as priors. +zPrior = Distribution('Uniform', min=fdp.z - 2.0, max=fdp.z + 2.0, prng=prng) +relativePrior = Distribution('Uniform', min=0.01, max=0.5, prng=prng) +additivePrior = Distribution('Uniform', min=5, max=15, prng=prng) +fdp.set_priors(z_prior=zPrior, relative_error_prior=relativePrior, additive_error_prior=additivePrior, prng=prng) + + +# In order to perturb our solvable parameters, we need to attach proposal distributions +z_proposal = Distribution('Normal', mean=fdp.z, variance = 0.01, prng=prng) +relativeProposal = Distribution('MvNormal', mean=fdp.relative_error, variance=2.5e-7, prng=prng) +additiveProposal = Distribution('MvLogNormal', mean=fdp.additive_error, variance=1e-4, prng=prng) +fdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal) + +# With priors set we can auto generate the posteriors +fdp.set_posteriors() + +nCells = 19 +par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$") +depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm') +depth[-1] = np.inf +mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par) +fdp.forward(mod) + +# Perturb the datapoint and record the perturbations +for i in range(10): + fdp.perturb() + fdp.update_posteriors() + + +# Plot the posterior distributions +fig = plt.figure() +fdp.plot_posteriors(overlay=fdp) + +import h5py +with h5py.File('fdp.h5', 'w') as f: + fdp.createHdf(f, 'fdp', withPosterior=True) + fdp.writeHdf(f, 'fdp', withPosterior=True) + +with h5py.File('fdp.h5', 'r') as f: + fdp1 = FdemDataPoint.fromHdf(f['fdp']) + +plt.figure() +fdp1.plot_posteriors(overlay=fdp1) + +import h5py +with h5py.File('fdp.h5', 'w') as f: + fdp.createHdf(f, 'fdp', withPosterior=True, add_axis=np.arange(10.0)) + + for i in range(10): + fdp.writeHdf(f, 'fdp', withPosterior=True, index=i) + +from geobipy import FdemData +with h5py.File('fdp.h5', 'r') as f: + fdp1 = FdemDataPoint.fromHdf(f['fdp'], index=0) + fdp2 = FdemData.fromHdf(f['fdp']) + +fdp1.plot_posteriors(overlay=fdp1) + +plt.show() +# %% \ No newline at end of file diff --git a/docs/_downloads/9570534d71f4f6a00b8f20360000c3ff/plot_histogram_1d.py b/docs/_downloads/9570534d71f4f6a00b8f20360000c3ff/plot_histogram_1d.py new file mode 100644 index 00000000..10a7fc18 --- /dev/null +++ b/docs/_downloads/9570534d71f4f6a00b8f20360000c3ff/plot_histogram_1d.py @@ -0,0 +1,156 @@ +""" +Histogram 1D +------------ + +This histogram class allows efficient updating of histograms, plotting and +saving as HDF5 +""" + +#%% +from geobipy.src.classes.mesh.RectilinearMesh1D import RectilinearMesh1D +import h5py +from geobipy import StatArray +from geobipy import Histogram +import numpy as np +import matplotlib.pyplot as plt + +#%% +# Histogram with regular bins +# +++++++++++++++++++++++++++ + +# Create regularly spaced bins +mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm')) + +#%% +# Set the histogram using the bins, and update +H = Histogram(mesh=mesh) + +#%% +# We can update the histogram with some new values +H.update(np.random.randn(1000), trim=True) + +# Plot the histogram +plt.figure() +plt.subplot(221) +_ = H.plot() +plt.subplot(222) +_ = H.pdf.bar() +plt.subplot(223) +H.pmf.bar() +plt.subplot(224) +H.cdf().bar() + +#%% +# Get the median, and 95% confidence values +print(H.credible_intervals(percent=95.0)) + +plt.figure() +H.plot() +H.plotCredibleIntervals() +H.plotMean() +H.plotMedian() + +#%% +# Histogram with irregular bins +# +++++++++++++++++++++++++++++ + +# Create irregularly spaced bins +x = np.cumsum(np.arange(10, dtype=np.float64)) +irregularBins = np.hstack([-x[::-1], x[1:]]) + +#%% +# Create a named StatArray +edges = StatArray(irregularBins, 'irregular bins') +mesh = RectilinearMesh1D(edges = edges) + +#%% +# Instantiate the histogram with bin edges +H = Histogram(mesh=mesh) + +# Update the histogram +H.update((np.random.randn(10000)*20.0) - 10.0) + +#%% +# Plot the histogram +plt.figure() +plt.subplot(211) +_ = H.plot() +plt.subplot(212) +_ = H.plot(normalize=True) + +plt.figure() +H.plot() +H.plotCredibleIntervals() +H.plotMean() +H.plotMedian() + +#%% +# We can plot the histogram as a pcolor plot +plt.figure() +_ = H.pcolor(grid=True, transpose=True) + +#%% +# Histogram with linear space entries that are logged internally +# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +# Create some bins spaced logarithmically +mesh = RectilinearMesh1D(edges = StatArray(np.logspace(-5, 3), 'positive bins'), log=10) + +#%% +# Instantiate the Histogram with log=10 +H = Histogram(mesh) + +#%% +# The update takes in the numbers in linear space and takes their log=10 +H.update(10.0**(np.random.randn(1000)*2.0), trim=True) + +#%% +plt.figure() +plt.subplot(211) +_ = H.plot() + +import h5py +with h5py.File('h1d.h5', 'w') as f: + H.toHdf(f, 'h1d') + +with h5py.File('h1d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h1d']) + +plt.subplot(212) +_ = H1.plot() + + +#%% +mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm')) +#%% +# Set the histogram using the bins, and update +H = Histogram(mesh=mesh) + +#%% +# We can update the histogram with some new values +H.update(np.random.randn(1000), trim=True) + +import h5py +with h5py.File('h1d.h5', 'w') as f: + H.createHdf(f, 'h1d', add_axis=StatArray(np.arange(3.0), "Name", "Units")) + H.writeHdf(f, 'h1d', index=0) + H.update(np.random.randn(1000), trim=True) + H.writeHdf(f, 'h1d', index=1) + H.update(np.random.randn(1000), trim=True) + H.writeHdf(f, 'h1d', index=2) + +with h5py.File('h1d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h1d']) + H2 = Histogram.fromHdf(f['h1d'], index=0) + H3 = Histogram.fromHdf(f['h1d'], index=1) + H4 = Histogram.fromHdf(f['h1d'], index=2) + + +print(H4.summary) + +# plt.figure() +# plt.subplot(211) +# _ = H1.plot() +# plt.subplot(212) +# _ = H4.plot() + +plt.show() \ No newline at end of file diff --git a/docs/_downloads/a209de4dae1a6f3f3daeab70fe6f557a/plot_inference_1d_skytem.py b/docs/_downloads/a209de4dae1a6f3f3daeab70fe6f557a/plot_inference_1d_skytem.py new file mode 100644 index 00000000..14a3f440 --- /dev/null +++ b/docs/_downloads/a209de4dae1a6f3f3daeab70fe6f557a/plot_inference_1d_skytem.py @@ -0,0 +1,89 @@ +""" +Running GeoBIPy to invert Skytem data +++++++++++++++++++++++++++++++++++++++ +""" + +import os +import sys +import pathlib +from datetime import timedelta +import time +import numpy as np +from geobipy import Inference3D +from geobipy import user_parameters +from geobipy import get_prng + +def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + +#%% +np.random.seed(0) + +args = checkCommandArguments() +sys.path.append(os.getcwd()) + +models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + +data_type = "Skytem" +model_type = models[args.index] + +#%% +# The directory where HDF files will be stored +#%% +file_path = os.path.join(data_type, model_type) +pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + +for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + +output_directory = file_path + +data_filename = data_type + '_' + model_type + +supplementary = "..//..//supplementary//" +parameter_file = supplementary + "//options_files//{}_options".format(data_type) +inputFile = pathlib.Path(parameter_file) +assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + +output_directory = pathlib.Path(output_directory) +assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + +print('Using user input file {}'.format(parameter_file)) +print('Output files will be produced at {}'.format(output_directory)) + +kwargs = user_parameters.read(inputFile) + +kwargs['n_markov_chains'] = 5000 + +kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' +kwargs['system_filename'] = [supplementary + "//data//" + x for x in kwargs['system_filename']] + +# Everyone needs the system classes read in early. +data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + +# Start keeping track of time. +t0 = time.time() + +seed = 146100583096709124601953385843316024947 +prng = get_prng(seed=seed) + +inference3d = Inference3D(data, prng=prng) +inference3d.create_hdf5(directory=output_directory, **kwargs) + +print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + +inference3d.infer(index=2, **kwargs) \ No newline at end of file diff --git a/docs/_downloads/a3ece577667aef55ca48b74fee4c6ed6/plot_rectilinear_mesh_1d.zip b/docs/_downloads/a3ece577667aef55ca48b74fee4c6ed6/plot_rectilinear_mesh_1d.zip new file mode 100644 index 00000000..c18baf0c Binary files /dev/null and b/docs/_downloads/a3ece577667aef55ca48b74fee4c6ed6/plot_rectilinear_mesh_1d.zip differ diff --git a/docs/_downloads/a54a8abe32f99b7dd05f1ec6da162902/plot_distributions.zip b/docs/_downloads/a54a8abe32f99b7dd05f1ec6da162902/plot_distributions.zip new file mode 100644 index 00000000..49edb2d3 Binary files /dev/null and b/docs/_downloads/a54a8abe32f99b7dd05f1ec6da162902/plot_distributions.zip differ diff --git a/docs/_downloads/b30e33468a3aa2492e5eaedce425508f/plot_skytem_dataset.zip b/docs/_downloads/b30e33468a3aa2492e5eaedce425508f/plot_skytem_dataset.zip new file mode 100644 index 00000000..c550a111 Binary files /dev/null and b/docs/_downloads/b30e33468a3aa2492e5eaedce425508f/plot_skytem_dataset.zip differ diff --git a/docs/_downloads/b65392b576618a906ad35b58f026489b/plot_model_2d.py b/docs/_downloads/b65392b576618a906ad35b58f026489b/plot_model_2d.py new file mode 100644 index 00000000..bf428207 --- /dev/null +++ b/docs/_downloads/b65392b576618a906ad35b58f026489b/plot_model_2d.py @@ -0,0 +1,56 @@ +""" +2D Rectilinear Model +-------------------- +This 2D rectilinear model defines a grid with straight cell boundaries. + +""" + +#%% +from geobipy import StatArray +from geobipy import RectilinearMesh2D +from geobipy import Model +import h5py +import matplotlib.pyplot as plt +import numpy as np + + +#%% +# Specify some cell centres in x and y +x = StatArray(np.arange(11.0), 'Easting', 'm') +y = StatArray(np.arange(11.0), 'Northing', 'm') +mesh = RectilinearMesh2D(x_edges=x, y_edges=y) + +xx, yy = np.meshgrid(mesh.x.centres, mesh.y.centres) +values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values") + +mod = Model(mesh=mesh, values = values) + +plt.figure() +mod.pcolor() + +mod2 = mod.resample(0.5, 0.5) +mod3 = mod.resample(1.5, 1.5) +plt.figure() +plt.subplot(121) +mod2.pcolor() +plt.axis('equal') +plt.subplot(122) +mod3.pcolor() +plt.axis('equal') + + +# #%% +# # We can plot the mesh in 3D! +# pv = rm.pyvista_plotter() +# pv.show() + +# rm.to_vtk('Model3D.vtk') + +with h5py.File('Model2D.h5', 'w') as f: + mod.toHdf(f, 'model') + +with h5py.File('Model2D.h5', 'r') as f: + mod2 = Model.fromHdf(f['model']) + + +plt.show() \ No newline at end of file diff --git a/docs/_downloads/bc82bea3a5dd7bdba60b65220891d9e5/examples_python.zip b/docs/_downloads/bc82bea3a5dd7bdba60b65220891d9e5/examples_python.zip new file mode 100644 index 00000000..25f49d11 Binary files /dev/null and b/docs/_downloads/bc82bea3a5dd7bdba60b65220891d9e5/examples_python.zip differ diff --git a/docs/_downloads/c95b0f836ba0fba698e8506132e0a081/plot_inference_2d_skytem.zip b/docs/_downloads/c95b0f836ba0fba698e8506132e0a081/plot_inference_2d_skytem.zip new file mode 100644 index 00000000..19f71018 Binary files /dev/null and b/docs/_downloads/c95b0f836ba0fba698e8506132e0a081/plot_inference_2d_skytem.zip differ diff --git a/docs/_downloads/cad36827134302329c83d6bd810a1239/plot_tempest_dataset.py b/docs/_downloads/cad36827134302329c83d6bd810a1239/plot_tempest_dataset.py new file mode 100644 index 00000000..8fda1079 --- /dev/null +++ b/docs/_downloads/cad36827134302329c83d6bd810a1239/plot_tempest_dataset.py @@ -0,0 +1,88 @@ +""" +Tempest dataset +-------------------- +""" +#%% +import h5py +from geobipy import plotting as cP +from os.path import join +import matplotlib.pyplot as plt +import numpy as np +from geobipy import TempestData + +#%% +# Reading in the Data +# +++++++++++++++++++ + +#%% +dataFolder = "..//..//supplementary//data//" + +# # The data file name +# dataFiles = dataFolder + 'Tempest.nc' +# # The EM system file name +# systemFiles = dataFolder + 'Tempest.stm' + +# #%% +# # Read in the data from file +# TD = TempestData.read_netcdf(dataFiles, systemFiles) + +# TD.write_csv(dataFolder + 'Tempest.csv') +TD = TempestData.read_csv(dataFolder + 'tempest_saline_clay.csv', system_filename=dataFolder + 'Tempest.stm') + + +#%% +# Plot the locations of the data points +plt.figure(figsize=(8,6)) +_ = TD.scatter2D() +plt.title("Scatter plot") + +#%% +# Plot all the data along the specified line +plt.figure(figsize=(8,6)) +_ = TD.plotLine(0.0) +plt.title('Line {}'.format(225401.0)) + +#%% +# Or, plot specific channels in the data +plt.figure(figsize=(8,6)) +_ = TD.plot_data(system=0, channels=[0, 6, 18]) +plt.title("3 channels of data") + +#%% +plt.figure() +_ = TD.pcolor(system=0) +plt.title('Data as an array') + +#%% +plt.figure() +ax = TD.scatter2D(c=TD.data[:, TD.channel_index(system=0, channel=10)], equalize=True) +plt.axis('equal') +plt.title(f"scatter plot of channel {TD.channel_index(system=0, channel=10)}") + +with h5py.File('tdem.h5', 'w') as f: + TD.createHdf(f, 'tdem') + TD.writeHdf(f, 'tdem') + +with h5py.File('tdem.h5', 'r') as f: + TD3 = TempestData.fromHdf(f['tdem']) + +with h5py.File('tdem.h5', 'r') as f: + tdp = TempestData.fromHdf(f['tdem'], index=0) + + +# #%% +# # Obtain a line from the data set +# # +++++++++++++++++++++++++++++++ +# line = TD.line(0.0) + +# #%% +# plt.figure() +# _ = line.scatter2D() +# plt.title('Channel') + +# #%% +# plt.figure() +# _ = line.plot_data(xAxis='index', log=10) +# plt.title("All data along line") + +plt.show() diff --git a/docs/_downloads/d11ca9540d0abc72d005f3af4dbab908/plot_tempest_dataset.zip b/docs/_downloads/d11ca9540d0abc72d005f3af4dbab908/plot_tempest_dataset.zip new file mode 100644 index 00000000..bf1c2f3c Binary files /dev/null and b/docs/_downloads/d11ca9540d0abc72d005f3af4dbab908/plot_tempest_dataset.zip differ diff --git a/docs/_downloads/d879a27c2deb3b0230b4bdd74daa15f2/plot_resolve_datapoint.zip b/docs/_downloads/d879a27c2deb3b0230b4bdd74daa15f2/plot_resolve_datapoint.zip new file mode 100644 index 00000000..23d9a20e Binary files /dev/null and b/docs/_downloads/d879a27c2deb3b0230b4bdd74daa15f2/plot_resolve_datapoint.zip differ diff --git a/docs/_downloads/dab51a848a47b0a54695528258f289e6/plot_model_3d.zip b/docs/_downloads/dab51a848a47b0a54695528258f289e6/plot_model_3d.zip new file mode 100644 index 00000000..5d8cfc2a Binary files /dev/null and b/docs/_downloads/dab51a848a47b0a54695528258f289e6/plot_model_3d.zip differ diff --git a/docs/_downloads/dc3e882d376317616b04bc390b62a137/plot_rectilinear_mesh_3d.zip b/docs/_downloads/dc3e882d376317616b04bc390b62a137/plot_rectilinear_mesh_3d.zip new file mode 100644 index 00000000..5c442223 Binary files /dev/null and b/docs/_downloads/dc3e882d376317616b04bc390b62a137/plot_rectilinear_mesh_3d.zip differ diff --git a/docs/_downloads/dd5f000fe10de32b5e5e1f6a210e1afb/plot_frequency_dataset.zip b/docs/_downloads/dd5f000fe10de32b5e5e1f6a210e1afb/plot_frequency_dataset.zip new file mode 100644 index 00000000..f637bab5 Binary files /dev/null and b/docs/_downloads/dd5f000fe10de32b5e5e1f6a210e1afb/plot_frequency_dataset.zip differ diff --git a/docs/_downloads/dee3b069d7e8a3391dafe3d4781cd626/hdf5.zip b/docs/_downloads/dee3b069d7e8a3391dafe3d4781cd626/hdf5.zip new file mode 100644 index 00000000..692f053c Binary files /dev/null and b/docs/_downloads/dee3b069d7e8a3391dafe3d4781cd626/hdf5.zip differ diff --git a/docs/_downloads/e7cc18385c0a8722277c7651cb3e9fdb/plot_inference_1d_tempest.py b/docs/_downloads/e7cc18385c0a8722277c7651cb3e9fdb/plot_inference_1d_tempest.py new file mode 100644 index 00000000..87823af0 --- /dev/null +++ b/docs/_downloads/e7cc18385c0a8722277c7651cb3e9fdb/plot_inference_1d_tempest.py @@ -0,0 +1,90 @@ +""" +Running GeoBIPy to invert Tempest data +++++++++++++++++++++++++++++++++++++++ +""" + +import os +import sys +import pathlib +from datetime import timedelta +import time +import numpy as np +from geobipy import Inference3D +from geobipy import user_parameters +from geobipy import get_prng + +def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + +#%% +np.random.seed(0) + +args = checkCommandArguments() +sys.path.append(os.getcwd()) + +models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + +data_type = "Tempest" +model_type = models[args.index] + +#%% +# The directory where HDF files will be stored +#%% +file_path = os.path.join(data_type, model_type) +pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + +for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + +output_directory = file_path + +data_filename = data_type + '_' + model_type + +supplementary = "..//..//supplementary//" + +parameter_file = supplementary + "//options_files//{}_options".format(data_type) +inputFile = pathlib.Path(parameter_file) +assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + +output_directory = pathlib.Path(output_directory) +assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + +print('Using user input file {}'.format(parameter_file)) +print('Output files will be produced at {}'.format(output_directory)) + +kwargs = user_parameters.read(inputFile) + +kwargs['n_markov_chains'] = 5000 + +kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' +kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename'] + +# Everyone needs the system classes read in early. +data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + +# Start keeping track of time. +t0 = time.time() + +seed = 146100583096709124601953385843316024947 +prng = get_prng(seed=seed) + +inference3d = Inference3D(data, prng=prng) +inference3d.create_hdf5(directory=output_directory, **kwargs) + +print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + +inference3d.infer(index=2, **kwargs) diff --git a/docs/_downloads/ec453d00c4b1863c6db7fb665ba67ecb/plot_distributions.py b/docs/_downloads/ec453d00c4b1863c6db7fb665ba67ecb/plot_distributions.py new file mode 100644 index 00000000..9fc321db --- /dev/null +++ b/docs/_downloads/ec453d00c4b1863c6db7fb665ba67ecb/plot_distributions.py @@ -0,0 +1,48 @@ +""" +Distribution Class +++++++++++++++++++ + +Handles the initialization of different statistical distribution +""" + +#%% +from geobipy import Distribution +from geobipy import plotting as cP +import matplotlib.pyplot as plt +import numpy as np + +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +#%% +# Univariate Normal Distribution +# ++++++++++++++++++++++++++++++ +D = Distribution('Normal', 0.0, 1.0, prng=prng) + +# Get the bins of the Distribution from +- 4 standard deviations of the mean +bins = D.bins() + +# Grab random samples from the distribution +D.rng(10) + +# We can then get the Probability Density Function for those bins +pdf = D.probability(bins, log=False) + +# And we can plot that PDF +# sphinx_gallery_thumbnail_number = 1 +plt.figure() +plt.plot(bins, pdf) + +#%% +# Multivariate Normal Distribution +# ++++++++++++++++++++++++++++++++ +D = Distribution('MvNormal',[0.0,1.0,2.0],[1.0,1.0,1.0], prng=prng) +D.rng() + + +#%% +# Uniform Distribution +D = Distribution('Uniform', 0.0, 1.0, prng=prng) +D.bins() diff --git a/docs/_downloads/f48e5c05cfcaf1411cc06bb7a0ed1eb0/plot_model_1d.py b/docs/_downloads/f48e5c05cfcaf1411cc06bb7a0ed1eb0/plot_model_1d.py new file mode 100644 index 00000000..0076ce37 --- /dev/null +++ b/docs/_downloads/f48e5c05cfcaf1411cc06bb7a0ed1eb0/plot_model_1d.py @@ -0,0 +1,160 @@ +""" +1D Model with an infinite halfspace +----------------------------------- +""" + +# %% +from copy import deepcopy +from geobipy import StatArray +from geobipy import RectilinearMesh1D +from geobipy import Model +from geobipy import Distribution +import matplotlib.pyplot as plt +import numpy as np + +# %% +# Instantiate the 1D Model with a Half Space +# ++++++++++++++++++++++++++++++++++++++++++ + +# Make a test model with 10 layers, and increasing parameter values +nLayers = 2 +par = StatArray(np.linspace(0.001, 0.02, nLayers), "Conductivity", "$\\frac{S}{m}$") +thk = StatArray(np.full(nLayers, fill_value=10.0)) +thk[-1] = np.inf +mesh = RectilinearMesh1D(widths = thk) + +mod = Model(mesh = mesh, values=par) + +plt.figure() +mod.plot_grid(transpose=True, flip=True) + +#%% +# Randomness and Model Perturbations +# ++++++++++++++++++++++++++++++++++ +# We can set the priors on the 1D model by assigning minimum and maximum layer +# depths and a maximum number of layers. These are used to create priors on +# the number of cells in the model, a new depth interface, new parameter values +# and the vertical gradient of those parameters. +# The halfSpaceValue is used as a reference value for the parameter prior. +from numpy.random import Generator +from numpy.random import PCG64DXSM +generator = PCG64DXSM(seed=0) +prng = Generator(generator) + +# Set the priors +mod.set_priors(value_mean=0.01, + min_edge=1.0, + max_edge=150.0, + max_cells=30, + solve_value=True, + solve_gradient=True, + prng=prng) + +#%% +# We can evaluate the prior of the model using depths only +print('Log probability of the Model given its priors: ', mod.probability(False, False)) +# Or with priors on its parameters, and parameter gradient with depth. +print('Log probability of the Model given its priors: ', mod.probability(True, True)) + +#%% +# To propose new models, we specify the probabilities of creating, removing, perturbing, and not changing +# a layer interface +pProposal = Distribution('LogNormal', 0.01, np.log(2.0)**2.0, linearSpace=True, prng=prng) +mod.set_proposals(probabilities=[0.25, 0.25, 0.5, 0.25], proposal=pProposal, prng=prng) + +#%% +# We can then perturb the layers of the model +remapped, perturbed = mod.perturb() + +#%% +fig = plt.figure(figsize=(8, 6)) +ax = plt.subplot(121) +mod.pcolor(transpose=True, flip=True, log=10) # , grid=True) +ax = plt.subplot(122) +perturbed.pcolor(transpose=True, flip=True, log=10) # , grid=True) + +#%% +# We can evaluate the prior of the model using depths only +print('Log probability of the Model given its priors: ',perturbed.probability(False, False)) +# Or with priors on its parameters, and parameter gradient with depth. +print('Log probability of the Model given its priors: ',perturbed.probability(True, True)) + + +# %% +# Perturbing a model multiple times +# +++++++++++++++++++++++++++++++++ +# In the stochasitic inference process, we perturb the model structure, +# and parameter values, multiple times. +# Each time the model is perturbed, we can record its state +# in a posterior distribution. +# +# For a 1D model, the parameter posterior is a 2D hitmap with depth in one dimension +# and the parameter value in the other. +# We also attach a 1D histogram for the number of layers, +# and a 1D histogram for the locations of interfaces. +# +# Since we have already set the priors on the Model, we can set the posteriors +# based on bins from from the priors. + +mod.set_posteriors() + +mod0 = deepcopy(mod) + +#%% +# Now we randomly perturb the model, and update its posteriors. +mod.update_posteriors() +for i in range(1001): + remapped, perturbed = mod.perturb() + + # And update the model posteriors + perturbed.update_posteriors() + + mod = perturbed + +#%% +# We can now plot the posteriors of the model. +# +# Remember in this case, we are simply perturbing the model structure and parameter values +# The proposal for the parameter values is fixed and centred around a single value. +# fig = plt.figure(figsize=(8, 6)) + +# plt.subplot(131) +# mod.nCells.posterior.plot() +# ax = plt.subplot(132) +# mod.values.posterior.pcolor(cmap='gray_r', colorbar=False, flipY=True, logX=10) +# plt.subplot(133, sharey=ax) +# mod.mesh.edges.posterior.plot(transpose=True, flipY=True) + +# plt.figure() +# mod.plot_posteriors(**{"cmap": 'gray_r', +# "xscale": 'log', +# "noColorbar": True, +# "flipY": True, +# 'credible_interval_kwargs':{'axis': 1, +# 'reciprocate': True, +# 'xscale': 'log'}}) +# mod.par.posterior.plotCredibleIntervals(xscale='log', axis=1) + + +fig = plt.figure(figsize=(8, 6)) +# gs = fig.add_gridspec(nrows=1, ncols=1) +mod.plot_posteriors(axes=fig, + edges_kwargs = { + "transpose":True, + "flipY":True + }, + parameter_kwargs = { + "cmap": 'gray_r', + "xscale": 'log', + "colorbar": False, + "flipY": True, + 'credible_interval_kwargs':{ + 'reciprocate':True, + # 'axis': 1, + 'xscale': 'log' + } + }, + best = mod) + + +plt.show() diff --git a/docs/_downloads/fb625db3c50d423b1b7881136ffdeec8/examples_jupyter.zip b/docs/_downloads/fb625db3c50d423b1b7881136ffdeec8/examples_jupyter.zip new file mode 100644 index 00000000..e9a56369 Binary files /dev/null and b/docs/_downloads/fb625db3c50d423b1b7881136ffdeec8/examples_jupyter.zip differ diff --git a/docs/_downloads/fd0d450029ed4426f6b6d51ec196e9de/plot_inference_2d_skytem.py b/docs/_downloads/fd0d450029ed4426f6b6d51ec196e9de/plot_inference_2d_skytem.py new file mode 100644 index 00000000..92da3c30 --- /dev/null +++ b/docs/_downloads/fd0d450029ed4426f6b6d51ec196e9de/plot_inference_2d_skytem.py @@ -0,0 +1,152 @@ +""" +2D Posterior analysis of Skytem inference +----------------------------------------- + +All plotting in GeoBIPy can be carried out using the 3D inference class + +""" + +import argparse +import matplotlib.pyplot as plt +import numpy as np +from geobipy import Model +from geobipy import Inference2D + +def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-550, 60]) + + print(results_2d.mean_parameters().summary) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # plt.show() + plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + + +if __name__ == '__main__': + types = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + for model in types: + # try: + plot_2d_summary('../../../Parallel_Inference/', "skytem", model) + # except Exception as e: + # print(model) + # print(e) + # pass \ No newline at end of file diff --git a/docs/_images/sphx_glr_hdf5_thumb.png b/docs/_images/sphx_glr_hdf5_thumb.png new file mode 100644 index 00000000..19cbde5a Binary files /dev/null and b/docs/_images/sphx_glr_hdf5_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_DataArray_thumb.png b/docs/_images/sphx_glr_plot_DataArray_thumb.png new file mode 100644 index 00000000..19cbde5a Binary files /dev/null and b/docs/_images/sphx_glr_plot_DataArray_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_001.png b/docs/_images/sphx_glr_plot_StatArray_001.png new file mode 100644 index 00000000..264a4569 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_001.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_002.png b/docs/_images/sphx_glr_plot_StatArray_002.png new file mode 100644 index 00000000..13f17625 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_002.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_003.png b/docs/_images/sphx_glr_plot_StatArray_003.png new file mode 100644 index 00000000..0205310d Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_003.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_004.png b/docs/_images/sphx_glr_plot_StatArray_004.png new file mode 100644 index 00000000..5090a06d Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_004.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_005.png b/docs/_images/sphx_glr_plot_StatArray_005.png new file mode 100644 index 00000000..ab88115f Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_005.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_006.png b/docs/_images/sphx_glr_plot_StatArray_006.png new file mode 100644 index 00000000..ba14661c Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_006.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_007.png b/docs/_images/sphx_glr_plot_StatArray_007.png new file mode 100644 index 00000000..d64f9d8e Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_007.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_008.png b/docs/_images/sphx_glr_plot_StatArray_008.png new file mode 100644 index 00000000..f0556281 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_008.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_009.png b/docs/_images/sphx_glr_plot_StatArray_009.png new file mode 100644 index 00000000..6c90f0ae Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_009.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_010.png b/docs/_images/sphx_glr_plot_StatArray_010.png new file mode 100644 index 00000000..105486c6 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_010.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_011.png b/docs/_images/sphx_glr_plot_StatArray_011.png new file mode 100644 index 00000000..61b6f5e5 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_011.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_012.png b/docs/_images/sphx_glr_plot_StatArray_012.png new file mode 100644 index 00000000..e6894344 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_012.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_013.png b/docs/_images/sphx_glr_plot_StatArray_013.png new file mode 100644 index 00000000..57177ed8 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_013.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_014.png b/docs/_images/sphx_glr_plot_StatArray_014.png new file mode 100644 index 00000000..a202f3bb Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_014.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_015.png b/docs/_images/sphx_glr_plot_StatArray_015.png new file mode 100644 index 00000000..64396bab Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_015.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_016.png b/docs/_images/sphx_glr_plot_StatArray_016.png new file mode 100644 index 00000000..40073be2 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_016.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_017.png b/docs/_images/sphx_glr_plot_StatArray_017.png new file mode 100644 index 00000000..f9ea1eda Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_017.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_018.png b/docs/_images/sphx_glr_plot_StatArray_018.png new file mode 100644 index 00000000..35d4930a Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_018.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_019.png b/docs/_images/sphx_glr_plot_StatArray_019.png new file mode 100644 index 00000000..d220780e Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_019.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_020.png b/docs/_images/sphx_glr_plot_StatArray_020.png new file mode 100644 index 00000000..519cbf4d Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_020.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_021.png b/docs/_images/sphx_glr_plot_StatArray_021.png new file mode 100644 index 00000000..764b33ad Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_021.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_022.png b/docs/_images/sphx_glr_plot_StatArray_022.png new file mode 100644 index 00000000..992df639 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_022.png differ diff --git a/docs/_images/sphx_glr_plot_StatArray_thumb.png b/docs/_images/sphx_glr_plot_StatArray_thumb.png new file mode 100644 index 00000000..33c2e772 Binary files /dev/null and b/docs/_images/sphx_glr_plot_StatArray_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_distributions_001.png b/docs/_images/sphx_glr_plot_distributions_001.png new file mode 100644 index 00000000..5cf8f597 Binary files /dev/null and b/docs/_images/sphx_glr_plot_distributions_001.png differ diff --git a/docs/_images/sphx_glr_plot_distributions_thumb.png b/docs/_images/sphx_glr_plot_distributions_thumb.png new file mode 100644 index 00000000..b1dde1a1 Binary files /dev/null and b/docs/_images/sphx_glr_plot_distributions_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_001.png b/docs/_images/sphx_glr_plot_frequency_dataset_001.png new file mode 100644 index 00000000..94767912 Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_001.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_002.png b/docs/_images/sphx_glr_plot_frequency_dataset_002.png new file mode 100644 index 00000000..c9283886 Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_002.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_003.png b/docs/_images/sphx_glr_plot_frequency_dataset_003.png new file mode 100644 index 00000000..5bf62160 Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_003.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_004.png b/docs/_images/sphx_glr_plot_frequency_dataset_004.png new file mode 100644 index 00000000..0622ab09 Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_004.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_005.png b/docs/_images/sphx_glr_plot_frequency_dataset_005.png new file mode 100644 index 00000000..1d872a77 Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_005.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_006.png b/docs/_images/sphx_glr_plot_frequency_dataset_006.png new file mode 100644 index 00000000..b3686b5c Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_006.png differ diff --git a/docs/_images/sphx_glr_plot_frequency_dataset_thumb.png b/docs/_images/sphx_glr_plot_frequency_dataset_thumb.png new file mode 100644 index 00000000..2fd24c4f Binary files /dev/null and b/docs/_images/sphx_glr_plot_frequency_dataset_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_001.png b/docs/_images/sphx_glr_plot_histogram_1d_001.png new file mode 100644 index 00000000..c2e379b4 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_001.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_002.png b/docs/_images/sphx_glr_plot_histogram_1d_002.png new file mode 100644 index 00000000..3cceb1c9 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_002.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_003.png b/docs/_images/sphx_glr_plot_histogram_1d_003.png new file mode 100644 index 00000000..8c60a76d Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_003.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_004.png b/docs/_images/sphx_glr_plot_histogram_1d_004.png new file mode 100644 index 00000000..f09453da Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_004.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_005.png b/docs/_images/sphx_glr_plot_histogram_1d_005.png new file mode 100644 index 00000000..1e983ef6 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_005.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_006.png b/docs/_images/sphx_glr_plot_histogram_1d_006.png new file mode 100644 index 00000000..e8e995c5 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_006.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_1d_thumb.png b/docs/_images/sphx_glr_plot_histogram_1d_thumb.png new file mode 100644 index 00000000..5bdbf2e3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_1d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_001.png b/docs/_images/sphx_glr_plot_histogram_2d_001.png new file mode 100644 index 00000000..7f2dcf54 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_001.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_002.png b/docs/_images/sphx_glr_plot_histogram_2d_002.png new file mode 100644 index 00000000..06ea9808 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_002.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_003.png b/docs/_images/sphx_glr_plot_histogram_2d_003.png new file mode 100644 index 00000000..99e34016 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_003.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_004.png b/docs/_images/sphx_glr_plot_histogram_2d_004.png new file mode 100644 index 00000000..4aae9c6b Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_004.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_005.png b/docs/_images/sphx_glr_plot_histogram_2d_005.png new file mode 100644 index 00000000..b451be24 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_005.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_006.png b/docs/_images/sphx_glr_plot_histogram_2d_006.png new file mode 100644 index 00000000..bf586e51 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_006.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_007.png b/docs/_images/sphx_glr_plot_histogram_2d_007.png new file mode 100644 index 00000000..6aed59ef Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_007.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_008.png b/docs/_images/sphx_glr_plot_histogram_2d_008.png new file mode 100644 index 00000000..4ca1e24c Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_008.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_009.png b/docs/_images/sphx_glr_plot_histogram_2d_009.png new file mode 100644 index 00000000..504b3b24 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_009.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_010.png b/docs/_images/sphx_glr_plot_histogram_2d_010.png new file mode 100644 index 00000000..327a3f0b Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_010.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_011.png b/docs/_images/sphx_glr_plot_histogram_2d_011.png new file mode 100644 index 00000000..d5ad3797 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_011.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_012.png b/docs/_images/sphx_glr_plot_histogram_2d_012.png new file mode 100644 index 00000000..fa040acb Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_012.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_013.png b/docs/_images/sphx_glr_plot_histogram_2d_013.png new file mode 100644 index 00000000..fa040acb Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_013.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_014.png b/docs/_images/sphx_glr_plot_histogram_2d_014.png new file mode 100644 index 00000000..fa040acb Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_014.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_2d_thumb.png b/docs/_images/sphx_glr_plot_histogram_2d_thumb.png new file mode 100644 index 00000000..500df819 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_2d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_001.png b/docs/_images/sphx_glr_plot_histogram_3d_001.png new file mode 100644 index 00000000..7d47c8d3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_001.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_002.png b/docs/_images/sphx_glr_plot_histogram_3d_002.png new file mode 100644 index 00000000..6eadcc77 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_002.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_003.png b/docs/_images/sphx_glr_plot_histogram_3d_003.png new file mode 100644 index 00000000..6c2ac2b5 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_003.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_004.png b/docs/_images/sphx_glr_plot_histogram_3d_004.png new file mode 100644 index 00000000..ffd41e6d Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_004.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_005.png b/docs/_images/sphx_glr_plot_histogram_3d_005.png new file mode 100644 index 00000000..08a39e11 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_005.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_006.png b/docs/_images/sphx_glr_plot_histogram_3d_006.png new file mode 100644 index 00000000..51c0f640 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_006.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_007.png b/docs/_images/sphx_glr_plot_histogram_3d_007.png new file mode 100644 index 00000000..3de566f5 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_007.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_008.png b/docs/_images/sphx_glr_plot_histogram_3d_008.png new file mode 100644 index 00000000..bb7a6c42 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_008.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_009.png b/docs/_images/sphx_glr_plot_histogram_3d_009.png new file mode 100644 index 00000000..a2523f4d Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_009.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_010.png b/docs/_images/sphx_glr_plot_histogram_3d_010.png new file mode 100644 index 00000000..ea293941 Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_010.png differ diff --git a/docs/_images/sphx_glr_plot_histogram_3d_thumb.png b/docs/_images/sphx_glr_plot_histogram_3d_thumb.png new file mode 100644 index 00000000..635424de Binary files /dev/null and b/docs/_images/sphx_glr_plot_histogram_3d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_resolve_001.png b/docs/_images/sphx_glr_plot_inference_1d_resolve_001.png new file mode 100644 index 00000000..3f8d3b25 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_resolve_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_resolve_thumb.png b/docs/_images/sphx_glr_plot_inference_1d_resolve_thumb.png new file mode 100644 index 00000000..9f8fc952 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_resolve_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_skytem_001.png b/docs/_images/sphx_glr_plot_inference_1d_skytem_001.png new file mode 100644 index 00000000..718e5605 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_skytem_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_skytem_thumb.png b/docs/_images/sphx_glr_plot_inference_1d_skytem_thumb.png new file mode 100644 index 00000000..146974ff Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_skytem_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_tempest_001.png b/docs/_images/sphx_glr_plot_inference_1d_tempest_001.png new file mode 100644 index 00000000..21ee188c Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_tempest_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_1d_tempest_thumb.png b/docs/_images/sphx_glr_plot_inference_1d_tempest_thumb.png new file mode 100644 index 00000000..32496c24 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_1d_tempest_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_001.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_001.png new file mode 100644 index 00000000..0b238235 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_002.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_002.png new file mode 100644 index 00000000..38ca1d1f Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_002.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_003.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_003.png new file mode 100644 index 00000000..9809153c Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_003.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_004.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_004.png new file mode 100644 index 00000000..9f7ac3ea Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_004.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_005.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_005.png new file mode 100644 index 00000000..afd0e661 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_005.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_006.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_006.png new file mode 100644 index 00000000..8c28573f Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_006.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_resolve_thumb.png b/docs/_images/sphx_glr_plot_inference_2d_resolve_thumb.png new file mode 100644 index 00000000..e67506a9 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_resolve_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_001.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_001.png new file mode 100644 index 00000000..d4b6dc9d Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_002.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_002.png new file mode 100644 index 00000000..1b680ca0 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_002.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_003.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_003.png new file mode 100644 index 00000000..3f57f7d1 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_003.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_004.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_004.png new file mode 100644 index 00000000..14de5084 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_004.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_005.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_005.png new file mode 100644 index 00000000..7ec0db98 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_005.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_006.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_006.png new file mode 100644 index 00000000..cecbdcb9 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_006.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_skytem_thumb.png b/docs/_images/sphx_glr_plot_inference_2d_skytem_thumb.png new file mode 100644 index 00000000..f6d9e855 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_skytem_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_001.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_001.png new file mode 100644 index 00000000..3211c912 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_001.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_002.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_002.png new file mode 100644 index 00000000..fabded70 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_002.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_003.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_003.png new file mode 100644 index 00000000..dceacd93 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_003.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_004.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_004.png new file mode 100644 index 00000000..2181648a Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_004.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_005.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_005.png new file mode 100644 index 00000000..f874ba71 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_005.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_006.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_006.png new file mode 100644 index 00000000..d0622e2e Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_006.png differ diff --git a/docs/_images/sphx_glr_plot_inference_2d_tempest_thumb.png b/docs/_images/sphx_glr_plot_inference_2d_tempest_thumb.png new file mode 100644 index 00000000..a7a1e335 Binary files /dev/null and b/docs/_images/sphx_glr_plot_inference_2d_tempest_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_model_1d_001.png b/docs/_images/sphx_glr_plot_model_1d_001.png new file mode 100644 index 00000000..b2c8537e Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_1d_001.png differ diff --git a/docs/_images/sphx_glr_plot_model_1d_002.png b/docs/_images/sphx_glr_plot_model_1d_002.png new file mode 100644 index 00000000..007c77a3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_1d_002.png differ diff --git a/docs/_images/sphx_glr_plot_model_1d_003.png b/docs/_images/sphx_glr_plot_model_1d_003.png new file mode 100644 index 00000000..65cadb89 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_1d_003.png differ diff --git a/docs/_images/sphx_glr_plot_model_1d_thumb.png b/docs/_images/sphx_glr_plot_model_1d_thumb.png new file mode 100644 index 00000000..53f89a4f Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_1d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_model_2d_001.png b/docs/_images/sphx_glr_plot_model_2d_001.png new file mode 100644 index 00000000..5621ccac Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_2d_001.png differ diff --git a/docs/_images/sphx_glr_plot_model_2d_002.png b/docs/_images/sphx_glr_plot_model_2d_002.png new file mode 100644 index 00000000..d7ddbdc3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_2d_002.png differ diff --git a/docs/_images/sphx_glr_plot_model_2d_thumb.png b/docs/_images/sphx_glr_plot_model_2d_thumb.png new file mode 100644 index 00000000..d30fea50 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_2d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_001.png b/docs/_images/sphx_glr_plot_model_3d_001.png new file mode 100644 index 00000000..3a96aebb Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_001.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_002.png b/docs/_images/sphx_glr_plot_model_3d_002.png new file mode 100644 index 00000000..940d7621 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_002.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_003.png b/docs/_images/sphx_glr_plot_model_3d_003.png new file mode 100644 index 00000000..7ef8ebc2 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_003.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_004.png b/docs/_images/sphx_glr_plot_model_3d_004.png new file mode 100644 index 00000000..cf34fce7 Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_004.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_005.png b/docs/_images/sphx_glr_plot_model_3d_005.png new file mode 100644 index 00000000..789466ef Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_005.png differ diff --git a/docs/_images/sphx_glr_plot_model_3d_thumb.png b/docs/_images/sphx_glr_plot_model_3d_thumb.png new file mode 100644 index 00000000..cb9e395f Binary files /dev/null and b/docs/_images/sphx_glr_plot_model_3d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_001.png b/docs/_images/sphx_glr_plot_pointcloud3d_001.png new file mode 100644 index 00000000..7d4fcceb Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_001.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_002.png b/docs/_images/sphx_glr_plot_pointcloud3d_002.png new file mode 100644 index 00000000..1098da7d Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_002.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_003.png b/docs/_images/sphx_glr_plot_pointcloud3d_003.png new file mode 100644 index 00000000..ac352201 Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_003.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_004.png b/docs/_images/sphx_glr_plot_pointcloud3d_004.png new file mode 100644 index 00000000..d2a72ca5 Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_004.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_005.png b/docs/_images/sphx_glr_plot_pointcloud3d_005.png new file mode 100644 index 00000000..d40f273a Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_005.png differ diff --git a/docs/_images/sphx_glr_plot_pointcloud3d_thumb.png b/docs/_images/sphx_glr_plot_pointcloud3d_thumb.png new file mode 100644 index 00000000..343c7661 Binary files /dev/null and b/docs/_images/sphx_glr_plot_pointcloud3d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_001.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_001.png new file mode 100644 index 00000000..cc6bf715 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_001.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_002.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_002.png new file mode 100644 index 00000000..f1d74a4d Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_002.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_003.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_003.png new file mode 100644 index 00000000..7e736aaf Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_003.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_004.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_004.png new file mode 100644 index 00000000..a21d8d67 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_004.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_005.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_005.png new file mode 100644 index 00000000..ce3bb3db Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_005.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_006.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_006.png new file mode 100644 index 00000000..0d9a69d1 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_006.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_007.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_007.png new file mode 100644 index 00000000..5f032944 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_007.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_008.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_008.png new file mode 100644 index 00000000..ede5713e Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_008.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_009.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_009.png new file mode 100644 index 00000000..67ba5bc6 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_009.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_010.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_010.png new file mode 100644 index 00000000..b330968b Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_010.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_011.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_011.png new file mode 100644 index 00000000..db3446af Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_011.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_012.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_012.png new file mode 100644 index 00000000..8eace092 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_012.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_013.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_013.png new file mode 100644 index 00000000..db3446af Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_013.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_014.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_014.png new file mode 100644 index 00000000..aed4146e Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_014.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_015.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_015.png new file mode 100644 index 00000000..b31e3eb6 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_015.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_thumb.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_thumb.png new file mode 100644 index 00000000..fc5313a3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_1d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_001.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_001.png new file mode 100644 index 00000000..549f63c0 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_001.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_002.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_002.png new file mode 100644 index 00000000..5a1ed171 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_002.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_003.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_003.png new file mode 100644 index 00000000..d27239b8 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_003.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_004.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_004.png new file mode 100644 index 00000000..4142da6e Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_004.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_005.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_005.png new file mode 100644 index 00000000..bc7a4165 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_005.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_006.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_006.png new file mode 100644 index 00000000..f393ad5d Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_006.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_007.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_007.png new file mode 100644 index 00000000..ff6625b3 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_007.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_008.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_008.png new file mode 100644 index 00000000..0fc86686 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_008.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_009.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_009.png new file mode 100644 index 00000000..e806251c Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_009.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_010.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_010.png new file mode 100644 index 00000000..003b0706 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_010.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_011.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_011.png new file mode 100644 index 00000000..65f723cf Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_011.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_012.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_012.png new file mode 100644 index 00000000..4909fa87 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_012.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_013.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_013.png new file mode 100644 index 00000000..bb7221fa Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_013.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_014.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_014.png new file mode 100644 index 00000000..b556da9f Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_014.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_thumb.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_thumb.png new file mode 100644 index 00000000..ce256ff2 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_2d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_001.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_001.png new file mode 100644 index 00000000..53fd1949 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_001.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_002.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_002.png new file mode 100644 index 00000000..abe0841a Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_002.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_003.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_003.png new file mode 100644 index 00000000..05a5e571 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_003.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_004.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_004.png new file mode 100644 index 00000000..a41105da Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_004.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_005.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_005.png new file mode 100644 index 00000000..b0271327 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_005.png differ diff --git a/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_thumb.png b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_thumb.png new file mode 100644 index 00000000..c269e056 Binary files /dev/null and b/docs/_images/sphx_glr_plot_rectilinear_mesh_3d_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_001.png b/docs/_images/sphx_glr_plot_resolve_datapoint_001.png new file mode 100644 index 00000000..c878e19c Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_001.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_002.png b/docs/_images/sphx_glr_plot_resolve_datapoint_002.png new file mode 100644 index 00000000..201c2327 Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_002.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_003.png b/docs/_images/sphx_glr_plot_resolve_datapoint_003.png new file mode 100644 index 00000000..4a0d3957 Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_003.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_004.png b/docs/_images/sphx_glr_plot_resolve_datapoint_004.png new file mode 100644 index 00000000..14f69eb4 Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_004.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_005.png b/docs/_images/sphx_glr_plot_resolve_datapoint_005.png new file mode 100644 index 00000000..2c99d5cb Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_005.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_006.png b/docs/_images/sphx_glr_plot_resolve_datapoint_006.png new file mode 100644 index 00000000..1de23429 Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_006.png differ diff --git a/docs/_images/sphx_glr_plot_resolve_datapoint_thumb.png b/docs/_images/sphx_glr_plot_resolve_datapoint_thumb.png new file mode 100644 index 00000000..61225d4b Binary files /dev/null and b/docs/_images/sphx_glr_plot_resolve_datapoint_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_001.png b/docs/_images/sphx_glr_plot_skytem_datapoint_001.png new file mode 100644 index 00000000..24ec5d27 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_001.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_002.png b/docs/_images/sphx_glr_plot_skytem_datapoint_002.png new file mode 100644 index 00000000..42fe7294 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_002.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_003.png b/docs/_images/sphx_glr_plot_skytem_datapoint_003.png new file mode 100644 index 00000000..b7ae4056 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_003.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_004.png b/docs/_images/sphx_glr_plot_skytem_datapoint_004.png new file mode 100644 index 00000000..dd92df7e Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_004.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_005.png b/docs/_images/sphx_glr_plot_skytem_datapoint_005.png new file mode 100644 index 00000000..f84abebe Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_005.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_006.png b/docs/_images/sphx_glr_plot_skytem_datapoint_006.png new file mode 100644 index 00000000..3c054719 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_006.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_datapoint_thumb.png b/docs/_images/sphx_glr_plot_skytem_datapoint_thumb.png new file mode 100644 index 00000000..ac589e5f Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_datapoint_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_001.png b/docs/_images/sphx_glr_plot_skytem_dataset_001.png new file mode 100644 index 00000000..d058b1c2 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_001.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_002.png b/docs/_images/sphx_glr_plot_skytem_dataset_002.png new file mode 100644 index 00000000..48e905f1 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_002.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_003.png b/docs/_images/sphx_glr_plot_skytem_dataset_003.png new file mode 100644 index 00000000..5f1f288a Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_003.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_004.png b/docs/_images/sphx_glr_plot_skytem_dataset_004.png new file mode 100644 index 00000000..32017216 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_004.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_005.png b/docs/_images/sphx_glr_plot_skytem_dataset_005.png new file mode 100644 index 00000000..131b6a13 Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_005.png differ diff --git a/docs/_images/sphx_glr_plot_skytem_dataset_thumb.png b/docs/_images/sphx_glr_plot_skytem_dataset_thumb.png new file mode 100644 index 00000000..7d9a745a Binary files /dev/null and b/docs/_images/sphx_glr_plot_skytem_dataset_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_datapoint_001.png b/docs/_images/sphx_glr_plot_tempest_datapoint_001.png new file mode 100644 index 00000000..b771d3c8 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_datapoint_001.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_datapoint_002.png b/docs/_images/sphx_glr_plot_tempest_datapoint_002.png new file mode 100644 index 00000000..bb19b258 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_datapoint_002.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_datapoint_003.png b/docs/_images/sphx_glr_plot_tempest_datapoint_003.png new file mode 100644 index 00000000..58067269 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_datapoint_003.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_datapoint_004.png b/docs/_images/sphx_glr_plot_tempest_datapoint_004.png new file mode 100644 index 00000000..d7a0ede8 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_datapoint_004.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_datapoint_thumb.png b/docs/_images/sphx_glr_plot_tempest_datapoint_thumb.png new file mode 100644 index 00000000..e32c5d15 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_datapoint_thumb.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_001.png b/docs/_images/sphx_glr_plot_tempest_dataset_001.png new file mode 100644 index 00000000..559c79cb Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_001.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_002.png b/docs/_images/sphx_glr_plot_tempest_dataset_002.png new file mode 100644 index 00000000..4f017a73 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_002.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_003.png b/docs/_images/sphx_glr_plot_tempest_dataset_003.png new file mode 100644 index 00000000..599dce4f Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_003.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_004.png b/docs/_images/sphx_glr_plot_tempest_dataset_004.png new file mode 100644 index 00000000..c8066987 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_004.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_005.png b/docs/_images/sphx_glr_plot_tempest_dataset_005.png new file mode 100644 index 00000000..4c071d14 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_005.png differ diff --git a/docs/_images/sphx_glr_plot_tempest_dataset_thumb.png b/docs/_images/sphx_glr_plot_tempest_dataset_thumb.png new file mode 100644 index 00000000..3a1abcd1 Binary files /dev/null and b/docs/_images/sphx_glr_plot_tempest_dataset_thumb.png differ diff --git a/docs/_sources/content/api/api.rst.txt b/docs/_sources/content/api/api.rst.txt new file mode 100644 index 00000000..af707cb2 --- /dev/null +++ b/docs/_sources/content/api/api.rst.txt @@ -0,0 +1,11 @@ +*** +API +*** + +The source code is split into two types of files. Those that contain a set of python functions, and those that contain classes. + +.. toctree:: + :maxdepth: 1 + + base/base + classes/classes diff --git a/docs/_sources/content/api/base/HDF.rst.txt b/docs/_sources/content/api/base/HDF.rst.txt new file mode 100644 index 00000000..622d0e4e --- /dev/null +++ b/docs/_sources/content/api/base/HDF.rst.txt @@ -0,0 +1,7 @@ +Heirarchical Data Format (HDF) +============================== + +.. automodule:: geobipy.src.base.HDF.hdfRead + :members: +.. automodule:: geobipy.src.base.HDF.hdfWrite + :members: diff --git a/docs/_sources/content/api/base/MPI.rst.txt b/docs/_sources/content/api/base/MPI.rst.txt new file mode 100644 index 00000000..7e1f75dd --- /dev/null +++ b/docs/_sources/content/api/base/MPI.rst.txt @@ -0,0 +1,5 @@ +MPI wrapper functions +===================== + +.. automodule:: geobipy.src.base.MPI + :members: diff --git a/docs/_sources/content/api/base/base.rst.txt b/docs/_sources/content/api/base/base.rst.txt new file mode 100644 index 00000000..1feb68bc --- /dev/null +++ b/docs/_sources/content/api/base/base.rst.txt @@ -0,0 +1,14 @@ +******************************** +Core routines needed for GeoBIPy +******************************** + +.. toctree:: + :maxdepth: 1 + + utilities + plotting + fileIO + interpolation + HDF + MPI + diff --git a/docs/_sources/content/api/base/fileIO.rst.txt b/docs/_sources/content/api/base/fileIO.rst.txt new file mode 100644 index 00000000..5cbf82e2 --- /dev/null +++ b/docs/_sources/content/api/base/fileIO.rst.txt @@ -0,0 +1,5 @@ +fileIO +====== + +.. automodule:: geobipy.src.base.fileIO + :members: diff --git a/docs/_sources/content/api/base/interpolation.rst.txt b/docs/_sources/content/api/base/interpolation.rst.txt new file mode 100644 index 00000000..be572b21 --- /dev/null +++ b/docs/_sources/content/api/base/interpolation.rst.txt @@ -0,0 +1,5 @@ +Interpolation +============= + +.. automodule:: geobipy.src.base.interpolation + :members: diff --git a/docs/_sources/content/api/base/plotting.rst.txt b/docs/_sources/content/api/base/plotting.rst.txt new file mode 100644 index 00000000..817f9cbc --- /dev/null +++ b/docs/_sources/content/api/base/plotting.rst.txt @@ -0,0 +1,5 @@ +plotting +======== + +.. automodule:: geobipy.src.base.plotting + :members: diff --git a/docs/_sources/content/api/base/utilities.rst.txt b/docs/_sources/content/api/base/utilities.rst.txt new file mode 100644 index 00000000..98345120 --- /dev/null +++ b/docs/_sources/content/api/base/utilities.rst.txt @@ -0,0 +1,5 @@ +utilities +========= + +.. automodule:: geobipy.src.base.utilities + :members: diff --git a/docs/_sources/content/api/classes/classes.rst.txt b/docs/_sources/content/api/classes/classes.rst.txt new file mode 100644 index 00000000..3c29c371 --- /dev/null +++ b/docs/_sources/content/api/classes/classes.rst.txt @@ -0,0 +1,15 @@ +*********************** +Classes used in GeoBIPy +*********************** + + +.. toctree:: + :maxdepth: 1 + + core/core + data/data + mesh/mesh + model/model + pointcloud/pointcloud + statistics/statistics + system/system diff --git a/docs/_sources/content/api/classes/core/StatArray.rst.txt b/docs/_sources/content/api/classes/core/StatArray.rst.txt new file mode 100644 index 00000000..8ef3b391 --- /dev/null +++ b/docs/_sources/content/api/classes/core/StatArray.rst.txt @@ -0,0 +1,10 @@ +StatArray +--------- + +.. inheritance-diagram:: geobipy.src.classes.core.StatArray.StatArray + :parts: 1 + +.. automodule:: geobipy.src.classes.core.StatArray + :members: + :undoc-members: + diff --git a/docs/_sources/content/api/classes/core/core.rst.txt b/docs/_sources/content/api/classes/core/core.rst.txt new file mode 100644 index 00000000..18fffe5a --- /dev/null +++ b/docs/_sources/content/api/classes/core/core.rst.txt @@ -0,0 +1,9 @@ +Core classes +============ + +.. toctree:: + :maxdepth: 1 + + myObject + StatArray + diff --git a/docs/_sources/content/api/classes/core/myObject.rst.txt b/docs/_sources/content/api/classes/core/myObject.rst.txt new file mode 100644 index 00000000..dce4f231 --- /dev/null +++ b/docs/_sources/content/api/classes/core/myObject.rst.txt @@ -0,0 +1,5 @@ +Core object class +----------------- + +.. automodule:: geobipy.src.classes.core.myObject + :members: diff --git a/docs/_sources/content/api/classes/data/data.rst.txt b/docs/_sources/content/api/classes/data/data.rst.txt new file mode 100644 index 00000000..994b8e24 --- /dev/null +++ b/docs/_sources/content/api/classes/data/data.rst.txt @@ -0,0 +1,8 @@ +Data classes +============ + +.. toctree:: + :maxdepth: 1 + + dataset/dataset + datapoint/datapointrst diff --git a/docs/_sources/content/api/classes/data/datapoint/EmDataPoint.rst.txt b/docs/_sources/content/api/classes/data/datapoint/EmDataPoint.rst.txt new file mode 100644 index 00000000..5c02be96 --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/EmDataPoint.rst.txt @@ -0,0 +1,8 @@ +EmDataPoint +^^^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint + :parts: 1 + +.. automodule:: geobipy.src.classes.data.datapoint.EmDataPoint + :members: diff --git a/docs/_sources/content/api/classes/data/datapoint/FdemDataPoint.rst.txt b/docs/_sources/content/api/classes/data/datapoint/FdemDataPoint.rst.txt new file mode 100644 index 00000000..b521cdd2 --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/FdemDataPoint.rst.txt @@ -0,0 +1,8 @@ +FdemDataPoint +^^^^^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint + :parts: 1 + +.. automodule:: geobipy.src.classes.data.datapoint.FdemDataPoint + :members: diff --git a/docs/_sources/content/api/classes/data/datapoint/TdemDataPoint.rst.txt b/docs/_sources/content/api/classes/data/datapoint/TdemDataPoint.rst.txt new file mode 100644 index 00000000..45ca6a07 --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/TdemDataPoint.rst.txt @@ -0,0 +1,8 @@ +TdemDataPoint +^^^^^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint + :parts: 1 + +.. automodule:: geobipy.src.classes.data.datapoint.TdemDataPoint + :members: diff --git a/docs/_sources/content/api/classes/data/datapoint/Tempest_dataPoint.rst.txt b/docs/_sources/content/api/classes/data/datapoint/Tempest_dataPoint.rst.txt new file mode 100644 index 00000000..e8a6288e --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/Tempest_dataPoint.rst.txt @@ -0,0 +1,8 @@ +Tempest_datapoint +^^^^^^^^^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.datapoint.Tempest_datapoint + :parts: 1 + +.. automodule:: geobipy.src.classes.data.datapoint.Tempest_datapoint + :members: diff --git a/docs/_sources/content/api/classes/data/datapoint/datapoint.rst.txt b/docs/_sources/content/api/classes/data/datapoint/datapoint.rst.txt new file mode 100644 index 00000000..e1e4f6f1 --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/datapoint.rst.txt @@ -0,0 +1,8 @@ +DataPoint +^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.datapoint.DataPoint.DataPoint + :parts: 1 + +.. automodule:: geobipy.src.classes.data.datapoint.DataPoint + :members: diff --git a/docs/_sources/content/api/classes/data/datapoint/datapointrst.rst.txt b/docs/_sources/content/api/classes/data/datapoint/datapointrst.rst.txt new file mode 100644 index 00000000..b5a34a1d --- /dev/null +++ b/docs/_sources/content/api/classes/data/datapoint/datapointrst.rst.txt @@ -0,0 +1,10 @@ +Datapoint classes +----------------- + +.. toctree:: + :maxdepth: 1 + + datapoint + EmDataPoint + FdemDataPoint + TdemDataPoint diff --git a/docs/_sources/content/api/classes/data/dataset/Data.rst.txt b/docs/_sources/content/api/classes/data/dataset/Data.rst.txt new file mode 100644 index 00000000..192a83cd --- /dev/null +++ b/docs/_sources/content/api/classes/data/dataset/Data.rst.txt @@ -0,0 +1,8 @@ +Data +^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.dataset.Data.Data + :parts: 1 + +.. automodule:: geobipy.src.classes.data.dataset.Data + :members: diff --git a/docs/_sources/content/api/classes/data/dataset/FdemData.rst.txt b/docs/_sources/content/api/classes/data/dataset/FdemData.rst.txt new file mode 100644 index 00000000..7256f21f --- /dev/null +++ b/docs/_sources/content/api/classes/data/dataset/FdemData.rst.txt @@ -0,0 +1,7 @@ +FdemData +^^^^^^^^ +.. inheritance-diagram:: geobipy.src.classes.data.dataset.FdemData + :parts: 1 + +.. automodule:: geobipy.src.classes.data.dataset.FdemData + :members: diff --git a/docs/_sources/content/api/classes/data/dataset/TdemData.rst.txt b/docs/_sources/content/api/classes/data/dataset/TdemData.rst.txt new file mode 100644 index 00000000..5786ddc2 --- /dev/null +++ b/docs/_sources/content/api/classes/data/dataset/TdemData.rst.txt @@ -0,0 +1,8 @@ +TdemData +^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.dataset.TdemData + :parts: 1 + +.. automodule:: geobipy.src.classes.data.dataset.TdemData + :members: diff --git a/docs/_sources/content/api/classes/data/dataset/TempestData.rst.txt b/docs/_sources/content/api/classes/data/dataset/TempestData.rst.txt new file mode 100644 index 00000000..8a08b82f --- /dev/null +++ b/docs/_sources/content/api/classes/data/dataset/TempestData.rst.txt @@ -0,0 +1,8 @@ +TempestData +^^^^^^^^^^^ + +.. inheritance-diagram:: geobipy.src.classes.data.dataset.TempestData + :parts: 1 + +.. automodule:: geobipy.src.classes.data.dataset.TempestData + :members: diff --git a/docs/_sources/content/api/classes/data/dataset/dataset.rst.txt b/docs/_sources/content/api/classes/data/dataset/dataset.rst.txt new file mode 100644 index 00000000..c829cb99 --- /dev/null +++ b/docs/_sources/content/api/classes/data/dataset/dataset.rst.txt @@ -0,0 +1,10 @@ +Dataset classes +--------------- + +.. toctree:: + :maxdepth: 1 + + Data + FdemData + TdemData + TempestData diff --git a/docs/_sources/content/api/classes/mesh/RectilinearMesh1D.rst.txt b/docs/_sources/content/api/classes/mesh/RectilinearMesh1D.rst.txt new file mode 100644 index 00000000..9104f467 --- /dev/null +++ b/docs/_sources/content/api/classes/mesh/RectilinearMesh1D.rst.txt @@ -0,0 +1,8 @@ +RectilinearMesh1D +================= + +.. inheritance-diagram:: geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D + :parts: 1 + +.. automodule:: geobipy.src.classes.mesh.RectilinearMesh1D + :members: diff --git a/docs/_sources/content/api/classes/mesh/RectilinearMesh2D.rst.txt b/docs/_sources/content/api/classes/mesh/RectilinearMesh2D.rst.txt new file mode 100644 index 00000000..a2f2fa64 --- /dev/null +++ b/docs/_sources/content/api/classes/mesh/RectilinearMesh2D.rst.txt @@ -0,0 +1,8 @@ +RectilinearMesh2D +================= + +.. inheritance-diagram:: geobipy.src.classes.mesh.RectilinearMesh2D + :parts: 1 + +.. automodule:: geobipy.src.classes.mesh.RectilinearMesh2D + :members: diff --git a/docs/_sources/content/api/classes/mesh/RectilinearMesh2D_stitched.rst.txt b/docs/_sources/content/api/classes/mesh/RectilinearMesh2D_stitched.rst.txt new file mode 100644 index 00000000..af033881 --- /dev/null +++ b/docs/_sources/content/api/classes/mesh/RectilinearMesh2D_stitched.rst.txt @@ -0,0 +1,8 @@ +RectilinearMesh2D_stitched +========================== + +.. inheritance-diagram:: geobipy.src.classes.mesh.RectilinearMesh2D_stitched + :parts: 1 + +.. automodule:: geobipy.src.classes.mesh.RectilinearMesh2D_stitched + :members: diff --git a/docs/_sources/content/api/classes/mesh/RectilinearMesh3D.rst.txt b/docs/_sources/content/api/classes/mesh/RectilinearMesh3D.rst.txt new file mode 100644 index 00000000..940cca60 --- /dev/null +++ b/docs/_sources/content/api/classes/mesh/RectilinearMesh3D.rst.txt @@ -0,0 +1,8 @@ +RectilinearMesh3D +================= + +.. inheritance-diagram:: geobipy.src.classes.mesh.RectilinearMesh3D + :parts: 1 + +.. automodule:: geobipy.src.classes.mesh.RectilinearMesh3D + :members: diff --git a/docs/_sources/content/api/classes/mesh/mesh.rst.txt b/docs/_sources/content/api/classes/mesh/mesh.rst.txt new file mode 100644 index 00000000..95210c6f --- /dev/null +++ b/docs/_sources/content/api/classes/mesh/mesh.rst.txt @@ -0,0 +1,10 @@ +Mesh classes +================================= + +.. toctree:: + :maxdepth: 1 + + RectilinearMesh1D + RectilinearMesh2D + RectilinearMesh2D_stitched + RectilinearMesh3D diff --git a/docs/_sources/content/api/classes/model/Model_.rst.txt b/docs/_sources/content/api/classes/model/Model_.rst.txt new file mode 100644 index 00000000..0f0f9341 --- /dev/null +++ b/docs/_sources/content/api/classes/model/Model_.rst.txt @@ -0,0 +1,8 @@ +Model +========================= + +.. inheritance-diagram:: geobipy.src.classes.model.Model + :parts: 1 + +.. automodule:: geobipy.src.classes.model.Model + :members: diff --git a/docs/_sources/content/api/classes/model/model.rst.txt b/docs/_sources/content/api/classes/model/model.rst.txt new file mode 100644 index 00000000..87acf8e0 --- /dev/null +++ b/docs/_sources/content/api/classes/model/model.rst.txt @@ -0,0 +1,7 @@ +Model classes +================================= + +.. toctree:: + :maxdepth: 1 + + Model_ diff --git a/docs/_sources/content/api/classes/pointcloud/Point.rst.txt b/docs/_sources/content/api/classes/pointcloud/Point.rst.txt new file mode 100644 index 00000000..4b67dff8 --- /dev/null +++ b/docs/_sources/content/api/classes/pointcloud/Point.rst.txt @@ -0,0 +1,7 @@ +Point +============ +.. inheritance-diagram:: geobipy.src.classes.pointcloud.Point + :parts: 1 + +.. automodule:: geobipy.src.classes.pointcloud.Point + :members: diff --git a/docs/_sources/content/api/classes/pointcloud/pointcloud.rst.txt b/docs/_sources/content/api/classes/pointcloud/pointcloud.rst.txt new file mode 100644 index 00000000..b7c05794 --- /dev/null +++ b/docs/_sources/content/api/classes/pointcloud/pointcloud.rst.txt @@ -0,0 +1,7 @@ +Pointcloud classes +================================= + +.. toctree:: + :maxdepth: 1 + + Point \ No newline at end of file diff --git a/docs/_sources/content/api/classes/statistics/Distribution.rst.txt b/docs/_sources/content/api/classes/statistics/Distribution.rst.txt new file mode 100644 index 00000000..4dc7d624 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/Distribution.rst.txt @@ -0,0 +1,8 @@ +Distribution Wrapper +================================= + +.. .. inheritance-diagram:: geobipy.src.classes.statistics.Distribution +.. :parts: 1 + +.. automodule:: geobipy.src.classes.statistics.Distribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/GammaDistribution.rst.txt b/docs/_sources/content/api/classes/statistics/GammaDistribution.rst.txt new file mode 100644 index 00000000..432c3e32 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/GammaDistribution.rst.txt @@ -0,0 +1,9 @@ +Gamma Distribution +================================= + +.. inheritance-diagram:: geobipy.src.classes.statistics.GammaDistribution + :parts: 1 + + +.. automodule:: geobipy.src.classes.statistics.GammaDistribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/Histogram.rst.txt b/docs/_sources/content/api/classes/statistics/Histogram.rst.txt new file mode 100644 index 00000000..3b894888 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/Histogram.rst.txt @@ -0,0 +1,9 @@ +Histogram +========= + +.. inheritance-diagram:: geobipy.src.classes.statistics.Histogram + :parts: 1 + + +.. automodule:: geobipy.src.classes.statistics.Histogram + :members: diff --git a/docs/_sources/content/api/classes/statistics/MvNormalDistribution.rst.txt b/docs/_sources/content/api/classes/statistics/MvNormalDistribution.rst.txt new file mode 100644 index 00000000..6210866f --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/MvNormalDistribution.rst.txt @@ -0,0 +1,8 @@ +MvNormal +======== +.. inheritance-diagram:: geobipy.src.classes.statistics.MvNormalDistribution + :parts: 1 + + +.. automodule:: geobipy.src.classes.statistics.MvNormalDistribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/NormalDistribution.rst.txt b/docs/_sources/content/api/classes/statistics/NormalDistribution.rst.txt new file mode 100644 index 00000000..ef806bb9 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/NormalDistribution.rst.txt @@ -0,0 +1,8 @@ +Normal distribution +================================= + +.. inheritance-diagram:: geobipy.src.classes.statistics.NormalDistribution + :parts: 1 + +.. automodule:: geobipy.src.classes.statistics.NormalDistribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/OrderStatistics.rst.txt b/docs/_sources/content/api/classes/statistics/OrderStatistics.rst.txt new file mode 100644 index 00000000..4f0293ee --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/OrderStatistics.rst.txt @@ -0,0 +1,9 @@ +Order Statistics +================================= + +.. inheritance-diagram:: geobipy.src.classes.statistics.OrderStatistics + :parts: 1 + + +.. automodule:: geobipy.src.classes.statistics.OrderStatistics + :members: diff --git a/docs/_sources/content/api/classes/statistics/UniformDistribution.rst.txt b/docs/_sources/content/api/classes/statistics/UniformDistribution.rst.txt new file mode 100644 index 00000000..95e11d53 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/UniformDistribution.rst.txt @@ -0,0 +1,8 @@ +Uniform distribution +================================= + +.. inheritance-diagram:: geobipy.src.classes.statistics.UniformDistribution + :parts: 1 + +.. automodule:: geobipy.src.classes.statistics.UniformDistribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/baseDistribution.rst.txt b/docs/_sources/content/api/classes/statistics/baseDistribution.rst.txt new file mode 100644 index 00000000..1977a063 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/baseDistribution.rst.txt @@ -0,0 +1,8 @@ +baseDistribution +================================= + +.. inheritance-diagram:: geobipy.src.classes.statistics.baseDistribution + :parts: 1 + +.. automodule:: geobipy.src.classes.statistics.baseDistribution + :members: diff --git a/docs/_sources/content/api/classes/statistics/statistics.rst.txt b/docs/_sources/content/api/classes/statistics/statistics.rst.txt new file mode 100644 index 00000000..0966ed51 --- /dev/null +++ b/docs/_sources/content/api/classes/statistics/statistics.rst.txt @@ -0,0 +1,14 @@ +Statistics classes +================== + +.. toctree:: + :maxdepth: 1 + + Distribution + Histogram + baseDistribution + NormalDistribution + MvNormalDistribution + UniformDistribution + GammaDistribution + OrderStatistics diff --git a/docs/_sources/content/api/classes/system/CircularLoop.rst.txt b/docs/_sources/content/api/classes/system/CircularLoop.rst.txt new file mode 100644 index 00000000..3a1ef55c --- /dev/null +++ b/docs/_sources/content/api/classes/system/CircularLoop.rst.txt @@ -0,0 +1,8 @@ +Circular Loop +============= + +.. inheritance-diagram:: geobipy.src.classes.system.CircularLoop + :parts: 1 + +.. automodule:: geobipy.src.classes.system.CircularLoop + :members: diff --git a/docs/_sources/content/api/classes/system/EmLoop.rst.txt b/docs/_sources/content/api/classes/system/EmLoop.rst.txt new file mode 100644 index 00000000..b6d69075 --- /dev/null +++ b/docs/_sources/content/api/classes/system/EmLoop.rst.txt @@ -0,0 +1,8 @@ +EmLoop +====== + +.. inheritance-diagram:: geobipy.src.classes.system.EmLoop + :parts: 1 + +.. automodule:: geobipy.src.classes.system.EmLoop + :members: diff --git a/docs/_sources/content/api/classes/system/FdemSystem.rst.txt b/docs/_sources/content/api/classes/system/FdemSystem.rst.txt new file mode 100644 index 00000000..8de0ceeb --- /dev/null +++ b/docs/_sources/content/api/classes/system/FdemSystem.rst.txt @@ -0,0 +1,8 @@ +Frequency domain system +======================= + +.. inheritance-diagram:: geobipy.src.classes.system.FdemSystem + :parts: 1 + +.. automodule:: geobipy.src.classes.system.FdemSystem + :members: diff --git a/docs/_sources/content/api/classes/system/TdemSystem.rst.txt b/docs/_sources/content/api/classes/system/TdemSystem.rst.txt new file mode 100644 index 00000000..a2b39a53 --- /dev/null +++ b/docs/_sources/content/api/classes/system/TdemSystem.rst.txt @@ -0,0 +1,8 @@ +Time domain system +======================= + +.. inheritance-diagram:: geobipy.src.classes.system.TdemSystem + :parts: 1 + +.. automodule:: geobipy.src.classes.system.TdemSystem + :members: diff --git a/docs/_sources/content/api/classes/system/system.rst.txt b/docs/_sources/content/api/classes/system/system.rst.txt new file mode 100644 index 00000000..3921db3d --- /dev/null +++ b/docs/_sources/content/api/classes/system/system.rst.txt @@ -0,0 +1,10 @@ +System classes +============== + +.. toctree:: + :maxdepth: 1 + + EmLoop + CircularLoop + FdemSystem + TdemSystem diff --git a/docs/_sources/content/getting_started/getting_started.rst.txt b/docs/_sources/content/getting_started/getting_started.rst.txt new file mode 100644 index 00000000..edd40256 --- /dev/null +++ b/docs/_sources/content/getting_started/getting_started.rst.txt @@ -0,0 +1,8 @@ +*************** +Getting Started +*************** + +.. toctree:: + :maxdepth: 1 + + installation \ No newline at end of file diff --git a/docs/_sources/content/getting_started/installation.rst.txt b/docs/_sources/content/getting_started/installation.rst.txt new file mode 100644 index 00000000..6a580968 --- /dev/null +++ b/docs/_sources/content/getting_started/installation.rst.txt @@ -0,0 +1,194 @@ +****************** +Installing GeoBIPy +****************** + +First things first, install a Python 3.5+ distribution. This is the minimum version that we have tested with. +You will also need to install Numpy and a Fortran compiler. + +This package has a few requirements depending on what you wish to do with it. + +If you require a serial version of the code, see `Installing a serial version of GeoBIPy`_. + +If you require an parallel implementation, you will need to install an MPI library, and Python's mpi4py module. See `Installing MPI and mpi4py`_. + +If you require parallel file reading and writing, you will also need to install an MPI enabled HDF5 library, as well as Python's h5py wrapper to that library. It is important to read the notes below on installing h5py on top of a parallel HDF library. The traditional "pip install h5py" will not work correctly. See `Installing parallel HDF5 and h5py`_ to do this correctly. + +If you need to install the parallel IO version of the code, we would recommend that you start with a clean install of Python. This makes it easier to determine whether you have installed and linked the correct version of the parallel HDF5 library. + + +There are two versions when installing GeoBIPy, a serial version, and a parallel version. Since GeoBIPy uses a Fortran backend for forward modelling frequency domain data, you will need to have a Fortran compiler installed. Make sure that the compiler can handle derived data types since I make use of object oriented programming in Fortran. + +Installing a serial version of GeoBIPy +====================================== +This is the easiest installation and provides access to a serial implementation of the code. + +Simply clone the git repository, navigate to the package folder that contains the setup.py file, and type "pip install ." + +You should then be able to import modules from geobipy. For this type of installation mpi will not need to be installed, and the serial version of h5py will suffice i.e. the standard "pip install h5py" is fine. h5py will automatically be installed during the install of GeoBIPy since it is a dependency. + +**Side note:** Let's say you ran a production run on a parallel machine with MPI and parallel HDF capabilities. You generated all the results, copied them back to your local machine, and wish to make plots and images. You will only need to install the serial version of the code on your local machine to do this. + +Installing a parallel version of GeoBIPy +======================================== +Installing the parallel version of the code is a little trickier due to the dependencies necessary between the OpenMPI and/or HDF libraries, and how Python's mpi4py and h5py wrap around those. + + +Installing MPI and mpi4py +------------------------- +To run this code in parallel you will need both an MPI library and the python wrapper, mpi4py. You must install MPI first before mpi4py. + +MPI +^^^ + +If you are installing GeoBIPy on a parallel machine, I would think that you have access to prebuilt MPI libraries. +If you are on a local laptop, you will need to install one. + +mpi4py +^^^^^^ + +At this point, if you have an mpi4py module already installed, please remove it (you can check with "pip list"). +If you started with a clean installation you should not have to worry about this. +To test whether a new install of mpi4py will see the mpi library you have, just type "which mpicc". +The path that you see should point to the implementation that you want mpi4py to link to. +Make sure you are about to install mpi4py to the correct python installation. +If you type 'which python' it should return the path to the correct python distribution. +If you are using environments, make sure you have activated the correct one. + +Next, use "env MPICC= python -m pip install mpi4py". This last option is very important, without it, pip might install its own MPI library called MPICH2. +I would try to avoid this because if you need to install the HDF5 library you will need know which directories to link to (see `Installing parallel HDF5 and h5py`_). + +At the end of the day, h5py needs to communicate with both the correct HDF5 library and mpi4py, and both of those need to communicate with the same MPI library. + +Installing parallel HDF5 and h5py +--------------------------------- +If a parallel HDF5 library is not available, you will need to install one. First make sure you follow `Installing MPI and mpi4py`_ so that an MPI library is available to you. You must install a HDF5 library first before h5py. + +HDF5 +^^^^ +When you install HDF5, make sure that the correct MPI library can be seen by typing "which mpicc". When you configure the HDF5 library, be sure to use the --enable-parallel option. + +h5py +^^^^ +Once the HDF5 library is installed you will need to install a parallel enabled `h5py package`_ + +.. _`h5py package`: https://github.com/h5py/h5py + +Make sure you are about to install h5py to the correct python installation. If you type 'which python' it should return the path to the correct python installation. + +First check the following + +- HDF5_DIR = Get the path to the HDF5 installation. +- Check that 'which mpicc' returns the correct version of an mpi enabled compiler. This needs to point to the same MPI library that mpi4py was installed on top of. + +- Do the following, replacing items in < > with your mpicc compiler and you HDF5 install directory. + +This will install h5py and compile the source. + +.. code:: bash + + CC= HDF5_MPI="ON" HDF5_DIR= pip install --no-binary=h5py h5py + +.. _Installing_time_domain_forward_modeller: + +Installing the time domain forward modeller +=========================================== +Ross Brodie at Geoscience Australia has written a great forward modeller, gatdaem1D, in C++ with a python interface. +You can obtain that code here at the `GA repository`_ + +.. _`GA repository`: https://github.com/GeoscienceAustralia/ga-aem + +Go ahead and "git clone" that repository. + +These instructions only describe how to install Ross' forward modeller, but it is part of a larger code base for inversion. +If you wish to install his entire package, please follow his instructions. + +Prerequisites +------------- + +To compile his forward modeller, you will need a c++ compiler, and `FFTW`_ + +.. _`FFTW`: http://www.fftw.org/ + +On a Mac, installing these two items is easy if you use a package manager such as `homebrew`_ + +.. _`homebrew`: https://brew.sh/ + +If you use brew, simply do the following + +.. code:: bash + + brew install gcc + brew install fftw + +If you do not have brew, or use a package manager, you can install fftw from source instead. + +Download fftw-3.3.7.tar.gz from the `FFTW downloads`_ . + +.. _`FFTW downloads`: http://www.fftw.org/download.html + +Untar the folder and install fftw using the following. + +.. code:: bash + + tar -zxvf fftw-3.3.7.tar.gz + cd fftw-3.3.7 + mkdir build + cd build + ../configure --prefix=path-to-install-to/fftw-3.3.7 --enable-threads + make + make install + +where, path-to-install-to is the location where you want fftw to be installed. + + +Compile the gatdaem1d shared library +------------------------------------ +Next, within the gatdaem1d folder, navigate to the makefiles folder and modify the top part of the file "gatdaem1d_python.make" to the following + +.. code:: bash + + SHELL = /bin/sh + .SUFFIXES: + .SUFFIXES: .cpp .o + cxx = g++ + cxxflags = -std=c++11 -O3 -Wall -fPIC + FFTW_DIR = path-to-fftw + + ldflags += -shared + bindir = ../python/gatdaem1d + + srcdir = ../src + objdir = ./obj + includes = -I$(srcdir) -I$(FFTW_DIR)/include + libs = -L$(FFTW_DIR)/lib -lfftw3 + library = $(bindir)/gatdaem1d.so + +You can find out where brew installed fftw by typing + +.. code:: bash + + brew info fftw + +Which may return something like "/usr/local/Cellar/fftw/3.3.5" + +In this case, path-to-fftw is "/usr/local/Cellar/fftw/3.3.5" + +If you installed fftw from source, then path-to-fftw is that install path. + +Next, type the following to compile the gatdaem1d c++ code. + +.. code:: bash + + make -f gatdaem1d_python.make + +Installing the Python Bindings +------------------------------ + +Finally, to install the python wrapper to gatdaem1d, navigate to the python folder of the gatdaem1d repository. +Type, + +.. code:: bash + + pip install . + +You should now have access to the time domain forward modeller within geobipy. diff --git a/docs/_sources/examples/Data/plot_frequency_dataset.rst.txt b/docs/_sources/examples/Data/plot_frequency_dataset.rst.txt new file mode 100644 index 00000000..68f9f6d7 --- /dev/null +++ b/docs/_sources/examples/Data/plot_frequency_dataset.rst.txt @@ -0,0 +1,758 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Data/plot_frequency_dataset.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Data_plot_frequency_dataset.py: + + +Frequency domain dataset +------------------------ + +.. GENERATED FROM PYTHON SOURCE LINES 6-14 + +.. code-block:: Python + + import matplotlib.pyplot as plt + from geobipy import CircularLoop + from geobipy import FdemSystem + from geobipy import FdemData + import h5py + import numpy as np + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 15-17 + +Defining data using a frequency domain system ++++++++++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 19-21 + +We can start by defining the frequencies, transmitter loops, and receiver loops +For each frequency we need to define a pair of loops + +.. GENERATED FROM PYTHON SOURCE LINES 21-23 + +.. code-block:: Python + + frequencies = np.asarray([395.0, 822.0, 3263.0, 8199.0, 38760.0, 128755.0]) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 24-26 + +Transmitter positions are defined relative to the observation locations in the data +This is usually a constant offset for all data points. + +.. GENERATED FROM PYTHON SOURCE LINES 26-36 + +.. code-block:: Python + + transmitters = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[0,0,0,0,0,0], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 37-38 + +Receiver positions are defined relative to the transmitter + +.. GENERATED FROM PYTHON SOURCE LINES 38-58 + +.. code-block:: Python + + receivers = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + + # Instantiate the system for the data + system = FdemSystem(frequencies=frequencies, transmitter=transmitters, receiver=receivers) + + # Create some data with random co-ordinates + x = np.random.randn(100) + y = np.random.randn(100) + z = np.random.randn(100) + + data = FdemData(x=x, y=-y, z=z, system = system) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 59-62 + +Reading in the Data ++++++++++++++++++++ +Of course measured field data is stored on disk. So instead we can read data from file. + +.. GENERATED FROM PYTHON SOURCE LINES 64-70 + +.. code-block:: Python + + dataFolder = "..//..//supplementary//data//" + # The data file name + dataFile = dataFolder + 'Resolve2.txt' + # The EM system file name + systemFile = dataFolder + 'FdemSystem2.stm' + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 71-72 + +Read in a data set from file. + +.. GENERATED FROM PYTHON SOURCE LINES 72-74 + +.. code-block:: Python + + FD1 = FdemData.read_csv(dataFile, systemFile) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 75-76 + +Take a look at the channel names + +.. GENERATED FROM PYTHON SOURCE LINES 76-104 + +.. code-block:: Python + + for name in FD1.channel_names: + print(name) + + # #%% + # # Get data points by slicing + # FDa = FD1[10:] + # FD1 = FD1[:10] + + # #%% + # # Append data sets together + # FD1.append(FDa) + + + # #%% + # # Plot the locations of the data points + # plt.figure(figsize=(8,6)) + # _ = FD1.scatter2D(); + + # #%% + # # Plot all the data along the specified line + # plt.figure(figsize=(8,6)) + # _ = FD1.plotLine(30010.0, log=10); + + # #%% + # # Or, plot specific channels in the data + # plt.figure(figsize=(8,6)) + # _ = FD1.plot(channels=[0,11,8], log=10, linewidth=0.5); + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + In_Phase 380.0 + In_Phase 1776.0 + In_Phase 3345.0 + In_Phase 8171.0 + In_Phase 41020.0 + In_Phase 129550.0 + Quadrature 380.0 + Quadrature 1776.0 + Quadrature 3345.0 + Quadrature 8171.0 + Quadrature 41020.0 + Quadrature 129550.0 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 105-106 + +Read in a second data set + +.. GENERATED FROM PYTHON SOURCE LINES 106-108 + +.. code-block:: Python + + FD2 = FdemData.read_csv(dataFilename=dataFolder + 'Resolve1.txt', system=dataFolder + 'FdemSystem1.stm') + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Warning: Your data contains values that are <= 0.0 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 109-110 + +We can create maps of the elevations in two separate figures + +.. GENERATED FROM PYTHON SOURCE LINES 110-114 + +.. code-block:: Python + + plt.figure(figsize=(8,6)) + _ = FD1.map(dx=50.0, dy=50.0, mask = 200.0) + plt.axis('equal'); + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_001.png + :alt: plot frequency dataset + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + surface [WARNING]: 66659 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + + (np.float64(584494.28), np.float64(590194.28), np.float64(4639054.24), np.float64(4661854.24)) + + + +.. GENERATED FROM PYTHON SOURCE LINES 115-120 + +.. code-block:: Python + + + plt.figure(figsize=(8,6)) + _ = FD2.map(dx=50.0, dy=50.0, mask = 200.0) + plt.axis('equal'); + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_002.png + :alt: plot frequency dataset + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + surface [WARNING]: 123487 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + + (np.float64(662822.398), np.float64(668372.398), np.float64(4560028.655), np.float64(4600678.655)) + + + +.. GENERATED FROM PYTHON SOURCE LINES 121-126 + +Or, we can plot both data sets in one figure to see their positions relative +to each other. + +In this case, I use a 2D scatter plot of the data point co-ordinates, and pass +one of the channels as the colour. + +.. GENERATED FROM PYTHON SOURCE LINES 126-131 + +.. code-block:: Python + + + plt.figure(figsize=(8,6)) + _ = FD1.scatter2D(s=1.0, c=FD1.data[:, 0]) + _ = FD2.scatter2D(s=1.0, c=FD2.data[:, 0], cmap='jet'); + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_003.png + :alt: plot frequency dataset + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 132-134 + +Or, interpolate the values to create a gridded "map". mapChannel will +interpolate the specified channel number. + +.. GENERATED FROM PYTHON SOURCE LINES 134-139 + +.. code-block:: Python + + + plt.figure(figsize=(8,6)) + _ = FD1.mapData(channel=3, system=0, dx=200, dy=200, mask=250) + plt.axis('equal'); + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_004.png + :alt: In_Phase 8171.0 + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + surface [WARNING]: 70336 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + + (np.float64(584419.28), np.float64(590219.28), np.float64(4638979.24), np.float64(4661979.24)) + + + +.. GENERATED FROM PYTHON SOURCE LINES 140-141 + +Export the data to VTK + +.. GENERATED FROM PYTHON SOURCE LINES 141-144 + +.. code-block:: Python + + FD1.to_vtk('FD_one.vtk') + # FD2.to_vtk('FD_two.vtk') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 145-147 + +Obtain a line from the data set ++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 149-150 + +Take a look at the line numbers in the dataset + +.. GENERATED FROM PYTHON SOURCE LINES 150-152 + +.. code-block:: Python + + print(np.unique(FD1.lineNumber)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [30010 30020 30030 ... 30100 39010 39020] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 153-155 + +.. code-block:: Python + + L = FD1.line(30010.0) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 156-157 + +A summary will now show the properties of the line. + +.. GENERATED FROM PYTHON SOURCE LINES 157-160 + +.. code-block:: Python + + + print(L.summary) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + FdemData + x: + | StatArray + | Name: Easting (m) + | Address:['0x156368350'] + | Shape: (6710,) + | Values: [586852.29 586852.23 586852.17 ... 586123.57 586123.2 586122.82] + | Min: 586122.82 + | Max: 586852.29 + | has_posterior: False + + y: + | StatArray + | Name: Northing (m) + | Address:['0x156369ed0'] + | Shape: (6710,) + | Values: [4639119.38 4639122.68 4639125.98 ... 4661765.26 4661768.84 4661772.42] + | Min: 4639119.38 + | Max: 4661772.42 + | has_posterior: False + + z: + | StatArray + | Name: Height (m) + | Address:['0x1563681d0'] + | Shape: (6710,) + | Values: [36.115 36.498 36.835 ... 27.799 27.704 27.601] + | Min: 23.830000000000002 + | Max: 50.567 + | has_posterior: False + + elevation: + | StatArray + | Name: Elevation (m) + | Address:['0x15468a450'] + | Shape: (6710,) + | Values: [1246.84 1246.71 1246.61 ... 1337.94 1337.96 1338.02] + | Min: 1213.18 + | Max: 1338.02 + | has_posterior: False + + channel names: + | In_Phase 380.0, In_Phase 1776.0, In_Phase 3345.0, In_Phase 8171.0, In_Phase 41020.0, + | In_Phase 129550.0, Quadrature 380.0, Quadrature 1776.0, Quadrature 3345.0, Quadrature 8171.0, + | Quadrature 41020.0, Quadrature 129550.0 + data: + | DataArray + | Name: Data (ppm) + | Address:['0x17eb61950'] + | Shape: (80520,) + | Values: [145.3 435.8 260.6 ... 749.2 976.5 928.3] + | Min: 37.7 + | Max: 3726.9 + + predicted data: + | DataArray + | Name: Predicted Data (ppm) + | Address:['0x17eb61950'] + | Shape: (80520,) + | Values: [0. 0. 0. ... 0. 0. 0.] + | Min: 0.0 + | Max: 0.0 + + std: + | DataArray + | Name: std (ppm) + | Address:['0x17eb604d0'] + | Shape: (80520,) + | Values: [1.453 4.358 2.606 ... 7.492 9.765 9.283] + | Min: 0.37700000000000006 + | Max: 37.269 + + line number: + | DataArray + | Name: Line number + | Address:['0x17ec01150'] + | Shape: (6710,) + | Values: [30010. 30010. 30010. ... 30010. 30010. 30010.] + | Min: 30010.0 + | Max: 30010.0 + + fiducial: + | DataArray + | Name: Fiducial + | Address:['0x156368250'] + | Shape: (6710,) + | Values: [30000 30000 30000 ... 30670 30670 30670] + | Min: 30000 + | Max: 30670 + + relative error: + | DataArray + | Name: Relative error (%) + | Address:['0x15468ad50'] + | Shape: (6710, 1) + | Values: [[0.01] + | [0.01] + | [0.01] + | ... + | [0.01] + | [0.01] + | [0.01]] + | Min: 0.01 + | Max: 0.01 + + additive error: + | DataArray + | Name: Additive error (ppm) + | Address:['0x15468ac50'] + | Shape: (6710, 1) + | Values: [[0.] + | [0.] + | [0.] + | ... + | [0.] + | [0.] + | [0.]] + | Min: 0.0 + | Max: 0.0 + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 161-162 + +And we can scatter2D the points in the line. + +.. GENERATED FROM PYTHON SOURCE LINES 162-166 + +.. code-block:: Python + + + plt.figure(figsize=(8,6)) + _ = L.scatter2D(); + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_005.png + :alt: plot frequency dataset + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 167-169 + +We can specify the axis along which to plot. +xAxis can be index, x, y, z, r2d, r3d + +.. GENERATED FROM PYTHON SOURCE LINES 169-198 + +.. code-block:: Python + + plt.figure(figsize=(8,6)) + _ = FD1.plot_data(channels=np.r_[0, 11, 8], log=10, linewidth=0.5); + + with h5py.File('fdem.h5', 'w') as f: + FD1.createHdf(f, 'fdem') + FD1.writeHdf(f, 'fdem') + + with h5py.File('fdem.h5', 'r') as f: + FD3 = FdemData.fromHdf(f['fdem']) + + with h5py.File('fdem.h5', 'r') as f: + fdp = FdemData.fromHdf(f['fdem'], index=0) + + + # #%% + # # Obtain a single datapoint from the data set + # # +++++++++++++++++++++++++++++++++++++++++++ + # # + # # Checkout :ref:`Frequency domain datapoint` for an example + # # about how to use a datapoint once it is instantiated. + # dp = FD1.datapoint(0) + + # # Prepare the dataset so that we can read a point at a time. + # Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile) + # # Get a datapoint from the file. + # DataPoint = Dataset._read_record() + + plt.show() + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_frequency_dataset_006.png + :alt: plot frequency dataset + :srcset: /examples/Data/images/sphx_glr_plot_frequency_dataset_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 199-235 + +File Format for frequency domain data ++++++++++++++++++++++++++++++++++++++ +Here we describe the file format for frequency domain data. + +The data columns are read in according to the column names in the first line. + +In this description, the column name or its alternatives are given followed by what the name represents. +Optional columns are also described. + +Required columns +________________ +line + Line number for the data point +fid + Unique identification number of the data point +x or northing or n + Northing co-ordinate of the data point, (m) +y or easting or e + Easting co-ordinate of the data point, (m) +z or alt + Altitude of the transmitter coil above ground level (m) +elevation + Elevation of the ground at the data point (m) +I_ Q_ ... I_ Q_ - with the number and square brackets + The measurements for each frequency specified in the accompanying system file. + I is the real inphase measurement in (ppm) + Q is the imaginary quadrature measurement in (ppm) +Optional columns +________________ +InphaseErr[0] QuadratureErr[0] ... InphaseErr[nFrequencies] QuadratureErr[nFrequencies] + Estimates of standard deviation for each inphase and quadrature measurement. + These must appear after the data colums. + +Example Header +______________ +Line fid easting northing elevation height I_380 Q_380 ... ... I_129550 Q_129550 + +.. GENERATED FROM PYTHON SOURCE LINES 237-266 + +File Format for a frequency domain system ++++++++++++++++++++++++++++++++++++++++++ +.. role:: raw-html(raw) + :format: html + +The system file is structured using columns with the first line containing header information + +Each subsequent row contains the information for each measurement frequency + +freq + Frequency of the channel +tor + Orientation of the transmitter loop 'x', or 'z' +tmom + Transmitter moment +tx, ty, tx + Offset of the transmitter with respect to the observation locations +ror + Orientation of the receiver loop 'x', or 'z' +rmom + Receiver moment +rx, ry, rz + Offset of the receiver with respect to the transmitter location + +Example system files are contained in +`the supplementary folder`_ in this repository + +.. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data + +See the Resolve.stm files. + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 3.257 seconds) + + +.. _sphx_glr_download_examples_Data_plot_frequency_dataset.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_frequency_dataset.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_frequency_dataset.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_frequency_dataset.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Data/plot_pointcloud3d.rst.txt b/docs/_sources/examples/Data/plot_pointcloud3d.rst.txt new file mode 100644 index 00000000..a0424ded --- /dev/null +++ b/docs/_sources/examples/Data/plot_pointcloud3d.rst.txt @@ -0,0 +1,478 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Data/plot_pointcloud3d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Data_plot_pointcloud3d.py: + + +3D Point Cloud class +-------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 7-16 + +.. code-block:: Python + + + from geobipy import Point + from os.path import join + import numpy as np + import matplotlib.pyplot as plt + import h5py + + nPoints = 200 + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 17-19 + +Create a quick test example using random points +$z=x(1-x)cos(4\pi x)sin(4\pi y^{2})^{2}$ + +.. GENERATED FROM PYTHON SOURCE LINES 19-25 + +.. code-block:: Python + + x = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0) + y = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0) + z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y) + + PC3D = Point(x=x, y=y, z=z) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 26-27 + +Append pointclouds together + +.. GENERATED FROM PYTHON SOURCE LINES 27-34 + +.. code-block:: Python + + x = np.abs((2.0 * np.random.rand(nPoints)) - 1.0) + y = np.abs((2.0 * np.random.rand(nPoints)) - 1.0) + z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y) + + Other_PC = Point(x=x, y=y, z=z) + PC3D.append(Other_PC) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 35-36 + +Write a summary of the contents of the point cloud + +.. GENERATED FROM PYTHON SOURCE LINES 36-39 + +.. code-block:: Python + + + print(PC3D.summary) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Point + x: + | StatArray + | Name: Easting (m) + | Address:['0x1462b7250'] + | Shape: (400,) + | Values: [-0.75575613 -0.37227056 -0.5082486 ... 0.91062364 0.56073342 + | 0.52761544] + | Min: -0.9805553827696698 + | Max: 0.9916814356778278 + | has_posterior: False + + y: + | StatArray + | Name: Northing (m) + | Address:['0x1462b72d0'] + | Shape: (400,) + | Values: [-0.15201495 -0.55717559 -0.53823673 ... 0.30290861 0.17254113 + | 0.23398822] + | Min: -0.9899649834962019 + | Max: 0.9979698382486921 + | has_posterior: False + + z: + | StatArray + | Name: Height (m) + | Address:['0x1462b7350'] + | Shape: (400,) + | Values: [-0.43897973 0.19632561 -0.01971922 ... -0.06368326 -0.02409831 + | -0.01448342] + | Min: -1.9063110648103332 + | Max: 0.22889687797869024 + | has_posterior: False + + elevation: + | StatArray + | Name: Elevation (m) + | Address:['0x1462b73d0'] + | Shape: (400,) + | Values: [0. 0. 0. ... 0. 0. 0.] + | Min: 0.0 + | Max: 0.0 + | has_posterior: False + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 40-41 + +Get a single location from the point as a 3x1 vector + +.. GENERATED FROM PYTHON SOURCE LINES 41-45 + +.. code-block:: Python + + + Point = PC3D[50] + # Print the point to the screen + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 46-47 + +Plot the locations with Height as colour + +.. GENERATED FROM PYTHON SOURCE LINES 47-51 + +.. code-block:: Python + + + plt.figure() + PC3D.scatter2D(edgecolor='k') + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_pointcloud3d_001.png + :alt: plot pointcloud3d + :srcset: /examples/Data/images/sphx_glr_plot_pointcloud3d_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 52-55 + +Plotting routines take matplotlib arguments for customization + +For example, plotting the size of the points according to the absolute value of height + +.. GENERATED FROM PYTHON SOURCE LINES 55-58 + +.. code-block:: Python + + plt.figure() + ax = PC3D.scatter2D(s=100*np.abs(PC3D.z), edgecolor='k') + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_pointcloud3d_002.png + :alt: plot pointcloud3d + :srcset: /examples/Data/images/sphx_glr_plot_pointcloud3d_002.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 59-60 + +Interpolate the points to a 2D rectilinear mesh + +.. GENERATED FROM PYTHON SOURCE LINES 60-66 + +.. code-block:: Python + + mesh, dum = PC3D.interpolate(0.01, 0.01, values=PC3D.z, method='sibson', mask=0.03) + + # We can save that mesh to VTK + PC3D.to_vtk('pc3d.vtk') + mesh.to_vtk('interpolated_pc3d.vtk') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 67-68 + +Grid the points using a triangulated CloughTocher, or minimum curvature interpolation + +.. GENERATED FROM PYTHON SOURCE LINES 68-83 + +.. code-block:: Python + + + plt.figure() + plt.subplot(331) + PC3D.map(dx=0.01, dy=0.01, method='ct') + plt.subplot(332) + PC3D.map(dx=0.01, dy=0.01, method='mc') + plt.subplot(333) + PC3D.map(dx=0.01, dy=0.01, method='sibson') + + plt.subplot(334) + PC3D.map(dx=0.01, dy=0.01, method='ct', mask=0.03) + plt.subplot(335) + PC3D.map(dx=0.01, dy=0.01, method='mc', mask=0.3) + plt.subplot(336) + PC3D.map(dx=0.01, dy=0.01, method='sibson', mask=0.03) + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_pointcloud3d_003.png + :alt: plot pointcloud3d + :srcset: /examples/Data/images/sphx_glr_plot_pointcloud3d_003.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + surface [WARNING]: 5 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + surface [WARNING]: 5 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 84-85 + +For lots of points, these surfaces can look noisy. Using a block filter will help + +.. GENERATED FROM PYTHON SOURCE LINES 85-94 + +.. code-block:: Python + + PCsub = PC3D.block_median(0.05, 0.05) + plt.subplot(337) + PCsub.map(dx=0.01, dy=0.01, method='ct', mask=0.03) + plt.subplot(338) + PCsub.map(dx=0.01, dy=0.01, method='mc', mask=0.03) + plt.subplot(339) + PCsub.map(dx=0.01, dy=0.01, method='sibson', mask=0.03) + + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_pointcloud3d_004.png + :alt: plot pointcloud3d + :srcset: /examples/Data/images/sphx_glr_plot_pointcloud3d_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + surface [WARNING]: 2 unusable points were supplied; these will be ignored. + surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode. + surface [WARNING]: Check that previous processing steps write results with enough decimals. + surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding. + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 95-96 + +We can perform spatial searches on the 3D point cloud + +.. GENERATED FROM PYTHON SOURCE LINES 96-100 + +.. code-block:: Python + + + PC3D.set_kdtree(ndim=2) + p = PC3D.nearest((0.0,0.0), k=200, p=2, radius=0.3) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 101-103 + +.nearest returns the distances and indices into the point cloud of the nearest points. +We can then obtain those points as another point cloud + +.. GENERATED FROM PYTHON SOURCE LINES 103-115 + +.. code-block:: Python + + + # pNear = PC3D[p[1]] + # plt.figure() + # ax1 = plt.subplot(1,2,1) + # pNear.scatter2D() + # plt.plot(0.0, 0.0, 'x') + # plt.subplot(1,2,2, sharex=ax1, sharey=ax1) + # ax, sc, cb = PC3D.scatter2D(edgecolor='k') + # searchRadius = plt.Circle((0.0, 0.0), 0.3, color='b', fill=False) + # ax.add_artist(searchRadius) + # plt.plot(0.0, 0.0, 'x') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 116-117 + +Read in the xyz co-ordinates in columns 2,3,4 from a file. Skip 1 header line. + +.. GENERATED FROM PYTHON SOURCE LINES 117-123 + +.. code-block:: Python + + + dataFolder = "..//..//supplementary//Data//" + + PC3D.read_csv(filename=dataFolder + 'Resolve1.txt') + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 124-138 + +.. code-block:: Python + + plt.figure() + f = PC3D.scatter2D(s=10) + + with h5py.File('test.h5', 'w') as f: + PC3D.createHdf(f, 'test') + PC3D.writeHdf(f, 'test') + + with h5py.File('test.h5', 'r') as f: + PC3D1 = Point.fromHdf(f['test']) + + with h5py.File('test.h5', 'r') as f: + point = Point.fromHdf(f['test'], index=0) + + plt.show() + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_pointcloud3d_005.png + :alt: plot pointcloud3d + :srcset: /examples/Data/images/sphx_glr_plot_pointcloud3d_005.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 19.063 seconds) + + +.. _sphx_glr_download_examples_Data_plot_pointcloud3d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_pointcloud3d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_pointcloud3d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_pointcloud3d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Data/plot_skytem_dataset.rst.txt b/docs/_sources/examples/Data/plot_skytem_dataset.rst.txt new file mode 100644 index 00000000..4aecaba6 --- /dev/null +++ b/docs/_sources/examples/Data/plot_skytem_dataset.rst.txt @@ -0,0 +1,545 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Data/plot_skytem_dataset.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Data_plot_skytem_dataset.py: + + +Skytem dataset +-------------- + +.. GENERATED FROM PYTHON SOURCE LINES 6-14 + +.. code-block:: Python + + from geobipy import plotting as cP + from os.path import join + import matplotlib.pyplot as plt + import numpy as np + from geobipy import StatArray + from geobipy import TdemData + import h5py + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 15-17 + +Reading in the Data ++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 19-31 + +.. code-block:: Python + + dataFolder = "..//..//supplementary//data//" + # The data file name + dataFiles=dataFolder + 'skytem_saline_clay.csv' + # dataFiles = dataFolder + 'Skytem.csv' + # The EM system file name + systemFiles=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm'] + + from pathlib import Path + for f in systemFiles[:1]: + txt = Path(f).read_text() + print(txt) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + System Begin + Name = SkyTemHighMoment-ElkHills + Type = Time Domain + Transmitter Begin + NumberOfTurns = 1 + PeakCurrent = 1 + LoopArea = 1 + BaseFrequency = 30.0 + WaveformDigitisingFrequency = 491520 + WaveFormCurrent Begin + -4.00E-03 0.00E+00 + -3.91E-03 3.17E-01 + -3.81E-03 6.30E-01 + -3.72E-03 8.79E-01 + -3.68E-03 9.61E-01 + -2.30E-03 9.74E-01 + -1.01E-03 9.88E-01 + 0.00E+00 1.00E+00 + 3.25E-06 9.91E-01 + 1.00E-04 7.02E-01 + 2.02E-04 3.78E-01 + 2.82E-04 1.16E-01 + 3.08E-04 2.79E-02 + 3.13E-04 1.21E-02 + 3.15E-04 6.61E-03 + 3.17E-04 3.03E-03 + 3.19E-04 0.00E+00 + 0.012666667 0.00E+00 + + WaveFormCurrent End + Transmitter End + Receiver Begin + NumberOfWindows = 26 + WindowWeightingScheme = AreaUnderCurve + WindowTimes Begin + 3.796E-04 3.872E-04 + 3.876E-04 3.972E-04 + 3.976E-04 4.102E-04 + 4.106E-04 4.262E-04 + 4.266E-04 4.462E-04 + 4.466E-04 4.712E-04 + 4.716E-04 5.022E-04 + 5.026E-04 5.422E-04 + 5.426E-04 5.932E-04 + 5.936E-04 6.562E-04 + 6.566E-04 7.372E-04 + 7.376E-04 8.382E-04 + 8.386E-04 9.652E-04 + 9.656E-04 1.126E-03 + 1.127E-03 1.328E-03 + 1.329E-03 1.583E-03 + 1.584E-03 1.905E-03 + 1.906E-03 2.311E-03 + 2.312E-03 2.822E-03 + 2.823E-03 3.468E-03 + 3.469E-03 4.260E-03 + 4.261E-03 5.228E-03 + 5.229E-03 6.413E-03 + 6.414E-03 7.865E-03 + 7.866E-03 9.641E-03 + 9.642E-03 1.182E-02 + + WindowTimes End + LowPassFilter Begin + CutOffFrequency = 300000 210000 + Order = 1 2 + LowPassFilter End + Receiver End + ForwardModelling Begin + //TX loop area is was 340.82 m^2 -> r = sqrt(340.82/pi) + ModellingLoopRadius = 10.416 + OutputType = dB/dt + XOutputScaling = 0 + YOutputScaling = 0 + ZOutputScaling = 1 + SecondaryFieldNormalisation = none + FrequenciesPerDecade = 5 + NumberOfAbsiccaInHankelTransformEvaluation = 21 + ForwardModelling End + + System End + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-33 + +Read in the data from file + +.. GENERATED FROM PYTHON SOURCE LINES 33-35 + +.. code-block:: Python + + TD = TdemData.read_csv(dataFiles, systemFiles) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 36-37 + +Plot the locations of the data points + +.. GENERATED FROM PYTHON SOURCE LINES 37-40 + +.. code-block:: Python + + plt.figure(1, figsize=(8,6)) + _ = TD.scatter2D() + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_skytem_dataset_001.png + :alt: plot skytem dataset + :srcset: /examples/Data/images/sphx_glr_plot_skytem_dataset_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 41-42 + +Plot all the data along the specified line + +.. GENERATED FROM PYTHON SOURCE LINES 42-45 + +.. code-block:: Python + + plt.figure(2, figsize=(8,6)) + _ = TD.plotLine(0.0, log=10) + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_skytem_dataset_002.png + :alt: plot skytem dataset + :srcset: /examples/Data/images/sphx_glr_plot_skytem_dataset_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 46-47 + +Or, plot specific channels in the data + +.. GENERATED FROM PYTHON SOURCE LINES 47-50 + +.. code-block:: Python + + plt.figure(3, figsize=(8,6)) + _ = TD.plot_data(system=0, channels=[1, 3, 5], log=10) + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_skytem_dataset_003.png + :alt: plot skytem dataset + :srcset: /examples/Data/images/sphx_glr_plot_skytem_dataset_003.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 51-57 + +.. code-block:: Python + + plt.figure(4) + plt.subplot(211) + _ = TD.pcolor(system=0, xscale='log', log=10) + plt.subplot(212) + _ = TD.pcolor(system=1, xscale='log', log=10) + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_skytem_dataset_004.png + :alt: plot skytem dataset + :srcset: /examples/Data/images/sphx_glr_plot_skytem_dataset_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 58-94 + +.. code-block:: Python + + plt.figure(5) + ax = TD.scatter2D(c=TD.secondary_field[:, TD.channel_index(system=0, channel=6)], log=10) + plt.axis('equal') + + + # with h5py.File('tdem.h5', 'w') as f: + # TD.createHdf(f, 'tdem') + # TD.writeHdf(f, 'tdem') + + # with h5py.File('tdem.h5', 'r') as f: + # TD3 = TdemData.fromHdf(f['tdem']) + + # with h5py.File('tdem.h5', 'r') as f: + # tdp = TdemData.fromHdf(f['tdem'], index=0) + + + # #%% + # # Obtain a line from the data set + # # +++++++++++++++++++++++++++++++ + # line = TD.line(0.0) + + # #%% + # plt.figure(6) + # _ = line.scatter2D(c=line.secondary_field[:, line.channel_index(system=0, channel=6)], log=10) + + # #%% + # plt.figure(7) + # _ = line.plot(xAxis='index', log=10) + + # Prepare the dataset so that we can read a point at a time. + Dataset = TdemData._initialize_sequential_reading(dataFiles, systemFiles) + # Get a datapoint from the file. + DataPoint = Dataset._read_record() + + plt.show() + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_skytem_dataset_005.png + :alt: plot skytem dataset + :srcset: /examples/Data/images/sphx_glr_plot_skytem_dataset_005.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 95-146 + +File Format for time domain data +++++++++++++++++++++++++++++++++ +Here we describe the file format for time domain data. + +The data columns are read in according to the column names in the first line + +In this description, the column name or its alternatives are given followed by what the name represents +Optional columns are also described. + +Required columns +________________ +line + Line number for the data point +fid + Unique identification number of the data point +x or northing or n + Northing co-ordinate of the data point, (m) +y or easting or e + Easting co-ordinate of the data point, (m) +z or alt + Altitude of the transmitter coil above ground level (m) +elevation + Elevation of the ground at the data point (m) +txrx_dx + Distance in x between transmitter and reciever (m) +txrx_dy + Distance in y between transmitter and reciever (m) +txrx_dz + Distance in z between transmitter and reciever (m) +Tx_Pitch + Pitch of the transmitter loop +Tx_Roll + Roll of the transmitter loop +Tx_Yaw + Yaw of the transmitter loop +Rx_Pitch + Pitch of the receiver loop +Rx_Roll + Roll of the receiver loop +Rx_Yaw + Yaw of the receiver loop +Off_time[0] Off_time[1] ... Off_time[last] - with the number and square brackets + The measurements for each time gate specified in the accompanying system file under Receiver Window Times + The total number of off_time columns should equal the sum of the receiver windows in all system files. +Optional columns +________________ +Off_time_Error[0] Off_time_Error[1] ... Off_time_Error[last] + Estimates of standard deviation for each off time measurement +Example Header +______________ +Line fid easting northing elevation height txrx_dx txrx_dy txrx_dz TxPitch TxRoll TxYaw RxPitch RxRoll RxYaw Off[0] Off[1] + +.. GENERATED FROM PYTHON SOURCE LINES 148-160 + +File Format for a time domain system +++++++++++++++++++++++++++++++++++++ +Please see Page 13 of Ross Brodie's `instructions`_ + +.. _instructions: https://github.com/GeoscienceAustralia/ga-aem/blob/master/docs/GA%20AEM%20Programs%20User%20Manual.pdf + +We use GA-AEM for our airborne time domain forward modeller. + +Example system files are contained in +`the supplementary folder`_ in this repository + +.. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 2.295 seconds) + + +.. _sphx_glr_download_examples_Data_plot_skytem_dataset.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_skytem_dataset.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_skytem_dataset.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_skytem_dataset.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Data/plot_tempest_dataset.rst.txt b/docs/_sources/examples/Data/plot_tempest_dataset.rst.txt new file mode 100644 index 00000000..446c64a2 --- /dev/null +++ b/docs/_sources/examples/Data/plot_tempest_dataset.rst.txt @@ -0,0 +1,385 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Data/plot_tempest_dataset.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Data_plot_tempest_dataset.py: + + +Tempest dataset +-------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 6-13 + +.. code-block:: Python + + import h5py + from geobipy import plotting as cP + from os.path import join + import matplotlib.pyplot as plt + import numpy as np + from geobipy import TempestData + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 14-16 + +Reading in the Data ++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 18-33 + +.. code-block:: Python + + dataFolder = "..//..//supplementary//data//" + + # # The data file name + # dataFiles = dataFolder + 'Tempest.nc' + # # The EM system file name + # systemFiles = dataFolder + 'Tempest.stm' + + # #%% + # # Read in the data from file + # TD = TempestData.read_netcdf(dataFiles, systemFiles) + + # TD.write_csv(dataFolder + 'Tempest.csv') + TD = TempestData.read_csv(dataFolder + 'tempest_saline_clay.csv', system_filename=dataFolder + 'Tempest.stm') + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 34-35 + +Plot the locations of the data points + +.. GENERATED FROM PYTHON SOURCE LINES 35-39 + +.. code-block:: Python + + plt.figure(figsize=(8,6)) + _ = TD.scatter2D() + plt.title("Scatter plot") + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_tempest_dataset_001.png + :alt: Scatter plot + :srcset: /examples/Data/images/sphx_glr_plot_tempest_dataset_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + Text(0.5, 1.0, 'Scatter plot') + + + +.. GENERATED FROM PYTHON SOURCE LINES 40-41 + +Plot all the data along the specified line + +.. GENERATED FROM PYTHON SOURCE LINES 41-45 + +.. code-block:: Python + + plt.figure(figsize=(8,6)) + _ = TD.plotLine(0.0) + plt.title('Line {}'.format(225401.0)) + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_tempest_dataset_002.png + :alt: Line 225401.0 + :srcset: /examples/Data/images/sphx_glr_plot_tempest_dataset_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + Text(0.5, 1.0, 'Line 225401.0') + + + +.. GENERATED FROM PYTHON SOURCE LINES 46-47 + +Or, plot specific channels in the data + +.. GENERATED FROM PYTHON SOURCE LINES 47-51 + +.. code-block:: Python + + plt.figure(figsize=(8,6)) + _ = TD.plot_data(system=0, channels=[0, 6, 18]) + plt.title("3 channels of data") + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_tempest_dataset_003.png + :alt: 3 channels of data + :srcset: /examples/Data/images/sphx_glr_plot_tempest_dataset_003.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + Text(0.5, 1.0, '3 channels of data') + + + +.. GENERATED FROM PYTHON SOURCE LINES 52-56 + +.. code-block:: Python + + plt.figure() + _ = TD.pcolor(system=0) + plt.title('Data as an array') + + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_tempest_dataset_004.png + :alt: Data as an array + :srcset: /examples/Data/images/sphx_glr_plot_tempest_dataset_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + Text(0.5, 1.0, 'Data as an array') + + + +.. GENERATED FROM PYTHON SOURCE LINES 57-89 + +.. code-block:: Python + + plt.figure() + ax = TD.scatter2D(c=TD.data[:, TD.channel_index(system=0, channel=10)], equalize=True) + plt.axis('equal') + plt.title(f"scatter plot of channel {TD.channel_index(system=0, channel=10)}") + + with h5py.File('tdem.h5', 'w') as f: + TD.createHdf(f, 'tdem') + TD.writeHdf(f, 'tdem') + + with h5py.File('tdem.h5', 'r') as f: + TD3 = TempestData.fromHdf(f['tdem']) + + with h5py.File('tdem.h5', 'r') as f: + tdp = TempestData.fromHdf(f['tdem'], index=0) + + + # #%% + # # Obtain a line from the data set + # # +++++++++++++++++++++++++++++++ + # line = TD.line(0.0) + + # #%% + # plt.figure() + # _ = line.scatter2D() + # plt.title('Channel') + + # #%% + # plt.figure() + # _ = line.plot_data(xAxis='index', log=10) + # plt.title("All data along line") + + plt.show() + + + +.. image-sg:: /examples/Data/images/sphx_glr_plot_tempest_dataset_005.png + :alt: scatter plot of channel 10 + :srcset: /examples/Data/images/sphx_glr_plot_tempest_dataset_005.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 4.125 seconds) + + +.. _sphx_glr_download_examples_Data_plot_tempest_dataset.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_tempest_dataset.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_tempest_dataset.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_tempest_dataset.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Data/readme.rst.txt b/docs/_sources/examples/Data/readme.rst.txt new file mode 100644 index 00000000..f18be396 --- /dev/null +++ b/docs/_sources/examples/Data/readme.rst.txt @@ -0,0 +1,2 @@ +Data +==== \ No newline at end of file diff --git a/docs/_sources/examples/Data/sg_execution_times.rst.txt b/docs/_sources/examples/Data/sg_execution_times.rst.txt new file mode 100644 index 00000000..fdadb173 --- /dev/null +++ b/docs/_sources/examples/Data/sg_execution_times.rst.txt @@ -0,0 +1,46 @@ + +:orphan: + +.. _sphx_glr_examples_Data_sg_execution_times: + + +Computation times +================= +**00:28.741** total execution time for 4 files **from examples/Data**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Data_plot_pointcloud3d.py` (``plot_pointcloud3d.py``) + - 00:19.063 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_tempest_dataset.py` (``plot_tempest_dataset.py``) + - 00:04.125 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_frequency_dataset.py` (``plot_frequency_dataset.py``) + - 00:03.257 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_skytem_dataset.py` (``plot_skytem_dataset.py``) + - 00:02.295 + - 0.0 diff --git a/docs/_sources/examples/Datapoints/plot_resolve_datapoint.rst.txt b/docs/_sources/examples/Datapoints/plot_resolve_datapoint.rst.txt new file mode 100644 index 00000000..df0df104 --- /dev/null +++ b/docs/_sources/examples/Datapoints/plot_resolve_datapoint.rst.txt @@ -0,0 +1,362 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Datapoints/plot_resolve_datapoint.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Datapoints_plot_resolve_datapoint.py: + + +Frequency domain datapoint +-------------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 6-88 + +.. code-block:: Python + + from os.path import join + import numpy as np + import h5py + import matplotlib.pyplot as plt + from geobipy import CircularLoop + from geobipy import FdemSystem + from geobipy import FdemData + from geobipy import FdemDataPoint + from geobipy import RectilinearMesh1D + from geobipy import Model + from geobipy import StatArray + from geobipy import Distribution + + # Instantiating a frequency domain data point + # +++++++++++++++++++++++++++++++++++++++++++ + # + # To instantiate a frequency domain datapoint we need to define some + # characteristics of the acquisition system. + # + # We need to define the frequencies in Hz of the transmitter, + # and the geometery of the loops used for each frequency. + + frequencies = np.asarray([380.0, 1776.0, 3345.0, 8171.0, 41020.0, 129550.0]) + + # Transmitter positions are defined relative to the observation locations in the data + # This is usually a constant offset for all data points. + transmitters = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[0,0,0,0,0,0], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + + # Receiver positions are defined relative to the transmitter + receivers = CircularLoop(orientation=['z','z','x','z','z','z'], + moment=np.r_[1, 1, -1, 1, 1, 1], + x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89], + y = np.r_[0,0,0,0,0,0], + z = np.r_[0,0,0,0,0,0], + pitch = np.r_[0,0,0,0,0,0], + roll = np.r_[0,0,0,0,0,0], + yaw = np.r_[0,0,0,0,0,0], + radius = np.r_[1,1,1,1,1,1]) + + # Now we can instantiate the system. + fds = FdemSystem(frequencies, transmitters, receivers) + + # And use the system to instantiate a datapoint + # + # Note the extra arguments that can be used to create the data point. + # data is for any observed data one might have, while std are the estimated standard + # deviations of those observed data. + # + # Define some in-phase then quadrature data for each frequency. + data = np.r_[145.3, 435.8, 260.6, 875.1, 1502.7, 1516.9, + 217.9, 412.5, 178.7, 516.5, 405.7, 255.7] + + fdp = FdemDataPoint(x=0.0, y=0.0, z=30.0, elevation=0.0, + data=data, std=None, predictedData=None, + system=fds, lineNumber=0.0, fiducial=0.0) + + # plt.figure() + # _ = fdp.plot() + + # Obtaining a datapoint from a dataset + # ++++++++++++++++++++++++++++++++++++ + # + # More often than not, our observed data is stored in a file on disk. + # We can read in a dataset and pull datapoints from it. + # + # For more information about the frequency domain data set see :ref:`Frequency domain dataset` + + # Set some paths and file names + dataFolder = "..//..//supplementary//Data//" + # The data file name + dataFile = dataFolder + 'Resolve2.txt' + # The EM system file name + systemFile = dataFolder + 'FdemSystem2.stm' + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 89-91 + +Initialize and read an EM data set +Prepare the dataset so that we can read a point at a time. + +.. GENERATED FROM PYTHON SOURCE LINES 91-94 + +.. code-block:: Python + + Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile) + # Get a datapoint from the file. + fdp = Dataset._read_record() + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 95-227 + +.. code-block:: Python + + + # # Initialize and read an EM data set + # D = FdemData.read_csv(dataFile,systemFile) + + # # Get a data point from the dataset + # fdp = D.datapoint(0) + # plt.figure() + # _ = fdp.plot() + + # Using a resolve datapoint + # +++++++++++++++++++++++++ + + # We can define a 1D layered earth model, and use it to predict some data + nCells = 19 + par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$") + depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm') + depth[-1] = np.inf + mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par) + + # Forward model the data + fdp.forward(mod) + + plt.figure() + plt.subplot(121) + _ = mod.pcolor(transpose=True) + plt.subplot(122) + _ = fdp.plot_predicted() + plt.tight_layout() + + # Compute the sensitivity matrix for a given model + J = fdp.sensitivity(mod) + + plt.figure() + _ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + + # Attaching statistical descriptors to the resolve datapoint + # ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + # Set values of relative and additive error for both systems. + fdp.relative_error = 0.05 + fdp.additive_error = 10.0 + # Define a multivariate log normal distribution as the prior on the predicted data. + fdp.predictedData.prior = Distribution('MvLogNormal', fdp.data[fdp.active], fdp.std[fdp.active]**2.0, prng=prng) + + # This allows us to evaluate the likelihood of the predicted data + print(fdp.likelihood(log=True)) + # Or the misfit + print(fdp.data_misfit()) + + # Plot the misfits for a range of half space conductivities + plt.figure() + _ = fdp.plot_halfspace_responses(-6.0, 4.0, 200) + + plt.title("Halfspace responses"); + + # We can perform a quick search for the best fitting half space + halfspace = fdp.find_best_halfspace() + print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) + plt.figure() + _ = fdp.plot() + _ = fdp.plot_predicted() + + # Compute the misfit between observed and predicted data + print(fdp.data_misfit()) + + # We can attach priors to the height of the datapoint, + # the relative error multiplier, and the additive error noise floor + + + # Define the distributions used as priors. + zPrior = Distribution('Uniform', min=fdp.z - 2.0, max=fdp.z + 2.0, prng=prng) + relativePrior = Distribution('Uniform', min=0.01, max=0.5, prng=prng) + additivePrior = Distribution('Uniform', min=5, max=15, prng=prng) + fdp.set_priors(z_prior=zPrior, relative_error_prior=relativePrior, additive_error_prior=additivePrior, prng=prng) + + + # In order to perturb our solvable parameters, we need to attach proposal distributions + z_proposal = Distribution('Normal', mean=fdp.z, variance = 0.01, prng=prng) + relativeProposal = Distribution('MvNormal', mean=fdp.relative_error, variance=2.5e-7, prng=prng) + additiveProposal = Distribution('MvLogNormal', mean=fdp.additive_error, variance=1e-4, prng=prng) + fdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal) + + # With priors set we can auto generate the posteriors + fdp.set_posteriors() + + nCells = 19 + par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$") + depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm') + depth[-1] = np.inf + mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par) + fdp.forward(mod) + + # Perturb the datapoint and record the perturbations + for i in range(10): + fdp.perturb() + fdp.update_posteriors() + + + # Plot the posterior distributions + fig = plt.figure() + fdp.plot_posteriors(overlay=fdp) + + import h5py + with h5py.File('fdp.h5', 'w') as f: + fdp.createHdf(f, 'fdp', withPosterior=True) + fdp.writeHdf(f, 'fdp', withPosterior=True) + + with h5py.File('fdp.h5', 'r') as f: + fdp1 = FdemDataPoint.fromHdf(f['fdp']) + + plt.figure() + fdp1.plot_posteriors(overlay=fdp1) + + import h5py + with h5py.File('fdp.h5', 'w') as f: + fdp.createHdf(f, 'fdp', withPosterior=True, add_axis=np.arange(10.0)) + + for i in range(10): + fdp.writeHdf(f, 'fdp', withPosterior=True, index=i) + + from geobipy import FdemData + with h5py.File('fdp.h5', 'r') as f: + fdp1 = FdemDataPoint.fromHdf(f['fdp'], index=0) + fdp2 = FdemData.fromHdf(f['fdp']) + + fdp1.plot_posteriors(overlay=fdp1) + + plt.show() + # %% + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_001.png + :alt: Frequency Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_002.png + :alt: plot resolve datapoint + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_002.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_003.png + :alt: Halfspace responses + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_004.png + :alt: Frequency Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_004.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_005.png + :alt: Frequency Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_005.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_006.png + :alt: Frequency Domain EM Data, Frequency Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_resolve_datapoint_006.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + -733.5454886696688 + 1367.81945885548 + Best half space conductivity is [0.097701] $S/m$ + 45286.828623928755 + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 7.271 seconds) + + +.. _sphx_glr_download_examples_Datapoints_plot_resolve_datapoint.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_resolve_datapoint.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_resolve_datapoint.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_resolve_datapoint.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Datapoints/plot_skytem_datapoint.rst.txt b/docs/_sources/examples/Datapoints/plot_skytem_datapoint.rst.txt new file mode 100644 index 00000000..a85ab087 --- /dev/null +++ b/docs/_sources/examples/Datapoints/plot_skytem_datapoint.rst.txt @@ -0,0 +1,596 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Datapoints/plot_skytem_datapoint.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Datapoints_plot_skytem_datapoint.py: + + +Skytem Datapoint Class +---------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 7-15 + +Credits: +We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +https://github.com/GeoscienceAustralia/ga-aem + +For ground-based time domain data, we are using Dieter Werthmuller's python package Empymod +https://empymod.github.io/ + +Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy + +.. GENERATED FROM PYTHON SOURCE LINES 17-46 + +.. code-block:: Python + + from os.path import join + import numpy as np + import h5py + import matplotlib.pyplot as plt + from geobipy import Waveform + from geobipy import SquareLoop, CircularLoop + from geobipy import butterworth + from geobipy import TdemSystem + from geobipy import TdemData + from geobipy import TdemDataPoint + from geobipy import RectilinearMesh1D + from geobipy import Model + from geobipy import StatArray + from geobipy import Distribution + + dataFolder = "..//..//supplementary//data//" + + # Obtaining a datapoint from a dataset + # ++++++++++++++++++++++++++++++++++++ + # More often than not, our observed data is stored in a file on disk. + # We can read in a dataset and pull datapoints from it. + # + # For more information about the time domain data set, see :ref:`Time domain dataset` + + # The data file name + dataFile=dataFolder + 'skytem_saline_clay.csv' + # The EM system file name + systemFile=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm'] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 47-49 + +Initialize and read an EM data set +Prepare the dataset so that we can read a point at a time. + +.. GENERATED FROM PYTHON SOURCE LINES 49-55 + +.. code-block:: Python + + Dataset = TdemData._initialize_sequential_reading(dataFile, systemFile) + # Get a datapoint from the file. + tdp = Dataset._read_record() + + Dataset._file.close() + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 56-58 + +Using a time domain datapoint ++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 60-61 + +We can define a 1D layered earth model, and use it to predict some data + +.. GENERATED FROM PYTHON SOURCE LINES 61-64 + +.. code-block:: Python + + par = StatArray(np.r_[500.0, 20.0], "Conductivity", "$\frac{S}{m}$") + mod = Model(RectilinearMesh1D(edges=np.r_[0, 75.0, np.inf]), values=par) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 65-66 + +Forward model the data + +.. GENERATED FROM PYTHON SOURCE LINES 66-68 + +.. code-block:: Python + + tdp.forward(mod) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 69-77 + +.. code-block:: Python + + plt.figure() + plt.subplot(121) + _ = mod.pcolor() + plt.subplot(122) + _ = tdp.plot() + _ = tdp.plot_predicted() + plt.tight_layout() + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_001.png + :alt: Time Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + /Users/nfoks/codes/repositories/geobipy_docs/geobipy/src/classes/data/datapoint/TdemDataPoint.py:363: RuntimeWarning: divide by zero encountered in log + additive_error = exp(log(self.additive_error[i]) - 0.5 * (log(off_times) - log(1e-3))) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 78-82 + +.. code-block:: Python + + plt.figure() + tdp.plotDataResidual(yscale='log', xscale='log') + plt.title('new') + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_002.png + :alt: new + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + Text(0.5, 1.0, 'new') + + + +.. GENERATED FROM PYTHON SOURCE LINES 83-84 + +Compute the sensitivity matrix for a given model + +.. GENERATED FROM PYTHON SOURCE LINES 84-88 + +.. code-block:: Python + + J = tdp.sensitivity(mod) + plt.figure() + _ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_003.png + :alt: plot skytem datapoint + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 89-91 + +Attaching statistical descriptors to the skytem datapoint ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 91-105 + +.. code-block:: Python + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + # Set values of relative and additive error for both systems. + tdp.relative_error = np.r_[0.05, 0.05] + tdp.additive_error = np.r_[1e-14, 1e-13] + # Define a multivariate normal distribution as the prior on the predicted data. + data_prior = Distribution('MvNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng) + + tdp.set_priors(data_prior=data_prior) + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 106-107 + +This allows us to evaluate the likelihood of the predicted data + +.. GENERATED FROM PYTHON SOURCE LINES 107-111 + +.. code-block:: Python + + print(tdp.likelihood(log=True)) + # Or the misfit + print(tdp.data_misfit()) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + -320327.7331520327 + 643134.8665682999 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 112-113 + +Plot the misfits for a range of half space conductivities + +.. GENERATED FROM PYTHON SOURCE LINES 113-117 + +.. code-block:: Python + + plt.figure() + _ = tdp.plot_halfspace_responses(-6.0, 4.0, 200) + plt.title("Halfspace responses") + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_004.png + :alt: Halfspace responses + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + Text(0.5, 1.0, 'Halfspace responses') + + + +.. GENERATED FROM PYTHON SOURCE LINES 118-119 + +We can perform a quick search for the best fitting half space + +.. GENERATED FROM PYTHON SOURCE LINES 119-126 + +.. code-block:: Python + + halfspace = tdp.find_best_halfspace() + + print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) + plt.figure() + _ = tdp.plot() + _ = tdp.plot_predicted() + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_005.png + :alt: Time Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_005.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Best half space conductivity is [0.01047616] $S/m$ + + + + +.. GENERATED FROM PYTHON SOURCE LINES 127-128 + +Compute the misfit between observed and predicted data + +.. GENERATED FROM PYTHON SOURCE LINES 128-130 + +.. code-block:: Python + + print(tdp.data_misfit()) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 19656.31514467744 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 131-133 + +We can attach priors to the height of the datapoint, +the relative error multiplier, and the additive error noise floor + +.. GENERATED FROM PYTHON SOURCE LINES 133-140 + +.. code-block:: Python + + + # Define the distributions used as priors. + z_prior = Distribution('Uniform', min=np.float64(tdp.z) - 2.0, max=np.float64(tdp.z) + 2.0, prng=prng) + relativePrior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng) + additivePrior = Distribution('Uniform', min=np.r_[1e-16, 1e-16], max=np.r_[1e-10, 1e-10], log=True, prng=prng) + tdp.set_priors(relative_error_prior=relativePrior, additive_error_prior=additivePrior, z_prior=z_prior, prng=prng) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + /Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_skytem_datapoint.py:135: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.) + z_prior = Distribution('Uniform', min=np.float64(tdp.z) - 2.0, max=np.float64(tdp.z) + 2.0, prng=prng) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 141-142 + +In order to perturb our solvable parameters, we need to attach proposal distributions + +.. GENERATED FROM PYTHON SOURCE LINES 142-147 + +.. code-block:: Python + + z_proposal = Distribution('Normal', mean=tdp.z, variance = 0.01, prng=prng) + relativeProposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-7, prng=prng) + additiveProposal = Distribution('MvLogNormal', mean=tdp.additive_error, variance=2.5e-3, linearSpace=True, prng=prng) + tdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal, prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 148-149 + +With priorss set we can auto generate the posteriors + +.. GENERATED FROM PYTHON SOURCE LINES 149-151 + +.. code-block:: Python + + tdp.set_posteriors() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 152-154 + +Perturb the datapoint and record the perturbations +Note we are not using the priors to accept or reject perturbations. + +.. GENERATED FROM PYTHON SOURCE LINES 154-159 + +.. code-block:: Python + + for i in range(10): + tdp.perturb() + tdp.update_posteriors() + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 160-161 + +Plot the posterior distributions + +.. GENERATED FROM PYTHON SOURCE LINES 161-166 + +.. code-block:: Python + + plt.figure() + tdp.plot_posteriors(overlay=tdp) + + plt.show() + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_006.png + :alt: Time Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_skytem_datapoint_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 167-244 + +File Format for a time domain datapoint ++++++++++++++++++++++++++++++++++++++++ +Here we describe the file format for a time domain datapoint. + +For individual datapoints we are using the AarhusInv data format. + +Here we take the description for the AarhusInv TEM data file, modified to reflect what we can +currently handle in GeoBIPy. + +Line 1 :: string + User-defined label describing the TEM datapoint. + This line must contain the following, separated by semicolons. + XUTM= + YUTM= + Elevation= + StationNumber= + LineNumber= + Current= + +Line 2 :: first integer, sourceType + 7 = Rectangular loop source parallel to the x - y plane +Line 2 :: second integer, polarization + 3 = Vertical magnetic field + +Line 3 :: 6 floats, transmitter and receiver offsets relative to X/Y UTM location. + If sourceType = 7, Position of the center loop sounding. + +Line 4 :: Transmitter loop dimensions + If sourceType = 7, 2 floats. Loop side length in the x and y directions + +Line 5 :: Fixed + 3 3 3 + +Line 6 :: first integer, transmitter waveform type. Fixed + 3 = User defined waveform. + +Line 6 :: second integer, number of transmitter waveforms. Fixed + 1 + +Line 7 :: transmitter waveform definition + A user-defined waveform with piecewise linear segments. + A full transmitter waveform definition consists of a number of linear segments + This line contains an integer as the first entry, which specifies the number of + segments, followed by each segment with 4 floats each. The 4 floats per segment + are the start and end times, and start and end amplitudes of the waveform. e.g. + 3 -8.333e-03 -8.033e-03 0.0 1.0 -8.033e-03 0.0 1.0 1.0 0.0 5.4e-06 1.0 0.0 + +Line 8 :: On time information. Not used but needs specifying. + 1 1 1 + +Line 9 :: On time low-pass filters. Not used but need specifying. + 0 + +Line 10 :: On time high-pass filters. Not used but need specifying. + 0 + +Line 11 :: Front-gate time. Not used but need specifying. + 0.0 + +Line 12 :: first integer, Number of off time filters + Number of filters + +Line 12 :: second integer, Order of the butterworth filter + 1 or 2 + +Line 12 :: cutoff frequencies Hz, one per the number of filters + e.g. 4.5e5 + +Line 13 :: Off time high pass filters. + See Line 12 + +Lines after 13 contain 3 columns that pertain to +Measurement Time, Data Value, Estimated Standard Deviation + +Example data files are contained in +`the supplementary folder`_ in this repository + +.. _the supplementary folder: https://github.com/usgs/geobipy/tree/master/documentation_source/source/examples/supplementary/Data + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 2.306 seconds) + + +.. _sphx_glr_download_examples_Datapoints_plot_skytem_datapoint.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_skytem_datapoint.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_skytem_datapoint.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_skytem_datapoint.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Datapoints/plot_tempest_datapoint.rst.txt b/docs/_sources/examples/Datapoints/plot_tempest_datapoint.rst.txt new file mode 100644 index 00000000..4a96ba66 --- /dev/null +++ b/docs/_sources/examples/Datapoints/plot_tempest_datapoint.rst.txt @@ -0,0 +1,463 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Datapoints/plot_tempest_datapoint.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Datapoints_plot_tempest_datapoint.py: + + +Tempest Datapoint Class +----------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 7-15 + +Credits: +We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +https://github.com/GeoscienceAustralia/ga-aem + +For ground-based time domain data, we are using Dieter Werthmuller's python package Empymod +https://empymod.github.io/ + +Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy + +.. GENERATED FROM PYTHON SOURCE LINES 17-54 + +.. code-block:: Python + + from os.path import join + import numpy as np + import h5py + import matplotlib.pyplot as plt + from geobipy import TempestData + # from geobipy import TemDataPoint + from geobipy import RectilinearMesh1D + from geobipy import Model + from geobipy import StatArray + from geobipy import Distribution + from geobipy import get_prng + + dataFolder = "..//..//supplementary//data//" + # dataFolder = "source//examples//supplementary//Data" + + # Obtaining a tempest datapoint from a dataset + # ++++++++++++++++++++++++++++++++++++++++++++ + # More often than not, our observed data is stored in a file on disk. + # We can read in a dataset and pull datapoints from it. + # + # For more information about the time domain data set, see :ref:`Time domain dataset` + + # The data file name + dataFile = dataFolder + 'tempest_saline_clay.csv' + # The EM system file name + systemFile = dataFolder + 'Tempest.stm' + + # Prepare the dataset so that we can read a point at a time. + Dataset = TempestData._initialize_sequential_reading(dataFile, systemFile) + # Get a datapoint from the file. + tdp = Dataset._read_record(0) + + plt.figure() + tdp.plot() + + prng = get_prng(seed=146100583096709124601953385843316024947) + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_001.png + :alt: Time Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 55-57 + +Using a tempest domain datapoint +++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 59-60 + +We can define a 1D layered earth model, and use it to predict some data + +.. GENERATED FROM PYTHON SOURCE LINES 60-67 + +.. code-block:: Python + + par = StatArray(np.r_[0.01, 0.1, 1.], "Conductivity", "$\frac{S}{m}$") + mod = Model(mesh=RectilinearMesh1D(edges=np.r_[0.0, 50.0, 75.0, np.inf]), values=par) + + par = StatArray(np.logspace(-3, 3, 30), "Conductivity", "$\frac{S}{m}$") + e = np.linspace(0, 350, 31); e[-1] = np.inf + mod = Model(mesh=RectilinearMesh1D(edges=e), values=par) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 68-69 + +Forward model the data + +.. GENERATED FROM PYTHON SOURCE LINES 69-108 + +.. code-block:: Python + + tdp.forward(mod) + + print('primary', tdp.primary_field) + print('sx', tdp.secondary_field[:15]) + print('sz', tdp.secondary_field[15:]) + + # #%% + # plt.figure() + # plt.subplot(121) + # _ = mod.pcolor(transpose=True) + # plt.subplot(122) + # _ = tdp.plot() + # _ = tdp.plot_predicted() + # plt.tight_layout() + # plt.suptitle('Model and response') + + # #%% + # # plt.figure() + # # tdp.plotDataResidual(xscale='log') + # # plt.title('data residual') + + # #%% + # # Compute the sensitivity matrix for a given model + J = tdp.sensitivity(mod) + # plt.figure() + # _ = np.abs(J).pcolor(equalize=True, log=10, flipY=True) + + print('J', J) + # print('J shape', J.shape) + # print('sx 0', J[:16, 0]) + + tdp.fm_dlogc(mod) + + print('new primary', tdp.primary_field) + print('sx', tdp.secondary_field[:15]) + print('sz', tdp.secondary_field[15:]) + + print('new J', tdp.sensitivity_matrix) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + primary [34.27253219 17.55503397] + sx [4.46362582 2.52720951 2.10544857 ... 0.34346631 0.27359586 0.19875285] + sz [6.47100177 4.53101158 3.87594468 ... 1.05345525 0.79969548 0.56994112] + J [[ 1.13463137e-01 1.49920887e-01 1.76789170e-01 ... -1.01809840e-09 + 1.13341751e-11 7.27489718e-13] + [ 2.09383016e-02 3.20412212e-02 4.74815387e-02 ... -1.02489023e-09 + 1.15994185e-11 7.25910166e-13] + [ 1.04188675e-02 1.61552555e-02 2.45575508e-02 ... -1.03167228e-09 + 1.18645190e-11 7.24296662e-13] + ... + [ 7.20880061e-05 1.13758034e-04 1.79138645e-04 ... -6.62044639e-09 + 1.91310127e-10 4.83737910e-13] + [ 3.95655826e-05 6.25753935e-05 9.87713313e-05 ... -6.33418159e-09 + 2.26727066e-10 -1.05451995e-12] + [ 1.60007270e-05 3.08385747e-05 5.05626410e-05 ... -1.28316523e-09 + 1.11041966e-10 -2.41585978e-12]] + new primary [34.27253219 17.55503397] + sx [4.46362582 2.52720951 2.10544857 ... 0.34346631 0.27359586 0.19875285] + sz [6.47100177 4.53101158 3.87594468 ... 1.05345525 0.79969548 0.56994112] + new J [[ 1.13463137e-01 1.49920887e-01 1.76789170e-01 ... -1.01809840e-09 + 1.13341751e-11 7.27489718e-13] + [ 2.09383016e-02 3.20412212e-02 4.74815387e-02 ... -1.02489023e-09 + 1.15994185e-11 7.25910166e-13] + [ 1.04188675e-02 1.61552555e-02 2.45575508e-02 ... -1.03167228e-09 + 1.18645190e-11 7.24296662e-13] + ... + [ 7.20880061e-05 1.13758034e-04 1.79138645e-04 ... -6.62044639e-09 + 1.91310127e-10 4.83737910e-13] + [ 3.95655826e-05 6.25753935e-05 9.87713313e-05 ... -6.33418159e-09 + 2.26727066e-10 -1.05451995e-12] + [ 1.60007270e-05 3.08385747e-05 5.05626410e-05 ... -1.28316523e-09 + 1.11041966e-10 -2.41585978e-12]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 109-111 + +Attaching statistical descriptors to the tempest datapoint +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 111-125 + +.. code-block:: Python + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + # Set relative errors for the primary fields, and secondary fields. + tdp.relative_error = np.r_[0.001, 0.001] + + # Set the additive errors for + tdp.additive_error = np.hstack([[0.011474, 0.012810, 0.008507, 0.005154, 0.004742, 0.004477, 0.004168, 0.003539, 0.003352, 0.003213, 0.003161, 0.003122, 0.002587, 0.002038, 0.002201], + [0.007383, 0.005693, 0.005178, 0.003659, 0.003426, 0.003046, 0.003095, 0.003247, 0.002775, 0.002627, 0.002460, 0.002178, 0.001754, 0.001405, 0.001283]]) + # Define a multivariate log normal distribution as the prior on the predicted data. + tdp.predictedData.prior = Distribution('MvLogNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 126-127 + +This allows us to evaluate the likelihood of the predicted data + +.. GENERATED FROM PYTHON SOURCE LINES 127-131 + +.. code-block:: Python + + print(tdp.likelihood(log=True)) + # Or the misfit + print(tdp.data_misfit()) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + -36389.6500813217 + 72940.71365767403 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 132-133 + +Plot the misfits for a range of half space conductivities + +.. GENERATED FROM PYTHON SOURCE LINES 133-138 + +.. code-block:: Python + + plt.figure() + plt.subplot(1, 2, 1) + _ = tdp.plot_halfspace_responses(-6.0, 4.0, 200) + plt.title("Halfspace responses") + + + + +.. image-sg:: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_002.png + :alt: Halfspace responses + :srcset: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + Text(0.5, 1.0, 'Halfspace responses') + + + +.. GENERATED FROM PYTHON SOURCE LINES 139-140 + +We can perform a quick search for the best fitting half space + +.. GENERATED FROM PYTHON SOURCE LINES 140-161 + +.. code-block:: Python + + halfspace = tdp.find_best_halfspace() + print('Best half space conductivity is {} $S/m$'.format(halfspace.values)) + plt.subplot(1, 2, 2) + _ = tdp.plot() + _ = tdp.plot_predicted() + + plt.figure() + tdp.plot_secondary_field() + tdp.plot_predicted_secondary_field() + + # #%% + # # We can attach priors to the height of the datapoint, + # # the relative error multiplier, and the additive error noise floor + + # Define the distributions used as priors. + relative_prior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng) + receiver_x_prior = Distribution('Uniform', min=np.float64(tdp.receiver.x) - 1.0, max=np.float64(tdp.receiver.x) + 1.0, prng=prng) + receiver_z_prior = Distribution('Uniform', min=np.float64(tdp.receiver.z) - 1.0, max=np.float64(tdp.receiver.z) + 1.0, prng=prng) + receiver_pitch_prior = Distribution('Uniform', min=tdp.receiver.pitch - 5.0, max=tdp.receiver.pitch + 5.0, prng=prng) + tdp.set_priors(relative_error_prior=relative_prior, receiver_x_prior=receiver_x_prior, receiver_z_prior=receiver_z_prior, receiver_pitch_prior=receiver_pitch_prior, prng=prng) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_003.png + :alt: Time Domain EM Data + :srcset: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_004.png + :alt: plot tempest datapoint + :srcset: /examples/Datapoints/images/sphx_glr_plot_tempest_datapoint_004.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Best half space conductivity is [0.01830738] $S/m$ + /Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_tempest_datapoint.py:156: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.) + receiver_x_prior = Distribution('Uniform', min=np.float64(tdp.receiver.x) - 1.0, max=np.float64(tdp.receiver.x) + 1.0, prng=prng) + /Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_tempest_datapoint.py:157: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.) + receiver_z_prior = Distribution('Uniform', min=np.float64(tdp.receiver.z) - 1.0, max=np.float64(tdp.receiver.z) + 1.0, prng=prng) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 162-163 + +In order to perturb our solvable parameters, we need to attach proposal distributions + +.. GENERATED FROM PYTHON SOURCE LINES 163-173 + +.. code-block:: Python + + relative_proposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-4, prng=prng) + receiver_x_proposal = Distribution('Normal', mean=tdp.receiver.x, variance = 0.01, prng=prng) + receiver_z_proposal = Distribution('Normal', mean=tdp.receiver.z, variance = 0.01, prng=prng) + receiver_pitch_proposal = Distribution('Normal', mean=tdp.receiver.pitch, variance = 0.01, prng=prng) + tdp.set_proposals(relative_error_proposal=relative_proposal, + receiver_x_proposal=receiver_x_proposal, + receiver_z_proposal=receiver_z_proposal, + receiver_pitch_proposal=receiver_pitch_proposal, + solve_additive_error=True, additive_error_proposal_variance=1e-4, prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 174-175 + +With priors set we can auto generate the posteriors + +.. GENERATED FROM PYTHON SOURCE LINES 175-177 + +.. code-block:: Python + + tdp.set_posteriors() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 178-180 + +Perturb the datapoint and record the perturbations +Note we are not using the priors to accept or reject perturbations. + +.. GENERATED FROM PYTHON SOURCE LINES 180-184 + +.. code-block:: Python + + for i in range(10): + tdp.perturb() + tdp.update_posteriors() + + plt.show() + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 1.653 seconds) + + +.. _sphx_glr_download_examples_Datapoints_plot_tempest_datapoint.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_tempest_datapoint.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_tempest_datapoint.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_tempest_datapoint.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Datapoints/readme.rst.txt b/docs/_sources/examples/Datapoints/readme.rst.txt new file mode 100644 index 00000000..4b1753c1 --- /dev/null +++ b/docs/_sources/examples/Datapoints/readme.rst.txt @@ -0,0 +1,2 @@ +Datapoints +========== \ No newline at end of file diff --git a/docs/_sources/examples/Datapoints/sg_execution_times.rst.txt b/docs/_sources/examples/Datapoints/sg_execution_times.rst.txt new file mode 100644 index 00000000..4415abaf --- /dev/null +++ b/docs/_sources/examples/Datapoints/sg_execution_times.rst.txt @@ -0,0 +1,43 @@ + +:orphan: + +.. _sphx_glr_examples_Datapoints_sg_execution_times: + + +Computation times +================= +**00:11.231** total execution time for 3 files **from examples/Datapoints**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Datapoints_plot_resolve_datapoint.py` (``plot_resolve_datapoint.py``) + - 00:07.271 + - 0.0 + * - :ref:`sphx_glr_examples_Datapoints_plot_skytem_datapoint.py` (``plot_skytem_datapoint.py``) + - 00:02.306 + - 0.0 + * - :ref:`sphx_glr_examples_Datapoints_plot_tempest_datapoint.py` (``plot_tempest_datapoint.py``) + - 00:01.653 + - 0.0 diff --git a/docs/_sources/examples/Distributions/plot_distributions.rst.txt b/docs/_sources/examples/Distributions/plot_distributions.rst.txt new file mode 100644 index 00000000..6ea3575f --- /dev/null +++ b/docs/_sources/examples/Distributions/plot_distributions.rst.txt @@ -0,0 +1,168 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Distributions/plot_distributions.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Distributions_plot_distributions.py: + + +Distribution Class +++++++++++++++++++ + +Handles the initialization of different statistical distribution + +.. GENERATED FROM PYTHON SOURCE LINES 9-19 + +.. code-block:: Python + + from geobipy import Distribution + from geobipy import plotting as cP + import matplotlib.pyplot as plt + import numpy as np + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 20-22 + +Univariate Normal Distribution +++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 22-37 + +.. code-block:: Python + + D = Distribution('Normal', 0.0, 1.0, prng=prng) + + # Get the bins of the Distribution from +- 4 standard deviations of the mean + bins = D.bins() + + # Grab random samples from the distribution + D.rng(10) + + # We can then get the Probability Density Function for those bins + pdf = D.probability(bins, log=False) + + # And we can plot that PDF + plt.figure() + plt.plot(bins, pdf) + + + + +.. image-sg:: /examples/Distributions/images/sphx_glr_plot_distributions_001.png + :alt: plot distributions + :srcset: /examples/Distributions/images/sphx_glr_plot_distributions_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + [] + + + +.. GENERATED FROM PYTHON SOURCE LINES 39-41 + +Multivariate Normal Distribution +++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 41-45 + +.. code-block:: Python + + D = Distribution('MvNormal',[0.0,1.0,2.0],[1.0,1.0,1.0], prng=prng) + D.rng() + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + array([ 0.64050649, 1.77177243, -0.34500474]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 46-47 + +Uniform Distribution + +.. GENERATED FROM PYTHON SOURCE LINES 47-49 + +.. code-block:: Python + + D = Distribution('Uniform', 0.0, 1.0, prng=prng) + D.bins() + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + DataArray([0. , 0.01010101, 0.02020202, ..., 0.97979798, + 0.98989899, 1. ]) + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 0.050 seconds) + + +.. _sphx_glr_download_examples_Distributions_plot_distributions.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_distributions.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_distributions.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_distributions.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Distributions/readme.rst.txt b/docs/_sources/examples/Distributions/readme.rst.txt new file mode 100644 index 00000000..c9d04d99 --- /dev/null +++ b/docs/_sources/examples/Distributions/readme.rst.txt @@ -0,0 +1,2 @@ +Distributions +============= \ No newline at end of file diff --git a/docs/_sources/examples/Distributions/sg_execution_times.rst.txt b/docs/_sources/examples/Distributions/sg_execution_times.rst.txt new file mode 100644 index 00000000..47ec0e63 --- /dev/null +++ b/docs/_sources/examples/Distributions/sg_execution_times.rst.txt @@ -0,0 +1,37 @@ + +:orphan: + +.. _sphx_glr_examples_Distributions_sg_execution_times: + + +Computation times +================= +**00:00.050** total execution time for 1 file **from examples/Distributions**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Distributions_plot_distributions.py` (``plot_distributions.py``) + - 00:00.050 + - 0.0 diff --git a/docs/_sources/examples/HDF5/hdf5.rst.txt b/docs/_sources/examples/HDF5/hdf5.rst.txt new file mode 100644 index 00000000..047f1bde --- /dev/null +++ b/docs/_sources/examples/HDF5/hdf5.rst.txt @@ -0,0 +1,182 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/HDF5/hdf5.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_HDF5_hdf5.py: + + +Using HDF5 within GeoBIPy +------------------------- + +Inference for large scale datasets in GeoBIPy is handled using MPI and distributed memory systems. +A common bottleneck with large parallel algorithms is the input output of information to disk. +We use HDF5 to read and write data in order to leverage the parallel capabililties of the HDF5 API. + +Each object within GeoBIPy has a create_hdf, write_hdf, and read_hdf routine. + +.. GENERATED FROM PYTHON SOURCE LINES 12-16 + +.. code-block:: Python + + import numpy as np + import h5py + from geobipy import StatArray + + +.. GENERATED FROM PYTHON SOURCE LINES 17-18 + +StatArray + +.. GENERATED FROM PYTHON SOURCE LINES 18-33 + +.. code-block:: Python + + + # Instantiate a StatArray + x = StatArray(np.arange(10.0), name = 'an Array', units = 'some units') + + # Write the StatArray to a HDF file. + with h5py.File("x.h5", 'w') as f: + x.toHdf(f, "x") + + # Read the StatArray back in. + with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x') + + print('x', x) + print('y', y) + + +.. GENERATED FROM PYTHON SOURCE LINES 34-39 + +There are actually steps within the "toHdf" function. +First, space is created within the HDF file and second, the data is written to that space +These functions are split because during the execution of a parallel enabled program, +all the space within the HDF file needs to be allocated before we can write to the file +using multiple cores. + +.. GENERATED FROM PYTHON SOURCE LINES 39-52 + +.. code-block:: Python + + + # Write the StatArray to a HDF file. + with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x") + x.writeHdf(f, "x") + + # Read the StatArray back in. + with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x') + + print('x', x) + print('y', y) + + +.. GENERATED FROM PYTHON SOURCE LINES 53-57 + +The create and write HDF methods also allow extra space to be allocated so that +the extra memory can be written later, perhaps by multiple cores. +Here we specify space for 2 arrays, the memory is stored contiguously as a numpy array. +We then write to only the first index. + +.. GENERATED FROM PYTHON SOURCE LINES 57-71 + +.. code-block:: Python + + + # Write the StatArray to a HDF file. + with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=2) + x.writeHdf(f, "x", index=0) + + # Read the StatArray back in. + with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=0) + + print('x', x) + print('y', y) + + + +.. GENERATED FROM PYTHON SOURCE LINES 72-73 + +The duplication can also be a shape. + +.. GENERATED FROM PYTHON SOURCE LINES 73-86 + +.. code-block:: Python + + + # Write the StatArray to a HDF file. + with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=(2, 2)) + x.writeHdf(f, "x", index=(0, 0)) + + # Read the StatArray back in. + with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=(0, 0)) + + print('x', x) + print('y', y) + + +.. GENERATED FROM PYTHON SOURCE LINES 87-88 + +Similarly, we can duplicate a 2D array with an extra 2D duplication + +.. GENERATED FROM PYTHON SOURCE LINES 88-100 + +.. code-block:: Python + + + x = StatArray(np.random.randn(2, 2), name = 'an Array', units = 'some units') + # Write the StatArray to a HDF file. + with h5py.File("x.h5", 'w') as f: + x.createHdf(f, "x", nRepeats=(2, 2)) + x.writeHdf(f, "x", index=(0, 0)) + + # Read the StatArray back in. + with h5py.File("x.h5", 'r') as f: + y = StatArray.fromHdf(f, 'x', index=(0, 0)) + + print('x', x) + print('y', y) + +.. _sphx_glr_download_examples_HDF5_hdf5.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: hdf5.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: hdf5.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: hdf5.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/HDF5/readme.rst.txt b/docs/_sources/examples/HDF5/readme.rst.txt new file mode 100644 index 00000000..7582962c --- /dev/null +++ b/docs/_sources/examples/HDF5/readme.rst.txt @@ -0,0 +1,2 @@ +HDF 5 +===== \ No newline at end of file diff --git a/docs/_sources/examples/HDF5/sg_execution_times.rst.txt b/docs/_sources/examples/HDF5/sg_execution_times.rst.txt new file mode 100644 index 00000000..c7859313 --- /dev/null +++ b/docs/_sources/examples/HDF5/sg_execution_times.rst.txt @@ -0,0 +1,37 @@ + +:orphan: + +.. _sphx_glr_examples_HDF5_sg_execution_times: + + +Computation times +================= +**00:00.000** total execution time for 1 file **from examples/HDF5**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_HDF5_hdf5.py` (``hdf5.py``) + - 00:00.000 + - 0.0 diff --git a/docs/_sources/examples/Inference_1D/plot_inference_1d_resolve.rst.txt b/docs/_sources/examples/Inference_1D/plot_inference_1d_resolve.rst.txt new file mode 100644 index 00000000..18c47b34 --- /dev/null +++ b/docs/_sources/examples/Inference_1D/plot_inference_1d_resolve.rst.txt @@ -0,0 +1,191 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_1D/plot_inference_1d_resolve.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_1D_plot_inference_1d_resolve.py: + + +Running GeoBIPy to invert Resolve data +++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 5-29 + +.. code-block:: Python + + + import os + import sys + import pathlib + from datetime import timedelta + import time + import numpy as np + from geobipy import Inference3D + from geobipy import user_parameters + from geobipy import get_prng + + def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 30-39 + +.. code-block:: Python + + np.random.seed(0) + + args = checkCommandArguments() + sys.path.append(os.getcwd()) + + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + data_type = "Resolve" + model_type = models[args.index] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 40-42 + +The directory where HDF files will be stored +%% + +.. GENERATED FROM PYTHON SOURCE LINES 42-89 + +.. code-block:: Python + + file_path = os.path.join(data_type, model_type) + pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + + for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + + output_directory = file_path + + data_filename = data_type + '_' + model_type + + supplementary = "..//..//supplementary//" + + parameter_file = supplementary + "//options_files//{}_options".format(data_type) + inputFile = pathlib.Path(parameter_file) + assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + + output_directory = pathlib.Path(output_directory) + assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + + print('Using user input file {}'.format(parameter_file)) + print('Output files will be produced at {}'.format(output_directory)) + + kwargs = user_parameters.read(inputFile) + + kwargs['n_markov_chains'] = 5000 + + kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' + kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename'] + + # Everyone needs the system classes read in early. + data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + + # Start keeping track of time. + t0 = time.time() + + seed = 146100583096709124601953385843316024947 + prng = get_prng(seed=seed) + + inference3d = Inference3D(data, prng=prng) + inference3d.create_hdf5(directory=output_directory, **kwargs) + + print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + + inference3d.infer(index=30, **kwargs) + + +.. image-sg:: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_resolve_001.png + :alt: Fiducial [30], Frequency Domain EM Data + :srcset: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_resolve_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Using user input file ..//..//supplementary////options_files//Resolve_options + Output files will be produced at Resolve/glacial + Creating HDF5 files, this may take a few minutes... + Files are being created for data files ..//..//supplementary////data//Resolve_glacial.csv and system files ..//..//supplementary////data//..//data/FdemSystem2.stm + Created hdf5 file for line 0.0 with 79 data points + Created hdf5 files 79 total data points + Created hdf5 files in 0:00:00.164874 h:m:s + i=5000, k=1, acc=*49.200, 0.006 s/Model, 28.026 s Elapsed, eta=--:--:-- h:m:s + + Remaining Points -30/1 || Elapsed Time: 0:00:28.993323 h:m:s || ETA 0:00:00.935268 h:m:s + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 29.449 seconds) + + +.. _sphx_glr_download_examples_Inference_1D_plot_inference_1d_resolve.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_1d_resolve.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_1d_resolve.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_1d_resolve.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_1D/plot_inference_1d_skytem.rst.txt b/docs/_sources/examples/Inference_1D/plot_inference_1d_skytem.rst.txt new file mode 100644 index 00000000..c2df5c3a --- /dev/null +++ b/docs/_sources/examples/Inference_1D/plot_inference_1d_skytem.rst.txt @@ -0,0 +1,264 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_1D/plot_inference_1d_skytem.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_1D_plot_inference_1d_skytem.py: + + +Running GeoBIPy to invert Skytem data +++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 5-29 + +.. code-block:: Python + + + import os + import sys + import pathlib + from datetime import timedelta + import time + import numpy as np + from geobipy import Inference3D + from geobipy import user_parameters + from geobipy import get_prng + + def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 30-40 + +.. code-block:: Python + + np.random.seed(0) + + args = checkCommandArguments() + sys.path.append(os.getcwd()) + + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + data_type = "Skytem" + model_type = models[args.index] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 41-43 + +The directory where HDF files will be stored +%% + +.. GENERATED FROM PYTHON SOURCE LINES 43-89 + +.. code-block:: Python + + file_path = os.path.join(data_type, model_type) + pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + + for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + + output_directory = file_path + + data_filename = data_type + '_' + model_type + + supplementary = "..//..//supplementary//" + parameter_file = supplementary + "//options_files//{}_options".format(data_type) + inputFile = pathlib.Path(parameter_file) + assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + + output_directory = pathlib.Path(output_directory) + assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + + print('Using user input file {}'.format(parameter_file)) + print('Output files will be produced at {}'.format(output_directory)) + + kwargs = user_parameters.read(inputFile) + + kwargs['n_markov_chains'] = 5000 + + kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' + kwargs['system_filename'] = [supplementary + "//data//" + x for x in kwargs['system_filename']] + + # Everyone needs the system classes read in early. + data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + + # Start keeping track of time. + t0 = time.time() + + seed = 146100583096709124601953385843316024947 + prng = get_prng(seed=seed) + + inference3d = Inference3D(data, prng=prng) + inference3d.create_hdf5(directory=output_directory, **kwargs) + + print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + + inference3d.infer(index=2, **kwargs) + + +.. image-sg:: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_skytem_001.png + :alt: Fiducial [2.], Time Domain EM Data + :srcset: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_skytem_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Using user input file ..//..//supplementary////options_files//Skytem_options + Output files will be produced at Skytem/glacial + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Creating HDF5 files, this may take a few minutes... + Files are being created for data files ..//..//supplementary////data//Skytem_glacial.csv and system files ['..//..//supplementary////data//..//data//SkytemHM.stm', '..//..//supplementary////data//..//data//SkytemLM.stm'] + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Created hdf5 file for line 0.0 with 79 data points + Created hdf5 files 79 total data points + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Created hdf5 files in 0:00:00.368412 h:m:s + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + i=5000, k=6, acc=*25.760, 0.019 s/Model, 94.498 s Elapsed, eta=--:--:-- h:m:s + + Remaining Points -2/1 || Elapsed Time: 0:01:36.342864 h:m:s || ETA 0:00:32.114288 h:m:s + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (1 minutes 37.169 seconds) + + +.. _sphx_glr_download_examples_Inference_1D_plot_inference_1d_skytem.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_1d_skytem.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_1d_skytem.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_1d_skytem.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_1D/plot_inference_1d_tempest.rst.txt b/docs/_sources/examples/Inference_1D/plot_inference_1d_tempest.rst.txt new file mode 100644 index 00000000..6be4c2ee --- /dev/null +++ b/docs/_sources/examples/Inference_1D/plot_inference_1d_tempest.rst.txt @@ -0,0 +1,266 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_1D/plot_inference_1d_tempest.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_1D_plot_inference_1d_tempest.py: + + +Running GeoBIPy to invert Tempest data +++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 5-29 + +.. code-block:: Python + + + import os + import sys + import pathlib + from datetime import timedelta + import time + import numpy as np + from geobipy import Inference3D + from geobipy import user_parameters + from geobipy import get_prng + + def checkCommandArguments(): + """Check the users command line arguments. """ + import argparse + # warnings.filterwarnings('error') + + Parser = argparse.ArgumentParser(description="GeoBIPy", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + Parser.add_argument('--index', default=0, type=int, help='job array index 0-18') + Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']") + Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']") + + return Parser.parse_args() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 30-40 + +.. code-block:: Python + + np.random.seed(0) + + args = checkCommandArguments() + sys.path.append(os.getcwd()) + + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + data_type = "Tempest" + model_type = models[args.index] + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 41-43 + +The directory where HDF files will be stored +%% + +.. GENERATED FROM PYTHON SOURCE LINES 43-91 + +.. code-block:: Python + + file_path = os.path.join(data_type, model_type) + pathlib.Path(file_path).mkdir(parents=True, exist_ok=True) + + for filename in os.listdir(file_path): + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + except Exception as e: + print('Failed to delete %s. Reason: %s' % (file_path, e)) + + output_directory = file_path + + data_filename = data_type + '_' + model_type + + supplementary = "..//..//supplementary//" + + parameter_file = supplementary + "//options_files//{}_options".format(data_type) + inputFile = pathlib.Path(parameter_file) + assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile)) + + output_directory = pathlib.Path(output_directory) + assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory)) + + print('Using user input file {}'.format(parameter_file)) + print('Output files will be produced at {}'.format(output_directory)) + + kwargs = user_parameters.read(inputFile) + + kwargs['n_markov_chains'] = 5000 + + kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv' + kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename'] + + # Everyone needs the system classes read in early. + data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename']) + + # Start keeping track of time. + t0 = time.time() + + seed = 146100583096709124601953385843316024947 + prng = get_prng(seed=seed) + + inference3d = Inference3D(data, prng=prng) + inference3d.create_hdf5(directory=output_directory, **kwargs) + + print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0)))) + + inference3d.infer(index=2, **kwargs) + + + +.. image-sg:: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_tempest_001.png + :alt: Fiducial [2.], Time Domain EM Data + :srcset: /examples/Inference_1D/images/sphx_glr_plot_inference_1d_tempest_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Using user input file ..//..//supplementary////options_files//Tempest_options + Output files will be produced at Tempest/glacial + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + Creating HDF5 files, this may take a few minutes... + Files are being created for data files ..//..//supplementary////data//Tempest_glacial.csv and system files ..//..//supplementary////data//..//data/tempest.stm + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + Created hdf5 file for line 0.0 with 79 data points + Created hdf5 files 79 total data points + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + Created hdf5 files in 0:00:00.544066 h:m:s + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + i=5000, k=5, acc=*24.800, 0.023 s/Model, 113.655 s Elapsed, eta=--:--:-- h:m:s + + Remaining Points -2/1 || Elapsed Time: 0:01:55.563057 h:m:s || ETA 0:00:38.521019 h:m:s + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (1 minutes 56.531 seconds) + + +.. _sphx_glr_download_examples_Inference_1D_plot_inference_1d_tempest.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_1d_tempest.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_1d_tempest.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_1d_tempest.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_1D/readme.rst.txt b/docs/_sources/examples/Inference_1D/readme.rst.txt new file mode 100644 index 00000000..9c93f798 --- /dev/null +++ b/docs/_sources/examples/Inference_1D/readme.rst.txt @@ -0,0 +1,11 @@ +1D Inference +============ + +There are a couple of ways to run an inference using geobipy, the first is via command line using + +.. code-block:: bash + + geobipy skytem_options.py + +The other is with a python script similar to the examples in this folder. +In both cases, you will need to write an options file (also shown in these examples) diff --git a/docs/_sources/examples/Inference_1D/sg_execution_times.rst.txt b/docs/_sources/examples/Inference_1D/sg_execution_times.rst.txt new file mode 100644 index 00000000..ceebf7c0 --- /dev/null +++ b/docs/_sources/examples/Inference_1D/sg_execution_times.rst.txt @@ -0,0 +1,43 @@ + +:orphan: + +.. _sphx_glr_examples_Inference_1D_sg_execution_times: + + +Computation times +================= +**04:03.149** total execution time for 3 files **from examples/Inference_1D**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_tempest.py` (``plot_inference_1d_tempest.py``) + - 01:56.531 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_skytem.py` (``plot_inference_1d_skytem.py``) + - 01:37.169 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_resolve.py` (``plot_inference_1d_resolve.py``) + - 00:29.449 + - 0.0 diff --git a/docs/_sources/examples/Inference_2D/plot_inference_2d_resolve.rst.txt b/docs/_sources/examples/Inference_2D/plot_inference_2d_resolve.rst.txt new file mode 100644 index 00000000..d446af63 --- /dev/null +++ b/docs/_sources/examples/Inference_2D/plot_inference_2d_resolve.rst.txt @@ -0,0 +1,254 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_2D/plot_inference_2d_resolve.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_2D_plot_inference_2d_resolve.py: + + +2D Posterior analysis of Resolve inference +------------------------------------------ + +All plotting in GeoBIPy can be carried out using the 3D inference class + +.. GENERATED FROM PYTHON SOURCE LINES 8-151 + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_001.png + :alt: resolve glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_002.png + :alt: resolve saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_002.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_003.png + :alt: resolve resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_004.png + :alt: resolve resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_004.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_005.png + :alt: resolve coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_005.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_006.png + :alt: resolve ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_resolve_006.png + :class: sphx-glr-multi-img + + + + + +.. code-block:: Python + + + import matplotlib.pyplot as plt + import numpy as np + from geobipy import Model + from geobipy import Inference2D + + def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + true_model.mesh.y_edges = true_model.mesh.y_edges / 10.0 + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-160, 60]) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + + plt.show() + # plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + + if __name__ == '__main__': + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + for model in models: + try: + plot_2d_summary("../../../Parallel_Inference/", "resolve", model) + except Exception as e: + print(model) + print(e) + pass + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 21.524 seconds) + + +.. _sphx_glr_download_examples_Inference_2D_plot_inference_2d_resolve.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_2d_resolve.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_2d_resolve.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_2d_resolve.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_2D/plot_inference_2d_skytem.rst.txt b/docs/_sources/examples/Inference_2D/plot_inference_2d_skytem.rst.txt new file mode 100644 index 00000000..c43df09b --- /dev/null +++ b/docs/_sources/examples/Inference_2D/plot_inference_2d_skytem.rst.txt @@ -0,0 +1,972 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_2D/plot_inference_2d_skytem.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_2D_plot_inference_2d_skytem.py: + + +2D Posterior analysis of Skytem inference +----------------------------------------- + +All plotting in GeoBIPy can be carried out using the 3D inference class + +.. GENERATED FROM PYTHON SOURCE LINES 8-152 + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_001.png + :alt: skytem glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_002.png + :alt: skytem saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_002.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_003.png + :alt: skytem resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_004.png + :alt: skytem resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_004.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_005.png + :alt: skytem coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_005.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_006.png + :alt: skytem ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_skytem_006.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17f00d0d0'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17f00ebd0'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x182cba8d0'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17f00fa50'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17f00e9d0'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x17ec00950'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x17f00d650'] + | Shape: (79, 132) + | Values: [[0.01120778 0.01120778 0.01089737 ... 0.01842322 0.01842322 0.01842322] + | [0.01004315 0.01004315 0.01001518 ... 0.01627435 0.01627435 0.01627435] + | [0.0098209 0.0098209 0.00983342 ... 0.01667789 0.01667789 0.01667789] + | ... + | [0.06755182 0.06755182 0.11932536 ... 0.0915802 0.0915802 0.0915802 ] + | [0.08104576 0.08104576 0.09688295 ... 0.09217019 0.09217019 0.09217019] + | [0.09121028 0.09121028 0.09943253 ... 0.11213815 0.11213815 0.11213815]] + | Min: 0.007485002271267226 + | Max: 0.21717666705189165 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x182cb9c50'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17f114d50'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x182c2d4d0'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17f115b50'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17ec01c50'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x17ec036d0'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x17ec006d0'] + | Shape: (79, 132) + | Values: [[0.01022279 0.01022279 0.01016321 ... 0.0112021 0.0112021 0.0112021 ] + | [0.01121804 0.01121804 0.01062572 ... 0.0110357 0.0110357 0.0110357 ] + | [0.01049917 0.01049917 0.01007536 ... 0.01354095 0.01354095 0.01354095] + | ... + | [0.06896877 0.06896877 0.082946 ... 0.07112381 0.07112381 0.07112381] + | [0.08055856 0.08055856 0.0985627 ... 0.07903887 0.07903887 0.07903887] + | [0.09166633 0.09166633 0.09878492 ... 0.11154601 0.11154601 0.11154601]] + | Min: 0.006775009348709323 + | Max: 0.8077378262817626 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x18229f350'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17f21de50'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x182600c50'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x18229d5d0'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x18229dbd0'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x18229e6d0'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x18229cb50'] + | Shape: (79, 132) + | Values: [[0.02104953 0.02104953 0.02099504 ... 0.01952426 0.01952426 0.01952426] + | [0.02008789 0.02008789 0.01997577 ... 0.02017019 0.02017019 0.02017019] + | [0.0198301 0.0198301 0.01986773 ... 0.0145253 0.0145253 0.0145253 ] + | ... + | [0.00511314 0.00511314 0.0029654 ... 0.01258484 0.01258484 0.01258484] + | [0.00392961 0.00392961 0.00385047 ... 0.00870505 0.00870505 0.00870505] + | [0.00279718 0.00279718 0.00269725 ... 0.00924302 0.00924302 0.00924302]] + | Min: 0.0008600086897131668 + | Max: 9.774547726888374 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17eab3d50'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x17eab07d0'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x17f2d60d0'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17eab31d0'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x17eab0bd0'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x182fd6bd0'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x182fd5650'] + | Shape: (79, 132) + | Values: [[0.01147797 0.01147797 0.01033699 ... 0.00131732 0.00131732 0.00131732] + | [0.00999299 0.00999299 0.01000167 ... 0.00120333 0.00120333 0.00120333] + | [0.01007022 0.01007022 0.01005285 ... 0.00844038 0.00844038 0.00844038] + | ... + | [0.06763163 0.06763163 0.11942951 ... 0.09924038 0.09924038 0.09924038] + | [0.08122468 0.08122468 0.09651811 ... 0.09608009 0.09608009 0.09608009] + | [0.09162505 0.09162505 0.09883382 ... 0.11272764 0.11272764 0.11272764]] + | Min: 0.0007661982574834428 + | Max: 0.21045876958147633 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x181fa95d0'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x181fa88d0'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x181f49a50'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x181fa8450'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x181fa8150'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x181fa9150'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x181fa87d0'] + | Shape: (79, 132) + | Values: [[1.02310541 1.02310541 1.02438661 ... 1.05490484 1.05490484 1.05490484] + | [1.02229478 1.02229478 1.02483553 ... 0.83077926 0.83077926 0.83077926] + | [1.02256309 1.02256309 1.02298923 ... 1.16206135 1.16206135 1.16206135] + | ... + | [0.18317234 0.18317234 0.18317234 ... 0.04611609 0.04611609 0.04611609] + | [0.13296225 0.13296225 0.13296225 ... 0.02326468 0.02326468 0.02326468] + | [0.08897776 0.08897776 0.08897776 ... 0.02807133 0.02807133 0.02807133]] + | Min: 0.005922638381206899 + | Max: 15.778868401978407 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + Model: + mesh: + | RectilinearMesh2D: + | Shape: : (79, 132) + | x + | RectilinearMesh1D + | Number of Cells: + | | 79 + | Cell Centres: + | | StatArray + | | Name: Easting (m) + | | Address:['0x180d2a5d0'] + | | Shape: (79,) + | | Values: [ 0. 1. 2. ... 76. 77. 78.] + | | Min: 0.0 + | | Max: 78.0 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: Easting (m) + | | Address:['0x180d2aad0'] + | | Shape: (80,) + | | Values: [-0.5 0.5 1.5 ... 76.5 77.5 78.5] + | | Min: -0.5 + | | Max: 78.5 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x17f866cd0'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + | y + | RectilinearMesh1D + | Number of Cells: + | | 132 + | Cell Centres: + | | StatArray + | | Name: elevation (m) + | | Address:['0x180d2a750'] + | | Shape: (132,) + | | Values: [ -3.1875 -7.7625 -12.3375 ... -593.3625 -597.9375 -602.5125] + | | Min: -602.5125 + | | Max: -3.1875 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: elevation (m) + | | Address:['0x180d2ac50'] + | | Shape: (133,) + | | Values: [ -0.9 -5.475 -10.05 ... -595.65 -600.225 -604.8 ] + | | Min: -604.8 + | | Max: -0.9 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: Elevation (m) + | | Address:['0x180d216d0'] + | | Shape: (79,) + | | Values: [0. 0. 0. ... 0. 0. 0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | StatArray + | Name: Mean Conductivity ($\frac{S}{m}$) + | Address:['0x180d21350'] + | Shape: (79, 132) + | Values: [[0.00056777 0.00056777 0.00056795 ... 0.00059446 0.00059446 0.00059446] + | [0.00052266 0.00052266 0.00052266 ... 0.00071041 0.00071041 0.00071041] + | [0.00080071 0.00080071 0.00080071 ... 0.00104502 0.00104502 0.00104502] + | ... + | [0.00854703 0.00854703 0.00857136 ... 0.00167267 0.00167267 0.00167267] + | [0.00894549 0.00894549 0.00908742 ... 0.00255047 0.00255047 0.00255047] + | [0.00925526 0.00925526 0.00948505 ... 0.00507188 0.00507188 0.00507188]] + | Min: 0.0004901610032814771 + | Max: 0.7309893437585234 + | has_posterior: False + + + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + self.n_components=1, self.nTimes=array([26, 19]) + + + + + + +| + +.. code-block:: Python + + + import argparse + import matplotlib.pyplot as plt + import numpy as np + from geobipy import Model + from geobipy import Inference2D + + def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-550, 60]) + + print(results_2d.mean_parameters().summary) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # plt.show() + plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + + + if __name__ == '__main__': + types = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + for model in types: + # try: + plot_2d_summary('../../../Parallel_Inference/', "skytem", model) + # except Exception as e: + # print(model) + # print(e) + # pass + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 18.965 seconds) + + +.. _sphx_glr_download_examples_Inference_2D_plot_inference_2d_skytem.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_2d_skytem.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_2d_skytem.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_2d_skytem.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_2D/plot_inference_2d_tempest.rst.txt b/docs/_sources/examples/Inference_2D/plot_inference_2d_tempest.rst.txt new file mode 100644 index 00000000..a8127abf --- /dev/null +++ b/docs/_sources/examples/Inference_2D/plot_inference_2d_tempest.rst.txt @@ -0,0 +1,445 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Inference_2D/plot_inference_2d_tempest.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Inference_2D_plot_inference_2d_tempest.py: + + +2D Posterior analysis of Tempest inference +------------------------------------------ + +All plotting in GeoBIPy can be carried out using the 3D inference class + +.. GENERATED FROM PYTHON SOURCE LINES 8-153 + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_001.png + :alt: tempest glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_002.png + :alt: tempest saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_002.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_003.png + :alt: tempest resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_004.png + :alt: tempest resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_004.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_005.png + :alt: tempest coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_005.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_006.png + :alt: tempest ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface) + :srcset: /examples/Inference_2D/images/sphx_glr_plot_inference_2d_tempest_006.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + self.n_components=2, self.nTimes=array([15]) + + + + + + +| + +.. code-block:: Python + + + import argparse + import matplotlib.pyplot as plt + import numpy as np + from geobipy import Model + from geobipy import Inference2D + + def plot_2d_summary(folder, data_type, model_type): + #%% + # Inference for a line of inferences + # ++++++++++++++++++++++++++++++++++ + # + # We can instantiate the inference handler by providing a path to the directory containing + # HDF5 files generated by GeoBIPy. + # + # The InfereceXD classes are low memory. They only read information from the HDF5 files + # as and when it is needed. + # + # The first time you use these classes to create plots, expect longer initial processing times. + # I precompute expensive properties and store them in the HDF5 files for later use. + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + #%% + results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng) + + kwargs = { + "log" : 10, + "cmap" : 'jet' + } + + fig = plt.figure(figsize=(16, 8)) + plt.suptitle("{} {}".format(data_type, model_type)) + gs0 = fig.add_gridspec(6, 2, hspace=1.0) + + true_model = Model.create_synthetic_model(model_type) + + kwargs['vmin'] = np.log10(np.min(true_model.values)) + kwargs['vmax'] = np.log10(np.max(true_model.values)) + + ax = fig.add_subplot(gs0[0, 0]) + true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False); + + plt.ylim([-550, 60]) + + ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax) + results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # By adding the useVariance keyword, we can make regions of lower confidence more transparent + ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax) + results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # # # # # We can also choose to keep parameters above the DOI opaque. + # # # # results_2d.compute_doi() + # # # # plt.subplot(313) + # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs); + # # # # results_2d.plot_data_elevation(linewidth=0.3); + # # # # results_2d.plot_elevation(linewidth=0.3); + + ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax) + results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + ax1.set_title('Best model') + + del kwargs['vmin'] + del kwargs['vmax'] + + ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%') + results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%') + results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%') + results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs) + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + #%% + # We can plot the parameter values that produced the highest posterior + ax1 = fig.add_subplot(gs0[2, 0], sharex=ax) + results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True) + + ax1 = fig.add_subplot(gs0[1, 0], sharex=ax) + + ll, bb, ww, hh = ax1.get_position().bounds + ax1.set_position([ll, bb, ww*0.8, hh]) + + results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True) + results_2d.plot_burned_in(ax=ax1, underlay=True) + + #%% + # Now we can start plotting some more interesting posterior properties. + # How about the confidence? + ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax) + results_2d.plot_confidence(ax=ax1); + results_2d.plot_data_elevation(ax=ax1, linewidth=0.3); + results_2d.plot_elevation(ax=ax1, linewidth=0.3); + + #%% + # We can take the interface depth posterior for each data point, + # and display an interface probability cross section + # This posterior can be washed out, so the clim_scaling keyword lets me saturate + # the top and bottom 0.5% of the colour range + ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax) + ax1.set_title('P(Interface)') + results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax) + results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1); + results_2d.plot_data_elevation(linewidth=0.3, ax=ax1); + results_2d.plot_elevation(linewidth=0.3, ax=ax1); + + # plt.show() + plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300) + + + if __name__ == '__main__': + models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water'] + + # import warnings + # warnings.filterwarnings('error') + for model in models: + try: + plot_2d_summary('../../../Parallel_Inference/', "tempest", model) + except Exception as e: + print(model) + print(e) + pass + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 19.999 seconds) + + +.. _sphx_glr_download_examples_Inference_2D_plot_inference_2d_tempest.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_inference_2d_tempest.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_inference_2d_tempest.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_inference_2d_tempest.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Inference_2D/readme.rst.txt b/docs/_sources/examples/Inference_2D/readme.rst.txt new file mode 100644 index 00000000..669f32ab --- /dev/null +++ b/docs/_sources/examples/Inference_2D/readme.rst.txt @@ -0,0 +1,2 @@ +2D Inference +============ \ No newline at end of file diff --git a/docs/_sources/examples/Inference_2D/sg_execution_times.rst.txt b/docs/_sources/examples/Inference_2D/sg_execution_times.rst.txt new file mode 100644 index 00000000..ff3edf14 --- /dev/null +++ b/docs/_sources/examples/Inference_2D/sg_execution_times.rst.txt @@ -0,0 +1,43 @@ + +:orphan: + +.. _sphx_glr_examples_Inference_2D_sg_execution_times: + + +Computation times +================= +**01:00.488** total execution time for 3 files **from examples/Inference_2D**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_resolve.py` (``plot_inference_2d_resolve.py``) + - 00:21.524 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_tempest.py` (``plot_inference_2d_tempest.py``) + - 00:19.999 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_skytem.py` (``plot_inference_2d_skytem.py``) + - 00:18.965 + - 0.0 diff --git a/docs/_sources/examples/Meshes/plot_rectilinear_mesh_1d.rst.txt b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_1d.rst.txt new file mode 100644 index 00000000..c3682320 --- /dev/null +++ b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_1d.rst.txt @@ -0,0 +1,716 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Meshes/plot_rectilinear_mesh_1d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Meshes_plot_rectilinear_mesh_1d.py: + + +1D Rectilinear Mesh +------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 6-14 + +.. code-block:: Python + + from copy import deepcopy + from geobipy import DataArray, StatArray + from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh2D_stitched + import matplotlib.gridspec as gridspec + import matplotlib.pyplot as plt + import numpy as np + import h5py + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 15-18 + +The basics +++++++++++ +Instantiate a new 1D rectilinear mesh by specifying cell centres, edges, or widths. + +.. GENERATED FROM PYTHON SOURCE LINES 18-20 + +.. code-block:: Python + + x = StatArray(np.cumsum(np.arange(0.0, 10.0)), 'Depth', 'm') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 21-22 + +Cell edges + +.. GENERATED FROM PYTHON SOURCE LINES 22-24 + +.. code-block:: Python + + rm = RectilinearMesh1D(edges=x, centres=None, widths=None) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 25-27 + +We can plot the grid of the mesh +Or Pcolor the mesh showing. An array of cell values is used as the colour. + +.. GENERATED FROM PYTHON SOURCE LINES 27-72 + +.. code-block:: Python + + arr = StatArray(np.random.randn(*rm.shape), "Name", "Units") + p=0; plt.figure(p) + plt.subplot(121) + _ = rm.plot_grid(transpose=True, flip=True) + plt.subplot(122) + _ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + + # Mask the mesh cells by a distance + rm_masked, indices, arr2 = rm.mask_cells(2.0, values=arr) + p+=1; plt.figure(p) + _ = rm_masked.pcolor(StatArray(arr2), grid=True, transpose=True, flip=True) + + # Writing and reading to/from HDF5 + # ++++++++++++++++++++++++++++++++ + with h5py.File('rm1d.h5', 'w') as f: + rm.toHdf(f, 'rm1d') + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(122) + _ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + + with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=10) + for i in range(10): + rm.writeHdf(f, 'rm1d', index=i) + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) + with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(131) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(132) + _ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) + plt.subplot(133) + _ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True) + + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_001.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_002.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_002.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_003.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_004.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_004.png + :class: sphx-glr-multi-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 73-77 + +Log-space rectilinear mesh +++++++++++++++++++++++++++ +Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +Here we use edges + +.. GENERATED FROM PYTHON SOURCE LINES 77-79 + +.. code-block:: Python + + x = StatArray(np.logspace(-3, 3, 10), 'Depth', 'm') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 80-123 + +.. code-block:: Python + + rm = RectilinearMesh1D(edges=x, log=10) + + # We can plot the grid of the mesh + # Or Pcolor the mesh showing. An array of cell values is used as the colour. + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.plot_grid(transpose=True, flip=True) + plt.subplot(122) + arr = StatArray(np.random.randn(rm.nCells), "Name", "Units") + _ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + + # Writing and reading to/from HDF5 + # ++++++++++++++++++++++++++++++++ + with h5py.File('rm1d.h5', 'w') as f: + rm.toHdf(f, 'rm1d') + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(122) + _ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + + with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=10) + for i in range(10): + rm.writeHdf(f, 'rm1d', index=i) + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) + with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(131) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(132) + _ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) + plt.subplot(133) + _ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_005.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_005.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_006.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_006.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_007.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_007.png + :class: sphx-glr-multi-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 124-128 + +relative_to +++++++++++ +Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +Here we use edges + +.. GENERATED FROM PYTHON SOURCE LINES 128-130 + +.. code-block:: Python + + x = StatArray(np.arange(11.0), 'Deviation', 'm') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 131-133 + +.. code-block:: Python + + rm = RectilinearMesh1D(edges=x, relative_to=5.0) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 134-136 + +We can plot the grid of the mesh +Or Pcolor the mesh showing. An array of cell values is used as the colour. + +.. GENERATED FROM PYTHON SOURCE LINES 136-184 + +.. code-block:: Python + + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.plot_grid(transpose=True, flip=True) + plt.subplot(122) + arr = StatArray(np.random.randn(rm.nCells), "Name", "Units") + _ = rm.pcolor(arr, grid=True, transpose=True, flip=True) + + # Writing and reading to/from HDF5 + # ++++++++++++++++++++++++++++++++ + with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d') + rm.writeHdf(f, 'rm1d') + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(122) + _ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + + with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', add_axis=3) + for i in range(3): + rm.relative_to += 0.5 + rm.writeHdf(f, 'rm1d', index=i) + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0) + with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(131) + _ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True) + plt.subplot(132) + _ = rm1.pcolor(arr, grid=True, transpose=True, flip=True) + plt.subplot(133) + _ = rm2.pcolor(np.repeat(arr[None, :], 3, 0), grid=True, flipY=True) + + + # Making a mesh perturbable + # +++++++++++++++++++++++++ + n_cells = 2 + widths = DataArray(np.full(n_cells, fill_value=10.0), 'test') + rm = RectilinearMesh1D(widths=widths, relative_to=0.0) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_008.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_008.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_009.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_009.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_010.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_010.png + :class: sphx-glr-multi-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 185-192 + +Randomness and Model Perturbations +++++++++++++++++++++++++++++++++++ +We can set the priors on the 1D model by assigning minimum and maximum layer +depths and a maximum number of layers. These are used to create priors on +the number of cells in the model, a new depth interface, new parameter values +and the vertical gradient of those parameters. +The halfSpaceValue is used as a reference value for the parameter prior. + +.. GENERATED FROM PYTHON SOURCE LINES 192-203 + +.. code-block:: Python + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + # Set the priors + rm.set_priors(min_edge = 1.0, + max_edge = 150.0, + max_cells = 30, + prng = prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 204-205 + +We can evaluate the prior of the model using depths only + +.. GENERATED FROM PYTHON SOURCE LINES 205-207 + +.. code-block:: Python + + print('Log probability of the Mesh given its priors: ', rm.probability) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Log probability of the Mesh given its priors: -3.367295829986474 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 208-211 + +To propose new meshes, we specify the probabilities of creating, removing, perturbing, and not changing +an edge interface +Here we force the creation of a layer. + +.. GENERATED FROM PYTHON SOURCE LINES 211-216 + +.. code-block:: Python + + rm.set_proposals(probabilities = [0.25, 0.25, 0.25, 0.25], prng=prng) + rm.set_posteriors() + + rm0 = deepcopy(rm) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 217-218 + +We can then perturb the layers of the model + +.. GENERATED FROM PYTHON SOURCE LINES 218-222 + +.. code-block:: Python + + for i in range(1000): + rm = rm.perturb() + rm.update_posteriors() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 223-245 + +.. code-block:: Python + + p+=1; fig = plt.figure(p) + ax = rm._init_posterior_plots(fig) + + rm.plot_posteriors(axes=ax) + + with h5py.File('rm1d.h5', 'w') as f: + rm.createHdf(f, 'rm1d', withPosterior = True) + rm.writeHdf(f, 'rm1d', withPosterior = True) + + with h5py.File('rm1d.h5', 'r') as f: + rm1 = RectilinearMesh1D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(121) + _ = rm.pcolor(StatArray(rm.shape), grid=True, transpose=True, flip=True) + plt.subplot(122) + _ = rm1.pcolor(StatArray(rm1.shape), grid=True, transpose=True, flip=True) + + p+=1; fig = plt.figure(p) + ax = rm1._init_posterior_plots(fig) + rm1.plot_posteriors(axes=ax) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_011.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_011.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_012.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_012.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_013.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_013.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + [, ] + + + +.. GENERATED FROM PYTHON SOURCE LINES 246-247 + +Expanded + +.. GENERATED FROM PYTHON SOURCE LINES 247-289 + +.. code-block:: Python + + with h5py.File('rm1d.h5', 'w') as f: + tmp = rm.pad(rm.max_cells) + tmp.createHdf(f, 'rm1d', withPosterior=True, add_axis=DataArray(np.arange(3.0), name='Easting', units="m")) + + print(list(f['rm1d'].keys())) + + rm.relative_to = 5.0 + print(rm.summary) + rm.writeHdf(f, 'rm1d', withPosterior = True, index=0) + + rm = deepcopy(rm0) + for i in range(1000): + rm = rm.perturb(); rm.update_posteriors() + rm.relative_to = 10.0 + rm.writeHdf(f, 'rm1d', withPosterior = True, index=1) + + rm = deepcopy(rm0) + for i in range(1000): + rm = rm.perturb(); rm.update_posteriors() + rm.relative_to = 25.0 + rm.writeHdf(f, 'rm1d', withPosterior = True, index=2) + + with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d']) + + p+=1; plt.figure(p) + plt.subplot(121) + arr = np.random.randn(3, rm.max_cells) * 10 + _ = rm0.pcolor(arr[0, :rm0.nCells.item()], grid=True, transpose=True, flip=True) + plt.subplot(122) + _ = rm2.pcolor(arr, grid=True, flipY=True, equalize=True) + + from geobipy import RectilinearMesh2D + with h5py.File('rm1d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['rm1d'], index=0) + + plt.figure() + plt.subplot(121) + rm2.plot_grid(transpose=True, flip=True) + plt.subplot(122) + rm2.edges.posterior.pcolor(transpose=True, flip=True) + + plt.show() + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_014.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_014.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_015.png + :alt: plot rectilinear mesh 1d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_1d_015.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + ['nCells', 'x', 'y'] + RectilinearMesh1D + Number of Cells: + | StatArray + | Name: Number of cells + | Address:['0x154b0fd50' '0x182c1a750' '0x180970e10' '0x1809733f0'] + | Shape: (1,) + | Values: [15] + | Min: 15 + | Max: 15 + | Prior: + | | Uniform Distribution: + | | Min: 1 + | | Max: 30 + | has_posterior: True + + Cell Centres: + | StatArray + | Name: test + | Address:['0x17f3b4850'] + | Shape: (15,) + | Values: [ 1.22331766 3.28906551 6.36608642 ... 66.85590133 108.99206549 + | 148.449825 ] + | Min: 1.2233176602046558 + | Max: 148.4498250024555 + | has_posterior: False + + Cell Edges: + | StatArray + | Name: test + | Address:['0x1834e3950' '0x17ebbc1a0' '0x182174450' '0x17f387aa0' '0x1808cd750' + | '0x1808ced70'] + | Shape: (16,) + | Values: [ 0. 2.44663532 4.13149571 ... 70.19008745 147.79404353 + | 149.10560647] + | Min: 0.0 + | Max: 149.1056064716983 + | Prior: + | | Order Statistics: + | | None + | Proposal: + | | Uniform Distribution: + | | Min: 1.0 + | | Max: 149.99999999999997 + | has_posterior: True + + log: + | None + relative_to: + | StatArray + | Name: + | Address:['0x17ef83e50'] + | Shape: (1,) + | Values: [5.] + | Min: 5.0 + | Max: 5.0 + | has_posterior: False + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 4.585 seconds) + + +.. _sphx_glr_download_examples_Meshes_plot_rectilinear_mesh_1d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rectilinear_mesh_1d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_rectilinear_mesh_1d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_rectilinear_mesh_1d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Meshes/plot_rectilinear_mesh_2d.rst.txt b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_2d.rst.txt new file mode 100644 index 00000000..b96baffa --- /dev/null +++ b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_2d.rst.txt @@ -0,0 +1,613 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Meshes/plot_rectilinear_mesh_2d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Meshes_plot_rectilinear_mesh_2d.py: + + +2D Rectilinear Mesh +------------------- +This 2D rectilinear mesh defines a grid with straight cell boundaries. + +It can be instantiated in two ways. + +The first is by providing the cell centres or +cell edges in two dimensions. + +The second embeds the 2D mesh in 3D by providing the cell centres or edges in three dimensions. +The first two dimensions specify the mesh coordinates in the horiztontal cartesian plane +while the third discretizes in depth. This allows us to characterize a mesh whose horizontal coordinates +do not follow a line that is parallel to either the "x" or "y" axis. + +.. GENERATED FROM PYTHON SOURCE LINES 19-26 + +.. code-block:: Python + + import h5py + from geobipy import StatArray + from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh3D + import matplotlib.pyplot as plt + import numpy as np + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 27-28 + +Specify some cell centres in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 28-32 + +.. code-block:: Python + + x = StatArray(np.arange(10.0), 'Easting', 'm') + y = StatArray(np.arange(20.0), 'Depth', 'm') + rm = RectilinearMesh2D(x_centres=x, y_centres=y) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 33-34 + +We can plot the grid lines of the mesh. + +.. GENERATED FROM PYTHON SOURCE LINES 34-45 + +.. code-block:: Python + + p=0; + plt.figure(p) + _ = rm.plot_grid(flipY=True, linewidth=0.5) + + # Intersecting multisegment lines with a mesh + arr = np.zeros(rm.shape) + i = rm.line_indices([0.0, 3.0, 6.0, 9], [2.0, 6.0, 0.0, 10]) + arr[i[:, 0], i[:, 1]] = 1 + p += 1; plt.figure(p) + rm.pcolor(values = arr) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_001.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_002.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_002.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 46-47 + +We can pcolor the mesh by providing cell values. + +.. GENERATED FROM PYTHON SOURCE LINES 47-58 + +.. code-block:: Python + + xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) + arr = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values") + + rm2, values2 = rm.resample(0.5, 0.5, arr, method='linear') + + + p += 1; plt.figure(p) + _ = rm.pcolor(arr, grid=True, flipY=True, linewidth=0.5) + + + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_003.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 59-60 + +Mask the x axis cells by a distance + +.. GENERATED FROM PYTHON SOURCE LINES 60-64 + +.. code-block:: Python + + rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, values=arr) + p += 1; plt.figure(p) + _ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_004.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_004.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 65-66 + +Mask the z axis cells by a distance + +.. GENERATED FROM PYTHON SOURCE LINES 66-70 + +.. code-block:: Python + + rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(y_distance=0.2, values=arr) + p += 1; plt.figure(p) + _ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_005.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 71-72 + +Mask axes by a distance + +.. GENERATED FROM PYTHON SOURCE LINES 72-80 + +.. code-block:: Python + + rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, y_distance=0.2, values=arr) + p += 1; plt.figure(p) + _ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True) + + x = StatArray(np.arange(10.0), 'Easting', 'm') + y = StatArray(np.cumsum(np.arange(15.0)), 'Depth', 'm') + rm = RectilinearMesh2D(x_centres=x, y_centres=y) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_006.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 81-83 + +We can perform some interval statistics on the cell values of the mesh +Generate some values + +.. GENERATED FROM PYTHON SOURCE LINES 83-85 + +.. code-block:: Python + + a = np.repeat(np.arange(1.0, np.float64(rm.x.nCells+1))[:, np.newaxis], rm.y.nCells, 1) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 86-87 + +Compute the mean over an interval for the mesh. + +.. GENERATED FROM PYTHON SOURCE LINES 87-89 + +.. code-block:: Python + + rm.intervalStatistic(a, intervals=[6.8, 12.4], axis=0, statistic='mean') + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (array([[9., 9., 9., ..., 9., 9., 9.]]), [6.8, 12.4]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 90-91 + +Compute the mean over multiple intervals for the mesh. + +.. GENERATED FROM PYTHON SOURCE LINES 91-93 + +.. code-block:: Python + + rm.intervalStatistic(a, intervals=[6.8, 12.4, 20.0, 40.0], axis=0, statistic='mean') + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (array([[ 9., 9., 9., ..., 9., 9., 9.], + [nan, nan, nan, ..., nan, nan, nan], + [nan, nan, nan, ..., nan, nan, nan]]), [6.8, 12.4, 20.0, 40.0]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 94-95 + +We can specify either axis + +.. GENERATED FROM PYTHON SOURCE LINES 95-97 + +.. code-block:: Python + + rm.intervalStatistic(a, intervals=[2.8, 4.2], axis=1, statistic='mean') + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (array([[ 1.], + [ 2.], + [ 3.], + ..., + [ 8.], + [ 9.], + [10.]]), [2.8, 4.2]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 98-100 + +.. code-block:: Python + + rm.intervalStatistic(a, intervals=[2.8, 4.2, 5.1, 8.4], axis=1, statistic='mean') + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (array([[ 1., nan, 1.], + [ 2., nan, 2.], + [ 3., nan, 3.], + ..., + [ 8., nan, 8.], + [ 9., nan, 9.], + [10., nan, 10.]]), [2.8, 4.2, 5.1, 8.4]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 101-102 + +Slice the 2D mesh to retrieve either a 2D mesh or 1D mesh + +.. GENERATED FROM PYTHON SOURCE LINES 102-114 + +.. code-block:: Python + + rm2 = rm[:5, :5] + rm3 = rm[:5, 5] + rm4 = rm[5, :5] + + p += 1; plt.figure(p) + plt.subplot(131) + rm2.plot_grid() + plt.subplot(132) + rm3.plot_grid() + plt.subplot(133) + rm4.plot_grid(transpose=True) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_007.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_007.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 115-116 + +Resample a grid + +.. GENERATED FROM PYTHON SOURCE LINES 116-125 + +.. code-block:: Python + + values = StatArray(np.random.randn(*rm.shape)) + rm2, values2 = rm.resample(0.5, 0.5, values) + + p += 1; plt.figure(p) + plt.subplot(121) + rm.pcolor(values) + plt.subplot(122) + rm2.pcolor(values2) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_008.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_008.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 126-128 + +Axes in log space ++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 128-136 + +.. code-block:: Python + + x = StatArray(np.logspace(-1, 4, 10), 'x') + y = StatArray(np.logspace(0, 3, 10), 'y') + rm = RectilinearMesh2D(x_edges=x, x_log=10, y_edges=y, y_log=10) + + # We can plot the grid lines of the mesh. + p += 1; plt.figure(p) + _ = rm.plot_grid(linewidth=0.5) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_009.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_009.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 137-150 + +.. code-block:: Python + + with h5py.File('rm2d.h5', 'w') as f: + rm.toHdf(f, 'test') + + with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test']) + + arr = np.random.randn(*rm.shape) + p += 1; plt.figure(p) + plt.subplot(211) + rm.pcolor(arr) + plt.subplot(212) + rm2.pcolor(arr) + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_010.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_010.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 151-153 + +relative_to +++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 153-211 + +.. code-block:: Python + + x = StatArray(np.arange(10.0), 'Northing', 'm') + y = StatArray(np.arange(20.0), 'Depth', 'm') + + rm = RectilinearMesh2D(x_centres=x, y_centres=y) + + p += 1; plt.figure(p) + plt.subplot(121) + _ = rm.plot_grid(linewidth=0.5, flipY=True) + rm = RectilinearMesh2D(x_centres=x, x_relative_to=0.2*np.random.randn(y.size), y_centres=y, y_relative_to=0.2*np.random.randn(x.size)) + plt.subplot(122) + _ = rm.plot_grid(linewidth=0.5, flipY=True) + + # relative_to single + with h5py.File('rm2d.h5', 'w') as f: + rm.toHdf(f, 'test') + + with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test']) + + arr = np.random.randn(*rm.shape) + p += 1; plt.figure(p) + plt.subplot(211) + rm.pcolor(arr, flipY=True) + plt.subplot(212) + rm2.pcolor(arr, flipY=True) + + # relative_to expanded + with h5py.File('rm2d.h5', 'w') as f: + rm.createHdf(f, 'test', add_axis=RectilinearMesh1D(centres=StatArray(np.arange(3.0), name='Easting', units="m"), relative_to = 0.2*np.random.randn(x.size, y.size))) + for i in range(3): + rm.x.relative_to += 0.5 + rm.y.relative_to += 0.5 + rm.writeHdf(f, 'test', index=i) + + with h5py.File('rm2d.h5', 'r') as f: + rm2 = RectilinearMesh2D.fromHdf(f['test'], index=0) + + with h5py.File('rm2d.h5', 'r') as f: + rm3 = RectilinearMesh3D.fromHdf(f['test']) + + p += 1; plt.figure(p) + plt.subplot(311) + rm.pcolor(arr, flipY=True) + plt.subplot(312) + rm2.pcolor(arr, flipY=True) + + p += 1; plt.figure(p) + arr = np.random.randn(*rm3.shape) + plt.subplot(311) + mesh = rm3[0, :, :] + mesh.pcolor(arr[0, :, :], flipY=True) + plt.subplot(312) + mesh = rm3[:, 0, :] + mesh.pcolor(arr[:, 0, :], flipY=True) + plt.subplot(313) + rm3[:, :, 0].pcolor(arr[:, :, 0]) + + plt.show() + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_011.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_011.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_012.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_012.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_013.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_013.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_014.png + :alt: plot rectilinear mesh 2d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_2d_014.png + :class: sphx-glr-multi-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 1.747 seconds) + + +.. _sphx_glr_download_examples_Meshes_plot_rectilinear_mesh_2d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rectilinear_mesh_2d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_rectilinear_mesh_2d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_rectilinear_mesh_2d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Meshes/plot_rectilinear_mesh_3d.rst.txt b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_3d.rst.txt new file mode 100644 index 00000000..f6597fde --- /dev/null +++ b/docs/_sources/examples/Meshes/plot_rectilinear_mesh_3d.rst.txt @@ -0,0 +1,345 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Meshes/plot_rectilinear_mesh_3d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Meshes_plot_rectilinear_mesh_3d.py: + + +3D Rectilinear Mesh +------------------- +This 3D rectilinear mesh defines a grid with straight cell boundaries. + +.. GENERATED FROM PYTHON SOURCE LINES 9-16 + +.. code-block:: Python + + from geobipy import StatArray + from geobipy import RectilinearMesh3D + import matplotlib.pyplot as plt + import numpy as np + import h5py + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 17-18 + +Specify some cell centres in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 18-37 + +.. code-block:: Python + + x = StatArray(np.arange(10.0), 'Easting', 'm') + y = StatArray(np.arange(15.0), 'Northing', 'm') + z = StatArray(np.arange(20.0), 'Depth', 'm') + + rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + + rm1 = rm[:5, :5, :5] + rm2 = rm[:, :, 5] + rm3 = rm[:, 5, :] + rm4 = rm[5, :, :] + + plt.figure() + plt.subplot(231) + rm2.plot_grid() + plt.subplot(232) + rm3.plot_grid() + plt.subplot(233) + rm4.plot_grid() + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_001.png + :alt: plot rectilinear mesh 3d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 38-49 + +.. code-block:: Python + + rm2 = rm[:, 5, 5] + rm3 = rm[5, :, 5] + rm4 = rm[5, 5, :] + + plt.subplot(234) + rm2.plot_grid() + plt.subplot(235) + rm3.plot_grid() + plt.subplot(236) + rm4.plot_grid() + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_002.png + :alt: plot rectilinear mesh 3d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_002.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 50-77 + +.. code-block:: Python + + with h5py.File('rm3d.h5', 'w') as f: + rm.createHdf(f, 'test') + rm.writeHdf(f, 'test') + + with h5py.File('rm3d.h5', 'r') as f: + rm2 = RectilinearMesh3D.fromHdf(f['test']) + + rm.pyvista_mesh().save('rm3d.vtk') + + + xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) + z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re) + + rm1 = rm[:5, :5, :5] + rm2 = rm[:, :, 5] + rm3 = rm[:, 5, :] + rm4 = rm[5, :, :] + + plt.figure() + plt.subplot(231) + rm2.plot_grid() + plt.subplot(232) + rm3.plot_grid() + plt.subplot(233) + rm4.plot_grid() + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_003.png + :alt: plot rectilinear mesh 3d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 78-79 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 79-81 + +.. code-block:: Python + + pv = rm.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 82-83 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 83-104 + +.. code-block:: Python + + mesh = rm.pyvista_mesh().save('rm3d_re1.vtk') + + x_re = StatArray(np.sin(np.repeat(rm.y.centres[:, None], rm.z.nCells, 1)), "x_re") + + xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) + z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re) + + rm1 = rm[:5, :5, :5] + rm2 = rm[:, :, 5] + rm3 = rm[:, 5, :] + rm4 = rm[5, :, :] + + plt.figure() + plt.subplot(231) + rm2.plot_grid() + plt.subplot(232) + rm3.plot_grid() + plt.subplot(233) + rm4.plot_grid() + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_004.png + :alt: plot rectilinear mesh 3d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_004.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 105-106 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 106-108 + +.. code-block:: Python + + pv = rm.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 109-110 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 110-136 + +.. code-block:: Python + + mesh = rm.pyvista_mesh().save('rm3d_re2.vtk') + + + xx, yy = np.meshgrid(rm.z.centres, rm.y.centres) + x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re") + + xx, yy = np.meshgrid(rm.z.centres, rm.x.centres) + y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + + xx, yy = np.meshgrid(rm.y.centres, rm.x.centres) + z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) + + rm1 = rm[:5, :5, :5] + rm2 = rm[:, :, 5] + rm3 = rm[:, 5, :] + rm4 = rm[5, :, :] + + plt.figure() + plt.subplot(231) + rm2.plot_grid() + plt.subplot(232) + rm3.plot_grid() + plt.subplot(233) + rm4.plot_grid() + + + + +.. image-sg:: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_005.png + :alt: plot rectilinear mesh 3d + :srcset: /examples/Meshes/images/sphx_glr_plot_rectilinear_mesh_3d_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 137-138 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 138-140 + +.. code-block:: Python + + pv = rm.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 141-142 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 142-153 + +.. code-block:: Python + + mesh = rm.pyvista_mesh().save('rm3d_re3.vtk') + + with h5py.File('rm3d.h5', 'w') as f: + rm.toHdf(f, 'test') + + with h5py.File('rm3d.h5', 'r') as f: + rm2 = RectilinearMesh3D.fromHdf(f['test']) + + rm2.pyvista_mesh().save('rm3d_read.vtk') + + plt.show() + + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 1.378 seconds) + + +.. _sphx_glr_download_examples_Meshes_plot_rectilinear_mesh_3d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_rectilinear_mesh_3d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_rectilinear_mesh_3d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_rectilinear_mesh_3d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Meshes/readme.rst.txt b/docs/_sources/examples/Meshes/readme.rst.txt new file mode 100644 index 00000000..781066d7 --- /dev/null +++ b/docs/_sources/examples/Meshes/readme.rst.txt @@ -0,0 +1,2 @@ +Meshes +====== \ No newline at end of file diff --git a/docs/_sources/examples/Meshes/sg_execution_times.rst.txt b/docs/_sources/examples/Meshes/sg_execution_times.rst.txt new file mode 100644 index 00000000..c05fc45b --- /dev/null +++ b/docs/_sources/examples/Meshes/sg_execution_times.rst.txt @@ -0,0 +1,43 @@ + +:orphan: + +.. _sphx_glr_examples_Meshes_sg_execution_times: + + +Computation times +================= +**00:07.710** total execution time for 3 files **from examples/Meshes**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_1d.py` (``plot_rectilinear_mesh_1d.py``) + - 00:04.585 + - 0.0 + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_2d.py` (``plot_rectilinear_mesh_2d.py``) + - 00:01.747 + - 0.0 + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_3d.py` (``plot_rectilinear_mesh_3d.py``) + - 00:01.378 + - 0.0 diff --git a/docs/_sources/examples/Models/plot_model_1d.rst.txt b/docs/_sources/examples/Models/plot_model_1d.rst.txt new file mode 100644 index 00000000..5526b3fd --- /dev/null +++ b/docs/_sources/examples/Models/plot_model_1d.rst.txt @@ -0,0 +1,380 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Models/plot_model_1d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Models_plot_model_1d.py: + + +1D Model with an infinite halfspace +----------------------------------- + +.. GENERATED FROM PYTHON SOURCE LINES 7-15 + +.. code-block:: Python + + from copy import deepcopy + from geobipy import StatArray + from geobipy import RectilinearMesh1D + from geobipy import Model + from geobipy import Distribution + import matplotlib.pyplot as plt + import numpy as np + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 16-18 + +Instantiate the 1D Model with a Half Space +++++++++++++++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 18-31 + +.. code-block:: Python + + + # Make a test model with 10 layers, and increasing parameter values + nLayers = 2 + par = StatArray(np.linspace(0.001, 0.02, nLayers), "Conductivity", "$\\frac{S}{m}$") + thk = StatArray(np.full(nLayers, fill_value=10.0)) + thk[-1] = np.inf + mesh = RectilinearMesh1D(widths = thk) + + mod = Model(mesh = mesh, values=par) + + plt.figure() + mod.plot_grid(transpose=True, flip=True) + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_1d_001.png + :alt: plot model 1d + :srcset: /examples/Models/images/sphx_glr_plot_model_1d_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-39 + +Randomness and Model Perturbations +++++++++++++++++++++++++++++++++++ +We can set the priors on the 1D model by assigning minimum and maximum layer +depths and a maximum number of layers. These are used to create priors on +the number of cells in the model, a new depth interface, new parameter values +and the vertical gradient of those parameters. +The halfSpaceValue is used as a reference value for the parameter prior. + +.. GENERATED FROM PYTHON SOURCE LINES 39-53 + +.. code-block:: Python + + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + # Set the priors + mod.set_priors(value_mean=0.01, + min_edge=1.0, + max_edge=150.0, + max_cells=30, + solve_value=True, + solve_gradient=True, + prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 54-55 + +We can evaluate the prior of the model using depths only + +.. GENERATED FROM PYTHON SOURCE LINES 55-59 + +.. code-block:: Python + + print('Log probability of the Model given its priors: ', mod.probability(False, False)) + # Or with priors on its parameters, and parameter gradient with depth. + print('Log probability of the Model given its priors: ', mod.probability(True, True)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Log probability of the Model given its priors: -3.367295829986474 + Log probability of the Model given its priors: -9.157731937350919 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 60-62 + +To propose new models, we specify the probabilities of creating, removing, perturbing, and not changing +a layer interface + +.. GENERATED FROM PYTHON SOURCE LINES 62-65 + +.. code-block:: Python + + pProposal = Distribution('LogNormal', 0.01, np.log(2.0)**2.0, linearSpace=True, prng=prng) + mod.set_proposals(probabilities=[0.25, 0.25, 0.5, 0.25], proposal=pProposal, prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 66-67 + +We can then perturb the layers of the model + +.. GENERATED FROM PYTHON SOURCE LINES 67-69 + +.. code-block:: Python + + remapped, perturbed = mod.perturb() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 70-76 + +.. code-block:: Python + + fig = plt.figure(figsize=(8, 6)) + ax = plt.subplot(121) + mod.pcolor(transpose=True, flip=True, log=10) # , grid=True) + ax = plt.subplot(122) + perturbed.pcolor(transpose=True, flip=True, log=10) # , grid=True) + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_1d_002.png + :alt: plot model 1d + :srcset: /examples/Models/images/sphx_glr_plot_model_1d_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 77-78 + +We can evaluate the prior of the model using depths only + +.. GENERATED FROM PYTHON SOURCE LINES 78-83 + +.. code-block:: Python + + print('Log probability of the Model given its priors: ',perturbed.probability(False, False)) + # Or with priors on its parameters, and parameter gradient with depth. + print('Log probability of the Model given its priors: ',perturbed.probability(True, True)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Log probability of the Model given its priors: -3.367295829986474 + Log probability of the Model given its priors: -8.559817917094882 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 84-98 + +Perturbing a model multiple times ++++++++++++++++++++++++++++++++++ +In the stochasitic inference process, we perturb the model structure, +and parameter values, multiple times. +Each time the model is perturbed, we can record its state +in a posterior distribution. + +For a 1D model, the parameter posterior is a 2D hitmap with depth in one dimension +and the parameter value in the other. +We also attach a 1D histogram for the number of layers, +and a 1D histogram for the locations of interfaces. + +Since we have already set the priors on the Model, we can set the posteriors +based on bins from from the priors. + +.. GENERATED FROM PYTHON SOURCE LINES 98-103 + +.. code-block:: Python + + + mod.set_posteriors() + + mod0 = deepcopy(mod) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 104-105 + +Now we randomly perturb the model, and update its posteriors. + +.. GENERATED FROM PYTHON SOURCE LINES 105-114 + +.. code-block:: Python + + mod.update_posteriors() + for i in range(1001): + remapped, perturbed = mod.perturb() + + # And update the model posteriors + perturbed.update_posteriors() + + mod = perturbed + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 115-120 + +We can now plot the posteriors of the model. + +Remember in this case, we are simply perturbing the model structure and parameter values +The proposal for the parameter values is fixed and centred around a single value. +fig = plt.figure(figsize=(8, 6)) + +.. GENERATED FROM PYTHON SOURCE LINES 120-161 + +.. code-block:: Python + + + # plt.subplot(131) + # mod.nCells.posterior.plot() + # ax = plt.subplot(132) + # mod.values.posterior.pcolor(cmap='gray_r', colorbar=False, flipY=True, logX=10) + # plt.subplot(133, sharey=ax) + # mod.mesh.edges.posterior.plot(transpose=True, flipY=True) + + # plt.figure() + # mod.plot_posteriors(**{"cmap": 'gray_r', + # "xscale": 'log', + # "noColorbar": True, + # "flipY": True, + # 'credible_interval_kwargs':{'axis': 1, + # 'reciprocate': True, + # 'xscale': 'log'}}) + # mod.par.posterior.plotCredibleIntervals(xscale='log', axis=1) + + + fig = plt.figure(figsize=(8, 6)) + # gs = fig.add_gridspec(nrows=1, ncols=1) + mod.plot_posteriors(axes=fig, + edges_kwargs = { + "transpose":True, + "flipY":True + }, + parameter_kwargs = { + "cmap": 'gray_r', + "xscale": 'log', + "colorbar": False, + "flipY": True, + 'credible_interval_kwargs':{ + 'reciprocate':True, + # 'axis': 1, + 'xscale': 'log' + } + }, + best = mod) + + + plt.show() + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_1d_003.png + :alt: plot model 1d + :srcset: /examples/Models/images/sphx_glr_plot_model_1d_003.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 3.051 seconds) + + +.. _sphx_glr_download_examples_Models_plot_model_1d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_model_1d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_model_1d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_model_1d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Models/plot_model_2d.rst.txt b/docs/_sources/examples/Models/plot_model_2d.rst.txt new file mode 100644 index 00000000..85a85b61 --- /dev/null +++ b/docs/_sources/examples/Models/plot_model_2d.rst.txt @@ -0,0 +1,142 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Models/plot_model_2d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Models_plot_model_2d.py: + + +2D Rectilinear Model +-------------------- +This 2D rectilinear model defines a grid with straight cell boundaries. + +.. GENERATED FROM PYTHON SOURCE LINES 9-17 + +.. code-block:: Python + + from geobipy import StatArray + from geobipy import RectilinearMesh2D + from geobipy import Model + import h5py + import matplotlib.pyplot as plt + import numpy as np + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 18-19 + +Specify some cell centres in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 19-56 + +.. code-block:: Python + + x = StatArray(np.arange(11.0), 'Easting', 'm') + y = StatArray(np.arange(11.0), 'Northing', 'm') + mesh = RectilinearMesh2D(x_edges=x, y_edges=y) + + xx, yy = np.meshgrid(mesh.x.centres, mesh.y.centres) + values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values") + + mod = Model(mesh=mesh, values = values) + + plt.figure() + mod.pcolor() + + mod2 = mod.resample(0.5, 0.5) + mod3 = mod.resample(1.5, 1.5) + plt.figure() + plt.subplot(121) + mod2.pcolor() + plt.axis('equal') + plt.subplot(122) + mod3.pcolor() + plt.axis('equal') + + + # #%% + # # We can plot the mesh in 3D! + # pv = rm.pyvista_plotter() + # pv.show() + + # rm.to_vtk('Model3D.vtk') + + with h5py.File('Model2D.h5', 'w') as f: + mod.toHdf(f, 'model') + + with h5py.File('Model2D.h5', 'r') as f: + mod2 = Model.fromHdf(f['model']) + + + plt.show() + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Models/images/sphx_glr_plot_model_2d_001.png + :alt: plot model 2d + :srcset: /examples/Models/images/sphx_glr_plot_model_2d_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Models/images/sphx_glr_plot_model_2d_002.png + :alt: plot model 2d + :srcset: /examples/Models/images/sphx_glr_plot_model_2d_002.png + :class: sphx-glr-multi-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 0.212 seconds) + + +.. _sphx_glr_download_examples_Models_plot_model_2d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_model_2d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_model_2d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_model_2d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Models/plot_model_3d.rst.txt b/docs/_sources/examples/Models/plot_model_3d.rst.txt new file mode 100644 index 00000000..4cebc3ff --- /dev/null +++ b/docs/_sources/examples/Models/plot_model_3d.rst.txt @@ -0,0 +1,410 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Models/plot_model_3d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Models_plot_model_3d.py: + + +3D Rectilinear Model +-------------------- +This 3D rectilinear model defines a grid with straight cell boundaries. + +.. GENERATED FROM PYTHON SOURCE LINES 9-24 + +.. code-block:: Python + + from geobipy import StatArray + from geobipy import RectilinearMesh3D + from geobipy import Model + import matplotlib.pyplot as plt + import numpy as np + import h5py + + + """ + 3D Rectilinear Mesh + ------------------- + This 3D rectilinear mesh defines a grid with straight cell boundaries. + + """ + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + '\n3D Rectilinear Mesh\n-------------------\nThis 3D rectilinear mesh defines a grid with straight cell boundaries.\n\n' + + + +.. GENERATED FROM PYTHON SOURCE LINES 25-33 + +.. code-block:: Python + + from geobipy import StatArray + from geobipy import RectilinearMesh3D + from geobipy import Model + import matplotlib.pyplot as plt + import numpy as np + import h5py + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 34-35 + +Specify some cell centres in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 35-60 + +.. code-block:: Python + + x = StatArray(np.arange(10.0), 'Easting', 'm') + y = StatArray(np.arange(15.0), 'Northing', 'm') + z = StatArray(np.arange(20.0), 'Depth', 'm') + + mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + + xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) + values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Height") + values = np.repeat(values[:, :, None], mesh.z.nCells, 2) + + model = Model(mesh=mesh, values=values) + + model1 = model[:5, :5, :5] + model2 = model[:, :, 5] + model3 = model[:, 5, :] + model4 = model[5, :, :] + + plt.figure() + plt.subplot(231) + model2.pcolor() + plt.subplot(232) + model3.pcolor() + plt.subplot(233) + model4.pcolor() + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_3d_001.png + :alt: plot model 3d + :srcset: /examples/Models/images/sphx_glr_plot_model_3d_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 61-72 + +.. code-block:: Python + + model2 = model[:, 5, 5] + model3 = model[5, :, 5] + model4 = model[5, 5, :] + + plt.subplot(234) + model2.pcolor() + plt.subplot(235) + model3.pcolor() + plt.subplot(236) + model4.pcolor() + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_3d_002.png + :alt: plot model 3d + :srcset: /examples/Models/images/sphx_glr_plot_model_3d_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 73-101 + +.. code-block:: Python + + with h5py.File('model3d.h5', 'w') as f: + model.createHdf(f, 'test') + model.writeHdf(f, 'test') + + with h5py.File('model3d.h5', 'r') as f: + model2 = Model.fromHdf(f['test']) + + model.pyvista_mesh().save('model3d.vtk') + + + xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) + z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re) + model = Model(mesh=mesh, values=values) + + model1 = model[:5, :5, :5] + model2 = model[:, :, 5] + model3 = model[:, 5, :] + model4 = model[5, :, :] + + plt.figure() + plt.subplot(231) + model2.pcolor() + plt.subplot(232) + model3.pcolor() + plt.subplot(233) + model4.pcolor() + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_3d_003.png + :alt: plot model 3d + :srcset: /examples/Models/images/sphx_glr_plot_model_3d_003.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 102-103 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 103-105 + +.. code-block:: Python + + pv = model.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 106-107 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 107-127 + +.. code-block:: Python + + model.pyvista_mesh().save('model3d_re1.vtk') + + + x_re = StatArray(np.sin(np.repeat(mesh.y.centres[:, None], mesh.z.nCells, 1)), "x_re") + mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re) + model = Model(mesh=mesh, values=values) + + model1 = model[:5, :5, :5] + model2 = model[:, :, 5] + model3 = model[:, 5, :] + model4 = model[5, :, :] + + plt.figure() + plt.subplot(231) + model2.pcolor() + plt.subplot(232) + model3.pcolor() + plt.subplot(233) + model4.pcolor() + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_3d_004.png + :alt: plot model 3d + :srcset: /examples/Models/images/sphx_glr_plot_model_3d_004.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 128-129 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 129-131 + +.. code-block:: Python + + pv = model.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 132-133 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 133-155 + +.. code-block:: Python + + model.pyvista_mesh().save('model3d_re2.vtk') + + + xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres) + y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + + mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) + model = Model(mesh=mesh, values=values) + + model1 = model[:5, :5, :5] + model2 = model[:, :, 5] + model3 = model[:, 5, :] + model4 = model[5, :, :] + + plt.figure() + plt.subplot(231) + model2.pcolor() + plt.subplot(232) + model3.pcolor() + plt.subplot(233) + model4.pcolor() + + + + +.. image-sg:: /examples/Models/images/sphx_glr_plot_model_3d_005.png + :alt: plot model 3d + :srcset: /examples/Models/images/sphx_glr_plot_model_3d_005.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 156-157 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 157-159 + +.. code-block:: Python + + pv = model.pyvista_plotter() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 160-161 + +We can plot the mesh in 3D! + +.. GENERATED FROM PYTHON SOURCE LINES 161-172 + +.. code-block:: Python + + model.pyvista_mesh().save('model3d_re3.vtk') + + # with h5py.File('mesh3d.h5', 'w') as f: + # mesh.toHdf(f, 'test') + + # with h5py.File('mesh3d.h5', 'r') as f: + # mesh2 = RectilinearMesh3D.fromHdf(f['test']) + + # mesh2.pyvista_mesh().save('mesh3d_read.vtk') + + plt.show() + + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 0.788 seconds) + + +.. _sphx_glr_download_examples_Models_plot_model_3d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_model_3d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_model_3d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_model_3d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Models/readme.rst.txt b/docs/_sources/examples/Models/readme.rst.txt new file mode 100644 index 00000000..f6155583 --- /dev/null +++ b/docs/_sources/examples/Models/readme.rst.txt @@ -0,0 +1,2 @@ +Models +====== \ No newline at end of file diff --git a/docs/_sources/examples/Models/sg_execution_times.rst.txt b/docs/_sources/examples/Models/sg_execution_times.rst.txt new file mode 100644 index 00000000..02ca942f --- /dev/null +++ b/docs/_sources/examples/Models/sg_execution_times.rst.txt @@ -0,0 +1,43 @@ + +:orphan: + +.. _sphx_glr_examples_Models_sg_execution_times: + + +Computation times +================= +**00:04.051** total execution time for 3 files **from examples/Models**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Models_plot_model_1d.py` (``plot_model_1d.py``) + - 00:03.051 + - 0.0 + * - :ref:`sphx_glr_examples_Models_plot_model_3d.py` (``plot_model_3d.py``) + - 00:00.788 + - 0.0 + * - :ref:`sphx_glr_examples_Models_plot_model_2d.py` (``plot_model_2d.py``) + - 00:00.212 + - 0.0 diff --git a/docs/_sources/examples/Statistics/plot_DataArray.rst.txt b/docs/_sources/examples/Statistics/plot_DataArray.rst.txt new file mode 100644 index 00000000..d739d765 --- /dev/null +++ b/docs/_sources/examples/Statistics/plot_DataArray.rst.txt @@ -0,0 +1,188 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Statistics/plot_DataArray.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Statistics_plot_DataArray.py: + + +DataArray Class +---------------- + +Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays. + +.. GENERATED FROM PYTHON SOURCE LINES 10-48 + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + DataArray + Name: 1 + Address:['0x17f629d50'] + Shape: (1,) + Values: [0.] + Min: 0.0 + Max: 0.0 + + DataArray + Name: 10 + Address:['0x17f629450'] + Shape: (10,) + Values: [0. 0. 0. ... 0. 0. 0.] + Min: 0.0 + Max: 0.0 + + DataArray + Name: (2, 10) + Address:['0x17f62aad0'] + Shape: (2, 10) + Values: [[0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.]] + Min: 0.0 + Max: 0.0 + + DataArray + Name: (2, 10) + Address:['0x154ab7a50'] + Shape: (2,) + Values: [ 2 10] + Min: 2 + Max: 10 + + DataArray + Name: 45.454 + Address:['0x154ab6dd0'] + Shape: (1,) + Values: [45.454] + Min: 45.454 + Max: 45.454 + + DataArray + Name: 45.454 + Address:['0x17f62aad0'] + Shape: (1,) + Values: [45.454] + Min: 45.454 + Max: 45.454 + + DataArray + Name: test ($\frac{g}{cc}$) + Address:['0x154ab6dd0'] + Shape: (1,) + Values: [0.00257118] + Min: 0.002571182431510025 + Max: 0.002571182431510025 + + DataArray + Name: test ($\frac{g}{cc}$) + Address:['0x17f62aad0'] + Shape: (10,) + Values: [0. 1. 2. ... 7. 8. 9.] + Min: 0.0 + Max: 9.0 + + DataArray + Name: test ($\frac{g}{cc}$) + Address:['0x154ab7a50'] + Shape: (10,) + Values: [0. 1. 2. ... 7. 8. 9.] + Min: 0.0 + Max: 9.0 + + + + + + + +| + +.. code-block:: Python + + import numpy as np + from geobipy import DataArray, StatArray + + # Integer + test = DataArray(1, name='1') + assert isinstance(test, DataArray) and test.size == 1 and test.item() == 0.0, TypeError("da 0") + print(test.summary) + test = DataArray(10, name='10') + assert isinstance(test, DataArray) and test.size == 10 and np.all(test == 0.0), TypeError("da 1") + print(test.summary) + # tuple/Shape + test = DataArray((2, 10), name='(2, 10)') + assert isinstance(test, DataArray) and np.all(test.shape == (2, 10)) and np.all(test == 0.0), TypeError("da 2") + print(test.summary) + + test = DataArray([2, 10], name='(2, 10)') + assert isinstance(test, DataArray) and np.all(test == [2, 10]), TypeError("da 2") + print(test.summary) + + # float + test = DataArray(45.454, name='45.454') + assert isinstance(test, DataArray) and test.size == 1 and test.item() == 45.454, TypeError("da 3") + print(test.summary) + test = DataArray(np.float64(45.454), name='45.454') + assert isinstance(test, DataArray) and test.size == 1 and test.item() == 45.454, TypeError("da 4") + print(test.summary) + + # array + test = DataArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$") + assert isinstance(test, DataArray) and test.size == 1, TypeError("da 5") + print(test.summary) + + test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") + assert isinstance(test, DataArray) and test.size == 10, TypeError("da 6") + print(test.summary) + + test = DataArray(test) + assert isinstance(test, DataArray) and test.size == 10, TypeError("da 6") + print(test.summary) + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 0.003 seconds) + + +.. _sphx_glr_download_examples_Statistics_plot_DataArray.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_DataArray.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_DataArray.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_DataArray.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Statistics/plot_StatArray.rst.txt b/docs/_sources/examples/Statistics/plot_StatArray.rst.txt new file mode 100644 index 00000000..30e50ab1 --- /dev/null +++ b/docs/_sources/examples/Statistics/plot_StatArray.rst.txt @@ -0,0 +1,1960 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Statistics/plot_StatArray.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Statistics_plot_StatArray.py: + + +StatArray Class +---------------- + +Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays. + +.. GENERATED FROM PYTHON SOURCE LINES 11-19 + +.. code-block:: Python + + import numpy as np + import matplotlib.pyplot as plt + import h5py + from geobipy import DataArray, StatArray, Histogram, Distribution, RectilinearMesh1D + + + # plt.style.use('seaborn-pastel') + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 20-23 + +Instantiating a new StatArray class ++++++++++++++++++++++++++++++++++++ + + +.. GENERATED FROM PYTHON SOURCE LINES 23-69 + +.. code-block:: Python + + + # Integer + test = StatArray(1, name='1') + assert isinstance(test, StatArray) and test.size == 1 and test.item() == 0.0, TypeError("da 0") + print(test.summary) + test = StatArray(10, name='10') + assert isinstance(test, StatArray) and test.size == 10 and np.all(test == 0.0), TypeError("da 1") + print(test.summary) + # tuple/Shape + test = StatArray((2, 10), name='(2, 10)') + assert isinstance(test, StatArray) and np.all(test.shape == (2, 10)) and np.all(test == 0.0), TypeError("da 2") + print(test.summary) + + test = StatArray([2, 10], name='(2, 10)') + assert isinstance(test, StatArray) and np.all(test == [2, 10]), TypeError("da 2") + print(test.summary) + + # float + test = StatArray(45.454, name='45.454') + assert isinstance(test, StatArray) and test.size == 1 and test.item() == 45.454, TypeError("da 3") + print(test.summary) + test = StatArray(np.float64(45.454), name='45.454') + assert isinstance(test, StatArray) and test.size == 1 and test.item() == 45.454, TypeError("da 4") + print(test.summary) + + # array + test = StatArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$") + assert isinstance(test, StatArray) and test.size == 1, TypeError("da 5") + print(test.summary) + + test = StatArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") + assert isinstance(test, StatArray) and test.size == 10, TypeError("da 6") + print(test.summary) + + + test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$") + test = StatArray(test) + assert isinstance(test, StatArray) and test.size == 10, TypeError("da 6") + print(test.summary) + + + + + # The StatArray can take any numpy function that returns an array as an input. + # The name and units of the variable can be assigned to the StatArray. + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + StatArray + Name: 1 + Address:['0x154bfe950'] + Shape: (1,) + Values: [0.] + Min: 0.0 + Max: 0.0 + has_posterior: False + + StatArray + Name: 10 + Address:['0x17f089a50'] + Shape: (10,) + Values: [0. 0. 0. ... 0. 0. 0.] + Min: 0.0 + Max: 0.0 + has_posterior: False + + StatArray + Name: (2, 10) + Address:['0x17f08a7d0'] + Shape: (2, 10) + Values: [[0. 0. 0. ... 0. 0. 0.] + [0. 0. 0. ... 0. 0. 0.]] + Min: 0.0 + Max: 0.0 + has_posterior: False + + StatArray + Name: (2, 10) + Address:['0x17f6825d0'] + Shape: (2,) + Values: [ 2 10] + Min: 2 + Max: 10 + has_posterior: False + + StatArray + Name: 45.454 + Address:['0x17f08a7d0'] + Shape: (1,) + Values: [45.454] + Min: 45.454 + Max: 45.454 + has_posterior: False + + StatArray + Name: 45.454 + Address:['0x17f680a50'] + Shape: (1,) + Values: [45.454] + Min: 45.454 + Max: 45.454 + has_posterior: False + + StatArray + Name: test ($\frac{g}{cc}$) + Address:['0x17f08a7d0'] + Shape: (1,) + Values: [-0.70419088] + Min: -0.7041908829549965 + Max: -0.7041908829549965 + has_posterior: False + + StatArray + Name: test ($\frac{g}{cc}$) + Address:['0x17f6819d0'] + Shape: (10,) + Values: [0. 1. 2. ... 7. 8. 9.] + Min: 0.0 + Max: 9.0 + has_posterior: False + + StatArray + Name: test ($\frac{g}{cc}$) + Address:['0x17f680a50'] + Shape: (10,) + Values: [0. 1. 2. ... 7. 8. 9.] + Min: 0.0 + Max: 9.0 + has_posterior: False + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 70-89 + +Attaching Prior and Proposal Distributions to a StatArray ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +The StatArray class has been built so that we may easily +attach not only names and units, but statistical distributions too. +We won't go into too much detail about the different distribution + +Two types of distributions can be attached to the StatArray. + +* Prior Distribution + The prior represents how the user believes the variable should + behave from a statistical standpoint. + The values of the variable can be evaluated against the attached prior, + to determine how likely they are to have occured https://en.wikipedia.org/wiki/Prior_probability + +* Proposal Distribution + The proposal describes a probability distribution from which to + sample when we wish to perturb the variable + https://en.wikipedia.org/wiki/Metropolis%E2%80%93Hastings_algorithm + +.. GENERATED FROM PYTHON SOURCE LINES 89-101 + +.. code-block:: Python + + + # Obtain an instantiation of a random number generator. + # This is optional, but is an important consideration for parallel programming. + from numpy.random import Generator + from numpy.random import PCG64DXSM + generator = PCG64DXSM(seed=0) + prng = Generator(generator) + + Density = StatArray(10.0, name="test", units="$\frac{g}{cc}$") + + Density.prior = Distribution('Uniform', -2.0, 2.0, prng=prng) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 102-103 + +We can also attach a proposal distribution + +.. GENERATED FROM PYTHON SOURCE LINES 103-109 + +.. code-block:: Python + + Density.proposal = Distribution('Normal', 0.0, 1.0, prng=prng) + print(Density.summary) + print("Class type of the prior: ",type(Density.prior)) + print("Class type of the proposal: ",type(Density.proposal)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + StatArray + Name: test ($\frac{g}{cc}$) + Address:['0x154bfd7d0' '0x17f652360' '0x181310930' ... '0x17f652180' '0x181313b10' + '0x1813134b0'] + Shape: (1,) + Values: [10.] + Min: 10.0 + Max: 10.0 + Prior: + | Uniform Distribution: + | Min: -2.0 + | Max: 2.0 + Proposal: + | Normal + | Mean:0.0 + | Variance:1.0 + has_posterior: False + + Class type of the prior: + Class type of the proposal: + + + + +.. GENERATED FROM PYTHON SOURCE LINES 110-113 + +The values in the variable can be evaluated against the prior. +In this case, we have 3 elements in the variable, and a univariate Normal for the prior. +Therefore each element is evaluated to get 3 probabilities, one for each element. + +.. GENERATED FROM PYTHON SOURCE LINES 113-115 + +.. code-block:: Python + + print(Density.probability(log=False)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 0.0 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 116-117 + +The univariate proposal distribution can generate random samples from itself. + +.. GENERATED FROM PYTHON SOURCE LINES 117-119 + +.. code-block:: Python + + print(Density.propose()) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 1.1375024404290368 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 120-122 + +From a sampling stand point we can either sample using only the proposal +Or we can only generate samples that simultaneously satisfy the prior. + +.. GENERATED FROM PYTHON SOURCE LINES 122-124 + +.. code-block:: Python + + print(Density.propose(relative=True)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [10.53816627] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 125-126 + +We can perturb the variable by drawing from the attached proposal distribution. + +.. GENERATED FROM PYTHON SOURCE LINES 126-130 + +.. code-block:: Python + + + Density.perturb() + print(Density.summary) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + StatArray + Name: test ($\frac{g}{cc}$) + Address:['0x154bfd7d0' '0x17f652360' '0x181310930' ... '0x17f652180' '0x181313b10' + '0x1813134b0'] + Shape: (1,) + Values: [0.38188467] + Min: 0.38188466718060166 + Max: 0.38188466718060166 + Prior: + | Uniform Distribution: + | Min: -2.0 + | Max: 2.0 + Proposal: + | Normal + | Mean:0.0 + | Variance:1.0 + has_posterior: False + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 131-136 + +Attaching a Histogram to capture the posterior distribution ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +The StatArray can perturb itself, evaluate its current probability given its priors +and a histogram can be attached to capture its posterior distribution. +As an example, lets create a Histogram class with bins generated from the prior. + +.. GENERATED FROM PYTHON SOURCE LINES 136-137 + +.. code-block:: Python + + bins = Density.prior.bins() + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 138-139 + +Attach the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 139-141 + +.. code-block:: Python + + Density.posterior = Histogram(mesh = RectilinearMesh1D(edges=bins)) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 142-143 + +In an iterative sense, we can propose and evaluate new values, and update the posterior + +.. GENERATED FROM PYTHON SOURCE LINES 143-150 + +.. code-block:: Python + + for i in range(1000): + Density.perturb() + p = Density.probability(log=False) + + if p > 0.0: # This is a simple example! + Density.update_posterior() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 151-154 + +.. code-block:: Python + + plt.figure() + Density.summaryPlot() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_001.png + :alt: Prior, Proposal, Posterior + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 155-159 + +Attach a multivariate normal distribution as the prior and proposal ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +Attach the multivariate prior + +.. GENERATED FROM PYTHON SOURCE LINES 159-165 + +.. code-block:: Python + + + mean = np.random.randn(Density.size) + variance = np.ones(Density.size) + Density.prior = Distribution('MvNormal', mean, variance, prng=prng) + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 166-169 + +Since the prior is multivariate, the appropriate equations are used to +evaluate the probability for all elements in the StatArray. +This produces a single probability. + +.. GENERATED FROM PYTHON SOURCE LINES 169-172 + +.. code-block:: Python + + + print(Density.probability(log=False)) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 0.16541198455442488 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 173-174 + +Attach the multivariate proposal + +.. GENERATED FROM PYTHON SOURCE LINES 174-180 + +.. code-block:: Python + + + mean = np.random.randn(Density.size) + variance = np.ones(Density.size) + Density.proposal = Distribution('MvNormal', mean, variance, prng=prng) + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 181-182 + +Perturb the variables using the multivariate proposal. + +.. GENERATED FROM PYTHON SOURCE LINES 182-197 + +.. code-block:: Python + + + Density.perturb() + Density.summary + + with h5py.File('statarray.h5', 'w') as f: + Density.createHdf(f, 'statarray', withPosterior=True, add_axis=3) + Density.writeHdf(f, 'statarray', withPosterior=True, index=0) + + with h5py.File('statarray.h5', 'r') as f: + tmp = StatArray.fromHdf(f, 'statarray', index=0, skip_posterior=False) + + with h5py.File('statarray.h5', 'r') as f: + tmp = StatArray.fromHdf(f, 'statarray', skip_posterior=False) + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 198-209 + +Basic manipulation +++++++++++++++++++ + +The StatArray contains other functions to perform basic array manipulations + +These routines essentially wrap around numpy functions, +but the result will have the same name and units, +and if any prior or proposal are set, those will be carried through too. + +1D example +__________ + +.. GENERATED FROM PYTHON SOURCE LINES 209-213 + +.. code-block:: Python + + + x = StatArray(-np.cumsum(np.arange(10.0))) + print(x) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ -0. -1. -3. ... -28. -36. -45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 214-219 + +.. code-block:: Python + + + + print(x.insert(i=[0, 9], values=[999.0, 999.0])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [999. -0. -1. ... -36. 999. -45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 220-225 + +.. code-block:: Python + + + + print(x.prepend(999.0)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [999. -0. -1. ... -28. -36. -45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 226-231 + +.. code-block:: Python + + + + print(x.prepend([998.0, 999.0])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [998. 999. -0. ... -28. -36. -45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 232-237 + +.. code-block:: Python + + + + print(x.append([998.0, 999.0])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ -0. -1. -3. ... -45. 998. 999.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 238-243 + +.. code-block:: Python + + + + print(x.resize(14)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [-0. -1. -3. ... -1. -3. -6.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 244-249 + +.. code-block:: Python + + + + print(x.delete([5,8])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ -0. -1. -3. ... -21. -28. -45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 250-255 + +.. code-block:: Python + + + + print(x.edges()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ 0.5 -0.5 -2. ... -32. -40.5 -49.5] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 256-261 + +.. code-block:: Python + + + + print(x.internalEdges()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ -0.5 -2. -4.5 ... -24.5 -32. -40.5] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 262-267 + +.. code-block:: Python + + + + print(x.firstNonZero()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 1 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 268-273 + +.. code-block:: Python + + + + print(x.lastNonZero()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + 10 + + + + +.. GENERATED FROM PYTHON SOURCE LINES 274-279 + +.. code-block:: Python + + + + print(x.abs()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ 0. 1. 3. ... 28. 36. 45.] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 280-282 + +2D example +__________ + +.. GENERATED FROM PYTHON SOURCE LINES 282-287 + +.. code-block:: Python + + + x = StatArray(np.asarray([[0, -2, 3],[3, 0, -1],[1, 2, 0]])) + print(x) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 3] + [ 3 0 -1] + [ 1 2 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 288-293 + +.. code-block:: Python + + + + print(x.insert(i=0, values=4)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 4 4 4] + [ 0 -2 3] + [ 3 0 -1] + [ 1 2 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 294-299 + +.. code-block:: Python + + + + print(x.insert(i=[2, 3], values=5, axis=1)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 5 3 5] + [ 3 0 5 -1 5] + [ 1 2 5 0 5]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 300-305 + +.. code-block:: Python + + + + print(x.insert(i=2, values=[10, 11, 12], axis=1)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 10 3] + [ 3 0 11 -1] + [ 1 2 12 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 306-311 + +.. code-block:: Python + + + + print(x.prepend(999)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[999 999 999] + [ 0 -2 3] + [ 3 0 -1] + [ 1 2 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 312-317 + +.. code-block:: Python + + + + print(x.prepend([999, 998, 997], axis=1)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[999 998 997 0 -2 3] + [999 998 997 3 0 -1] + [999 998 997 1 2 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 318-323 + +.. code-block:: Python + + + + print(x.append([[999, 998, 997]])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 3] + [ 3 0 -1] + [ 1 2 0] + [999 998 997]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 324-329 + +.. code-block:: Python + + + + print(x.resize([5,5])) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 3 3 0] + [-1 1 2 0 0] + [-2 3 3 0 -1] + [ 1 2 0 0 -2] + [ 3 3 0 -1 1]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 330-335 + +.. code-block:: Python + + + + print(x.delete(5)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [ 0 -2 3 ... 1 2 0] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 336-341 + +.. code-block:: Python + + + + print(x.delete(2, axis=0)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[ 0 -2 3] + [ 3 0 -1]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 342-347 + +.. code-block:: Python + + + + print(x.firstNonZero(axis=0)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [1 0 0] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 348-353 + +.. code-block:: Python + + + + print(x.lastNonZero(axis=0)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [3 3 2] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 354-359 + +.. code-block:: Python + + + + print(x.firstNonZero(axis=1)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [1 0 0] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 360-365 + +.. code-block:: Python + + + + print(x.lastNonZero(axis=1)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [3 3 2] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 366-371 + +.. code-block:: Python + + + + print(x.abs()) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + [[0 2 3] + [3 0 1] + [1 2 0]] + + + + +.. GENERATED FROM PYTHON SOURCE LINES 372-377 + +Plotting +++++++++ + +We can easily plot the StatArray with its built in plotting functions. +All plotting functions can take matplotlib keywords + +.. GENERATED FROM PYTHON SOURCE LINES 377-385 + +.. code-block:: Python + + + # The simplest is to just plot the array + + Density = StatArray(np.random.randn(100),name="Density",units="$\frac{g}{cc}$") + Time = StatArray(np.linspace(0, 100, Density.size), name='Time', units='s') + Depth = StatArray(np.random.exponential(size=Density.size), name='Depth', units='m') + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 386-391 + +.. code-block:: Python + + + + plt.figure() + _ = Density.plot(linewidth=0.5, marker='x', markersize=1.0) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_002.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_002.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 392-393 + +We can quickly plot a bar graph. + +.. GENERATED FROM PYTHON SOURCE LINES 393-398 + +.. code-block:: Python + + + plt.figure() + _ = Density.bar() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_003.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 399-400 + +We can scatter the contents of the StatArray if it is 1D + +.. GENERATED FROM PYTHON SOURCE LINES 400-405 + +.. code-block:: Python + + + plt.figure() + _ = Density.scatter(alpha=0.7) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_004.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_004.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 406-413 + +Histogram Equalization +______________________ + +A neat trick with colourmaps is histogram equalization. +This approach forces all colours in the images to have an equal weight. +This distorts the colour bar, but can really highlight the lower and higher +ends of whatever you are plotting. Just add the equalize keyword! + +.. GENERATED FROM PYTHON SOURCE LINES 413-418 + +.. code-block:: Python + + + plt.figure() + _ = Density.scatter(alpha=0.7, equalize=True) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_005.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 419-422 + +Take the log base(x) of the data + +We can also take the data to a log, log10, log2, or a custom number! + +.. GENERATED FROM PYTHON SOURCE LINES 422-426 + +.. code-block:: Python + + + plt.figure() + _ = Density.scatter(alpha=0.7,edgecolor='k',log='e') # could also use log='e', log=2, log=x) where x is the base you require + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_006.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 427-430 + +X and Y axes + +We can specify the x axis of the scatter plot. + +.. GENERATED FROM PYTHON SOURCE LINES 430-436 + +.. code-block:: Python + + + + plt.figure() + _ = Density.scatter(x=Time, alpha=0.7, edgecolor='k') + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_007.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_007.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 437-444 + +Notice that I never specified the y axis, so the y axis defaulted to the values in the StatArray. +In this case, any operations applied to the colours, are also applied to the y axis, e.g. log=10. +When I take the values of Density to log base 10, because I do not specify the y plotting locations, those locations are similarly affected. + +I can however force the y co-ordinates by specifying it as input. +In the second subplot I explicitly plot distance on the y axis. +In the first subplot, the y axis is the same as the colourbar. + +.. GENERATED FROM PYTHON SOURCE LINES 444-453 + +.. code-block:: Python + + + + plt.figure() + ax1 = plt.subplot(211) + Density.scatter(x=Time, alpha=0.7, edgecolor='k', log=10) + plt.subplot(212, sharex=ax1) + _ = Density.scatter(x=Time, y=Depth, alpha=0.7, edgecolor='k', log=10) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_008.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_008.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 454-457 + +Point sizes + +Since the plotting functions take matplotlib keywords, I can also specify the size of each points. + +.. GENERATED FROM PYTHON SOURCE LINES 459-474 + +.. code-block:: Python + + + + s = np.ceil(100*(np.abs(np.random.randn(Density.size)))) + plt.figure() + plt.tight_layout() + ax1 = plt.subplot(211) + Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=2) + plt.subplot(212, sharex=ax1) + #Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', sizeLegend=[1.0, 100, 200, 300]) + v = np.abs(Density)+1.0 + _ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=[1.0, 100, 200, 300], log=10) + + + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_009.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_009.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 475-476 + +Of course we can still take the log, or equalize the colour histogram + +.. GENERATED FROM PYTHON SOURCE LINES 476-481 + +.. code-block:: Python + + + plt.figure() + _ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k',equalize=True,log=10) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_010.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_010.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 482-483 + +Typically pcolor only works with 2D arrays. The StatArray has a pcolor method that will pcolor a 1D array + +.. GENERATED FROM PYTHON SOURCE LINES 483-495 + +.. code-block:: Python + + + plt.figure() + plt.subplot(221) + Density.pcolor() + plt.subplot(222) + Density.pcolor(y=Time) + plt.subplot(223) + Density.pcolor(y=Time, flip=True) + plt.subplot(224) + _ = Density.pcolor(y=Time, log=10, equalize=True) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_011.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_011.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 496-499 + +We can add grid lines, and add opacity to each element in the pcolor image + +This is useful if the colour values need to be scaled by another variable e.g. variance. + +.. GENERATED FROM PYTHON SOURCE LINES 499-509 + +.. code-block:: Python + + + + plt.figure() + plt.subplot(121) + Density.pcolor(grid=True, cmap='jet') + plt.subplot(122) + a = np.linspace(1.0, 0.0, Density.size) + _ = Density.pcolor(grid=True, alpha=a, cmap='jet') + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_012.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_012.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 510-511 + +We can plot a histogram of the StatArray + +.. GENERATED FROM PYTHON SOURCE LINES 511-516 + +.. code-block:: Python + + + plt.figure() + _ = Density.hist(100) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_013.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_013.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 517-518 + +We can write the StatArray to a HDF5 file. HDF5 files are binary files that can include compression. They allow quick and easy access to parts of the file, and can also be written to and read from in parallel! + +.. GENERATED FROM PYTHON SOURCE LINES 518-523 + +.. code-block:: Python + + + with h5py.File('1Dtest.h5','w') as f: + Density.toHdf(f,'test') + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 524-526 + +We can then read the StatArray from the file +Here x is a new variable, that is read in from the hdf5 file we just wrote. + +.. GENERATED FROM PYTHON SOURCE LINES 526-533 + +.. code-block:: Python + + + x = StatArray.fromHdf('1Dtest.h5', 'test') + print('x has the same values as Density? ',np.all(x == Density)) + x[2] = 5.0 # Change one of the values in x + print('x has its own memory allocated (not a reference/pointer)? ', id(x) != id(Density)) + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + x has the same values as Density? True + x has its own memory allocated (not a reference/pointer)? True + + + + +.. GENERATED FROM PYTHON SOURCE LINES 534-535 + +We can also define a 2D array + +.. GENERATED FROM PYTHON SOURCE LINES 535-540 + +.. code-block:: Python + + + Density = StatArray(np.random.randn(50,100),"Density","$\frac{g}{cc}$") + Density.summary + + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + "StatArray\nName: Density ($\\frac{g}{cc}$)\nAddress:['0x17f4e05d0']\nShape: (50, 100)\nValues: [[ 0.14104158 0.47155933 1.95502179 ... 1.76214533 1.24396239\n 0.1215391 ]\n [-1.85725407 -1.83481902 1.04987135 ... 0.85985878 -0.34529634\n -0.85558253]\n [ 0.76292177 1.13113089 -1.35816931 ... 1.31159774 0.46698778\n 1.68630769]\n ...\n [ 0.48384681 0.22774246 -0.51285518 ... -0.24617681 0.81498656\n 1.57095942]\n [-0.13056782 1.72508382 -1.33506224 ... -1.17319374 0.27482725\n 0.39378825]\n [ 1.36799972 0.26714623 -0.02588039 ... -0.28979426 -0.8747471\n -0.98512505]]\nMin: -3.2750329304041186\nMax: 3.445969826454679\nhas_posterior: False\n" + + + +.. GENERATED FROM PYTHON SOURCE LINES 541-544 + +The StatArray Class's functions work whether it is 1D or 2D + +We can still do a histogram + +.. GENERATED FROM PYTHON SOURCE LINES 544-549 + +.. code-block:: Python + + + plt.figure() + _ = Density.hist() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_014.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_014.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 550-551 + +And we can use pcolor to plot the 2D array + +.. GENERATED FROM PYTHON SOURCE LINES 551-556 + +.. code-block:: Python + + + plt.figure() + _ = Density.pcolor() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_015.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_015.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 557-560 + +The StatArray comes with extra plotting options + +Here we specify the x and y axes for the 2D array using two other 1D StatArrays + +.. GENERATED FROM PYTHON SOURCE LINES 560-567 + +.. code-block:: Python + + + plt.figure() + x = StatArray(np.arange(101),name='x Axis',units = 'mm') + y = StatArray(np.arange(51),name='y Axis',units = 'elephants') + _ = Density.pcolor(x=x, y=y) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_016.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_016.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 568-571 + +We can plot using a log10 scale, in this case, we have values that are less +than or equal to 0.0. Plotting with the log option will by default mask any +of those values, and will let you know that it has done so! + +.. GENERATED FROM PYTHON SOURCE LINES 571-576 + +.. code-block:: Python + + + plt.figure() + _ = Density.pcolor(x=x,y=y,log=2) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_017.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_017.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 577-581 + +A neat trick with colourmaps is histogram equalization. +This approach forces all colours in the image to have an equal amount. +This distorts the colours, but can really highlight the lower and higher +ends of whatever you are plotting + +.. GENERATED FROM PYTHON SOURCE LINES 581-586 + +.. code-block:: Python + + + plt.figure() + _ = Density.pcolor(x=x, y=y, equalize=True) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_018.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_018.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 587-588 + +We can equalize the log10 plot too :) + +.. GENERATED FROM PYTHON SOURCE LINES 588-593 + +.. code-block:: Python + + + plt.figure() + _ = Density.pcolor(x=x,y=y,equalize=True, log=10) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_019.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_019.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 594-595 + +We can add opacity to each pixel in the image + +.. GENERATED FROM PYTHON SOURCE LINES 595-599 + +.. code-block:: Python + + + a = StatArray(np.random.random(Density.shape), 'Opacity from 0.0 to 1.0') + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 600-611 + +.. code-block:: Python + + + + plt.figure() + ax1 = plt.subplot(131) + ax = Density.pcolor(x=x, y=y, flipY=True, linewidth=0.1, colorbar=False) + plt.subplot(132, sharex=ax1, sharey=ax1) + ax = Density.pcolor(x=x, y=y, alpha=a, flipY=True, linewidth=0.1, colorbar=False) + plt.subplot(133, sharex=ax1, sharey=ax1) + _ = a.pcolor(x=x, y=y, flipY=True) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_020.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_020.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 612-613 + +If the array potentially has a lot of white space around the edges, we can trim the image + +.. GENERATED FROM PYTHON SOURCE LINES 613-625 + +.. code-block:: Python + + + Density[:10, :] = 0.0 + Density[-10:, :] = 0.0 + Density[:, :10] = 0.0 + Density[:, -10:] = 0.0 + plt.figure() + plt.subplot(121) + Density.pcolor() + plt.subplot(122) + _ = Density.pcolor(trim=0.0) + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_021.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_021.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 626-627 + +Create a stacked area plot of a 2D StatArray + +.. GENERATED FROM PYTHON SOURCE LINES 627-637 + +.. code-block:: Python + + + A = StatArray(np.abs(np.random.randn(13,100)), name='Variable', units="units") + x = StatArray(np.arange(100),name='x Axis',units = 'mm') + plt.figure() + ax1 = plt.subplot(211) + A.stackedAreaPlot(x=x, axis=1) + plt.subplot(212, sharex=ax1) + _ = A.stackedAreaPlot(x=x, i=np.s_[[1,3,4],:], axis=1, labels=['a','b','c']) + + plt.show() + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_StatArray_022.png + :alt: plot StatArray + :srcset: /examples/Statistics/images/sphx_glr_plot_StatArray_022.png + :class: sphx-glr-single-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 2.767 seconds) + + +.. _sphx_glr_download_examples_Statistics_plot_StatArray.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_StatArray.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_StatArray.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_StatArray.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Statistics/plot_histogram_1d.rst.txt b/docs/_sources/examples/Statistics/plot_histogram_1d.rst.txt new file mode 100644 index 00000000..2cca20da --- /dev/null +++ b/docs/_sources/examples/Statistics/plot_histogram_1d.rst.txt @@ -0,0 +1,516 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Statistics/plot_histogram_1d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Statistics_plot_histogram_1d.py: + + +Histogram 1D +------------ + +This histogram class allows efficient updating of histograms, plotting and +saving as HDF5 + +.. GENERATED FROM PYTHON SOURCE LINES 10-17 + +.. code-block:: Python + + from geobipy.src.classes.mesh.RectilinearMesh1D import RectilinearMesh1D + import h5py + from geobipy import StatArray + from geobipy import Histogram + import numpy as np + import matplotlib.pyplot as plt + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 18-20 + +Histogram with regular bins ++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 20-24 + +.. code-block:: Python + + + # Create regularly spaced bins + mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm')) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 25-26 + +Set the histogram using the bins, and update + +.. GENERATED FROM PYTHON SOURCE LINES 26-28 + +.. code-block:: Python + + H = Histogram(mesh=mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 29-30 + +We can update the histogram with some new values + +.. GENERATED FROM PYTHON SOURCE LINES 30-43 + +.. code-block:: Python + + H.update(np.random.randn(1000), trim=True) + + # Plot the histogram + plt.figure() + plt.subplot(221) + _ = H.plot() + plt.subplot(222) + _ = H.pdf.bar() + plt.subplot(223) + H.pmf.bar() + plt.subplot(224) + H.cdf().bar() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_001.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_001.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 44-45 + +Get the median, and 95% confidence values + +.. GENERATED FROM PYTHON SOURCE LINES 45-53 + +.. code-block:: Python + + print(H.credible_intervals(percent=95.0)) + + plt.figure() + H.plot() + H.plotCredibleIntervals() + H.plotMean() + H.plotMedian() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_002.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_002.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + (np.float64(-0.030000000000000027), np.float64(-1.83), np.float64(1.8299999999999996)) + + + + +.. GENERATED FROM PYTHON SOURCE LINES 54-56 + +Histogram with irregular bins ++++++++++++++++++++++++++++++ + +.. GENERATED FROM PYTHON SOURCE LINES 56-61 + +.. code-block:: Python + + + # Create irregularly spaced bins + x = np.cumsum(np.arange(10, dtype=np.float64)) + irregularBins = np.hstack([-x[::-1], x[1:]]) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 62-63 + +Create a named StatArray + +.. GENERATED FROM PYTHON SOURCE LINES 63-66 + +.. code-block:: Python + + edges = StatArray(irregularBins, 'irregular bins') + mesh = RectilinearMesh1D(edges = edges) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 67-68 + +Instantiate the histogram with bin edges + +.. GENERATED FROM PYTHON SOURCE LINES 68-73 + +.. code-block:: Python + + H = Histogram(mesh=mesh) + + # Update the histogram + H.update((np.random.randn(10000)*20.0) - 10.0) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 74-75 + +Plot the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 75-87 + +.. code-block:: Python + + plt.figure() + plt.subplot(211) + _ = H.plot() + plt.subplot(212) + _ = H.plot(normalize=True) + + plt.figure() + H.plot() + H.plotCredibleIntervals() + H.plotMean() + H.plotMedian() + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_003.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_003.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_004.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_004.png + :class: sphx-glr-multi-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 88-89 + +We can plot the histogram as a pcolor plot + +.. GENERATED FROM PYTHON SOURCE LINES 89-92 + +.. code-block:: Python + + plt.figure() + _ = H.pcolor(grid=True, transpose=True) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_005.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 93-96 + +Histogram with linear space entries that are logged internally +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ +Create some bins spaced logarithmically + +.. GENERATED FROM PYTHON SOURCE LINES 96-98 + +.. code-block:: Python + + mesh = RectilinearMesh1D(edges = StatArray(np.logspace(-5, 3), 'positive bins'), log=10) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 99-100 + +Instantiate the Histogram with log=10 + +.. GENERATED FROM PYTHON SOURCE LINES 100-102 + +.. code-block:: Python + + H = Histogram(mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 103-104 + +The update takes in the numbers in linear space and takes their log=10 + +.. GENERATED FROM PYTHON SOURCE LINES 104-106 + +.. code-block:: Python + + H.update(10.0**(np.random.randn(1000)*2.0), trim=True) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 107-122 + +.. code-block:: Python + + plt.figure() + plt.subplot(211) + _ = H.plot() + + import h5py + with h5py.File('h1d.h5', 'w') as f: + H.toHdf(f, 'h1d') + + with h5py.File('h1d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h1d']) + + plt.subplot(212) + _ = H1.plot() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_1d_006.png + :alt: plot histogram 1d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_1d_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 123-124 + +.. code-block:: Python + + mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm')) + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 125-126 + +Set the histogram using the bins, and update + +.. GENERATED FROM PYTHON SOURCE LINES 126-128 + +.. code-block:: Python + + H = Histogram(mesh=mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 129-130 + +We can update the histogram with some new values + +.. GENERATED FROM PYTHON SOURCE LINES 130-156 + +.. code-block:: Python + + H.update(np.random.randn(1000), trim=True) + + import h5py + with h5py.File('h1d.h5', 'w') as f: + H.createHdf(f, 'h1d', add_axis=StatArray(np.arange(3.0), "Name", "Units")) + H.writeHdf(f, 'h1d', index=0) + H.update(np.random.randn(1000), trim=True) + H.writeHdf(f, 'h1d', index=1) + H.update(np.random.randn(1000), trim=True) + H.writeHdf(f, 'h1d', index=2) + + with h5py.File('h1d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h1d']) + H2 = Histogram.fromHdf(f['h1d'], index=0) + H3 = Histogram.fromHdf(f['h1d'], index=1) + H4 = Histogram.fromHdf(f['h1d'], index=2) + + + print(H4.summary) + + # plt.figure() + # plt.subplot(211) + # _ = H1.plot() + # plt.subplot(212) + # _ = H4.plot() + + plt.show() + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + Histogram: + mesh: + | RectilinearMesh1D + | Number of Cells: + | | 100 + | Cell Centres: + | | StatArray + | | Name: bins (m) + | | Address:['0x17f6bebd0'] + | | Shape: (100,) + | | Values: [-2.97 -2.91 -2.85 ... 2.85 2.91 2.97] + | | Min: -2.9699999999999998 + | | Max: 2.9699999999999998 + | | has_posterior: False + | + | Cell Edges: + | | StatArray + | | Name: bins (m) + | | Address:['0x17f6bfad0'] + | | Shape: (101,) + | | Values: [-3. -2.94 -2.88 ... 2.88 2.94 3. ] + | | Min: -3.0 + | | Max: 3.0 + | | has_posterior: False + | + | log: + | | None + | relative_to: + | | StatArray + | | Name: + | | Address:['0x17f088dd0'] + | | Shape: (1,) + | | Values: [0.] + | | Min: 0.0 + | | Max: 0.0 + | | has_posterior: False + | + values: + | DataArray + | Name: Frequency + | Address:['0x17f08a550'] + | Shape: (100,) + | Values: [2 0 0 ... 0 2 1] + | Min: 0 + | Max: 88 + + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 0.972 seconds) + + +.. _sphx_glr_download_examples_Statistics_plot_histogram_1d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_histogram_1d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_histogram_1d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_histogram_1d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Statistics/plot_histogram_2d.rst.txt b/docs/_sources/examples/Statistics/plot_histogram_2d.rst.txt new file mode 100644 index 00000000..4937c05b --- /dev/null +++ b/docs/_sources/examples/Statistics/plot_histogram_2d.rst.txt @@ -0,0 +1,791 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Statistics/plot_histogram_2d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Statistics_plot_histogram_2d.py: + + +Histogram 2D +------------ + +This 2D histogram class allows efficient updating of histograms, plotting and +saving as HDF5. + +.. GENERATED FROM PYTHON SOURCE LINES 11-21 + +.. code-block:: Python + + import h5py + import geobipy + from geobipy import StatArray + from geobipy import Histogram + import matplotlib.pyplot as plt + import matplotlib.gridspec as gridspec + from geobipy import RectilinearMesh2D + import numpy as np + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 22-23 + +Create some histogram bins in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 23-27 + +.. code-block:: Python + + x = StatArray(np.linspace(-4.0, 4.0, 100), 'Variable 1') + y = StatArray(np.linspace(-4.0, 4.0, 105), 'Variable 2') + + mesh = RectilinearMesh2D(x_edges=x, y_edges=y) + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 28-29 + +Instantiate + +.. GENERATED FROM PYTHON SOURCE LINES 29-31 + +.. code-block:: Python + + H = Histogram(mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-33 + +Generate some random numbers + +.. GENERATED FROM PYTHON SOURCE LINES 33-36 + +.. code-block:: Python + + a = np.random.randn(1000000) + b = np.random.randn(1000000) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 37-38 + +Update the histogram counts + +.. GENERATED FROM PYTHON SOURCE LINES 38-40 + +.. code-block:: Python + + H.update(a, b) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 41-59 + +.. code-block:: Python + + plt.figure() + plt.subplot(131) + plt.title("2D Histogram") + _ = H.plot(cmap='gray_r') + plt.subplot(132) + H.pdf.plot(cmap='gray_r') + plt.subplot(133) + H.pmf.plot(cmap='gray_r') + + + plt.figure() + plt.subplot(131) + H.cdf(axis=0).plot() + plt.subplot(132) + H.cdf(axis=1).plot() + plt.subplot(133) + H.cdf().plot() + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_001.png + :alt: 2D Histogram + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_001.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_002.png + :alt: plot histogram 2d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_002.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 60-61 + +We can overlay the histogram with its credible intervals + +.. GENERATED FROM PYTHON SOURCE LINES 61-67 + +.. code-block:: Python + + plt.figure() + plt.title("90% credible intervals overlain") + H.pcolor(cmap='gray_r') + H.plotCredibleIntervals(axis=0, percent=95.0) + _ = H.plotCredibleIntervals(axis=1, percent=95.0) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_003.png + :alt: 90% credible intervals overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 68-69 + +Generate marginal histograms along an axis + +.. GENERATED FROM PYTHON SOURCE LINES 69-72 + +.. code-block:: Python + + h1 = H.marginalize(axis=0) + h2 = H.marginalize(axis=1) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 73-74 + +Note that the names of the variables are automatically displayed + +.. GENERATED FROM PYTHON SOURCE LINES 74-81 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Marginals along each axis") + plt.subplot(121) + h1.plot() + plt.subplot(122) + _ = h2.plot() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_004.png + :alt: Marginals along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_004.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 82-84 + +Create a combination plot with marginal histograms. +sphinx_gallery_thumbnail_number = 3 + +.. GENERATED FROM PYTHON SOURCE LINES 84-102 + +.. code-block:: Python + + plt.figure() + gs = gridspec.GridSpec(5, 5) + gs.update(wspace=0.3, hspace=0.3) + ax = [plt.subplot(gs[1:, :4])] + H.pcolor(colorbar = False) + + ax.append(plt.subplot(gs[:1, :4])) + h = H.marginalize(axis=0).plot() + plt.xlabel(''); plt.ylabel('') + plt.xticks([]); plt.yticks([]) + ax[-1].spines["left"].set_visible(False) + + ax.append(plt.subplot(gs[1:, 4:])) + h = H.marginalize(axis=1).plot(transpose=True) + plt.ylabel(''); plt.xlabel('') + plt.yticks([]); plt.xticks([]) + ax[-1].spines["bottom"].set_visible(False) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_005.png + :alt: plot histogram 2d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_005.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 103-104 + +Take the mean or median estimates from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 104-107 + +.. code-block:: Python + + mean = H.mean() + median = H.median() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 108-124 + +.. code-block:: Python + + plt.figure(figsize=(9.5, 5)) + plt.suptitle("Mean, median, and credible interval overlain") + ax = plt.subplot(121) + H.pcolor(cmap='gray_r', colorbar=False) + H.plotCredibleIntervals(axis=0) + H.plotMedian(axis=0, color='g') + H.plotMean(axis=0, color='y') + plt.legend() + + plt.subplot(122, sharex=ax, sharey=ax) + H.pcolor(cmap='gray_r', colorbar=False) + H.plotCredibleIntervals(axis=1) + H.plotMedian(axis=1, color='g') + H.plotMean(axis=1, color='y') + plt.legend() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_006.png + :alt: Mean, median, and credible interval overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_006.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 125-126 + +Get the range between credible intervals + +.. GENERATED FROM PYTHON SOURCE LINES 126-128 + +.. code-block:: Python + + H.credible_range(percent=95.0) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + StatArray([3.47474747, 4.44444444, 3.71717172, ..., 3.47474747, + 3.47474747, 4.2020202 ]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 129-130 + +We can map the credible range to an opacity or transparency + +.. GENERATED FROM PYTHON SOURCE LINES 130-148 + +.. code-block:: Python + + H.opacity() + H.transparency() + + # H.animate(0, 'test.mp4') + + import h5py + with h5py.File('h2d.h5', 'w') as f: + H.toHdf(f, 'h2d') + + with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + + plt.close('all') + + x = StatArray(5.0 + np.linspace(-4.0, 4.0, 100), 'Variable 1') + y = StatArray(10.0 + np.linspace(-4.0, 4.0, 105), 'Variable 2') + + mesh = RectilinearMesh2D(x_edges=x, x_relative_to=5.0, y_edges=y, y_relative_to=10.0) + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 149-150 + +Instantiate + +.. GENERATED FROM PYTHON SOURCE LINES 150-152 + +.. code-block:: Python + + H = Histogram(mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 153-154 + +Generate some random numbers + +.. GENERATED FROM PYTHON SOURCE LINES 154-157 + +.. code-block:: Python + + a = np.random.randn(1000000) + 5.0 + b = np.random.randn(1000000) + 10.0 + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 158-159 + +Update the histogram counts + +.. GENERATED FROM PYTHON SOURCE LINES 159-161 + +.. code-block:: Python + + H.update(a, b) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 162-179 + +.. code-block:: Python + + plt.figure() + plt.subplot(131) + plt.title("2D Histogram") + _ = H.plot(cmap='gray_r') + plt.subplot(132) + H.pdf.plot(cmap='gray_r') + plt.subplot(133) + H.pmf.plot(cmap='gray_r') + + plt.figure() + plt.subplot(131) + H.cdf(axis=0).plot() + plt.subplot(132) + H.cdf(axis=1).plot() + plt.subplot(133) + H.cdf().plot() + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_007.png + :alt: 2D Histogram + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_007.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_008.png + :alt: plot histogram 2d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_008.png + :class: sphx-glr-multi-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + (, , ) + + + +.. GENERATED FROM PYTHON SOURCE LINES 180-181 + +We can overlay the histogram with its credible intervals + +.. GENERATED FROM PYTHON SOURCE LINES 181-191 + +.. code-block:: Python + + plt.figure() + plt.title("90% credible intervals overlain") + H.pcolor(cmap='gray_r') + H.plotCredibleIntervals(axis=0, percent=95.0) + _ = H.plotCredibleIntervals(axis=1, percent=95.0) + + # Generate marginal histograms along an axis + h1 = H.marginalize(axis=0) + h2 = H.marginalize(axis=1) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_009.png + :alt: 90% credible intervals overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_009.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 192-193 + +Note that the names of the variables are automatically displayed + +.. GENERATED FROM PYTHON SOURCE LINES 193-200 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Marginals along each axis") + plt.subplot(121) + h1.plot() + plt.subplot(122) + _ = h2.plot() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_010.png + :alt: Marginals along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_010.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 201-203 + +Create a combination plot with marginal histograms. +sphinx_gallery_thumbnail_number = 3 + +.. GENERATED FROM PYTHON SOURCE LINES 203-221 + +.. code-block:: Python + + plt.figure() + gs = gridspec.GridSpec(5, 5) + gs.update(wspace=0.3, hspace=0.3) + ax = [plt.subplot(gs[1:, :4])] + H.pcolor(colorbar = False) + + ax.append(plt.subplot(gs[:1, :4])) + h = H.marginalize(axis=0).plot() + plt.xlabel(''); plt.ylabel('') + plt.xticks([]); plt.yticks([]) + ax[-1].spines["left"].set_visible(False) + + ax.append(plt.subplot(gs[1:, 4:])) + h = H.marginalize(axis=1).plot(transpose=True) + plt.ylabel(''); plt.xlabel('') + plt.yticks([]); plt.xticks([]) + ax[-1].spines["bottom"].set_visible(False) + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_011.png + :alt: plot histogram 2d + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_011.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 222-223 + +Take the mean or median estimates from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 223-226 + +.. code-block:: Python + + mean = H.mean() + median = H.median() + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 227-243 + +.. code-block:: Python + + plt.figure(figsize=(9.5, 5)) + plt.suptitle("Mean, median, and credible interval overlain") + ax = plt.subplot(121) + H.pcolor(cmap='gray_r', colorbar=False) + H.plotCredibleIntervals(axis=0) + H.plotMedian(axis=0, color='g') + H.plotMean(axis=0, color='y') + plt.legend() + + plt.subplot(122, sharex=ax, sharey=ax) + H.pcolor(cmap='gray_r', colorbar=False) + H.plotCredibleIntervals(axis=1) + H.plotMedian(axis=1, color='g') + H.plotMean(axis=1, color='y') + plt.legend() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_012.png + :alt: Mean, median, and credible interval overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_012.png + :class: sphx-glr-single-img + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 244-245 + +Get the range between credible intervals + +.. GENERATED FROM PYTHON SOURCE LINES 245-247 + +.. code-block:: Python + + H.credible_range(percent=95.0) + + + + + +.. rst-class:: sphx-glr-script-out + + .. code-block:: none + + + StatArray([4.2020202 , 4.44444444, 3.31313131, ..., 2.50505051, + 4.52525253, 4.12121212]) + + + +.. GENERATED FROM PYTHON SOURCE LINES 248-249 + +We can map the credible range to an opacity or transparency + +.. GENERATED FROM PYTHON SOURCE LINES 249-306 + +.. code-block:: Python + + H.opacity() + H.transparency() + + # # H.animate(0, 'test.mp4') + + with h5py.File('h2d.h5', 'w') as f: + H.toHdf(f, 'h2d') + + with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + + plt.figure(figsize=(9.5, 5)) + plt.suptitle("Mean, median, and credible interval overlain") + ax = plt.subplot(121) + H1.pcolor(cmap='gray_r', colorbar=False) + H1.plotCredibleIntervals(axis=0) + H1.plotMedian(axis=0, color='g') + H1.plotMean(axis=0, color='y') + plt.legend() + + plt.subplot(122, sharex=ax, sharey=ax) + H1.pcolor(cmap='gray_r', colorbar=False) + H1.plotCredibleIntervals(axis=1) + H1.plotMedian(axis=1, color='g') + H1.plotMean(axis=1, color='y') + plt.legend() + + with h5py.File('h2d.h5', 'w') as f: + H.createHdf(f, 'h2d', add_axis=StatArray(np.arange(3.0), name='Easting', units="m")) + for i in range(3): + H.writeHdf(f, 'h2d', index=i) + + with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d'], index=0) + + plt.figure(figsize=(9.5, 5)) + plt.suptitle("Mean, median, and credible interval overlain") + ax = plt.subplot(121) + H1.pcolor(cmap='gray_r', colorbar=False) + H1.plotCredibleIntervals(axis=0) + H1.plotMedian(axis=0, color='g') + H1.plotMean(axis=0, color='y') + plt.legend() + + plt.subplot(122, sharex=ax, sharey=ax) + H1.pcolor(cmap='gray_r', colorbar=False) + H1.plotCredibleIntervals(axis=1) + H1.plotMedian(axis=1, color='g') + H1.plotMean(axis=1, color='y') + plt.legend() + + with h5py.File('h2d.h5', 'r') as f: + H1 = Histogram.fromHdf(f['h2d']) + + # H1.pyvista_mesh().save('h3d_read.vtk') + + plt.show() + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_013.png + :alt: Mean, median, and credible interval overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_013.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_2d_014.png + :alt: Mean, median, and credible interval overlain + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_2d_014.png + :class: sphx-glr-multi-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 5.012 seconds) + + +.. _sphx_glr_download_examples_Statistics_plot_histogram_2d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_histogram_2d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_histogram_2d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_histogram_2d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Statistics/plot_histogram_3d.rst.txt b/docs/_sources/examples/Statistics/plot_histogram_3d.rst.txt new file mode 100644 index 00000000..5121aeae --- /dev/null +++ b/docs/_sources/examples/Statistics/plot_histogram_3d.rst.txt @@ -0,0 +1,467 @@ + +.. DO NOT EDIT. +.. THIS FILE WAS AUTOMATICALLY GENERATED BY SPHINX-GALLERY. +.. TO MAKE CHANGES, EDIT THE SOURCE PYTHON FILE: +.. "examples/Statistics/plot_histogram_3d.py" +.. LINE NUMBERS ARE GIVEN BELOW. + +.. only:: html + + .. note:: + :class: sphx-glr-download-link-note + + :ref:`Go to the end ` + to download the full example code. + +.. rst-class:: sphx-glr-example-title + +.. _sphx_glr_examples_Statistics_plot_histogram_3d.py: + + +Histogram 3D +------------ + +This 3D histogram class allows efficient updating of histograms, plotting and +saving as HDF5. + +.. GENERATED FROM PYTHON SOURCE LINES 11-19 + +.. code-block:: Python + + import geobipy + from geobipy import StatArray + from geobipy import Histogram + import matplotlib.pyplot as plt + from geobipy import RectilinearMesh3D + import numpy as np + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 20-21 + +Create some histogram bins in x and y + +.. GENERATED FROM PYTHON SOURCE LINES 21-27 + +.. code-block:: Python + + x = StatArray(np.linspace(-4.0, 4.0, 11), 'Variable 1') + y = StatArray(np.linspace(-4.0, 4.0, 21), 'Variable 2') + z = StatArray(np.linspace(-4.0, 4.0, 31), 'Variable 3') + + mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 28-29 + +Instantiate + +.. GENERATED FROM PYTHON SOURCE LINES 29-31 + +.. code-block:: Python + + H = Histogram(mesh=mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 32-33 + +Generate some random numbers + +.. GENERATED FROM PYTHON SOURCE LINES 33-39 + +.. code-block:: Python + + a = np.random.randn(100000) + b = np.random.randn(100000) + c = np.random.randn(100000) + # x = np.asarray([a, b, c]) + + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 40-41 + +Update the histogram counts + +.. GENERATED FROM PYTHON SOURCE LINES 41-43 + +.. code-block:: Python + + H.update(a, b, c) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 44-51 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Slice half way along each dimension") + for axis in range(3): + plt.subplot(1, 3, axis+1) + s = [5 if i == axis else np.s_[:] for i in range(3)] + _ = H[tuple(s)].pcolor(cmap='gray_r') + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_001.png + :alt: Slice half way along each dimension + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_001.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 52-53 + +Generate marginal histograms along an axis + +.. GENERATED FROM PYTHON SOURCE LINES 53-60 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Marginals along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.marginalize(axis=axis).plot() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_002.png + :alt: Marginals along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_002.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 61-62 + +Take the mean estimate from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 62-68 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Mean along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.mean(axis=axis).pcolor() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_003.png + :alt: Mean along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_003.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 69-70 + +Take the median estimate from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 70-100 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Median along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.median(axis=axis).pcolor() + + # #%% + # # We can map the credible range to an opacity or transparency + # H.opacity() + # H.transparency() + + H.animate(0, 'test.mp4') + + H.to_vtk('h3d.vtk') + + + + + # Create some histogram bins in x and y + xx, yy = np.meshgrid(mesh.z.centres, mesh.y.centres) + x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re") + + xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres) + y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re") + + xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres) + z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re") + + mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re) + + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_004.png + :alt: Median along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_004.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_005.png + :alt: 3.60 + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_005.png + :class: sphx-glr-multi-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 101-102 + +Instantiate + +.. GENERATED FROM PYTHON SOURCE LINES 102-104 + +.. code-block:: Python + + H = Histogram(mesh=mesh) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 105-106 + +Generate some random numbers + +.. GENERATED FROM PYTHON SOURCE LINES 106-111 + +.. code-block:: Python + + a = np.random.randn(100000) + b = np.random.randn(100000) + c = np.random.randn(100000) + # x = np.asarray([a, b, c]) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 112-113 + +Update the histogram counts + +.. GENERATED FROM PYTHON SOURCE LINES 113-115 + +.. code-block:: Python + + H.update(a, b, c) + + + + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 116-123 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Slice half way along each dimension") + for axis in range(3): + plt.subplot(1, 3, axis+1) + s = [5 if i == axis else np.s_[:] for i in range(3)] + _ = H[tuple(s)].pcolor(cmap='gray_r') + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_006.png + :alt: Slice half way along each dimension + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_006.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 124-125 + +Generate marginal histograms along an axis + +.. GENERATED FROM PYTHON SOURCE LINES 125-132 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Marginals along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.marginalize(axis=axis).plot() + + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_007.png + :alt: Marginals along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_007.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 133-134 + +Take the mean estimate from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 134-140 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Mean along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.mean(axis=axis).pcolor() + + + + +.. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_008.png + :alt: Mean along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_008.png + :class: sphx-glr-single-img + + + + + +.. GENERATED FROM PYTHON SOURCE LINES 141-142 + +Take the median estimate from the histogram + +.. GENERATED FROM PYTHON SOURCE LINES 142-158 + +.. code-block:: Python + + plt.figure() + plt.suptitle("Median along each axis") + for axis in range(3): + plt.subplot(1, 3, axis+1) + _ = H.median(axis=axis).pcolor() + + # #%% + # # We can map the credible range to an opacity or transparency + # H.opacity() + # H.transparency() + + H.animate(0, 'test.mp4') + + plt.show() + + # H.to_vtk('h3d.vtk') + + + +.. rst-class:: sphx-glr-horizontal + + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_009.png + :alt: Median along each axis + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_009.png + :class: sphx-glr-multi-img + + * + + .. image-sg:: /examples/Statistics/images/sphx_glr_plot_histogram_3d_010.png + :alt: 3.60 + :srcset: /examples/Statistics/images/sphx_glr_plot_histogram_3d_010.png + :class: sphx-glr-multi-img + + + + + + +.. rst-class:: sphx-glr-timing + + **Total running time of the script:** (0 minutes 3.875 seconds) + + +.. _sphx_glr_download_examples_Statistics_plot_histogram_3d.py: + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-example + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download Jupyter notebook: plot_histogram_3d.ipynb ` + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download Python source code: plot_histogram_3d.py ` + + .. container:: sphx-glr-download sphx-glr-download-zip + + :download:`Download zipped: plot_histogram_3d.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/Statistics/readme.rst.txt b/docs/_sources/examples/Statistics/readme.rst.txt new file mode 100644 index 00000000..5f81bd2e --- /dev/null +++ b/docs/_sources/examples/Statistics/readme.rst.txt @@ -0,0 +1,2 @@ +Statistics +========== \ No newline at end of file diff --git a/docs/_sources/examples/Statistics/sg_execution_times.rst.txt b/docs/_sources/examples/Statistics/sg_execution_times.rst.txt new file mode 100644 index 00000000..7beb9fff --- /dev/null +++ b/docs/_sources/examples/Statistics/sg_execution_times.rst.txt @@ -0,0 +1,49 @@ + +:orphan: + +.. _sphx_glr_examples_Statistics_sg_execution_times: + + +Computation times +================= +**00:12.629** total execution time for 5 files **from examples/Statistics**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_2d.py` (``plot_histogram_2d.py``) + - 00:05.012 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_3d.py` (``plot_histogram_3d.py``) + - 00:03.875 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_StatArray.py` (``plot_StatArray.py``) + - 00:02.767 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_1d.py` (``plot_histogram_1d.py``) + - 00:00.972 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_DataArray.py` (``plot_DataArray.py``) + - 00:00.003 + - 0.0 diff --git a/docs/_sources/examples/index.rst.txt b/docs/_sources/examples/index.rst.txt new file mode 100644 index 00000000..c14511d3 --- /dev/null +++ b/docs/_sources/examples/index.rst.txt @@ -0,0 +1,705 @@ +:orphan: + +######## +Examples +######## + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ +Data +==== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Data/images/thumb/sphx_glr_plot_pointcloud3d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Data_plot_pointcloud3d.py` + +.. raw:: html + +
3D Point Cloud class
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Data/images/thumb/sphx_glr_plot_frequency_dataset_thumb.png + :alt: + + :ref:`sphx_glr_examples_Data_plot_frequency_dataset.py` + +.. raw:: html + +
Frequency domain dataset
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Data/images/thumb/sphx_glr_plot_skytem_dataset_thumb.png + :alt: + + :ref:`sphx_glr_examples_Data_plot_skytem_dataset.py` + +.. raw:: html + +
Skytem dataset
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Data/images/thumb/sphx_glr_plot_tempest_dataset_thumb.png + :alt: + + :ref:`sphx_glr_examples_Data_plot_tempest_dataset.py` + +.. raw:: html + +
Tempest dataset
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Data/plot_pointcloud3d + /examples/Data/plot_frequency_dataset + /examples/Data/plot_skytem_dataset + /examples/Data/plot_tempest_dataset + +Datapoints +========== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Datapoints/images/thumb/sphx_glr_plot_resolve_datapoint_thumb.png + :alt: + + :ref:`sphx_glr_examples_Datapoints_plot_resolve_datapoint.py` + +.. raw:: html + +
Frequency domain datapoint
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Datapoints/images/thumb/sphx_glr_plot_skytem_datapoint_thumb.png + :alt: + + :ref:`sphx_glr_examples_Datapoints_plot_skytem_datapoint.py` + +.. raw:: html + +
Skytem Datapoint Class
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Datapoints/images/thumb/sphx_glr_plot_tempest_datapoint_thumb.png + :alt: + + :ref:`sphx_glr_examples_Datapoints_plot_tempest_datapoint.py` + +.. raw:: html + +
Tempest Datapoint Class
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Datapoints/plot_resolve_datapoint + /examples/Datapoints/plot_skytem_datapoint + /examples/Datapoints/plot_tempest_datapoint + +Distributions +============= + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Distributions/images/thumb/sphx_glr_plot_distributions_thumb.png + :alt: + + :ref:`sphx_glr_examples_Distributions_plot_distributions.py` + +.. raw:: html + +
Distribution Class
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Distributions/plot_distributions + +HDF 5 +===== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/HDF5/images/thumb/sphx_glr_hdf5_thumb.png + :alt: + + :ref:`sphx_glr_examples_HDF5_hdf5.py` + +.. raw:: html + +
Using HDF5 within GeoBIPy
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/HDF5/hdf5 + +1D Inference +============ + +There are a couple of ways to run an inference using geobipy, the first is via command line using + +.. code-block:: bash + + geobipy skytem_options.py + +The other is with a python script similar to the examples in this folder. +In both cases, you will need to write an options file (also shown in these examples) + + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_1D/images/thumb/sphx_glr_plot_inference_1d_resolve_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_resolve.py` + +.. raw:: html + +
Running GeoBIPy to invert Resolve data
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_1D/images/thumb/sphx_glr_plot_inference_1d_skytem_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_skytem.py` + +.. raw:: html + +
Running GeoBIPy to invert Skytem data
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_1D/images/thumb/sphx_glr_plot_inference_1d_tempest_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_tempest.py` + +.. raw:: html + +
Running GeoBIPy to invert Tempest data
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Inference_1D/plot_inference_1d_resolve + /examples/Inference_1D/plot_inference_1d_skytem + /examples/Inference_1D/plot_inference_1d_tempest + +2D Inference +============ + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_2D/images/thumb/sphx_glr_plot_inference_2d_resolve_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_resolve.py` + +.. raw:: html + +
2D Posterior analysis of Resolve inference
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_2D/images/thumb/sphx_glr_plot_inference_2d_skytem_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_skytem.py` + +.. raw:: html + +
2D Posterior analysis of Skytem inference
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Inference_2D/images/thumb/sphx_glr_plot_inference_2d_tempest_thumb.png + :alt: + + :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_tempest.py` + +.. raw:: html + +
2D Posterior analysis of Tempest inference
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Inference_2D/plot_inference_2d_resolve + /examples/Inference_2D/plot_inference_2d_skytem + /examples/Inference_2D/plot_inference_2d_tempest + +Meshes +====== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Meshes/images/thumb/sphx_glr_plot_rectilinear_mesh_1d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_1d.py` + +.. raw:: html + +
1D Rectilinear Mesh
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Meshes/images/thumb/sphx_glr_plot_rectilinear_mesh_2d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_2d.py` + +.. raw:: html + +
2D Rectilinear Mesh
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Meshes/images/thumb/sphx_glr_plot_rectilinear_mesh_3d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_3d.py` + +.. raw:: html + +
3D Rectilinear Mesh
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Meshes/plot_rectilinear_mesh_1d + /examples/Meshes/plot_rectilinear_mesh_2d + /examples/Meshes/plot_rectilinear_mesh_3d + +Models +====== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Models/images/thumb/sphx_glr_plot_model_1d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Models_plot_model_1d.py` + +.. raw:: html + +
1D Model with an infinite halfspace
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Models/images/thumb/sphx_glr_plot_model_2d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Models_plot_model_2d.py` + +.. raw:: html + +
2D Rectilinear Model
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Models/images/thumb/sphx_glr_plot_model_3d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Models_plot_model_3d.py` + +.. raw:: html + +
3D Rectilinear Model
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Models/plot_model_1d + /examples/Models/plot_model_2d + /examples/Models/plot_model_3d + +Statistics +========== + + +.. raw:: html + +
+ +.. thumbnail-parent-div-open + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Statistics/images/thumb/sphx_glr_plot_DataArray_thumb.png + :alt: + + :ref:`sphx_glr_examples_Statistics_plot_DataArray.py` + +.. raw:: html + +
DataArray Class
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Statistics/images/thumb/sphx_glr_plot_histogram_1d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Statistics_plot_histogram_1d.py` + +.. raw:: html + +
Histogram 1D
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Statistics/images/thumb/sphx_glr_plot_histogram_2d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Statistics_plot_histogram_2d.py` + +.. raw:: html + +
Histogram 2D
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Statistics/images/thumb/sphx_glr_plot_histogram_3d_thumb.png + :alt: + + :ref:`sphx_glr_examples_Statistics_plot_histogram_3d.py` + +.. raw:: html + +
Histogram 3D
+
+ + +.. raw:: html + +
+ +.. only:: html + + .. image:: /examples/Statistics/images/thumb/sphx_glr_plot_StatArray_thumb.png + :alt: + + :ref:`sphx_glr_examples_Statistics_plot_StatArray.py` + +.. raw:: html + +
StatArray Class
+
+ + +.. thumbnail-parent-div-close + +.. raw:: html + +
+ + +.. toctree:: + :hidden: + + /examples/Statistics/plot_DataArray + /examples/Statistics/plot_histogram_1d + /examples/Statistics/plot_histogram_2d + /examples/Statistics/plot_histogram_3d + /examples/Statistics/plot_StatArray + + +.. only:: html + + .. container:: sphx-glr-footer sphx-glr-footer-gallery + + .. container:: sphx-glr-download sphx-glr-download-python + + :download:`Download all examples in Python source code: examples_python.zip ` + + .. container:: sphx-glr-download sphx-glr-download-jupyter + + :download:`Download all examples in Jupyter notebooks: examples_jupyter.zip ` + + +.. only:: html + + .. rst-class:: sphx-glr-signature + + `Gallery generated by Sphinx-Gallery `_ diff --git a/docs/_sources/examples/readme.rst.txt b/docs/_sources/examples/readme.rst.txt new file mode 100644 index 00000000..0db3c1d1 --- /dev/null +++ b/docs/_sources/examples/readme.rst.txt @@ -0,0 +1,3 @@ +######## +Examples +######## \ No newline at end of file diff --git a/docs/_sources/examples/sg_execution_times.rst.txt b/docs/_sources/examples/sg_execution_times.rst.txt new file mode 100644 index 00000000..f7a0c03b --- /dev/null +++ b/docs/_sources/examples/sg_execution_times.rst.txt @@ -0,0 +1,37 @@ + +:orphan: + +.. _sphx_glr_examples_sg_execution_times: + + +Computation times +================= +**00:00.000** total execution time for 0 files **from examples**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - N/A + - N/A + - N/A diff --git a/docs/_sources/index.rst.txt b/docs/_sources/index.rst.txt index d05d7185..b8d482f4 100644 --- a/docs/_sources/index.rst.txt +++ b/docs/_sources/index.rst.txt @@ -10,4 +10,21 @@ Application outside of these data types is in development. Currently there are two types of data that we have implemented; frequency domain electromagnetic data, and time domain electromagnetic data. The package comes with a frequency domain forward modeller, but it does not come with a time domain forward modeller. +See the section :ref:`Installing_time_domain_forward_modeller` for more information. + +Using GeoBIPy on Yeti +~~~~~~~~~~~~~~~~~~~~~ + +There is no need to install GeoBIPy on Yeti. +Simply type "module load python/geobipy" for the serial version of the code, mainly used for plotting results, +or "module load python/pGeobipy" for a parallel enabled version. + +`Codebase is here! `_ + +.. toctree:: + :maxdepth: 2 + + content/getting_started/getting_started + content/api/api + examples/index diff --git a/docs/_sources/sg_execution_times.rst.txt b/docs/_sources/sg_execution_times.rst.txt new file mode 100644 index 00000000..e35f03ac --- /dev/null +++ b/docs/_sources/sg_execution_times.rst.txt @@ -0,0 +1,112 @@ + +:orphan: + +.. _sphx_glr_sg_execution_times: + + +Computation times +================= +**06:08.049** total execution time for 26 files **from all galleries**: + +.. container:: + + .. raw:: html + + + + + + + + .. list-table:: + :header-rows: 1 + :class: table table-striped sg-datatable + + * - Example + - Time + - Mem (MB) + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_tempest.py` (``examples/Inference_1D/plot_inference_1d_tempest.py``) + - 01:56.531 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_skytem.py` (``examples/Inference_1D/plot_inference_1d_skytem.py``) + - 01:37.169 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_1D_plot_inference_1d_resolve.py` (``examples/Inference_1D/plot_inference_1d_resolve.py``) + - 00:29.449 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_resolve.py` (``examples/Inference_2D/plot_inference_2d_resolve.py``) + - 00:21.524 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_tempest.py` (``examples/Inference_2D/plot_inference_2d_tempest.py``) + - 00:19.999 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_pointcloud3d.py` (``examples/Data/plot_pointcloud3d.py``) + - 00:19.063 + - 0.0 + * - :ref:`sphx_glr_examples_Inference_2D_plot_inference_2d_skytem.py` (``examples/Inference_2D/plot_inference_2d_skytem.py``) + - 00:18.965 + - 0.0 + * - :ref:`sphx_glr_examples_Datapoints_plot_resolve_datapoint.py` (``examples/Datapoints/plot_resolve_datapoint.py``) + - 00:07.271 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_2d.py` (``examples/Statistics/plot_histogram_2d.py``) + - 00:05.012 + - 0.0 + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_1d.py` (``examples/Meshes/plot_rectilinear_mesh_1d.py``) + - 00:04.585 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_tempest_dataset.py` (``examples/Data/plot_tempest_dataset.py``) + - 00:04.125 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_3d.py` (``examples/Statistics/plot_histogram_3d.py``) + - 00:03.875 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_frequency_dataset.py` (``examples/Data/plot_frequency_dataset.py``) + - 00:03.257 + - 0.0 + * - :ref:`sphx_glr_examples_Models_plot_model_1d.py` (``examples/Models/plot_model_1d.py``) + - 00:03.051 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_StatArray.py` (``examples/Statistics/plot_StatArray.py``) + - 00:02.767 + - 0.0 + * - :ref:`sphx_glr_examples_Datapoints_plot_skytem_datapoint.py` (``examples/Datapoints/plot_skytem_datapoint.py``) + - 00:02.306 + - 0.0 + * - :ref:`sphx_glr_examples_Data_plot_skytem_dataset.py` (``examples/Data/plot_skytem_dataset.py``) + - 00:02.295 + - 0.0 + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_2d.py` (``examples/Meshes/plot_rectilinear_mesh_2d.py``) + - 00:01.747 + - 0.0 + * - :ref:`sphx_glr_examples_Datapoints_plot_tempest_datapoint.py` (``examples/Datapoints/plot_tempest_datapoint.py``) + - 00:01.653 + - 0.0 + * - :ref:`sphx_glr_examples_Meshes_plot_rectilinear_mesh_3d.py` (``examples/Meshes/plot_rectilinear_mesh_3d.py``) + - 00:01.378 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_histogram_1d.py` (``examples/Statistics/plot_histogram_1d.py``) + - 00:00.972 + - 0.0 + * - :ref:`sphx_glr_examples_Models_plot_model_3d.py` (``examples/Models/plot_model_3d.py``) + - 00:00.788 + - 0.0 + * - :ref:`sphx_glr_examples_Models_plot_model_2d.py` (``examples/Models/plot_model_2d.py``) + - 00:00.212 + - 0.0 + * - :ref:`sphx_glr_examples_Distributions_plot_distributions.py` (``examples/Distributions/plot_distributions.py``) + - 00:00.050 + - 0.0 + * - :ref:`sphx_glr_examples_Statistics_plot_DataArray.py` (``examples/Statistics/plot_DataArray.py``) + - 00:00.003 + - 0.0 + * - :ref:`sphx_glr_examples_HDF5_hdf5.py` (``examples/HDF5/hdf5.py``) + - 00:00.000 + - 0.0 diff --git a/docs/_static/_sphinx_javascript_frameworks_compat.js b/docs/_static/_sphinx_javascript_frameworks_compat.js new file mode 100644 index 00000000..81415803 --- /dev/null +++ b/docs/_static/_sphinx_javascript_frameworks_compat.js @@ -0,0 +1,123 @@ +/* Compatability shim for jQuery and underscores.js. + * + * Copyright Sphinx contributors + * Released under the two clause BSD licence + */ + +/** + * small helper function to urldecode strings + * + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent#Decoding_query_parameters_from_a_URL + */ +jQuery.urldecode = function(x) { + if (!x) { + return x + } + return decodeURIComponent(x.replace(/\+/g, ' ')); +}; + +/** + * small helper function to urlencode strings + */ +jQuery.urlencode = encodeURIComponent; + +/** + * This function returns the parsed url parameters of the + * current request. Multiple values per key are supported, + * it will always return arrays of strings for the value parts. + */ +jQuery.getQueryParameters = function(s) { + if (typeof s === 'undefined') + s = document.location.search; + var parts = s.substr(s.indexOf('?') + 1).split('&'); + var result = {}; + for (var i = 0; i < parts.length; i++) { + var tmp = parts[i].split('=', 2); + var key = jQuery.urldecode(tmp[0]); + var value = jQuery.urldecode(tmp[1]); + if (key in result) + result[key].push(value); + else + result[key] = [value]; + } + return result; +}; + +/** + * highlight a given string on a jquery object by wrapping it in + * span elements with the given class name. + */ +jQuery.fn.highlightText = function(text, className) { + function highlight(node, addItems) { + if (node.nodeType === 3) { + var val = node.nodeValue; + var pos = val.toLowerCase().indexOf(text); + if (pos >= 0 && + !jQuery(node.parentNode).hasClass(className) && + !jQuery(node.parentNode).hasClass("nohighlight")) { + var span; + var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); + if (isInSVG) { + span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); + } else { + span = document.createElement("span"); + span.className = className; + } + span.appendChild(document.createTextNode(val.substr(pos, text.length))); + node.parentNode.insertBefore(span, node.parentNode.insertBefore( + document.createTextNode(val.substr(pos + text.length)), + node.nextSibling)); + node.nodeValue = val.substr(0, pos); + if (isInSVG) { + var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); + var bbox = node.parentElement.getBBox(); + rect.x.baseVal.value = bbox.x; + rect.y.baseVal.value = bbox.y; + rect.width.baseVal.value = bbox.width; + rect.height.baseVal.value = bbox.height; + rect.setAttribute('class', className); + addItems.push({ + "parent": node.parentNode, + "target": rect}); + } + } + } + else if (!jQuery(node).is("button, select, textarea")) { + jQuery.each(node.childNodes, function() { + highlight(this, addItems); + }); + } + } + var addItems = []; + var result = this.each(function() { + highlight(this, addItems); + }); + for (var i = 0; i < addItems.length; ++i) { + jQuery(addItems[i].parent).before(addItems[i].target); + } + return result; +}; + +/* + * backward compatibility for jQuery.browser + * This will be supported until firefox bug is fixed. + */ +if (!jQuery.browser) { + jQuery.uaMatch = function(ua) { + ua = ua.toLowerCase(); + + var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || + /(webkit)[ \/]([\w.]+)/.exec(ua) || + /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || + /(msie) ([\w.]+)/.exec(ua) || + ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || + []; + + return { + browser: match[ 1 ] || "", + version: match[ 2 ] || "0" + }; + }; + jQuery.browser = {}; + jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; +} diff --git a/docs/_static/alabaster.css b/docs/_static/alabaster.css deleted file mode 100644 index e3174bf9..00000000 --- a/docs/_static/alabaster.css +++ /dev/null @@ -1,708 +0,0 @@ -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: Georgia, serif; - font-size: 17px; - background-color: #fff; - color: #000; - margin: 0; - padding: 0; -} - - -div.document { - width: 940px; - margin: 30px auto 0 auto; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 220px; -} - -div.sphinxsidebar { - width: 220px; - font-size: 14px; - line-height: 1.5; -} - -hr { - border: 1px solid #B1B4B6; -} - -div.body { - background-color: #fff; - color: #3E4349; - padding: 0 30px 0 30px; -} - -div.body > .section { - text-align: left; -} - -div.footer { - width: 940px; - margin: 20px auto 30px auto; - font-size: 14px; - color: #888; - text-align: right; -} - -div.footer a { - color: #888; -} - -p.caption { - font-family: inherit; - font-size: inherit; -} - - -div.relations { - display: none; -} - - -div.sphinxsidebar { - max-height: 100%; - overflow-y: auto; -} - -div.sphinxsidebar a { - color: #444; - text-decoration: none; - border-bottom: 1px dotted #999; -} - -div.sphinxsidebar a:hover { - border-bottom: 1px solid #999; -} - -div.sphinxsidebarwrapper { - padding: 18px 10px; -} - -div.sphinxsidebarwrapper p.logo { - padding: 0; - margin: -10px 0 0 0px; - text-align: center; -} - -div.sphinxsidebarwrapper h1.logo { - margin-top: -10px; - text-align: center; - margin-bottom: 5px; - text-align: left; -} - -div.sphinxsidebarwrapper h1.logo-name { - margin-top: 0px; -} - -div.sphinxsidebarwrapper p.blurb { - margin-top: 0; - font-style: normal; -} - -div.sphinxsidebar h3, -div.sphinxsidebar h4 { - font-family: Georgia, serif; - color: #444; - font-size: 24px; - font-weight: normal; - margin: 0 0 5px 0; - padding: 0; -} - -div.sphinxsidebar h4 { - font-size: 20px; -} - -div.sphinxsidebar h3 a { - color: #444; -} - -div.sphinxsidebar p.logo a, -div.sphinxsidebar h3 a, -div.sphinxsidebar p.logo a:hover, -div.sphinxsidebar h3 a:hover { - border: none; -} - -div.sphinxsidebar p { - color: #555; - margin: 10px 0; -} - -div.sphinxsidebar ul { - margin: 10px 0; - padding: 0; - color: #000; -} - -div.sphinxsidebar ul li.toctree-l1 > a { - font-size: 120%; -} - -div.sphinxsidebar ul li.toctree-l2 > a { - font-size: 110%; -} - -div.sphinxsidebar input { - border: 1px solid #CCC; - font-family: Georgia, serif; - font-size: 1em; -} - -div.sphinxsidebar #searchbox input[type="text"] { - width: 160px; -} - -div.sphinxsidebar .search > div { - display: table-cell; -} - -div.sphinxsidebar hr { - border: none; - height: 1px; - color: #AAA; - background: #AAA; - - text-align: left; - margin-left: 0; - width: 50%; -} - -div.sphinxsidebar .badge { - border-bottom: none; -} - -div.sphinxsidebar .badge:hover { - border-bottom: none; -} - -/* To address an issue with donation coming after search */ -div.sphinxsidebar h3.donation { - margin-top: 10px; -} - -/* -- body styles ----------------------------------------------------------- */ - -a { - color: #004B6B; - text-decoration: underline; -} - -a:hover { - color: #6D4100; - text-decoration: underline; -} - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: Georgia, serif; - font-weight: normal; - margin: 30px 0px 10px 0px; - padding: 0; -} - -div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } -div.body h2 { font-size: 180%; } -div.body h3 { font-size: 150%; } -div.body h4 { font-size: 130%; } -div.body h5 { font-size: 100%; } -div.body h6 { font-size: 100%; } - -a.headerlink { - color: #DDD; - padding: 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - color: #444; - background: #EAEAEA; -} - -div.body p, div.body dd, div.body li { - line-height: 1.4em; -} - -div.admonition { - margin: 20px 0px; - padding: 10px 30px; - background-color: #EEE; - border: 1px solid #CCC; -} - -div.admonition tt.xref, div.admonition code.xref, div.admonition a tt { - background-color: #FBFBFB; - border-bottom: 1px solid #fafafa; -} - -div.admonition p.admonition-title { - font-family: Georgia, serif; - font-weight: normal; - font-size: 24px; - margin: 0 0 10px 0; - padding: 0; - line-height: 1; -} - -div.admonition p.last { - margin-bottom: 0; -} - -div.highlight { - background-color: #fff; -} - -dt:target, .highlight { - background: #FAF3E8; -} - -div.warning { - background-color: #FCC; - border: 1px solid #FAA; -} - -div.danger { - background-color: #FCC; - border: 1px solid #FAA; - -moz-box-shadow: 2px 2px 4px #D52C2C; - -webkit-box-shadow: 2px 2px 4px #D52C2C; - box-shadow: 2px 2px 4px #D52C2C; -} - -div.error { - background-color: #FCC; - border: 1px solid #FAA; - -moz-box-shadow: 2px 2px 4px #D52C2C; - -webkit-box-shadow: 2px 2px 4px #D52C2C; - box-shadow: 2px 2px 4px #D52C2C; -} - -div.caution { - background-color: #FCC; - border: 1px solid #FAA; -} - -div.attention { - background-color: #FCC; - border: 1px solid #FAA; -} - -div.important { - background-color: #EEE; - border: 1px solid #CCC; -} - -div.note { - background-color: #EEE; - border: 1px solid #CCC; -} - -div.tip { - background-color: #EEE; - border: 1px solid #CCC; -} - -div.hint { - background-color: #EEE; - border: 1px solid #CCC; -} - -div.seealso { - background-color: #EEE; - border: 1px solid #CCC; -} - -div.topic { - background-color: #EEE; -} - -p.admonition-title { - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -pre, tt, code { - font-family: 'Consolas', 'Menlo', 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', monospace; - font-size: 0.9em; -} - -.hll { - background-color: #FFC; - margin: 0 -12px; - padding: 0 12px; - display: block; -} - -img.screenshot { -} - -tt.descname, tt.descclassname, code.descname, code.descclassname { - font-size: 0.95em; -} - -tt.descname, code.descname { - padding-right: 0.08em; -} - -img.screenshot { - -moz-box-shadow: 2px 2px 4px #EEE; - -webkit-box-shadow: 2px 2px 4px #EEE; - box-shadow: 2px 2px 4px #EEE; -} - -table.docutils { - border: 1px solid #888; - -moz-box-shadow: 2px 2px 4px #EEE; - -webkit-box-shadow: 2px 2px 4px #EEE; - box-shadow: 2px 2px 4px #EEE; -} - -table.docutils td, table.docutils th { - border: 1px solid #888; - padding: 0.25em 0.7em; -} - -table.field-list, table.footnote { - border: none; - -moz-box-shadow: none; - -webkit-box-shadow: none; - box-shadow: none; -} - -table.footnote { - margin: 15px 0; - width: 100%; - border: 1px solid #EEE; - background: #FDFDFD; - font-size: 0.9em; -} - -table.footnote + table.footnote { - margin-top: -15px; - border-top: none; -} - -table.field-list th { - padding: 0 0.8em 0 0; -} - -table.field-list td { - padding: 0; -} - -table.field-list p { - margin-bottom: 0.8em; -} - -/* Cloned from - * https://github.com/sphinx-doc/sphinx/commit/ef60dbfce09286b20b7385333d63a60321784e68 - */ -.field-name { - -moz-hyphens: manual; - -ms-hyphens: manual; - -webkit-hyphens: manual; - hyphens: manual; -} - -table.footnote td.label { - width: .1px; - padding: 0.3em 0 0.3em 0.5em; -} - -table.footnote td { - padding: 0.3em 0.5em; -} - -dl { - margin-left: 0; - margin-right: 0; - margin-top: 0; - padding: 0; -} - -dl dd { - margin-left: 30px; -} - -blockquote { - margin: 0 0 0 30px; - padding: 0; -} - -ul, ol { - /* Matches the 30px from the narrow-screen "li > ul" selector below */ - margin: 10px 0 10px 30px; - padding: 0; -} - -pre { - background: #EEE; - padding: 7px 30px; - margin: 15px 0px; - line-height: 1.3em; -} - -div.viewcode-block:target { - background: #ffd; -} - -dl pre, blockquote pre, li pre { - margin-left: 0; - padding-left: 30px; -} - -tt, code { - background-color: #ecf0f3; - color: #222; - /* padding: 1px 2px; */ -} - -tt.xref, code.xref, a tt { - background-color: #FBFBFB; - border-bottom: 1px solid #fff; -} - -a.reference { - text-decoration: none; - border-bottom: 1px dotted #004B6B; -} - -/* Don't put an underline on images */ -a.image-reference, a.image-reference:hover { - border-bottom: none; -} - -a.reference:hover { - border-bottom: 1px solid #6D4100; -} - -a.footnote-reference { - text-decoration: none; - font-size: 0.7em; - vertical-align: top; - border-bottom: 1px dotted #004B6B; -} - -a.footnote-reference:hover { - border-bottom: 1px solid #6D4100; -} - -a:hover tt, a:hover code { - background: #EEE; -} - - -@media screen and (max-width: 870px) { - - div.sphinxsidebar { - display: none; - } - - div.document { - width: 100%; - - } - - div.documentwrapper { - margin-left: 0; - margin-top: 0; - margin-right: 0; - margin-bottom: 0; - } - - div.bodywrapper { - margin-top: 0; - margin-right: 0; - margin-bottom: 0; - margin-left: 0; - } - - ul { - margin-left: 0; - } - - li > ul { - /* Matches the 30px from the "ul, ol" selector above */ - margin-left: 30px; - } - - .document { - width: auto; - } - - .footer { - width: auto; - } - - .bodywrapper { - margin: 0; - } - - .footer { - width: auto; - } - - .github { - display: none; - } - - - -} - - - -@media screen and (max-width: 875px) { - - body { - margin: 0; - padding: 20px 30px; - } - - div.documentwrapper { - float: none; - background: #fff; - } - - div.sphinxsidebar { - display: block; - float: none; - width: 102.5%; - margin: 50px -30px -20px -30px; - padding: 10px 20px; - background: #333; - color: #FFF; - } - - div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, - div.sphinxsidebar h3 a { - color: #fff; - } - - div.sphinxsidebar a { - color: #AAA; - } - - div.sphinxsidebar p.logo { - display: none; - } - - div.document { - width: 100%; - margin: 0; - } - - div.footer { - display: none; - } - - div.bodywrapper { - margin: 0; - } - - div.body { - min-height: 0; - padding: 0; - } - - .rtd_doc_footer { - display: none; - } - - .document { - width: auto; - } - - .footer { - width: auto; - } - - .footer { - width: auto; - } - - .github { - display: none; - } -} - - -/* misc. */ - -.revsys-inline { - display: none!important; -} - -/* Hide ugly table cell borders in ..bibliography:: directive output */ -table.docutils.citation, table.docutils.citation td, table.docutils.citation th { - border: none; - /* Below needed in some edge cases; if not applied, bottom shadows appear */ - -moz-box-shadow: none; - -webkit-box-shadow: none; - box-shadow: none; -} - - -/* relbar */ - -.related { - line-height: 30px; - width: 100%; - font-size: 0.9rem; -} - -.related.top { - border-bottom: 1px solid #EEE; - margin-bottom: 20px; -} - -.related.bottom { - border-top: 1px solid #EEE; -} - -.related ul { - padding: 0; - margin: 0; - list-style: none; -} - -.related li { - display: inline; -} - -nav#rellinks { - float: right; -} - -nav#rellinks li+li:before { - content: "|"; -} - -nav#breadcrumbs li+li:before { - content: "\00BB"; -} - -/* Hide certain items when printing */ -@media print { - div.related { - display: none; - } -} \ No newline at end of file diff --git a/docs/_static/basic.css b/docs/_static/basic.css index e5179b7a..f316efcb 100644 --- a/docs/_static/basic.css +++ b/docs/_static/basic.css @@ -222,7 +222,7 @@ table.modindextable td { /* -- general body styles --------------------------------------------------- */ div.body { - min-width: inherit; + min-width: 360px; max-width: 800px; } diff --git a/docs/_static/binder_badge_logo.svg b/docs/_static/binder_badge_logo.svg new file mode 100644 index 00000000..327f6b63 --- /dev/null +++ b/docs/_static/binder_badge_logo.svg @@ -0,0 +1 @@ + launchlaunchbinderbinder \ No newline at end of file diff --git a/docs/_static/broken_example.png b/docs/_static/broken_example.png new file mode 100644 index 00000000..4fea24e7 Binary files /dev/null and b/docs/_static/broken_example.png differ diff --git a/docs/_static/css/badge_only.css b/docs/_static/css/badge_only.css new file mode 100644 index 00000000..c718cee4 --- /dev/null +++ b/docs/_static/css/badge_only.css @@ -0,0 +1 @@ +.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-style:normal;font-weight:400;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#FontAwesome) format("svg")}.fa:before{font-family:FontAwesome;font-style:normal;font-weight:400;line-height:1}.fa:before,a .fa{text-decoration:inherit}.fa:before,a .fa,li .fa{display:inline-block}li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before,.icon-book:before{content:"\f02d"}.fa-caret-down:before,.icon-caret-down:before{content:"\f0d7"}.fa-caret-up:before,.icon-caret-up:before{content:"\f0d8"}.fa-caret-left:before,.icon-caret-left:before{content:"\f0d9"}.fa-caret-right:before,.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60}.rst-versions .rst-current-version:after{clear:both;content:"";display:block}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} \ No newline at end of file diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff b/docs/_static/css/fonts/Roboto-Slab-Bold.woff new file mode 100644 index 00000000..6cb60000 Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Bold.woff differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 new file mode 100644 index 00000000..7059e231 Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Bold.woff2 differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff b/docs/_static/css/fonts/Roboto-Slab-Regular.woff new file mode 100644 index 00000000..f815f63f Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Regular.woff differ diff --git a/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 new file mode 100644 index 00000000..f2c76e5b Binary files /dev/null and b/docs/_static/css/fonts/Roboto-Slab-Regular.woff2 differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.eot b/docs/_static/css/fonts/fontawesome-webfont.eot new file mode 100644 index 00000000..e9f60ca9 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.eot differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.svg b/docs/_static/css/fonts/fontawesome-webfont.svg new file mode 100644 index 00000000..855c845e --- /dev/null +++ b/docs/_static/css/fonts/fontawesome-webfont.svg @@ -0,0 +1,2671 @@ + + + + +Created by FontForge 20120731 at Mon Oct 24 17:37:40 2016 + By ,,, +Copyright Dave Gandy 2016. All rights reserved. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/_static/css/fonts/fontawesome-webfont.ttf b/docs/_static/css/fonts/fontawesome-webfont.ttf new file mode 100644 index 00000000..35acda2f Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.ttf differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff b/docs/_static/css/fonts/fontawesome-webfont.woff new file mode 100644 index 00000000..400014a4 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.woff differ diff --git a/docs/_static/css/fonts/fontawesome-webfont.woff2 b/docs/_static/css/fonts/fontawesome-webfont.woff2 new file mode 100644 index 00000000..4d13fc60 Binary files /dev/null and b/docs/_static/css/fonts/fontawesome-webfont.woff2 differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff b/docs/_static/css/fonts/lato-bold-italic.woff new file mode 100644 index 00000000..88ad05b9 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold-italic.woff differ diff --git a/docs/_static/css/fonts/lato-bold-italic.woff2 b/docs/_static/css/fonts/lato-bold-italic.woff2 new file mode 100644 index 00000000..c4e3d804 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold-italic.woff2 differ diff --git a/docs/_static/css/fonts/lato-bold.woff b/docs/_static/css/fonts/lato-bold.woff new file mode 100644 index 00000000..c6dff51f Binary files /dev/null and b/docs/_static/css/fonts/lato-bold.woff differ diff --git a/docs/_static/css/fonts/lato-bold.woff2 b/docs/_static/css/fonts/lato-bold.woff2 new file mode 100644 index 00000000..bb195043 Binary files /dev/null and b/docs/_static/css/fonts/lato-bold.woff2 differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff b/docs/_static/css/fonts/lato-normal-italic.woff new file mode 100644 index 00000000..76114bc0 Binary files /dev/null and b/docs/_static/css/fonts/lato-normal-italic.woff differ diff --git a/docs/_static/css/fonts/lato-normal-italic.woff2 b/docs/_static/css/fonts/lato-normal-italic.woff2 new file mode 100644 index 00000000..3404f37e Binary files /dev/null and b/docs/_static/css/fonts/lato-normal-italic.woff2 differ diff --git a/docs/_static/css/fonts/lato-normal.woff b/docs/_static/css/fonts/lato-normal.woff new file mode 100644 index 00000000..ae1307ff Binary files /dev/null and b/docs/_static/css/fonts/lato-normal.woff differ diff --git a/docs/_static/css/fonts/lato-normal.woff2 b/docs/_static/css/fonts/lato-normal.woff2 new file mode 100644 index 00000000..3bf98433 Binary files /dev/null and b/docs/_static/css/fonts/lato-normal.woff2 differ diff --git a/docs/_static/css/theme.css b/docs/_static/css/theme.css new file mode 100644 index 00000000..19a446a0 --- /dev/null +++ b/docs/_static/css/theme.css @@ -0,0 +1,4 @@ +html{box-sizing:border-box}*,:after,:before{box-sizing:inherit}article,aside,details,figcaption,figure,footer,header,hgroup,nav,section{display:block}audio,canvas,video{display:inline-block;*display:inline;*zoom:1}[hidden],audio:not([controls]){display:none}*{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}html{font-size:100%;-webkit-text-size-adjust:100%;-ms-text-size-adjust:100%}body{margin:0}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:700}blockquote{margin:0}dfn{font-style:italic}ins{background:#ff9;text-decoration:none}ins,mark{color:#000}mark{background:#ff0;font-style:italic;font-weight:700}.rst-content code,.rst-content tt,code,kbd,pre,samp{font-family:monospace,serif;_font-family:courier new,monospace;font-size:1em}pre{white-space:pre}q{quotes:none}q:after,q:before{content:"";content:none}small{font-size:85%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-.5em}sub{bottom:-.25em}dl,ol,ul{margin:0;padding:0;list-style:none;list-style-image:none}li{list-style:none}dd{margin:0}img{border:0;-ms-interpolation-mode:bicubic;vertical-align:middle;max-width:100%}svg:not(:root){overflow:hidden}figure,form{margin:0}label{cursor:pointer}button,input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}button,input{line-height:normal}button,input[type=button],input[type=reset],input[type=submit]{cursor:pointer;-webkit-appearance:button;*overflow:visible}button[disabled],input[disabled]{cursor:default}input[type=search]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}textarea{resize:vertical}table{border-collapse:collapse;border-spacing:0}td{vertical-align:top}.chromeframe{margin:.2em 0;background:#ccc;color:#000;padding:.2em 0}.ir{display:block;border:0;text-indent:-999em;overflow:hidden;background-color:transparent;background-repeat:no-repeat;text-align:left;direction:ltr;*line-height:0}.ir br{display:none}.hidden{display:none!important;visibility:hidden}.visuallyhidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.visuallyhidden.focusable:active,.visuallyhidden.focusable:focus{clip:auto;height:auto;margin:0;overflow:visible;position:static;width:auto}.invisible{visibility:hidden}.relative{position:relative}big,small{font-size:100%}@media print{body,html,section{background:none!important}*{box-shadow:none!important;text-shadow:none!important;filter:none!important;-ms-filter:none!important}a,a:visited{text-decoration:underline}.ir a:after,a[href^="#"]:after,a[href^="javascript:"]:after{content:""}blockquote,pre{page-break-inside:avoid}thead{display:table-header-group}img,tr{page-break-inside:avoid}img{max-width:100%!important}@page{margin:.5cm}.rst-content .toctree-wrapper>p.caption,h2,h3,p{orphans:3;widows:3}.rst-content .toctree-wrapper>p.caption,h2,h3{page-break-after:avoid}}.btn,.fa:before,.icon:before,.rst-content .admonition,.rst-content .admonition-title:before,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .code-block-caption .headerlink:before,.rst-content .danger,.rst-content .eqno .headerlink:before,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-alert,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before,input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week],select,textarea{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:after,.clearfix:before{display:table;content:""}.clearfix:after{clear:both}/*! + * Font Awesome 4.7.0 by @davegandy - http://fontawesome.io - @fontawesome + * License - http://fontawesome.io/license (Font: SIL OFL 1.1, CSS: MIT License) + */@font-face{font-family:FontAwesome;src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713);src:url(fonts/fontawesome-webfont.eot?674f50d287a8c48dc19ba404d20fe713?#iefix&v=4.7.0) format("embedded-opentype"),url(fonts/fontawesome-webfont.woff2?af7ae505a9eed503f8b8e6982036873e) format("woff2"),url(fonts/fontawesome-webfont.woff?fee66e712a8a08eef5805a46892932ad) format("woff"),url(fonts/fontawesome-webfont.ttf?b06871f281fee6b241d60582ae9369b9) format("truetype"),url(fonts/fontawesome-webfont.svg?912ec66d7572ff821749319396470bde#fontawesomeregular) format("svg");font-weight:400;font-style:normal}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{display:inline-block;font:normal normal normal 14px/1 FontAwesome;font-size:inherit;text-rendering:auto;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale}.fa-lg{font-size:1.33333em;line-height:.75em;vertical-align:-15%}.fa-2x{font-size:2em}.fa-3x{font-size:3em}.fa-4x{font-size:4em}.fa-5x{font-size:5em}.fa-fw{width:1.28571em;text-align:center}.fa-ul{padding-left:0;margin-left:2.14286em;list-style-type:none}.fa-ul>li{position:relative}.fa-li{position:absolute;left:-2.14286em;width:2.14286em;top:.14286em;text-align:center}.fa-li.fa-lg{left:-1.85714em}.fa-border{padding:.2em .25em .15em;border:.08em solid #eee;border-radius:.1em}.fa-pull-left{float:left}.fa-pull-right{float:right}.fa-pull-left.icon,.fa.fa-pull-left,.rst-content .code-block-caption .fa-pull-left.headerlink,.rst-content .eqno .fa-pull-left.headerlink,.rst-content .fa-pull-left.admonition-title,.rst-content code.download span.fa-pull-left:first-child,.rst-content dl dt .fa-pull-left.headerlink,.rst-content h1 .fa-pull-left.headerlink,.rst-content h2 .fa-pull-left.headerlink,.rst-content h3 .fa-pull-left.headerlink,.rst-content h4 .fa-pull-left.headerlink,.rst-content h5 .fa-pull-left.headerlink,.rst-content h6 .fa-pull-left.headerlink,.rst-content p .fa-pull-left.headerlink,.rst-content table>caption .fa-pull-left.headerlink,.rst-content tt.download span.fa-pull-left:first-child,.wy-menu-vertical li.current>a button.fa-pull-left.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-left.toctree-expand,.wy-menu-vertical li button.fa-pull-left.toctree-expand{margin-right:.3em}.fa-pull-right.icon,.fa.fa-pull-right,.rst-content .code-block-caption .fa-pull-right.headerlink,.rst-content .eqno .fa-pull-right.headerlink,.rst-content .fa-pull-right.admonition-title,.rst-content code.download span.fa-pull-right:first-child,.rst-content dl dt .fa-pull-right.headerlink,.rst-content h1 .fa-pull-right.headerlink,.rst-content h2 .fa-pull-right.headerlink,.rst-content h3 .fa-pull-right.headerlink,.rst-content h4 .fa-pull-right.headerlink,.rst-content h5 .fa-pull-right.headerlink,.rst-content h6 .fa-pull-right.headerlink,.rst-content p .fa-pull-right.headerlink,.rst-content table>caption .fa-pull-right.headerlink,.rst-content tt.download span.fa-pull-right:first-child,.wy-menu-vertical li.current>a button.fa-pull-right.toctree-expand,.wy-menu-vertical li.on a button.fa-pull-right.toctree-expand,.wy-menu-vertical li button.fa-pull-right.toctree-expand{margin-left:.3em}.pull-right{float:right}.pull-left{float:left}.fa.pull-left,.pull-left.icon,.rst-content .code-block-caption .pull-left.headerlink,.rst-content .eqno .pull-left.headerlink,.rst-content .pull-left.admonition-title,.rst-content code.download span.pull-left:first-child,.rst-content dl dt .pull-left.headerlink,.rst-content h1 .pull-left.headerlink,.rst-content h2 .pull-left.headerlink,.rst-content h3 .pull-left.headerlink,.rst-content h4 .pull-left.headerlink,.rst-content h5 .pull-left.headerlink,.rst-content h6 .pull-left.headerlink,.rst-content p .pull-left.headerlink,.rst-content table>caption .pull-left.headerlink,.rst-content tt.download span.pull-left:first-child,.wy-menu-vertical li.current>a button.pull-left.toctree-expand,.wy-menu-vertical li.on a button.pull-left.toctree-expand,.wy-menu-vertical li button.pull-left.toctree-expand{margin-right:.3em}.fa.pull-right,.pull-right.icon,.rst-content .code-block-caption .pull-right.headerlink,.rst-content .eqno .pull-right.headerlink,.rst-content .pull-right.admonition-title,.rst-content code.download span.pull-right:first-child,.rst-content dl dt .pull-right.headerlink,.rst-content h1 .pull-right.headerlink,.rst-content h2 .pull-right.headerlink,.rst-content h3 .pull-right.headerlink,.rst-content h4 .pull-right.headerlink,.rst-content h5 .pull-right.headerlink,.rst-content h6 .pull-right.headerlink,.rst-content p .pull-right.headerlink,.rst-content table>caption .pull-right.headerlink,.rst-content tt.download span.pull-right:first-child,.wy-menu-vertical li.current>a button.pull-right.toctree-expand,.wy-menu-vertical li.on a button.pull-right.toctree-expand,.wy-menu-vertical li button.pull-right.toctree-expand{margin-left:.3em}.fa-spin{-webkit-animation:fa-spin 2s linear infinite;animation:fa-spin 2s linear infinite}.fa-pulse{-webkit-animation:fa-spin 1s steps(8) infinite;animation:fa-spin 1s steps(8) infinite}@-webkit-keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}@keyframes fa-spin{0%{-webkit-transform:rotate(0deg);transform:rotate(0deg)}to{-webkit-transform:rotate(359deg);transform:rotate(359deg)}}.fa-rotate-90{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=1)";-webkit-transform:rotate(90deg);-ms-transform:rotate(90deg);transform:rotate(90deg)}.fa-rotate-180{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2)";-webkit-transform:rotate(180deg);-ms-transform:rotate(180deg);transform:rotate(180deg)}.fa-rotate-270{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=3)";-webkit-transform:rotate(270deg);-ms-transform:rotate(270deg);transform:rotate(270deg)}.fa-flip-horizontal{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=0, mirror=1)";-webkit-transform:scaleX(-1);-ms-transform:scaleX(-1);transform:scaleX(-1)}.fa-flip-vertical{-ms-filter:"progid:DXImageTransform.Microsoft.BasicImage(rotation=2, mirror=1)";-webkit-transform:scaleY(-1);-ms-transform:scaleY(-1);transform:scaleY(-1)}:root .fa-flip-horizontal,:root .fa-flip-vertical,:root .fa-rotate-90,:root .fa-rotate-180,:root .fa-rotate-270{filter:none}.fa-stack{position:relative;display:inline-block;width:2em;height:2em;line-height:2em;vertical-align:middle}.fa-stack-1x,.fa-stack-2x{position:absolute;left:0;width:100%;text-align:center}.fa-stack-1x{line-height:inherit}.fa-stack-2x{font-size:2em}.fa-inverse{color:#fff}.fa-glass:before{content:""}.fa-music:before{content:""}.fa-search:before,.icon-search:before{content:""}.fa-envelope-o:before{content:""}.fa-heart:before{content:""}.fa-star:before{content:""}.fa-star-o:before{content:""}.fa-user:before{content:""}.fa-film:before{content:""}.fa-th-large:before{content:""}.fa-th:before{content:""}.fa-th-list:before{content:""}.fa-check:before{content:""}.fa-close:before,.fa-remove:before,.fa-times:before{content:""}.fa-search-plus:before{content:""}.fa-search-minus:before{content:""}.fa-power-off:before{content:""}.fa-signal:before{content:""}.fa-cog:before,.fa-gear:before{content:""}.fa-trash-o:before{content:""}.fa-home:before,.icon-home:before{content:""}.fa-file-o:before{content:""}.fa-clock-o:before{content:""}.fa-road:before{content:""}.fa-download:before,.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{content:""}.fa-arrow-circle-o-down:before{content:""}.fa-arrow-circle-o-up:before{content:""}.fa-inbox:before{content:""}.fa-play-circle-o:before{content:""}.fa-repeat:before,.fa-rotate-right:before{content:""}.fa-refresh:before{content:""}.fa-list-alt:before{content:""}.fa-lock:before{content:""}.fa-flag:before{content:""}.fa-headphones:before{content:""}.fa-volume-off:before{content:""}.fa-volume-down:before{content:""}.fa-volume-up:before{content:""}.fa-qrcode:before{content:""}.fa-barcode:before{content:""}.fa-tag:before{content:""}.fa-tags:before{content:""}.fa-book:before,.icon-book:before{content:""}.fa-bookmark:before{content:""}.fa-print:before{content:""}.fa-camera:before{content:""}.fa-font:before{content:""}.fa-bold:before{content:""}.fa-italic:before{content:""}.fa-text-height:before{content:""}.fa-text-width:before{content:""}.fa-align-left:before{content:""}.fa-align-center:before{content:""}.fa-align-right:before{content:""}.fa-align-justify:before{content:""}.fa-list:before{content:""}.fa-dedent:before,.fa-outdent:before{content:""}.fa-indent:before{content:""}.fa-video-camera:before{content:""}.fa-image:before,.fa-photo:before,.fa-picture-o:before{content:""}.fa-pencil:before{content:""}.fa-map-marker:before{content:""}.fa-adjust:before{content:""}.fa-tint:before{content:""}.fa-edit:before,.fa-pencil-square-o:before{content:""}.fa-share-square-o:before{content:""}.fa-check-square-o:before{content:""}.fa-arrows:before{content:""}.fa-step-backward:before{content:""}.fa-fast-backward:before{content:""}.fa-backward:before{content:""}.fa-play:before{content:""}.fa-pause:before{content:""}.fa-stop:before{content:""}.fa-forward:before{content:""}.fa-fast-forward:before{content:""}.fa-step-forward:before{content:""}.fa-eject:before{content:""}.fa-chevron-left:before{content:""}.fa-chevron-right:before{content:""}.fa-plus-circle:before{content:""}.fa-minus-circle:before{content:""}.fa-times-circle:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before{content:""}.fa-check-circle:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before{content:""}.fa-question-circle:before{content:""}.fa-info-circle:before{content:""}.fa-crosshairs:before{content:""}.fa-times-circle-o:before{content:""}.fa-check-circle-o:before{content:""}.fa-ban:before{content:""}.fa-arrow-left:before{content:""}.fa-arrow-right:before{content:""}.fa-arrow-up:before{content:""}.fa-arrow-down:before{content:""}.fa-mail-forward:before,.fa-share:before{content:""}.fa-expand:before{content:""}.fa-compress:before{content:""}.fa-plus:before{content:""}.fa-minus:before{content:""}.fa-asterisk:before{content:""}.fa-exclamation-circle:before,.rst-content .admonition-title:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before{content:""}.fa-gift:before{content:""}.fa-leaf:before{content:""}.fa-fire:before,.icon-fire:before{content:""}.fa-eye:before{content:""}.fa-eye-slash:before{content:""}.fa-exclamation-triangle:before,.fa-warning:before{content:""}.fa-plane:before{content:""}.fa-calendar:before{content:""}.fa-random:before{content:""}.fa-comment:before{content:""}.fa-magnet:before{content:""}.fa-chevron-up:before{content:""}.fa-chevron-down:before{content:""}.fa-retweet:before{content:""}.fa-shopping-cart:before{content:""}.fa-folder:before{content:""}.fa-folder-open:before{content:""}.fa-arrows-v:before{content:""}.fa-arrows-h:before{content:""}.fa-bar-chart-o:before,.fa-bar-chart:before{content:""}.fa-twitter-square:before{content:""}.fa-facebook-square:before{content:""}.fa-camera-retro:before{content:""}.fa-key:before{content:""}.fa-cogs:before,.fa-gears:before{content:""}.fa-comments:before{content:""}.fa-thumbs-o-up:before{content:""}.fa-thumbs-o-down:before{content:""}.fa-star-half:before{content:""}.fa-heart-o:before{content:""}.fa-sign-out:before{content:""}.fa-linkedin-square:before{content:""}.fa-thumb-tack:before{content:""}.fa-external-link:before{content:""}.fa-sign-in:before{content:""}.fa-trophy:before{content:""}.fa-github-square:before{content:""}.fa-upload:before{content:""}.fa-lemon-o:before{content:""}.fa-phone:before{content:""}.fa-square-o:before{content:""}.fa-bookmark-o:before{content:""}.fa-phone-square:before{content:""}.fa-twitter:before{content:""}.fa-facebook-f:before,.fa-facebook:before{content:""}.fa-github:before,.icon-github:before{content:""}.fa-unlock:before{content:""}.fa-credit-card:before{content:""}.fa-feed:before,.fa-rss:before{content:""}.fa-hdd-o:before{content:""}.fa-bullhorn:before{content:""}.fa-bell:before{content:""}.fa-certificate:before{content:""}.fa-hand-o-right:before{content:""}.fa-hand-o-left:before{content:""}.fa-hand-o-up:before{content:""}.fa-hand-o-down:before{content:""}.fa-arrow-circle-left:before,.icon-circle-arrow-left:before{content:""}.fa-arrow-circle-right:before,.icon-circle-arrow-right:before{content:""}.fa-arrow-circle-up:before{content:""}.fa-arrow-circle-down:before{content:""}.fa-globe:before{content:""}.fa-wrench:before{content:""}.fa-tasks:before{content:""}.fa-filter:before{content:""}.fa-briefcase:before{content:""}.fa-arrows-alt:before{content:""}.fa-group:before,.fa-users:before{content:""}.fa-chain:before,.fa-link:before,.icon-link:before{content:""}.fa-cloud:before{content:""}.fa-flask:before{content:""}.fa-cut:before,.fa-scissors:before{content:""}.fa-copy:before,.fa-files-o:before{content:""}.fa-paperclip:before{content:""}.fa-floppy-o:before,.fa-save:before{content:""}.fa-square:before{content:""}.fa-bars:before,.fa-navicon:before,.fa-reorder:before{content:""}.fa-list-ul:before{content:""}.fa-list-ol:before{content:""}.fa-strikethrough:before{content:""}.fa-underline:before{content:""}.fa-table:before{content:""}.fa-magic:before{content:""}.fa-truck:before{content:""}.fa-pinterest:before{content:""}.fa-pinterest-square:before{content:""}.fa-google-plus-square:before{content:""}.fa-google-plus:before{content:""}.fa-money:before{content:""}.fa-caret-down:before,.icon-caret-down:before,.wy-dropdown .caret:before{content:""}.fa-caret-up:before{content:""}.fa-caret-left:before{content:""}.fa-caret-right:before{content:""}.fa-columns:before{content:""}.fa-sort:before,.fa-unsorted:before{content:""}.fa-sort-desc:before,.fa-sort-down:before{content:""}.fa-sort-asc:before,.fa-sort-up:before{content:""}.fa-envelope:before{content:""}.fa-linkedin:before{content:""}.fa-rotate-left:before,.fa-undo:before{content:""}.fa-gavel:before,.fa-legal:before{content:""}.fa-dashboard:before,.fa-tachometer:before{content:""}.fa-comment-o:before{content:""}.fa-comments-o:before{content:""}.fa-bolt:before,.fa-flash:before{content:""}.fa-sitemap:before{content:""}.fa-umbrella:before{content:""}.fa-clipboard:before,.fa-paste:before{content:""}.fa-lightbulb-o:before{content:""}.fa-exchange:before{content:""}.fa-cloud-download:before{content:""}.fa-cloud-upload:before{content:""}.fa-user-md:before{content:""}.fa-stethoscope:before{content:""}.fa-suitcase:before{content:""}.fa-bell-o:before{content:""}.fa-coffee:before{content:""}.fa-cutlery:before{content:""}.fa-file-text-o:before{content:""}.fa-building-o:before{content:""}.fa-hospital-o:before{content:""}.fa-ambulance:before{content:""}.fa-medkit:before{content:""}.fa-fighter-jet:before{content:""}.fa-beer:before{content:""}.fa-h-square:before{content:""}.fa-plus-square:before{content:""}.fa-angle-double-left:before{content:""}.fa-angle-double-right:before{content:""}.fa-angle-double-up:before{content:""}.fa-angle-double-down:before{content:""}.fa-angle-left:before{content:""}.fa-angle-right:before{content:""}.fa-angle-up:before{content:""}.fa-angle-down:before{content:""}.fa-desktop:before{content:""}.fa-laptop:before{content:""}.fa-tablet:before{content:""}.fa-mobile-phone:before,.fa-mobile:before{content:""}.fa-circle-o:before{content:""}.fa-quote-left:before{content:""}.fa-quote-right:before{content:""}.fa-spinner:before{content:""}.fa-circle:before{content:""}.fa-mail-reply:before,.fa-reply:before{content:""}.fa-github-alt:before{content:""}.fa-folder-o:before{content:""}.fa-folder-open-o:before{content:""}.fa-smile-o:before{content:""}.fa-frown-o:before{content:""}.fa-meh-o:before{content:""}.fa-gamepad:before{content:""}.fa-keyboard-o:before{content:""}.fa-flag-o:before{content:""}.fa-flag-checkered:before{content:""}.fa-terminal:before{content:""}.fa-code:before{content:""}.fa-mail-reply-all:before,.fa-reply-all:before{content:""}.fa-star-half-empty:before,.fa-star-half-full:before,.fa-star-half-o:before{content:""}.fa-location-arrow:before{content:""}.fa-crop:before{content:""}.fa-code-fork:before{content:""}.fa-chain-broken:before,.fa-unlink:before{content:""}.fa-question:before{content:""}.fa-info:before{content:""}.fa-exclamation:before{content:""}.fa-superscript:before{content:""}.fa-subscript:before{content:""}.fa-eraser:before{content:""}.fa-puzzle-piece:before{content:""}.fa-microphone:before{content:""}.fa-microphone-slash:before{content:""}.fa-shield:before{content:""}.fa-calendar-o:before{content:""}.fa-fire-extinguisher:before{content:""}.fa-rocket:before{content:""}.fa-maxcdn:before{content:""}.fa-chevron-circle-left:before{content:""}.fa-chevron-circle-right:before{content:""}.fa-chevron-circle-up:before{content:""}.fa-chevron-circle-down:before{content:""}.fa-html5:before{content:""}.fa-css3:before{content:""}.fa-anchor:before{content:""}.fa-unlock-alt:before{content:""}.fa-bullseye:before{content:""}.fa-ellipsis-h:before{content:""}.fa-ellipsis-v:before{content:""}.fa-rss-square:before{content:""}.fa-play-circle:before{content:""}.fa-ticket:before{content:""}.fa-minus-square:before{content:""}.fa-minus-square-o:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before{content:""}.fa-level-up:before{content:""}.fa-level-down:before{content:""}.fa-check-square:before{content:""}.fa-pencil-square:before{content:""}.fa-external-link-square:before{content:""}.fa-share-square:before{content:""}.fa-compass:before{content:""}.fa-caret-square-o-down:before,.fa-toggle-down:before{content:""}.fa-caret-square-o-up:before,.fa-toggle-up:before{content:""}.fa-caret-square-o-right:before,.fa-toggle-right:before{content:""}.fa-eur:before,.fa-euro:before{content:""}.fa-gbp:before{content:""}.fa-dollar:before,.fa-usd:before{content:""}.fa-inr:before,.fa-rupee:before{content:""}.fa-cny:before,.fa-jpy:before,.fa-rmb:before,.fa-yen:before{content:""}.fa-rouble:before,.fa-rub:before,.fa-ruble:before{content:""}.fa-krw:before,.fa-won:before{content:""}.fa-bitcoin:before,.fa-btc:before{content:""}.fa-file:before{content:""}.fa-file-text:before{content:""}.fa-sort-alpha-asc:before{content:""}.fa-sort-alpha-desc:before{content:""}.fa-sort-amount-asc:before{content:""}.fa-sort-amount-desc:before{content:""}.fa-sort-numeric-asc:before{content:""}.fa-sort-numeric-desc:before{content:""}.fa-thumbs-up:before{content:""}.fa-thumbs-down:before{content:""}.fa-youtube-square:before{content:""}.fa-youtube:before{content:""}.fa-xing:before{content:""}.fa-xing-square:before{content:""}.fa-youtube-play:before{content:""}.fa-dropbox:before{content:""}.fa-stack-overflow:before{content:""}.fa-instagram:before{content:""}.fa-flickr:before{content:""}.fa-adn:before{content:""}.fa-bitbucket:before,.icon-bitbucket:before{content:""}.fa-bitbucket-square:before{content:""}.fa-tumblr:before{content:""}.fa-tumblr-square:before{content:""}.fa-long-arrow-down:before{content:""}.fa-long-arrow-up:before{content:""}.fa-long-arrow-left:before{content:""}.fa-long-arrow-right:before{content:""}.fa-apple:before{content:""}.fa-windows:before{content:""}.fa-android:before{content:""}.fa-linux:before{content:""}.fa-dribbble:before{content:""}.fa-skype:before{content:""}.fa-foursquare:before{content:""}.fa-trello:before{content:""}.fa-female:before{content:""}.fa-male:before{content:""}.fa-gittip:before,.fa-gratipay:before{content:""}.fa-sun-o:before{content:""}.fa-moon-o:before{content:""}.fa-archive:before{content:""}.fa-bug:before{content:""}.fa-vk:before{content:""}.fa-weibo:before{content:""}.fa-renren:before{content:""}.fa-pagelines:before{content:""}.fa-stack-exchange:before{content:""}.fa-arrow-circle-o-right:before{content:""}.fa-arrow-circle-o-left:before{content:""}.fa-caret-square-o-left:before,.fa-toggle-left:before{content:""}.fa-dot-circle-o:before{content:""}.fa-wheelchair:before{content:""}.fa-vimeo-square:before{content:""}.fa-try:before,.fa-turkish-lira:before{content:""}.fa-plus-square-o:before,.wy-menu-vertical li button.toctree-expand:before{content:""}.fa-space-shuttle:before{content:""}.fa-slack:before{content:""}.fa-envelope-square:before{content:""}.fa-wordpress:before{content:""}.fa-openid:before{content:""}.fa-bank:before,.fa-institution:before,.fa-university:before{content:""}.fa-graduation-cap:before,.fa-mortar-board:before{content:""}.fa-yahoo:before{content:""}.fa-google:before{content:""}.fa-reddit:before{content:""}.fa-reddit-square:before{content:""}.fa-stumbleupon-circle:before{content:""}.fa-stumbleupon:before{content:""}.fa-delicious:before{content:""}.fa-digg:before{content:""}.fa-pied-piper-pp:before{content:""}.fa-pied-piper-alt:before{content:""}.fa-drupal:before{content:""}.fa-joomla:before{content:""}.fa-language:before{content:""}.fa-fax:before{content:""}.fa-building:before{content:""}.fa-child:before{content:""}.fa-paw:before{content:""}.fa-spoon:before{content:""}.fa-cube:before{content:""}.fa-cubes:before{content:""}.fa-behance:before{content:""}.fa-behance-square:before{content:""}.fa-steam:before{content:""}.fa-steam-square:before{content:""}.fa-recycle:before{content:""}.fa-automobile:before,.fa-car:before{content:""}.fa-cab:before,.fa-taxi:before{content:""}.fa-tree:before{content:""}.fa-spotify:before{content:""}.fa-deviantart:before{content:""}.fa-soundcloud:before{content:""}.fa-database:before{content:""}.fa-file-pdf-o:before{content:""}.fa-file-word-o:before{content:""}.fa-file-excel-o:before{content:""}.fa-file-powerpoint-o:before{content:""}.fa-file-image-o:before,.fa-file-photo-o:before,.fa-file-picture-o:before{content:""}.fa-file-archive-o:before,.fa-file-zip-o:before{content:""}.fa-file-audio-o:before,.fa-file-sound-o:before{content:""}.fa-file-movie-o:before,.fa-file-video-o:before{content:""}.fa-file-code-o:before{content:""}.fa-vine:before{content:""}.fa-codepen:before{content:""}.fa-jsfiddle:before{content:""}.fa-life-bouy:before,.fa-life-buoy:before,.fa-life-ring:before,.fa-life-saver:before,.fa-support:before{content:""}.fa-circle-o-notch:before{content:""}.fa-ra:before,.fa-rebel:before,.fa-resistance:before{content:""}.fa-empire:before,.fa-ge:before{content:""}.fa-git-square:before{content:""}.fa-git:before{content:""}.fa-hacker-news:before,.fa-y-combinator-square:before,.fa-yc-square:before{content:""}.fa-tencent-weibo:before{content:""}.fa-qq:before{content:""}.fa-wechat:before,.fa-weixin:before{content:""}.fa-paper-plane:before,.fa-send:before{content:""}.fa-paper-plane-o:before,.fa-send-o:before{content:""}.fa-history:before{content:""}.fa-circle-thin:before{content:""}.fa-header:before{content:""}.fa-paragraph:before{content:""}.fa-sliders:before{content:""}.fa-share-alt:before{content:""}.fa-share-alt-square:before{content:""}.fa-bomb:before{content:""}.fa-futbol-o:before,.fa-soccer-ball-o:before{content:""}.fa-tty:before{content:""}.fa-binoculars:before{content:""}.fa-plug:before{content:""}.fa-slideshare:before{content:""}.fa-twitch:before{content:""}.fa-yelp:before{content:""}.fa-newspaper-o:before{content:""}.fa-wifi:before{content:""}.fa-calculator:before{content:""}.fa-paypal:before{content:""}.fa-google-wallet:before{content:""}.fa-cc-visa:before{content:""}.fa-cc-mastercard:before{content:""}.fa-cc-discover:before{content:""}.fa-cc-amex:before{content:""}.fa-cc-paypal:before{content:""}.fa-cc-stripe:before{content:""}.fa-bell-slash:before{content:""}.fa-bell-slash-o:before{content:""}.fa-trash:before{content:""}.fa-copyright:before{content:""}.fa-at:before{content:""}.fa-eyedropper:before{content:""}.fa-paint-brush:before{content:""}.fa-birthday-cake:before{content:""}.fa-area-chart:before{content:""}.fa-pie-chart:before{content:""}.fa-line-chart:before{content:""}.fa-lastfm:before{content:""}.fa-lastfm-square:before{content:""}.fa-toggle-off:before{content:""}.fa-toggle-on:before{content:""}.fa-bicycle:before{content:""}.fa-bus:before{content:""}.fa-ioxhost:before{content:""}.fa-angellist:before{content:""}.fa-cc:before{content:""}.fa-ils:before,.fa-shekel:before,.fa-sheqel:before{content:""}.fa-meanpath:before{content:""}.fa-buysellads:before{content:""}.fa-connectdevelop:before{content:""}.fa-dashcube:before{content:""}.fa-forumbee:before{content:""}.fa-leanpub:before{content:""}.fa-sellsy:before{content:""}.fa-shirtsinbulk:before{content:""}.fa-simplybuilt:before{content:""}.fa-skyatlas:before{content:""}.fa-cart-plus:before{content:""}.fa-cart-arrow-down:before{content:""}.fa-diamond:before{content:""}.fa-ship:before{content:""}.fa-user-secret:before{content:""}.fa-motorcycle:before{content:""}.fa-street-view:before{content:""}.fa-heartbeat:before{content:""}.fa-venus:before{content:""}.fa-mars:before{content:""}.fa-mercury:before{content:""}.fa-intersex:before,.fa-transgender:before{content:""}.fa-transgender-alt:before{content:""}.fa-venus-double:before{content:""}.fa-mars-double:before{content:""}.fa-venus-mars:before{content:""}.fa-mars-stroke:before{content:""}.fa-mars-stroke-v:before{content:""}.fa-mars-stroke-h:before{content:""}.fa-neuter:before{content:""}.fa-genderless:before{content:""}.fa-facebook-official:before{content:""}.fa-pinterest-p:before{content:""}.fa-whatsapp:before{content:""}.fa-server:before{content:""}.fa-user-plus:before{content:""}.fa-user-times:before{content:""}.fa-bed:before,.fa-hotel:before{content:""}.fa-viacoin:before{content:""}.fa-train:before{content:""}.fa-subway:before{content:""}.fa-medium:before{content:""}.fa-y-combinator:before,.fa-yc:before{content:""}.fa-optin-monster:before{content:""}.fa-opencart:before{content:""}.fa-expeditedssl:before{content:""}.fa-battery-4:before,.fa-battery-full:before,.fa-battery:before{content:""}.fa-battery-3:before,.fa-battery-three-quarters:before{content:""}.fa-battery-2:before,.fa-battery-half:before{content:""}.fa-battery-1:before,.fa-battery-quarter:before{content:""}.fa-battery-0:before,.fa-battery-empty:before{content:""}.fa-mouse-pointer:before{content:""}.fa-i-cursor:before{content:""}.fa-object-group:before{content:""}.fa-object-ungroup:before{content:""}.fa-sticky-note:before{content:""}.fa-sticky-note-o:before{content:""}.fa-cc-jcb:before{content:""}.fa-cc-diners-club:before{content:""}.fa-clone:before{content:""}.fa-balance-scale:before{content:""}.fa-hourglass-o:before{content:""}.fa-hourglass-1:before,.fa-hourglass-start:before{content:""}.fa-hourglass-2:before,.fa-hourglass-half:before{content:""}.fa-hourglass-3:before,.fa-hourglass-end:before{content:""}.fa-hourglass:before{content:""}.fa-hand-grab-o:before,.fa-hand-rock-o:before{content:""}.fa-hand-paper-o:before,.fa-hand-stop-o:before{content:""}.fa-hand-scissors-o:before{content:""}.fa-hand-lizard-o:before{content:""}.fa-hand-spock-o:before{content:""}.fa-hand-pointer-o:before{content:""}.fa-hand-peace-o:before{content:""}.fa-trademark:before{content:""}.fa-registered:before{content:""}.fa-creative-commons:before{content:""}.fa-gg:before{content:""}.fa-gg-circle:before{content:""}.fa-tripadvisor:before{content:""}.fa-odnoklassniki:before{content:""}.fa-odnoklassniki-square:before{content:""}.fa-get-pocket:before{content:""}.fa-wikipedia-w:before{content:""}.fa-safari:before{content:""}.fa-chrome:before{content:""}.fa-firefox:before{content:""}.fa-opera:before{content:""}.fa-internet-explorer:before{content:""}.fa-television:before,.fa-tv:before{content:""}.fa-contao:before{content:""}.fa-500px:before{content:""}.fa-amazon:before{content:""}.fa-calendar-plus-o:before{content:""}.fa-calendar-minus-o:before{content:""}.fa-calendar-times-o:before{content:""}.fa-calendar-check-o:before{content:""}.fa-industry:before{content:""}.fa-map-pin:before{content:""}.fa-map-signs:before{content:""}.fa-map-o:before{content:""}.fa-map:before{content:""}.fa-commenting:before{content:""}.fa-commenting-o:before{content:""}.fa-houzz:before{content:""}.fa-vimeo:before{content:""}.fa-black-tie:before{content:""}.fa-fonticons:before{content:""}.fa-reddit-alien:before{content:""}.fa-edge:before{content:""}.fa-credit-card-alt:before{content:""}.fa-codiepie:before{content:""}.fa-modx:before{content:""}.fa-fort-awesome:before{content:""}.fa-usb:before{content:""}.fa-product-hunt:before{content:""}.fa-mixcloud:before{content:""}.fa-scribd:before{content:""}.fa-pause-circle:before{content:""}.fa-pause-circle-o:before{content:""}.fa-stop-circle:before{content:""}.fa-stop-circle-o:before{content:""}.fa-shopping-bag:before{content:""}.fa-shopping-basket:before{content:""}.fa-hashtag:before{content:""}.fa-bluetooth:before{content:""}.fa-bluetooth-b:before{content:""}.fa-percent:before{content:""}.fa-gitlab:before,.icon-gitlab:before{content:""}.fa-wpbeginner:before{content:""}.fa-wpforms:before{content:""}.fa-envira:before{content:""}.fa-universal-access:before{content:""}.fa-wheelchair-alt:before{content:""}.fa-question-circle-o:before{content:""}.fa-blind:before{content:""}.fa-audio-description:before{content:""}.fa-volume-control-phone:before{content:""}.fa-braille:before{content:""}.fa-assistive-listening-systems:before{content:""}.fa-american-sign-language-interpreting:before,.fa-asl-interpreting:before{content:""}.fa-deaf:before,.fa-deafness:before,.fa-hard-of-hearing:before{content:""}.fa-glide:before{content:""}.fa-glide-g:before{content:""}.fa-sign-language:before,.fa-signing:before{content:""}.fa-low-vision:before{content:""}.fa-viadeo:before{content:""}.fa-viadeo-square:before{content:""}.fa-snapchat:before{content:""}.fa-snapchat-ghost:before{content:""}.fa-snapchat-square:before{content:""}.fa-pied-piper:before{content:""}.fa-first-order:before{content:""}.fa-yoast:before{content:""}.fa-themeisle:before{content:""}.fa-google-plus-circle:before,.fa-google-plus-official:before{content:""}.fa-fa:before,.fa-font-awesome:before{content:""}.fa-handshake-o:before{content:""}.fa-envelope-open:before{content:""}.fa-envelope-open-o:before{content:""}.fa-linode:before{content:""}.fa-address-book:before{content:""}.fa-address-book-o:before{content:""}.fa-address-card:before,.fa-vcard:before{content:""}.fa-address-card-o:before,.fa-vcard-o:before{content:""}.fa-user-circle:before{content:""}.fa-user-circle-o:before{content:""}.fa-user-o:before{content:""}.fa-id-badge:before{content:""}.fa-drivers-license:before,.fa-id-card:before{content:""}.fa-drivers-license-o:before,.fa-id-card-o:before{content:""}.fa-quora:before{content:""}.fa-free-code-camp:before{content:""}.fa-telegram:before{content:""}.fa-thermometer-4:before,.fa-thermometer-full:before,.fa-thermometer:before{content:""}.fa-thermometer-3:before,.fa-thermometer-three-quarters:before{content:""}.fa-thermometer-2:before,.fa-thermometer-half:before{content:""}.fa-thermometer-1:before,.fa-thermometer-quarter:before{content:""}.fa-thermometer-0:before,.fa-thermometer-empty:before{content:""}.fa-shower:before{content:""}.fa-bath:before,.fa-bathtub:before,.fa-s15:before{content:""}.fa-podcast:before{content:""}.fa-window-maximize:before{content:""}.fa-window-minimize:before{content:""}.fa-window-restore:before{content:""}.fa-times-rectangle:before,.fa-window-close:before{content:""}.fa-times-rectangle-o:before,.fa-window-close-o:before{content:""}.fa-bandcamp:before{content:""}.fa-grav:before{content:""}.fa-etsy:before{content:""}.fa-imdb:before{content:""}.fa-ravelry:before{content:""}.fa-eercast:before{content:""}.fa-microchip:before{content:""}.fa-snowflake-o:before{content:""}.fa-superpowers:before{content:""}.fa-wpexplorer:before{content:""}.fa-meetup:before{content:""}.sr-only{position:absolute;width:1px;height:1px;padding:0;margin:-1px;overflow:hidden;clip:rect(0,0,0,0);border:0}.sr-only-focusable:active,.sr-only-focusable:focus{position:static;width:auto;height:auto;margin:0;overflow:visible;clip:auto}.fa,.icon,.rst-content .admonition-title,.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content code.download span:first-child,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink,.rst-content tt.download span:first-child,.wy-dropdown .caret,.wy-inline-validate.wy-inline-validate-danger .wy-input-context,.wy-inline-validate.wy-inline-validate-info .wy-input-context,.wy-inline-validate.wy-inline-validate-success .wy-input-context,.wy-inline-validate.wy-inline-validate-warning .wy-input-context,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li button.toctree-expand{font-family:inherit}.fa:before,.icon:before,.rst-content .admonition-title:before,.rst-content .code-block-caption .headerlink:before,.rst-content .eqno .headerlink:before,.rst-content code.download span:first-child:before,.rst-content dl dt .headerlink:before,.rst-content h1 .headerlink:before,.rst-content h2 .headerlink:before,.rst-content h3 .headerlink:before,.rst-content h4 .headerlink:before,.rst-content h5 .headerlink:before,.rst-content h6 .headerlink:before,.rst-content p.caption .headerlink:before,.rst-content p .headerlink:before,.rst-content table>caption .headerlink:before,.rst-content tt.download span:first-child:before,.wy-dropdown .caret:before,.wy-inline-validate.wy-inline-validate-danger .wy-input-context:before,.wy-inline-validate.wy-inline-validate-info .wy-input-context:before,.wy-inline-validate.wy-inline-validate-success .wy-input-context:before,.wy-inline-validate.wy-inline-validate-warning .wy-input-context:before,.wy-menu-vertical li.current>a button.toctree-expand:before,.wy-menu-vertical li.on a button.toctree-expand:before,.wy-menu-vertical li button.toctree-expand:before{font-family:FontAwesome;display:inline-block;font-style:normal;font-weight:400;line-height:1;text-decoration:inherit}.rst-content .code-block-caption a .headerlink,.rst-content .eqno a .headerlink,.rst-content a .admonition-title,.rst-content code.download a span:first-child,.rst-content dl dt a .headerlink,.rst-content h1 a .headerlink,.rst-content h2 a .headerlink,.rst-content h3 a .headerlink,.rst-content h4 a .headerlink,.rst-content h5 a .headerlink,.rst-content h6 a .headerlink,.rst-content p.caption a .headerlink,.rst-content p a .headerlink,.rst-content table>caption a .headerlink,.rst-content tt.download a span:first-child,.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand,.wy-menu-vertical li a button.toctree-expand,a .fa,a .icon,a .rst-content .admonition-title,a .rst-content .code-block-caption .headerlink,a .rst-content .eqno .headerlink,a .rst-content code.download span:first-child,a .rst-content dl dt .headerlink,a .rst-content h1 .headerlink,a .rst-content h2 .headerlink,a .rst-content h3 .headerlink,a .rst-content h4 .headerlink,a .rst-content h5 .headerlink,a .rst-content h6 .headerlink,a .rst-content p.caption .headerlink,a .rst-content p .headerlink,a .rst-content table>caption .headerlink,a .rst-content tt.download span:first-child,a .wy-menu-vertical li button.toctree-expand{display:inline-block;text-decoration:inherit}.btn .fa,.btn .icon,.btn .rst-content .admonition-title,.btn .rst-content .code-block-caption .headerlink,.btn .rst-content .eqno .headerlink,.btn .rst-content code.download span:first-child,.btn .rst-content dl dt .headerlink,.btn .rst-content h1 .headerlink,.btn .rst-content h2 .headerlink,.btn .rst-content h3 .headerlink,.btn .rst-content h4 .headerlink,.btn .rst-content h5 .headerlink,.btn .rst-content h6 .headerlink,.btn .rst-content p .headerlink,.btn .rst-content table>caption .headerlink,.btn .rst-content tt.download span:first-child,.btn .wy-menu-vertical li.current>a button.toctree-expand,.btn .wy-menu-vertical li.on a button.toctree-expand,.btn .wy-menu-vertical li button.toctree-expand,.nav .fa,.nav .icon,.nav .rst-content .admonition-title,.nav .rst-content .code-block-caption .headerlink,.nav .rst-content .eqno .headerlink,.nav .rst-content code.download span:first-child,.nav .rst-content dl dt .headerlink,.nav .rst-content h1 .headerlink,.nav .rst-content h2 .headerlink,.nav .rst-content h3 .headerlink,.nav .rst-content h4 .headerlink,.nav .rst-content h5 .headerlink,.nav .rst-content h6 .headerlink,.nav .rst-content p .headerlink,.nav .rst-content table>caption .headerlink,.nav .rst-content tt.download span:first-child,.nav .wy-menu-vertical li.current>a button.toctree-expand,.nav .wy-menu-vertical li.on a button.toctree-expand,.nav .wy-menu-vertical li button.toctree-expand,.rst-content .btn .admonition-title,.rst-content .code-block-caption .btn .headerlink,.rst-content .code-block-caption .nav .headerlink,.rst-content .eqno .btn .headerlink,.rst-content .eqno .nav .headerlink,.rst-content .nav .admonition-title,.rst-content code.download .btn span:first-child,.rst-content code.download .nav span:first-child,.rst-content dl dt .btn .headerlink,.rst-content dl dt .nav .headerlink,.rst-content h1 .btn .headerlink,.rst-content h1 .nav .headerlink,.rst-content h2 .btn .headerlink,.rst-content h2 .nav .headerlink,.rst-content h3 .btn .headerlink,.rst-content h3 .nav .headerlink,.rst-content h4 .btn .headerlink,.rst-content h4 .nav .headerlink,.rst-content h5 .btn .headerlink,.rst-content h5 .nav .headerlink,.rst-content h6 .btn .headerlink,.rst-content h6 .nav .headerlink,.rst-content p .btn .headerlink,.rst-content p .nav .headerlink,.rst-content table>caption .btn .headerlink,.rst-content table>caption .nav .headerlink,.rst-content tt.download .btn span:first-child,.rst-content tt.download .nav span:first-child,.wy-menu-vertical li .btn button.toctree-expand,.wy-menu-vertical li.current>a .btn button.toctree-expand,.wy-menu-vertical li.current>a .nav button.toctree-expand,.wy-menu-vertical li .nav button.toctree-expand,.wy-menu-vertical li.on a .btn button.toctree-expand,.wy-menu-vertical li.on a .nav button.toctree-expand{display:inline}.btn .fa-large.icon,.btn .fa.fa-large,.btn .rst-content .code-block-caption .fa-large.headerlink,.btn .rst-content .eqno .fa-large.headerlink,.btn .rst-content .fa-large.admonition-title,.btn .rst-content code.download span.fa-large:first-child,.btn .rst-content dl dt .fa-large.headerlink,.btn .rst-content h1 .fa-large.headerlink,.btn .rst-content h2 .fa-large.headerlink,.btn .rst-content h3 .fa-large.headerlink,.btn .rst-content h4 .fa-large.headerlink,.btn .rst-content h5 .fa-large.headerlink,.btn .rst-content h6 .fa-large.headerlink,.btn .rst-content p .fa-large.headerlink,.btn .rst-content table>caption .fa-large.headerlink,.btn .rst-content tt.download span.fa-large:first-child,.btn .wy-menu-vertical li button.fa-large.toctree-expand,.nav .fa-large.icon,.nav .fa.fa-large,.nav .rst-content .code-block-caption .fa-large.headerlink,.nav .rst-content .eqno .fa-large.headerlink,.nav .rst-content .fa-large.admonition-title,.nav .rst-content code.download span.fa-large:first-child,.nav .rst-content dl dt .fa-large.headerlink,.nav .rst-content h1 .fa-large.headerlink,.nav .rst-content h2 .fa-large.headerlink,.nav .rst-content h3 .fa-large.headerlink,.nav .rst-content h4 .fa-large.headerlink,.nav .rst-content h5 .fa-large.headerlink,.nav .rst-content h6 .fa-large.headerlink,.nav .rst-content p .fa-large.headerlink,.nav .rst-content table>caption .fa-large.headerlink,.nav .rst-content tt.download span.fa-large:first-child,.nav .wy-menu-vertical li button.fa-large.toctree-expand,.rst-content .btn .fa-large.admonition-title,.rst-content .code-block-caption .btn .fa-large.headerlink,.rst-content .code-block-caption .nav .fa-large.headerlink,.rst-content .eqno .btn .fa-large.headerlink,.rst-content .eqno .nav .fa-large.headerlink,.rst-content .nav .fa-large.admonition-title,.rst-content code.download .btn span.fa-large:first-child,.rst-content code.download .nav span.fa-large:first-child,.rst-content dl dt .btn .fa-large.headerlink,.rst-content dl dt .nav .fa-large.headerlink,.rst-content h1 .btn .fa-large.headerlink,.rst-content h1 .nav .fa-large.headerlink,.rst-content h2 .btn .fa-large.headerlink,.rst-content h2 .nav .fa-large.headerlink,.rst-content h3 .btn .fa-large.headerlink,.rst-content h3 .nav .fa-large.headerlink,.rst-content h4 .btn .fa-large.headerlink,.rst-content h4 .nav .fa-large.headerlink,.rst-content h5 .btn .fa-large.headerlink,.rst-content h5 .nav .fa-large.headerlink,.rst-content h6 .btn .fa-large.headerlink,.rst-content h6 .nav .fa-large.headerlink,.rst-content p .btn .fa-large.headerlink,.rst-content p .nav .fa-large.headerlink,.rst-content table>caption .btn .fa-large.headerlink,.rst-content table>caption .nav .fa-large.headerlink,.rst-content tt.download .btn span.fa-large:first-child,.rst-content tt.download .nav span.fa-large:first-child,.wy-menu-vertical li .btn button.fa-large.toctree-expand,.wy-menu-vertical li .nav button.fa-large.toctree-expand{line-height:.9em}.btn .fa-spin.icon,.btn .fa.fa-spin,.btn .rst-content .code-block-caption .fa-spin.headerlink,.btn .rst-content .eqno .fa-spin.headerlink,.btn .rst-content .fa-spin.admonition-title,.btn .rst-content code.download span.fa-spin:first-child,.btn .rst-content dl dt .fa-spin.headerlink,.btn .rst-content h1 .fa-spin.headerlink,.btn .rst-content h2 .fa-spin.headerlink,.btn .rst-content h3 .fa-spin.headerlink,.btn .rst-content h4 .fa-spin.headerlink,.btn .rst-content h5 .fa-spin.headerlink,.btn .rst-content h6 .fa-spin.headerlink,.btn .rst-content p .fa-spin.headerlink,.btn .rst-content table>caption .fa-spin.headerlink,.btn .rst-content tt.download span.fa-spin:first-child,.btn .wy-menu-vertical li button.fa-spin.toctree-expand,.nav .fa-spin.icon,.nav .fa.fa-spin,.nav .rst-content .code-block-caption .fa-spin.headerlink,.nav .rst-content .eqno .fa-spin.headerlink,.nav .rst-content .fa-spin.admonition-title,.nav .rst-content code.download span.fa-spin:first-child,.nav .rst-content dl dt .fa-spin.headerlink,.nav .rst-content h1 .fa-spin.headerlink,.nav .rst-content h2 .fa-spin.headerlink,.nav .rst-content h3 .fa-spin.headerlink,.nav .rst-content h4 .fa-spin.headerlink,.nav .rst-content h5 .fa-spin.headerlink,.nav .rst-content h6 .fa-spin.headerlink,.nav .rst-content p .fa-spin.headerlink,.nav .rst-content table>caption .fa-spin.headerlink,.nav .rst-content tt.download span.fa-spin:first-child,.nav .wy-menu-vertical li button.fa-spin.toctree-expand,.rst-content .btn .fa-spin.admonition-title,.rst-content .code-block-caption .btn .fa-spin.headerlink,.rst-content .code-block-caption .nav .fa-spin.headerlink,.rst-content .eqno .btn .fa-spin.headerlink,.rst-content .eqno .nav .fa-spin.headerlink,.rst-content .nav .fa-spin.admonition-title,.rst-content code.download .btn span.fa-spin:first-child,.rst-content code.download .nav span.fa-spin:first-child,.rst-content dl dt .btn .fa-spin.headerlink,.rst-content dl dt .nav .fa-spin.headerlink,.rst-content h1 .btn .fa-spin.headerlink,.rst-content h1 .nav .fa-spin.headerlink,.rst-content h2 .btn .fa-spin.headerlink,.rst-content h2 .nav .fa-spin.headerlink,.rst-content h3 .btn .fa-spin.headerlink,.rst-content h3 .nav .fa-spin.headerlink,.rst-content h4 .btn .fa-spin.headerlink,.rst-content h4 .nav .fa-spin.headerlink,.rst-content h5 .btn .fa-spin.headerlink,.rst-content h5 .nav .fa-spin.headerlink,.rst-content h6 .btn .fa-spin.headerlink,.rst-content h6 .nav .fa-spin.headerlink,.rst-content p .btn .fa-spin.headerlink,.rst-content p .nav .fa-spin.headerlink,.rst-content table>caption .btn .fa-spin.headerlink,.rst-content table>caption .nav .fa-spin.headerlink,.rst-content tt.download .btn span.fa-spin:first-child,.rst-content tt.download .nav span.fa-spin:first-child,.wy-menu-vertical li .btn button.fa-spin.toctree-expand,.wy-menu-vertical li .nav button.fa-spin.toctree-expand{display:inline-block}.btn.fa:before,.btn.icon:before,.rst-content .btn.admonition-title:before,.rst-content .code-block-caption .btn.headerlink:before,.rst-content .eqno .btn.headerlink:before,.rst-content code.download span.btn:first-child:before,.rst-content dl dt .btn.headerlink:before,.rst-content h1 .btn.headerlink:before,.rst-content h2 .btn.headerlink:before,.rst-content h3 .btn.headerlink:before,.rst-content h4 .btn.headerlink:before,.rst-content h5 .btn.headerlink:before,.rst-content h6 .btn.headerlink:before,.rst-content p .btn.headerlink:before,.rst-content table>caption .btn.headerlink:before,.rst-content tt.download span.btn:first-child:before,.wy-menu-vertical li button.btn.toctree-expand:before{opacity:.5;-webkit-transition:opacity .05s ease-in;-moz-transition:opacity .05s ease-in;transition:opacity .05s ease-in}.btn.fa:hover:before,.btn.icon:hover:before,.rst-content .btn.admonition-title:hover:before,.rst-content .code-block-caption .btn.headerlink:hover:before,.rst-content .eqno .btn.headerlink:hover:before,.rst-content code.download span.btn:first-child:hover:before,.rst-content dl dt .btn.headerlink:hover:before,.rst-content h1 .btn.headerlink:hover:before,.rst-content h2 .btn.headerlink:hover:before,.rst-content h3 .btn.headerlink:hover:before,.rst-content h4 .btn.headerlink:hover:before,.rst-content h5 .btn.headerlink:hover:before,.rst-content h6 .btn.headerlink:hover:before,.rst-content p .btn.headerlink:hover:before,.rst-content table>caption .btn.headerlink:hover:before,.rst-content tt.download span.btn:first-child:hover:before,.wy-menu-vertical li button.btn.toctree-expand:hover:before{opacity:1}.btn-mini .fa:before,.btn-mini .icon:before,.btn-mini .rst-content .admonition-title:before,.btn-mini .rst-content .code-block-caption .headerlink:before,.btn-mini .rst-content .eqno .headerlink:before,.btn-mini .rst-content code.download span:first-child:before,.btn-mini .rst-content dl dt .headerlink:before,.btn-mini .rst-content h1 .headerlink:before,.btn-mini .rst-content h2 .headerlink:before,.btn-mini .rst-content h3 .headerlink:before,.btn-mini .rst-content h4 .headerlink:before,.btn-mini .rst-content h5 .headerlink:before,.btn-mini .rst-content h6 .headerlink:before,.btn-mini .rst-content p .headerlink:before,.btn-mini .rst-content table>caption .headerlink:before,.btn-mini .rst-content tt.download span:first-child:before,.btn-mini .wy-menu-vertical li button.toctree-expand:before,.rst-content .btn-mini .admonition-title:before,.rst-content .code-block-caption .btn-mini .headerlink:before,.rst-content .eqno .btn-mini .headerlink:before,.rst-content code.download .btn-mini span:first-child:before,.rst-content dl dt .btn-mini .headerlink:before,.rst-content h1 .btn-mini .headerlink:before,.rst-content h2 .btn-mini .headerlink:before,.rst-content h3 .btn-mini .headerlink:before,.rst-content h4 .btn-mini .headerlink:before,.rst-content h5 .btn-mini .headerlink:before,.rst-content h6 .btn-mini .headerlink:before,.rst-content p .btn-mini .headerlink:before,.rst-content table>caption .btn-mini .headerlink:before,.rst-content tt.download .btn-mini span:first-child:before,.wy-menu-vertical li .btn-mini button.toctree-expand:before{font-size:14px;vertical-align:-15%}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning,.wy-alert{padding:12px;line-height:24px;margin-bottom:24px;background:#e7f2fa}.rst-content .admonition-title,.wy-alert-title{font-weight:700;display:block;color:#fff;background:#6ab0de;padding:6px 12px;margin:-12px -12px 12px}.rst-content .danger,.rst-content .error,.rst-content .wy-alert-danger.admonition,.rst-content .wy-alert-danger.admonition-todo,.rst-content .wy-alert-danger.attention,.rst-content .wy-alert-danger.caution,.rst-content .wy-alert-danger.hint,.rst-content .wy-alert-danger.important,.rst-content .wy-alert-danger.note,.rst-content .wy-alert-danger.seealso,.rst-content .wy-alert-danger.tip,.rst-content .wy-alert-danger.warning,.wy-alert.wy-alert-danger{background:#fdf3f2}.rst-content .danger .admonition-title,.rst-content .danger .wy-alert-title,.rst-content .error .admonition-title,.rst-content .error .wy-alert-title,.rst-content .wy-alert-danger.admonition-todo .admonition-title,.rst-content .wy-alert-danger.admonition-todo .wy-alert-title,.rst-content .wy-alert-danger.admonition .admonition-title,.rst-content .wy-alert-danger.admonition .wy-alert-title,.rst-content .wy-alert-danger.attention .admonition-title,.rst-content .wy-alert-danger.attention .wy-alert-title,.rst-content .wy-alert-danger.caution .admonition-title,.rst-content .wy-alert-danger.caution .wy-alert-title,.rst-content .wy-alert-danger.hint .admonition-title,.rst-content .wy-alert-danger.hint .wy-alert-title,.rst-content .wy-alert-danger.important .admonition-title,.rst-content .wy-alert-danger.important .wy-alert-title,.rst-content .wy-alert-danger.note .admonition-title,.rst-content .wy-alert-danger.note .wy-alert-title,.rst-content .wy-alert-danger.seealso .admonition-title,.rst-content .wy-alert-danger.seealso .wy-alert-title,.rst-content .wy-alert-danger.tip .admonition-title,.rst-content .wy-alert-danger.tip .wy-alert-title,.rst-content .wy-alert-danger.warning .admonition-title,.rst-content .wy-alert-danger.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-danger .admonition-title,.wy-alert.wy-alert-danger .rst-content .admonition-title,.wy-alert.wy-alert-danger .wy-alert-title{background:#f29f97}.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .warning,.rst-content .wy-alert-warning.admonition,.rst-content .wy-alert-warning.danger,.rst-content .wy-alert-warning.error,.rst-content .wy-alert-warning.hint,.rst-content .wy-alert-warning.important,.rst-content .wy-alert-warning.note,.rst-content .wy-alert-warning.seealso,.rst-content .wy-alert-warning.tip,.wy-alert.wy-alert-warning{background:#ffedcc}.rst-content .admonition-todo .admonition-title,.rst-content .admonition-todo .wy-alert-title,.rst-content .attention .admonition-title,.rst-content .attention .wy-alert-title,.rst-content .caution .admonition-title,.rst-content .caution .wy-alert-title,.rst-content .warning .admonition-title,.rst-content .warning .wy-alert-title,.rst-content .wy-alert-warning.admonition .admonition-title,.rst-content .wy-alert-warning.admonition .wy-alert-title,.rst-content .wy-alert-warning.danger .admonition-title,.rst-content .wy-alert-warning.danger .wy-alert-title,.rst-content .wy-alert-warning.error .admonition-title,.rst-content .wy-alert-warning.error .wy-alert-title,.rst-content .wy-alert-warning.hint .admonition-title,.rst-content .wy-alert-warning.hint .wy-alert-title,.rst-content .wy-alert-warning.important .admonition-title,.rst-content .wy-alert-warning.important .wy-alert-title,.rst-content .wy-alert-warning.note .admonition-title,.rst-content .wy-alert-warning.note .wy-alert-title,.rst-content .wy-alert-warning.seealso .admonition-title,.rst-content .wy-alert-warning.seealso .wy-alert-title,.rst-content .wy-alert-warning.tip .admonition-title,.rst-content .wy-alert-warning.tip .wy-alert-title,.rst-content .wy-alert.wy-alert-warning .admonition-title,.wy-alert.wy-alert-warning .rst-content .admonition-title,.wy-alert.wy-alert-warning .wy-alert-title{background:#f0b37e}.rst-content .note,.rst-content .seealso,.rst-content .wy-alert-info.admonition,.rst-content .wy-alert-info.admonition-todo,.rst-content .wy-alert-info.attention,.rst-content .wy-alert-info.caution,.rst-content .wy-alert-info.danger,.rst-content .wy-alert-info.error,.rst-content .wy-alert-info.hint,.rst-content .wy-alert-info.important,.rst-content .wy-alert-info.tip,.rst-content .wy-alert-info.warning,.wy-alert.wy-alert-info{background:#e7f2fa}.rst-content .note .admonition-title,.rst-content .note .wy-alert-title,.rst-content .seealso .admonition-title,.rst-content .seealso .wy-alert-title,.rst-content .wy-alert-info.admonition-todo .admonition-title,.rst-content .wy-alert-info.admonition-todo .wy-alert-title,.rst-content .wy-alert-info.admonition .admonition-title,.rst-content .wy-alert-info.admonition .wy-alert-title,.rst-content .wy-alert-info.attention .admonition-title,.rst-content .wy-alert-info.attention .wy-alert-title,.rst-content .wy-alert-info.caution .admonition-title,.rst-content .wy-alert-info.caution .wy-alert-title,.rst-content .wy-alert-info.danger .admonition-title,.rst-content .wy-alert-info.danger .wy-alert-title,.rst-content .wy-alert-info.error .admonition-title,.rst-content .wy-alert-info.error .wy-alert-title,.rst-content .wy-alert-info.hint .admonition-title,.rst-content .wy-alert-info.hint .wy-alert-title,.rst-content .wy-alert-info.important .admonition-title,.rst-content .wy-alert-info.important .wy-alert-title,.rst-content .wy-alert-info.tip .admonition-title,.rst-content .wy-alert-info.tip .wy-alert-title,.rst-content .wy-alert-info.warning .admonition-title,.rst-content .wy-alert-info.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-info .admonition-title,.wy-alert.wy-alert-info .rst-content .admonition-title,.wy-alert.wy-alert-info .wy-alert-title{background:#6ab0de}.rst-content .hint,.rst-content .important,.rst-content .tip,.rst-content .wy-alert-success.admonition,.rst-content .wy-alert-success.admonition-todo,.rst-content .wy-alert-success.attention,.rst-content .wy-alert-success.caution,.rst-content .wy-alert-success.danger,.rst-content .wy-alert-success.error,.rst-content .wy-alert-success.note,.rst-content .wy-alert-success.seealso,.rst-content .wy-alert-success.warning,.wy-alert.wy-alert-success{background:#dbfaf4}.rst-content .hint .admonition-title,.rst-content .hint .wy-alert-title,.rst-content .important .admonition-title,.rst-content .important .wy-alert-title,.rst-content .tip .admonition-title,.rst-content .tip .wy-alert-title,.rst-content .wy-alert-success.admonition-todo .admonition-title,.rst-content .wy-alert-success.admonition-todo .wy-alert-title,.rst-content .wy-alert-success.admonition .admonition-title,.rst-content .wy-alert-success.admonition .wy-alert-title,.rst-content .wy-alert-success.attention .admonition-title,.rst-content .wy-alert-success.attention .wy-alert-title,.rst-content .wy-alert-success.caution .admonition-title,.rst-content .wy-alert-success.caution .wy-alert-title,.rst-content .wy-alert-success.danger .admonition-title,.rst-content .wy-alert-success.danger .wy-alert-title,.rst-content .wy-alert-success.error .admonition-title,.rst-content .wy-alert-success.error .wy-alert-title,.rst-content .wy-alert-success.note .admonition-title,.rst-content .wy-alert-success.note .wy-alert-title,.rst-content .wy-alert-success.seealso .admonition-title,.rst-content .wy-alert-success.seealso .wy-alert-title,.rst-content .wy-alert-success.warning .admonition-title,.rst-content .wy-alert-success.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-success .admonition-title,.wy-alert.wy-alert-success .rst-content .admonition-title,.wy-alert.wy-alert-success .wy-alert-title{background:#1abc9c}.rst-content .wy-alert-neutral.admonition,.rst-content .wy-alert-neutral.admonition-todo,.rst-content .wy-alert-neutral.attention,.rst-content .wy-alert-neutral.caution,.rst-content .wy-alert-neutral.danger,.rst-content .wy-alert-neutral.error,.rst-content .wy-alert-neutral.hint,.rst-content .wy-alert-neutral.important,.rst-content .wy-alert-neutral.note,.rst-content .wy-alert-neutral.seealso,.rst-content .wy-alert-neutral.tip,.rst-content .wy-alert-neutral.warning,.wy-alert.wy-alert-neutral{background:#f3f6f6}.rst-content .wy-alert-neutral.admonition-todo .admonition-title,.rst-content .wy-alert-neutral.admonition-todo .wy-alert-title,.rst-content .wy-alert-neutral.admonition .admonition-title,.rst-content .wy-alert-neutral.admonition .wy-alert-title,.rst-content .wy-alert-neutral.attention .admonition-title,.rst-content .wy-alert-neutral.attention .wy-alert-title,.rst-content .wy-alert-neutral.caution .admonition-title,.rst-content .wy-alert-neutral.caution .wy-alert-title,.rst-content .wy-alert-neutral.danger .admonition-title,.rst-content .wy-alert-neutral.danger .wy-alert-title,.rst-content .wy-alert-neutral.error .admonition-title,.rst-content .wy-alert-neutral.error .wy-alert-title,.rst-content .wy-alert-neutral.hint .admonition-title,.rst-content .wy-alert-neutral.hint .wy-alert-title,.rst-content .wy-alert-neutral.important .admonition-title,.rst-content .wy-alert-neutral.important .wy-alert-title,.rst-content .wy-alert-neutral.note .admonition-title,.rst-content .wy-alert-neutral.note .wy-alert-title,.rst-content .wy-alert-neutral.seealso .admonition-title,.rst-content .wy-alert-neutral.seealso .wy-alert-title,.rst-content .wy-alert-neutral.tip .admonition-title,.rst-content .wy-alert-neutral.tip .wy-alert-title,.rst-content .wy-alert-neutral.warning .admonition-title,.rst-content .wy-alert-neutral.warning .wy-alert-title,.rst-content .wy-alert.wy-alert-neutral .admonition-title,.wy-alert.wy-alert-neutral .rst-content .admonition-title,.wy-alert.wy-alert-neutral .wy-alert-title{color:#404040;background:#e1e4e5}.rst-content .wy-alert-neutral.admonition-todo a,.rst-content .wy-alert-neutral.admonition a,.rst-content .wy-alert-neutral.attention a,.rst-content .wy-alert-neutral.caution a,.rst-content .wy-alert-neutral.danger a,.rst-content .wy-alert-neutral.error a,.rst-content .wy-alert-neutral.hint a,.rst-content .wy-alert-neutral.important a,.rst-content .wy-alert-neutral.note a,.rst-content .wy-alert-neutral.seealso a,.rst-content .wy-alert-neutral.tip a,.rst-content .wy-alert-neutral.warning a,.wy-alert.wy-alert-neutral a{color:#2980b9}.rst-content .admonition-todo p:last-child,.rst-content .admonition p:last-child,.rst-content .attention p:last-child,.rst-content .caution p:last-child,.rst-content .danger p:last-child,.rst-content .error p:last-child,.rst-content .hint p:last-child,.rst-content .important p:last-child,.rst-content .note p:last-child,.rst-content .seealso p:last-child,.rst-content .tip p:last-child,.rst-content .warning p:last-child,.wy-alert p:last-child{margin-bottom:0}.wy-tray-container{position:fixed;bottom:0;left:0;z-index:600}.wy-tray-container li{display:block;width:300px;background:transparent;color:#fff;text-align:center;box-shadow:0 5px 5px 0 rgba(0,0,0,.1);padding:0 24px;min-width:20%;opacity:0;height:0;line-height:56px;overflow:hidden;-webkit-transition:all .3s ease-in;-moz-transition:all .3s ease-in;transition:all .3s ease-in}.wy-tray-container li.wy-tray-item-success{background:#27ae60}.wy-tray-container li.wy-tray-item-info{background:#2980b9}.wy-tray-container li.wy-tray-item-warning{background:#e67e22}.wy-tray-container li.wy-tray-item-danger{background:#e74c3c}.wy-tray-container li.on{opacity:1;height:56px}@media screen and (max-width:768px){.wy-tray-container{bottom:auto;top:0;width:100%}.wy-tray-container li{width:100%}}button{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle;cursor:pointer;line-height:normal;-webkit-appearance:button;*overflow:visible}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}button[disabled]{cursor:default}.btn{display:inline-block;border-radius:2px;line-height:normal;white-space:nowrap;text-align:center;cursor:pointer;font-size:100%;padding:6px 12px 8px;color:#fff;border:1px solid rgba(0,0,0,.1);background-color:#27ae60;text-decoration:none;font-weight:400;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 2px -1px hsla(0,0%,100%,.5),inset 0 -2px 0 0 rgba(0,0,0,.1);outline-none:false;vertical-align:middle;*display:inline;zoom:1;-webkit-user-drag:none;-webkit-user-select:none;-moz-user-select:none;-ms-user-select:none;user-select:none;-webkit-transition:all .1s linear;-moz-transition:all .1s linear;transition:all .1s linear}.btn-hover{background:#2e8ece;color:#fff}.btn:hover{background:#2cc36b;color:#fff}.btn:focus{background:#2cc36b;outline:0}.btn:active{box-shadow:inset 0 -1px 0 0 rgba(0,0,0,.05),inset 0 2px 0 0 rgba(0,0,0,.1);padding:8px 12px 6px}.btn:visited{color:#fff}.btn-disabled,.btn-disabled:active,.btn-disabled:focus,.btn-disabled:hover,.btn:disabled{background-image:none;filter:progid:DXImageTransform.Microsoft.gradient(enabled = false);filter:alpha(opacity=40);opacity:.4;cursor:not-allowed;box-shadow:none}.btn::-moz-focus-inner{padding:0;border:0}.btn-small{font-size:80%}.btn-info{background-color:#2980b9!important}.btn-info:hover{background-color:#2e8ece!important}.btn-neutral{background-color:#f3f6f6!important;color:#404040!important}.btn-neutral:hover{background-color:#e5ebeb!important;color:#404040}.btn-neutral:visited{color:#404040!important}.btn-success{background-color:#27ae60!important}.btn-success:hover{background-color:#295!important}.btn-danger{background-color:#e74c3c!important}.btn-danger:hover{background-color:#ea6153!important}.btn-warning{background-color:#e67e22!important}.btn-warning:hover{background-color:#e98b39!important}.btn-invert{background-color:#222}.btn-invert:hover{background-color:#2f2f2f!important}.btn-link{background-color:transparent!important;color:#2980b9;box-shadow:none;border-color:transparent!important}.btn-link:active,.btn-link:hover{background-color:transparent!important;color:#409ad5!important;box-shadow:none}.btn-link:visited{color:#9b59b6}.wy-btn-group .btn,.wy-control .btn{vertical-align:middle}.wy-btn-group{margin-bottom:24px;*zoom:1}.wy-btn-group:after,.wy-btn-group:before{display:table;content:""}.wy-btn-group:after{clear:both}.wy-dropdown{position:relative;display:inline-block}.wy-dropdown-active .wy-dropdown-menu{display:block}.wy-dropdown-menu{position:absolute;left:0;display:none;float:left;top:100%;min-width:100%;background:#fcfcfc;z-index:100;border:1px solid #cfd7dd;box-shadow:0 2px 2px 0 rgba(0,0,0,.1);padding:12px}.wy-dropdown-menu>dd>a{display:block;clear:both;color:#404040;white-space:nowrap;font-size:90%;padding:0 12px;cursor:pointer}.wy-dropdown-menu>dd>a:hover{background:#2980b9;color:#fff}.wy-dropdown-menu>dd.divider{border-top:1px solid #cfd7dd;margin:6px 0}.wy-dropdown-menu>dd.search{padding-bottom:12px}.wy-dropdown-menu>dd.search input[type=search]{width:100%}.wy-dropdown-menu>dd.call-to-action{background:#e3e3e3;text-transform:uppercase;font-weight:500;font-size:80%}.wy-dropdown-menu>dd.call-to-action:hover{background:#e3e3e3}.wy-dropdown-menu>dd.call-to-action .btn{color:#fff}.wy-dropdown.wy-dropdown-up .wy-dropdown-menu{bottom:100%;top:auto;left:auto;right:0}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu{background:#fcfcfc;margin-top:2px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a{padding:6px 12px}.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover{background:#2980b9;color:#fff}.wy-dropdown.wy-dropdown-left .wy-dropdown-menu{right:0;left:auto;text-align:right}.wy-dropdown-arrow:before{content:" ";border-bottom:5px solid #f5f5f5;border-left:5px solid transparent;border-right:5px solid transparent;position:absolute;display:block;top:-4px;left:50%;margin-left:-3px}.wy-dropdown-arrow.wy-dropdown-arrow-left:before{left:11px}.wy-form-stacked select{display:block}.wy-form-aligned .wy-help-inline,.wy-form-aligned input,.wy-form-aligned label,.wy-form-aligned select,.wy-form-aligned textarea{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-form-aligned .wy-control-group>label{display:inline-block;vertical-align:middle;width:10em;margin:6px 12px 0 0;float:left}.wy-form-aligned .wy-control{float:left}.wy-form-aligned .wy-control label{display:block}.wy-form-aligned .wy-control select{margin-top:6px}fieldset{margin:0}fieldset,legend{border:0;padding:0}legend{width:100%;white-space:normal;margin-bottom:24px;font-size:150%;*margin-left:-7px}label,legend{display:block}label{margin:0 0 .3125em;color:#333;font-size:90%}input,select,textarea{font-size:100%;margin:0;vertical-align:baseline;*vertical-align:middle}.wy-control-group{margin-bottom:24px;max-width:1200px;margin-left:auto;margin-right:auto;*zoom:1}.wy-control-group:after,.wy-control-group:before{display:table;content:""}.wy-control-group:after{clear:both}.wy-control-group.wy-control-group-required>label:after{content:" *";color:#e74c3c}.wy-control-group .wy-form-full,.wy-control-group .wy-form-halves,.wy-control-group .wy-form-thirds{padding-bottom:12px}.wy-control-group .wy-form-full input[type=color],.wy-control-group .wy-form-full input[type=date],.wy-control-group .wy-form-full input[type=datetime-local],.wy-control-group .wy-form-full input[type=datetime],.wy-control-group .wy-form-full input[type=email],.wy-control-group .wy-form-full input[type=month],.wy-control-group .wy-form-full input[type=number],.wy-control-group .wy-form-full input[type=password],.wy-control-group .wy-form-full input[type=search],.wy-control-group .wy-form-full input[type=tel],.wy-control-group .wy-form-full input[type=text],.wy-control-group .wy-form-full input[type=time],.wy-control-group .wy-form-full input[type=url],.wy-control-group .wy-form-full input[type=week],.wy-control-group .wy-form-full select,.wy-control-group .wy-form-halves input[type=color],.wy-control-group .wy-form-halves input[type=date],.wy-control-group .wy-form-halves input[type=datetime-local],.wy-control-group .wy-form-halves input[type=datetime],.wy-control-group .wy-form-halves input[type=email],.wy-control-group .wy-form-halves input[type=month],.wy-control-group .wy-form-halves input[type=number],.wy-control-group .wy-form-halves input[type=password],.wy-control-group .wy-form-halves input[type=search],.wy-control-group .wy-form-halves input[type=tel],.wy-control-group .wy-form-halves input[type=text],.wy-control-group .wy-form-halves input[type=time],.wy-control-group .wy-form-halves input[type=url],.wy-control-group .wy-form-halves input[type=week],.wy-control-group .wy-form-halves select,.wy-control-group .wy-form-thirds input[type=color],.wy-control-group .wy-form-thirds input[type=date],.wy-control-group .wy-form-thirds input[type=datetime-local],.wy-control-group .wy-form-thirds input[type=datetime],.wy-control-group .wy-form-thirds input[type=email],.wy-control-group .wy-form-thirds input[type=month],.wy-control-group .wy-form-thirds input[type=number],.wy-control-group .wy-form-thirds input[type=password],.wy-control-group .wy-form-thirds input[type=search],.wy-control-group .wy-form-thirds input[type=tel],.wy-control-group .wy-form-thirds input[type=text],.wy-control-group .wy-form-thirds input[type=time],.wy-control-group .wy-form-thirds input[type=url],.wy-control-group .wy-form-thirds input[type=week],.wy-control-group .wy-form-thirds select{width:100%}.wy-control-group .wy-form-full{float:left;display:block;width:100%;margin-right:0}.wy-control-group .wy-form-full:last-child{margin-right:0}.wy-control-group .wy-form-halves{float:left;display:block;margin-right:2.35765%;width:48.82117%}.wy-control-group .wy-form-halves:last-child,.wy-control-group .wy-form-halves:nth-of-type(2n){margin-right:0}.wy-control-group .wy-form-halves:nth-of-type(odd){clear:left}.wy-control-group .wy-form-thirds{float:left;display:block;margin-right:2.35765%;width:31.76157%}.wy-control-group .wy-form-thirds:last-child,.wy-control-group .wy-form-thirds:nth-of-type(3n){margin-right:0}.wy-control-group .wy-form-thirds:nth-of-type(3n+1){clear:left}.wy-control-group.wy-control-group-no-input .wy-control,.wy-control-no-input{margin:6px 0 0;font-size:90%}.wy-control-no-input{display:inline-block}.wy-control-group.fluid-input input[type=color],.wy-control-group.fluid-input input[type=date],.wy-control-group.fluid-input input[type=datetime-local],.wy-control-group.fluid-input input[type=datetime],.wy-control-group.fluid-input input[type=email],.wy-control-group.fluid-input input[type=month],.wy-control-group.fluid-input input[type=number],.wy-control-group.fluid-input input[type=password],.wy-control-group.fluid-input input[type=search],.wy-control-group.fluid-input input[type=tel],.wy-control-group.fluid-input input[type=text],.wy-control-group.fluid-input input[type=time],.wy-control-group.fluid-input input[type=url],.wy-control-group.fluid-input input[type=week]{width:100%}.wy-form-message-inline{padding-left:.3em;color:#666;font-size:90%}.wy-form-message{display:block;color:#999;font-size:70%;margin-top:.3125em;font-style:italic}.wy-form-message p{font-size:inherit;font-style:italic;margin-bottom:6px}.wy-form-message p:last-child{margin-bottom:0}input{line-height:normal}input[type=button],input[type=reset],input[type=submit]{-webkit-appearance:button;cursor:pointer;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;*overflow:visible}input[type=color],input[type=date],input[type=datetime-local],input[type=datetime],input[type=email],input[type=month],input[type=number],input[type=password],input[type=search],input[type=tel],input[type=text],input[type=time],input[type=url],input[type=week]{-webkit-appearance:none;padding:6px;display:inline-block;border:1px solid #ccc;font-size:80%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;box-shadow:inset 0 1px 3px #ddd;border-radius:0;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}input[type=datetime-local]{padding:.34375em .625em}input[disabled]{cursor:default}input[type=checkbox],input[type=radio]{padding:0;margin-right:.3125em;*height:13px;*width:13px}input[type=checkbox],input[type=radio],input[type=search]{-webkit-box-sizing:border-box;-moz-box-sizing:border-box;box-sizing:border-box}input[type=search]::-webkit-search-cancel-button,input[type=search]::-webkit-search-decoration{-webkit-appearance:none}input[type=color]:focus,input[type=date]:focus,input[type=datetime-local]:focus,input[type=datetime]:focus,input[type=email]:focus,input[type=month]:focus,input[type=number]:focus,input[type=password]:focus,input[type=search]:focus,input[type=tel]:focus,input[type=text]:focus,input[type=time]:focus,input[type=url]:focus,input[type=week]:focus{outline:0;outline:thin dotted\9;border-color:#333}input.no-focus:focus{border-color:#ccc!important}input[type=checkbox]:focus,input[type=file]:focus,input[type=radio]:focus{outline:thin dotted #333;outline:1px auto #129fea}input[type=color][disabled],input[type=date][disabled],input[type=datetime-local][disabled],input[type=datetime][disabled],input[type=email][disabled],input[type=month][disabled],input[type=number][disabled],input[type=password][disabled],input[type=search][disabled],input[type=tel][disabled],input[type=text][disabled],input[type=time][disabled],input[type=url][disabled],input[type=week][disabled]{cursor:not-allowed;background-color:#fafafa}input:focus:invalid,select:focus:invalid,textarea:focus:invalid{color:#e74c3c;border:1px solid #e74c3c}input:focus:invalid:focus,select:focus:invalid:focus,textarea:focus:invalid:focus{border-color:#e74c3c}input[type=checkbox]:focus:invalid:focus,input[type=file]:focus:invalid:focus,input[type=radio]:focus:invalid:focus{outline-color:#e74c3c}input.wy-input-large{padding:12px;font-size:100%}textarea{overflow:auto;vertical-align:top;width:100%;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif}select,textarea{padding:.5em .625em;display:inline-block;border:1px solid #ccc;font-size:80%;box-shadow:inset 0 1px 3px #ddd;-webkit-transition:border .3s linear;-moz-transition:border .3s linear;transition:border .3s linear}select{border:1px solid #ccc;background-color:#fff}select[multiple]{height:auto}select:focus,textarea:focus{outline:0}input[readonly],select[disabled],select[readonly],textarea[disabled],textarea[readonly]{cursor:not-allowed;background-color:#fafafa}input[type=checkbox][disabled],input[type=radio][disabled]{cursor:not-allowed}.wy-checkbox,.wy-radio{margin:6px 0;color:#404040;display:block}.wy-checkbox input,.wy-radio input{vertical-align:baseline}.wy-form-message-inline{display:inline-block;*display:inline;*zoom:1;vertical-align:middle}.wy-input-prefix,.wy-input-suffix{white-space:nowrap;padding:6px}.wy-input-prefix .wy-input-context,.wy-input-suffix .wy-input-context{line-height:27px;padding:0 8px;display:inline-block;font-size:80%;background-color:#f3f6f6;border:1px solid #ccc;color:#999}.wy-input-suffix .wy-input-context{border-left:0}.wy-input-prefix .wy-input-context{border-right:0}.wy-switch{position:relative;display:block;height:24px;margin-top:12px;cursor:pointer}.wy-switch:before{left:0;top:0;width:36px;height:12px;background:#ccc}.wy-switch:after,.wy-switch:before{position:absolute;content:"";display:block;border-radius:4px;-webkit-transition:all .2s ease-in-out;-moz-transition:all .2s ease-in-out;transition:all .2s ease-in-out}.wy-switch:after{width:18px;height:18px;background:#999;left:-3px;top:-3px}.wy-switch span{position:absolute;left:48px;display:block;font-size:12px;color:#ccc;line-height:1}.wy-switch.active:before{background:#1e8449}.wy-switch.active:after{left:24px;background:#27ae60}.wy-switch.disabled{cursor:not-allowed;opacity:.8}.wy-control-group.wy-control-group-error .wy-form-message,.wy-control-group.wy-control-group-error>label{color:#e74c3c}.wy-control-group.wy-control-group-error input[type=color],.wy-control-group.wy-control-group-error input[type=date],.wy-control-group.wy-control-group-error input[type=datetime-local],.wy-control-group.wy-control-group-error input[type=datetime],.wy-control-group.wy-control-group-error input[type=email],.wy-control-group.wy-control-group-error input[type=month],.wy-control-group.wy-control-group-error input[type=number],.wy-control-group.wy-control-group-error input[type=password],.wy-control-group.wy-control-group-error input[type=search],.wy-control-group.wy-control-group-error input[type=tel],.wy-control-group.wy-control-group-error input[type=text],.wy-control-group.wy-control-group-error input[type=time],.wy-control-group.wy-control-group-error input[type=url],.wy-control-group.wy-control-group-error input[type=week],.wy-control-group.wy-control-group-error textarea{border:1px solid #e74c3c}.wy-inline-validate{white-space:nowrap}.wy-inline-validate .wy-input-context{padding:.5em .625em;display:inline-block;font-size:80%}.wy-inline-validate.wy-inline-validate-success .wy-input-context{color:#27ae60}.wy-inline-validate.wy-inline-validate-danger .wy-input-context{color:#e74c3c}.wy-inline-validate.wy-inline-validate-warning .wy-input-context{color:#e67e22}.wy-inline-validate.wy-inline-validate-info .wy-input-context{color:#2980b9}.rotate-90{-webkit-transform:rotate(90deg);-moz-transform:rotate(90deg);-ms-transform:rotate(90deg);-o-transform:rotate(90deg);transform:rotate(90deg)}.rotate-180{-webkit-transform:rotate(180deg);-moz-transform:rotate(180deg);-ms-transform:rotate(180deg);-o-transform:rotate(180deg);transform:rotate(180deg)}.rotate-270{-webkit-transform:rotate(270deg);-moz-transform:rotate(270deg);-ms-transform:rotate(270deg);-o-transform:rotate(270deg);transform:rotate(270deg)}.mirror{-webkit-transform:scaleX(-1);-moz-transform:scaleX(-1);-ms-transform:scaleX(-1);-o-transform:scaleX(-1);transform:scaleX(-1)}.mirror.rotate-90{-webkit-transform:scaleX(-1) rotate(90deg);-moz-transform:scaleX(-1) rotate(90deg);-ms-transform:scaleX(-1) rotate(90deg);-o-transform:scaleX(-1) rotate(90deg);transform:scaleX(-1) rotate(90deg)}.mirror.rotate-180{-webkit-transform:scaleX(-1) rotate(180deg);-moz-transform:scaleX(-1) rotate(180deg);-ms-transform:scaleX(-1) rotate(180deg);-o-transform:scaleX(-1) rotate(180deg);transform:scaleX(-1) rotate(180deg)}.mirror.rotate-270{-webkit-transform:scaleX(-1) rotate(270deg);-moz-transform:scaleX(-1) rotate(270deg);-ms-transform:scaleX(-1) rotate(270deg);-o-transform:scaleX(-1) rotate(270deg);transform:scaleX(-1) rotate(270deg)}@media only screen and (max-width:480px){.wy-form button[type=submit]{margin:.7em 0 0}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=text],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week],.wy-form label{margin-bottom:.3em;display:block}.wy-form input[type=color],.wy-form input[type=date],.wy-form input[type=datetime-local],.wy-form input[type=datetime],.wy-form input[type=email],.wy-form input[type=month],.wy-form input[type=number],.wy-form input[type=password],.wy-form input[type=search],.wy-form input[type=tel],.wy-form input[type=time],.wy-form input[type=url],.wy-form input[type=week]{margin-bottom:0}.wy-form-aligned .wy-control-group label{margin-bottom:.3em;text-align:left;display:block;width:100%}.wy-form-aligned .wy-control{margin:1.5em 0 0}.wy-form-message,.wy-form-message-inline,.wy-form .wy-help-inline{display:block;font-size:80%;padding:6px 0}}@media screen and (max-width:768px){.tablet-hide{display:none}}@media screen and (max-width:480px){.mobile-hide{display:none}}.float-left{float:left}.float-right{float:right}.full-width{width:100%}.rst-content table.docutils,.rst-content table.field-list,.wy-table{border-collapse:collapse;border-spacing:0;empty-cells:show;margin-bottom:24px}.rst-content table.docutils caption,.rst-content table.field-list caption,.wy-table caption{color:#000;font:italic 85%/1 arial,sans-serif;padding:1em 0;text-align:center}.rst-content table.docutils td,.rst-content table.docutils th,.rst-content table.field-list td,.rst-content table.field-list th,.wy-table td,.wy-table th{font-size:90%;margin:0;overflow:visible;padding:8px 16px}.rst-content table.docutils td:first-child,.rst-content table.docutils th:first-child,.rst-content table.field-list td:first-child,.rst-content table.field-list th:first-child,.wy-table td:first-child,.wy-table th:first-child{border-left-width:0}.rst-content table.docutils thead,.rst-content table.field-list thead,.wy-table thead{color:#000;text-align:left;vertical-align:bottom;white-space:nowrap}.rst-content table.docutils thead th,.rst-content table.field-list thead th,.wy-table thead th{font-weight:700;border-bottom:2px solid #e1e4e5}.rst-content table.docutils td,.rst-content table.field-list td,.wy-table td{background-color:transparent;vertical-align:middle}.rst-content table.docutils td p,.rst-content table.field-list td p,.wy-table td p{line-height:18px}.rst-content table.docutils td p:last-child,.rst-content table.field-list td p:last-child,.wy-table td p:last-child{margin-bottom:0}.rst-content table.docutils .wy-table-cell-min,.rst-content table.field-list .wy-table-cell-min,.wy-table .wy-table-cell-min{width:1%;padding-right:0}.rst-content table.docutils .wy-table-cell-min input[type=checkbox],.rst-content table.field-list .wy-table-cell-min input[type=checkbox],.wy-table .wy-table-cell-min input[type=checkbox]{margin:0}.wy-table-secondary{color:grey;font-size:90%}.wy-table-tertiary{color:grey;font-size:80%}.rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td,.wy-table-backed,.wy-table-odd td,.wy-table-striped tr:nth-child(2n-1) td{background-color:#f3f6f6}.rst-content table.docutils,.wy-table-bordered-all{border:1px solid #e1e4e5}.rst-content table.docutils td,.wy-table-bordered-all td{border-bottom:1px solid #e1e4e5;border-left:1px solid #e1e4e5}.rst-content table.docutils tbody>tr:last-child td,.wy-table-bordered-all tbody>tr:last-child td{border-bottom-width:0}.wy-table-bordered{border:1px solid #e1e4e5}.wy-table-bordered-rows td{border-bottom:1px solid #e1e4e5}.wy-table-bordered-rows tbody>tr:last-child td{border-bottom-width:0}.wy-table-horizontal td,.wy-table-horizontal th{border-width:0 0 1px;border-bottom:1px solid #e1e4e5}.wy-table-horizontal tbody>tr:last-child td{border-bottom-width:0}.wy-table-responsive{margin-bottom:24px;max-width:100%;overflow:auto}.wy-table-responsive table{margin-bottom:0!important}.wy-table-responsive table td,.wy-table-responsive table th{white-space:nowrap}a{color:#2980b9;text-decoration:none;cursor:pointer}a:hover{color:#3091d1}a:visited{color:#9b59b6}html{height:100%}body,html{overflow-x:hidden}body{font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;font-weight:400;color:#404040;min-height:100%;background:#edf0f2}.wy-text-left{text-align:left}.wy-text-center{text-align:center}.wy-text-right{text-align:right}.wy-text-large{font-size:120%}.wy-text-normal{font-size:100%}.wy-text-small,small{font-size:80%}.wy-text-strike{text-decoration:line-through}.wy-text-warning{color:#e67e22!important}a.wy-text-warning:hover{color:#eb9950!important}.wy-text-info{color:#2980b9!important}a.wy-text-info:hover{color:#409ad5!important}.wy-text-success{color:#27ae60!important}a.wy-text-success:hover{color:#36d278!important}.wy-text-danger{color:#e74c3c!important}a.wy-text-danger:hover{color:#ed7669!important}.wy-text-neutral{color:#404040!important}a.wy-text-neutral:hover{color:#595959!important}.rst-content .toctree-wrapper>p.caption,h1,h2,h3,h4,h5,h6,legend{margin-top:0;font-weight:700;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif}p{line-height:24px;font-size:16px;margin:0 0 24px}h1{font-size:175%}.rst-content .toctree-wrapper>p.caption,h2{font-size:150%}h3{font-size:125%}h4{font-size:115%}h5{font-size:110%}h6{font-size:100%}hr{display:block;height:1px;border:0;border-top:1px solid #e1e4e5;margin:24px 0;padding:0}.rst-content code,.rst-content tt,code{white-space:nowrap;max-width:100%;background:#fff;border:1px solid #e1e4e5;font-size:75%;padding:0 5px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#e74c3c;overflow-x:auto}.rst-content tt.code-large,code.code-large{font-size:90%}.rst-content .section ul,.rst-content .toctree-wrapper ul,.rst-content section ul,.wy-plain-list-disc,article ul{list-style:disc;line-height:24px;margin-bottom:24px}.rst-content .section ul li,.rst-content .toctree-wrapper ul li,.rst-content section ul li,.wy-plain-list-disc li,article ul li{list-style:disc;margin-left:24px}.rst-content .section ul li p:last-child,.rst-content .section ul li ul,.rst-content .toctree-wrapper ul li p:last-child,.rst-content .toctree-wrapper ul li ul,.rst-content section ul li p:last-child,.rst-content section ul li ul,.wy-plain-list-disc li p:last-child,.wy-plain-list-disc li ul,article ul li p:last-child,article ul li ul{margin-bottom:0}.rst-content .section ul li li,.rst-content .toctree-wrapper ul li li,.rst-content section ul li li,.wy-plain-list-disc li li,article ul li li{list-style:circle}.rst-content .section ul li li li,.rst-content .toctree-wrapper ul li li li,.rst-content section ul li li li,.wy-plain-list-disc li li li,article ul li li li{list-style:square}.rst-content .section ul li ol li,.rst-content .toctree-wrapper ul li ol li,.rst-content section ul li ol li,.wy-plain-list-disc li ol li,article ul li ol li{list-style:decimal}.rst-content .section ol,.rst-content .section ol.arabic,.rst-content .toctree-wrapper ol,.rst-content .toctree-wrapper ol.arabic,.rst-content section ol,.rst-content section ol.arabic,.wy-plain-list-decimal,article ol{list-style:decimal;line-height:24px;margin-bottom:24px}.rst-content .section ol.arabic li,.rst-content .section ol li,.rst-content .toctree-wrapper ol.arabic li,.rst-content .toctree-wrapper ol li,.rst-content section ol.arabic li,.rst-content section ol li,.wy-plain-list-decimal li,article ol li{list-style:decimal;margin-left:24px}.rst-content .section ol.arabic li ul,.rst-content .section ol li p:last-child,.rst-content .section ol li ul,.rst-content .toctree-wrapper ol.arabic li ul,.rst-content .toctree-wrapper ol li p:last-child,.rst-content .toctree-wrapper ol li ul,.rst-content section ol.arabic li ul,.rst-content section ol li p:last-child,.rst-content section ol li ul,.wy-plain-list-decimal li p:last-child,.wy-plain-list-decimal li ul,article ol li p:last-child,article ol li ul{margin-bottom:0}.rst-content .section ol.arabic li ul li,.rst-content .section ol li ul li,.rst-content .toctree-wrapper ol.arabic li ul li,.rst-content .toctree-wrapper ol li ul li,.rst-content section ol.arabic li ul li,.rst-content section ol li ul li,.wy-plain-list-decimal li ul li,article ol li ul li{list-style:disc}.wy-breadcrumbs{*zoom:1}.wy-breadcrumbs:after,.wy-breadcrumbs:before{display:table;content:""}.wy-breadcrumbs:after{clear:both}.wy-breadcrumbs>li{display:inline-block;padding-top:5px}.wy-breadcrumbs>li.wy-breadcrumbs-aside{float:right}.rst-content .wy-breadcrumbs>li code,.rst-content .wy-breadcrumbs>li tt,.wy-breadcrumbs>li .rst-content tt,.wy-breadcrumbs>li code{all:inherit;color:inherit}.breadcrumb-item:before{content:"/";color:#bbb;font-size:13px;padding:0 6px 0 3px}.wy-breadcrumbs-extra{margin-bottom:0;color:#b3b3b3;font-size:80%;display:inline-block}@media screen and (max-width:480px){.wy-breadcrumbs-extra,.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}@media print{.wy-breadcrumbs li.wy-breadcrumbs-aside{display:none}}html{font-size:16px}.wy-affix{position:fixed;top:1.618em}.wy-menu a:hover{text-decoration:none}.wy-menu-horiz{*zoom:1}.wy-menu-horiz:after,.wy-menu-horiz:before{display:table;content:""}.wy-menu-horiz:after{clear:both}.wy-menu-horiz li,.wy-menu-horiz ul{display:inline-block}.wy-menu-horiz li:hover{background:hsla(0,0%,100%,.1)}.wy-menu-horiz li.divide-left{border-left:1px solid #404040}.wy-menu-horiz li.divide-right{border-right:1px solid #404040}.wy-menu-horiz a{height:32px;display:inline-block;line-height:32px;padding:0 16px}.wy-menu-vertical{width:300px}.wy-menu-vertical header,.wy-menu-vertical p.caption{color:#55a5d9;height:32px;line-height:32px;padding:0 1.618em;margin:12px 0 0;display:block;font-weight:700;text-transform:uppercase;font-size:85%;white-space:nowrap}.wy-menu-vertical ul{margin-bottom:0}.wy-menu-vertical li.divide-top{border-top:1px solid #404040}.wy-menu-vertical li.divide-bottom{border-bottom:1px solid #404040}.wy-menu-vertical li.current{background:#e3e3e3}.wy-menu-vertical li.current a{color:grey;border-right:1px solid #c9c9c9;padding:.4045em 2.427em}.wy-menu-vertical li.current a:hover{background:#d6d6d6}.rst-content .wy-menu-vertical li tt,.wy-menu-vertical li .rst-content tt,.wy-menu-vertical li code{border:none;background:inherit;color:inherit;padding-left:0;padding-right:0}.wy-menu-vertical li button.toctree-expand{display:block;float:left;margin-left:-1.2em;line-height:18px;color:#4d4d4d;border:none;background:none;padding:0}.wy-menu-vertical li.current>a,.wy-menu-vertical li.on a{color:#404040;font-weight:700;position:relative;background:#fcfcfc;border:none;padding:.4045em 1.618em}.wy-menu-vertical li.current>a:hover,.wy-menu-vertical li.on a:hover{background:#fcfcfc}.wy-menu-vertical li.current>a:hover button.toctree-expand,.wy-menu-vertical li.on a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.current>a button.toctree-expand,.wy-menu-vertical li.on a button.toctree-expand{display:block;line-height:18px;color:#333}.wy-menu-vertical li.toctree-l1.current>a{border-bottom:1px solid #c9c9c9;border-top:1px solid #c9c9c9}.wy-menu-vertical .toctree-l1.current .toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .toctree-l11>ul{display:none}.wy-menu-vertical .toctree-l1.current .current.toctree-l2>ul,.wy-menu-vertical .toctree-l2.current .current.toctree-l3>ul,.wy-menu-vertical .toctree-l3.current .current.toctree-l4>ul,.wy-menu-vertical .toctree-l4.current .current.toctree-l5>ul,.wy-menu-vertical .toctree-l5.current .current.toctree-l6>ul,.wy-menu-vertical .toctree-l6.current .current.toctree-l7>ul,.wy-menu-vertical .toctree-l7.current .current.toctree-l8>ul,.wy-menu-vertical .toctree-l8.current .current.toctree-l9>ul,.wy-menu-vertical .toctree-l9.current .current.toctree-l10>ul,.wy-menu-vertical .toctree-l10.current .current.toctree-l11>ul{display:block}.wy-menu-vertical li.toctree-l3,.wy-menu-vertical li.toctree-l4{font-size:.9em}.wy-menu-vertical li.toctree-l2 a,.wy-menu-vertical li.toctree-l3 a,.wy-menu-vertical li.toctree-l4 a,.wy-menu-vertical li.toctree-l5 a,.wy-menu-vertical li.toctree-l6 a,.wy-menu-vertical li.toctree-l7 a,.wy-menu-vertical li.toctree-l8 a,.wy-menu-vertical li.toctree-l9 a,.wy-menu-vertical li.toctree-l10 a{color:#404040}.wy-menu-vertical li.toctree-l2 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l3 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l4 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l5 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l6 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l7 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l8 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l9 a:hover button.toctree-expand,.wy-menu-vertical li.toctree-l10 a:hover button.toctree-expand{color:grey}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a,.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a,.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a,.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a,.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a,.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a,.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a,.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{display:block}.wy-menu-vertical li.toctree-l2.current>a{padding:.4045em 2.427em}.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{padding:.4045em 1.618em .4045em 4.045em}.wy-menu-vertical li.toctree-l3.current>a{padding:.4045em 4.045em}.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{padding:.4045em 1.618em .4045em 5.663em}.wy-menu-vertical li.toctree-l4.current>a{padding:.4045em 5.663em}.wy-menu-vertical li.toctree-l4.current li.toctree-l5>a{padding:.4045em 1.618em .4045em 7.281em}.wy-menu-vertical li.toctree-l5.current>a{padding:.4045em 7.281em}.wy-menu-vertical li.toctree-l5.current li.toctree-l6>a{padding:.4045em 1.618em .4045em 8.899em}.wy-menu-vertical li.toctree-l6.current>a{padding:.4045em 8.899em}.wy-menu-vertical li.toctree-l6.current li.toctree-l7>a{padding:.4045em 1.618em .4045em 10.517em}.wy-menu-vertical li.toctree-l7.current>a{padding:.4045em 10.517em}.wy-menu-vertical li.toctree-l7.current li.toctree-l8>a{padding:.4045em 1.618em .4045em 12.135em}.wy-menu-vertical li.toctree-l8.current>a{padding:.4045em 12.135em}.wy-menu-vertical li.toctree-l8.current li.toctree-l9>a{padding:.4045em 1.618em .4045em 13.753em}.wy-menu-vertical li.toctree-l9.current>a{padding:.4045em 13.753em}.wy-menu-vertical li.toctree-l9.current li.toctree-l10>a{padding:.4045em 1.618em .4045em 15.371em}.wy-menu-vertical li.toctree-l10.current>a{padding:.4045em 15.371em}.wy-menu-vertical li.toctree-l10.current li.toctree-l11>a{padding:.4045em 1.618em .4045em 16.989em}.wy-menu-vertical li.toctree-l2.current>a,.wy-menu-vertical li.toctree-l2.current li.toctree-l3>a{background:#c9c9c9}.wy-menu-vertical li.toctree-l2 button.toctree-expand{color:#a3a3a3}.wy-menu-vertical li.toctree-l3.current>a,.wy-menu-vertical li.toctree-l3.current li.toctree-l4>a{background:#bdbdbd}.wy-menu-vertical li.toctree-l3 button.toctree-expand{color:#969696}.wy-menu-vertical li.current ul{display:block}.wy-menu-vertical li ul{margin-bottom:0;display:none}.wy-menu-vertical li ul li a{margin-bottom:0;color:#d9d9d9;font-weight:400}.wy-menu-vertical a{line-height:18px;padding:.4045em 1.618em;display:block;position:relative;font-size:90%;color:#d9d9d9}.wy-menu-vertical a:hover{background-color:#4e4a4a;cursor:pointer}.wy-menu-vertical a:hover button.toctree-expand{color:#d9d9d9}.wy-menu-vertical a:active{background-color:#2980b9;cursor:pointer;color:#fff}.wy-menu-vertical a:active button.toctree-expand{color:#fff}.wy-side-nav-search{display:block;width:300px;padding:.809em;margin-bottom:.809em;z-index:200;background-color:#2980b9;text-align:center;color:#fcfcfc}.wy-side-nav-search input[type=text]{width:100%;border-radius:50px;padding:6px 12px;border-color:#2472a4}.wy-side-nav-search img{display:block;margin:auto auto .809em;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-side-nav-search .wy-dropdown>a,.wy-side-nav-search>a{color:#fcfcfc;font-size:100%;font-weight:700;display:inline-block;padding:4px 6px;margin-bottom:.809em;max-width:100%}.wy-side-nav-search .wy-dropdown>a:hover,.wy-side-nav-search>a:hover{background:hsla(0,0%,100%,.1)}.wy-side-nav-search .wy-dropdown>a img.logo,.wy-side-nav-search>a img.logo{display:block;margin:0 auto;height:auto;width:auto;border-radius:0;max-width:100%;background:transparent}.wy-side-nav-search .wy-dropdown>a.icon img.logo,.wy-side-nav-search>a.icon img.logo{margin-top:.85em}.wy-side-nav-search>div.version{margin-top:-.4045em;margin-bottom:.809em;font-weight:400;color:hsla(0,0%,100%,.3)}.wy-nav .wy-menu-vertical header{color:#2980b9}.wy-nav .wy-menu-vertical a{color:#b3b3b3}.wy-nav .wy-menu-vertical a:hover{background-color:#2980b9;color:#fff}[data-menu-wrap]{-webkit-transition:all .2s ease-in;-moz-transition:all .2s ease-in;transition:all .2s ease-in;position:absolute;opacity:1;width:100%;opacity:0}[data-menu-wrap].move-center{left:0;right:auto;opacity:1}[data-menu-wrap].move-left{right:auto;left:-100%;opacity:0}[data-menu-wrap].move-right{right:-100%;left:auto;opacity:0}.wy-body-for-nav{background:#fcfcfc}.wy-grid-for-nav{position:absolute;width:100%;height:100%}.wy-nav-side{position:fixed;top:0;bottom:0;left:0;padding-bottom:2em;width:300px;overflow-x:hidden;overflow-y:hidden;min-height:100%;color:#9b9b9b;background:#343131;z-index:200}.wy-side-scroll{width:320px;position:relative;overflow-x:hidden;overflow-y:scroll;height:100%}.wy-nav-top{display:none;background:#2980b9;color:#fff;padding:.4045em .809em;position:relative;line-height:50px;text-align:center;font-size:100%;*zoom:1}.wy-nav-top:after,.wy-nav-top:before{display:table;content:""}.wy-nav-top:after{clear:both}.wy-nav-top a{color:#fff;font-weight:700}.wy-nav-top img{margin-right:12px;height:45px;width:45px;background-color:#2980b9;padding:5px;border-radius:100%}.wy-nav-top i{font-size:30px;float:left;cursor:pointer;padding-top:inherit}.wy-nav-content-wrap{margin-left:300px;background:#fcfcfc;min-height:100%}.wy-nav-content{padding:1.618em 3.236em;height:100%;max-width:800px;margin:auto}.wy-body-mask{position:fixed;width:100%;height:100%;background:rgba(0,0,0,.2);display:none;z-index:499}.wy-body-mask.on{display:block}footer{color:grey}footer p{margin-bottom:12px}.rst-content footer span.commit tt,footer span.commit .rst-content tt,footer span.commit code{padding:0;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:1em;background:none;border:none;color:grey}.rst-footer-buttons{*zoom:1}.rst-footer-buttons:after,.rst-footer-buttons:before{width:100%;display:table;content:""}.rst-footer-buttons:after{clear:both}.rst-breadcrumbs-buttons{margin-top:12px;*zoom:1}.rst-breadcrumbs-buttons:after,.rst-breadcrumbs-buttons:before{display:table;content:""}.rst-breadcrumbs-buttons:after{clear:both}#search-results .search li{margin-bottom:24px;border-bottom:1px solid #e1e4e5;padding-bottom:24px}#search-results .search li:first-child{border-top:1px solid #e1e4e5;padding-top:24px}#search-results .search li a{font-size:120%;margin-bottom:12px;display:inline-block}#search-results .context{color:grey;font-size:90%}.genindextable li>ul{margin-left:24px}@media screen and (max-width:768px){.wy-body-for-nav{background:#fcfcfc}.wy-nav-top{display:block}.wy-nav-side{left:-300px}.wy-nav-side.shift{width:85%;left:0}.wy-menu.wy-menu-vertical,.wy-side-nav-search,.wy-side-scroll{width:auto}.wy-nav-content-wrap{margin-left:0}.wy-nav-content-wrap .wy-nav-content{padding:1.618em}.wy-nav-content-wrap.shift{position:fixed;min-width:100%;left:85%;top:0;height:100%;overflow:hidden}}@media screen and (min-width:1100px){.wy-nav-content-wrap{background:rgba(0,0,0,.05)}.wy-nav-content{margin:0;background:#fcfcfc}}@media print{.rst-versions,.wy-nav-side,footer{display:none}.wy-nav-content-wrap{margin-left:0}}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:Lato,proxima-nova,Helvetica Neue,Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:after,.rst-versions .rst-current-version:before{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-content .code-block-caption .rst-versions .rst-current-version .headerlink,.rst-content .eqno .rst-versions .rst-current-version .headerlink,.rst-content .rst-versions .rst-current-version .admonition-title,.rst-content code.download .rst-versions .rst-current-version span:first-child,.rst-content dl dt .rst-versions .rst-current-version .headerlink,.rst-content h1 .rst-versions .rst-current-version .headerlink,.rst-content h2 .rst-versions .rst-current-version .headerlink,.rst-content h3 .rst-versions .rst-current-version .headerlink,.rst-content h4 .rst-versions .rst-current-version .headerlink,.rst-content h5 .rst-versions .rst-current-version .headerlink,.rst-content h6 .rst-versions .rst-current-version .headerlink,.rst-content p .rst-versions .rst-current-version .headerlink,.rst-content table>caption .rst-versions .rst-current-version .headerlink,.rst-content tt.download .rst-versions .rst-current-version span:first-child,.rst-versions .rst-current-version .fa,.rst-versions .rst-current-version .icon,.rst-versions .rst-current-version .rst-content .admonition-title,.rst-versions .rst-current-version .rst-content .code-block-caption .headerlink,.rst-versions .rst-current-version .rst-content .eqno .headerlink,.rst-versions .rst-current-version .rst-content code.download span:first-child,.rst-versions .rst-current-version .rst-content dl dt .headerlink,.rst-versions .rst-current-version .rst-content h1 .headerlink,.rst-versions .rst-current-version .rst-content h2 .headerlink,.rst-versions .rst-current-version .rst-content h3 .headerlink,.rst-versions .rst-current-version .rst-content h4 .headerlink,.rst-versions .rst-current-version .rst-content h5 .headerlink,.rst-versions .rst-current-version .rst-content h6 .headerlink,.rst-versions .rst-current-version .rst-content p .headerlink,.rst-versions .rst-current-version .rst-content table>caption .headerlink,.rst-versions .rst-current-version .rst-content tt.download span:first-child,.rst-versions .rst-current-version .wy-menu-vertical li button.toctree-expand,.wy-menu-vertical li .rst-versions .rst-current-version button.toctree-expand{color:#fcfcfc}.rst-versions .rst-current-version .fa-book,.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#f1c40f;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:grey;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:1px solid #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .fa-book,.rst-versions.rst-badge .icon-book{float:none;line-height:30px}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book,.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge>.rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width:768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}.rst-content .toctree-wrapper>p.caption,.rst-content h1,.rst-content h2,.rst-content h3,.rst-content h4,.rst-content h5,.rst-content h6{margin-bottom:24px}.rst-content img{max-width:100%;height:auto}.rst-content div.figure,.rst-content figure{margin-bottom:24px}.rst-content div.figure .caption-text,.rst-content figure .caption-text{font-style:italic}.rst-content div.figure p:last-child.caption,.rst-content figure p:last-child.caption{margin-bottom:0}.rst-content div.figure.align-center,.rst-content figure.align-center{text-align:center}.rst-content .section>a>img,.rst-content .section>img,.rst-content section>a>img,.rst-content section>img{margin-bottom:24px}.rst-content abbr[title]{text-decoration:none}.rst-content.style-external-links a.reference.external:after{font-family:FontAwesome;content:"\f08e";color:#b3b3b3;vertical-align:super;font-size:60%;margin:0 .2em}.rst-content blockquote{margin-left:24px;line-height:24px;margin-bottom:24px}.rst-content pre.literal-block{white-space:pre;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;display:block;overflow:auto}.rst-content div[class^=highlight],.rst-content pre.literal-block{border:1px solid #e1e4e5;overflow-x:auto;margin:1px 0 24px}.rst-content div[class^=highlight] div[class^=highlight],.rst-content pre.literal-block div[class^=highlight]{padding:0;border:none;margin:0}.rst-content div[class^=highlight] td.code{width:100%}.rst-content .linenodiv pre{border-right:1px solid #e6e9ea;margin:0;padding:12px;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;user-select:none;pointer-events:none}.rst-content div[class^=highlight] pre{white-space:pre;margin:0;padding:12px;display:block;overflow:auto}.rst-content div[class^=highlight] pre .hll{display:block;margin:0 -12px;padding:0 12px}.rst-content .linenodiv pre,.rst-content div[class^=highlight] pre,.rst-content pre.literal-block{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;font-size:12px;line-height:1.4}.rst-content div.highlight .gp,.rst-content div.highlight span.linenos{user-select:none;pointer-events:none}.rst-content div.highlight span.linenos{display:inline-block;padding-left:0;padding-right:12px;margin-right:12px;border-right:1px solid #e6e9ea}.rst-content .code-block-caption{font-style:italic;font-size:85%;line-height:1;padding:1em 0;text-align:center}@media print{.rst-content .codeblock,.rst-content div[class^=highlight],.rst-content div[class^=highlight] pre{white-space:pre-wrap}}.rst-content .admonition,.rst-content .admonition-todo,.rst-content .attention,.rst-content .caution,.rst-content .danger,.rst-content .error,.rst-content .hint,.rst-content .important,.rst-content .note,.rst-content .seealso,.rst-content .tip,.rst-content .warning{clear:both}.rst-content .admonition-todo .last,.rst-content .admonition-todo>:last-child,.rst-content .admonition .last,.rst-content .admonition>:last-child,.rst-content .attention .last,.rst-content .attention>:last-child,.rst-content .caution .last,.rst-content .caution>:last-child,.rst-content .danger .last,.rst-content .danger>:last-child,.rst-content .error .last,.rst-content .error>:last-child,.rst-content .hint .last,.rst-content .hint>:last-child,.rst-content .important .last,.rst-content .important>:last-child,.rst-content .note .last,.rst-content .note>:last-child,.rst-content .seealso .last,.rst-content .seealso>:last-child,.rst-content .tip .last,.rst-content .tip>:last-child,.rst-content .warning .last,.rst-content .warning>:last-child{margin-bottom:0}.rst-content .admonition-title:before{margin-right:4px}.rst-content .admonition table{border-color:rgba(0,0,0,.1)}.rst-content .admonition table td,.rst-content .admonition table th{background:transparent!important;border-color:rgba(0,0,0,.1)!important}.rst-content .section ol.loweralpha,.rst-content .section ol.loweralpha>li,.rst-content .toctree-wrapper ol.loweralpha,.rst-content .toctree-wrapper ol.loweralpha>li,.rst-content section ol.loweralpha,.rst-content section ol.loweralpha>li{list-style:lower-alpha}.rst-content .section ol.upperalpha,.rst-content .section ol.upperalpha>li,.rst-content .toctree-wrapper ol.upperalpha,.rst-content .toctree-wrapper ol.upperalpha>li,.rst-content section ol.upperalpha,.rst-content section ol.upperalpha>li{list-style:upper-alpha}.rst-content .section ol li>*,.rst-content .section ul li>*,.rst-content .toctree-wrapper ol li>*,.rst-content .toctree-wrapper ul li>*,.rst-content section ol li>*,.rst-content section ul li>*{margin-top:12px;margin-bottom:12px}.rst-content .section ol li>:first-child,.rst-content .section ul li>:first-child,.rst-content .toctree-wrapper ol li>:first-child,.rst-content .toctree-wrapper ul li>:first-child,.rst-content section ol li>:first-child,.rst-content section ul li>:first-child{margin-top:0}.rst-content .section ol li>p,.rst-content .section ol li>p:last-child,.rst-content .section ul li>p,.rst-content .section ul li>p:last-child,.rst-content .toctree-wrapper ol li>p,.rst-content .toctree-wrapper ol li>p:last-child,.rst-content .toctree-wrapper ul li>p,.rst-content .toctree-wrapper ul li>p:last-child,.rst-content section ol li>p,.rst-content section ol li>p:last-child,.rst-content section ul li>p,.rst-content section ul li>p:last-child{margin-bottom:12px}.rst-content .section ol li>p:only-child,.rst-content .section ol li>p:only-child:last-child,.rst-content .section ul li>p:only-child,.rst-content .section ul li>p:only-child:last-child,.rst-content .toctree-wrapper ol li>p:only-child,.rst-content .toctree-wrapper ol li>p:only-child:last-child,.rst-content .toctree-wrapper ul li>p:only-child,.rst-content .toctree-wrapper ul li>p:only-child:last-child,.rst-content section ol li>p:only-child,.rst-content section ol li>p:only-child:last-child,.rst-content section ul li>p:only-child,.rst-content section ul li>p:only-child:last-child{margin-bottom:0}.rst-content .section ol li>ol,.rst-content .section ol li>ul,.rst-content .section ul li>ol,.rst-content .section ul li>ul,.rst-content .toctree-wrapper ol li>ol,.rst-content .toctree-wrapper ol li>ul,.rst-content .toctree-wrapper ul li>ol,.rst-content .toctree-wrapper ul li>ul,.rst-content section ol li>ol,.rst-content section ol li>ul,.rst-content section ul li>ol,.rst-content section ul li>ul{margin-bottom:12px}.rst-content .section ol.simple li>*,.rst-content .section ol.simple li ol,.rst-content .section ol.simple li ul,.rst-content .section ul.simple li>*,.rst-content .section ul.simple li ol,.rst-content .section ul.simple li ul,.rst-content .toctree-wrapper ol.simple li>*,.rst-content .toctree-wrapper ol.simple li ol,.rst-content .toctree-wrapper ol.simple li ul,.rst-content .toctree-wrapper ul.simple li>*,.rst-content .toctree-wrapper ul.simple li ol,.rst-content .toctree-wrapper ul.simple li ul,.rst-content section ol.simple li>*,.rst-content section ol.simple li ol,.rst-content section ol.simple li ul,.rst-content section ul.simple li>*,.rst-content section ul.simple li ol,.rst-content section ul.simple li ul{margin-top:0;margin-bottom:0}.rst-content .line-block{margin-left:0;margin-bottom:24px;line-height:24px}.rst-content .line-block .line-block{margin-left:24px;margin-bottom:0}.rst-content .topic-title{font-weight:700;margin-bottom:12px}.rst-content .toc-backref{color:#404040}.rst-content .align-right{float:right;margin:0 0 24px 24px}.rst-content .align-left{float:left;margin:0 24px 24px 0}.rst-content .align-center{margin:auto}.rst-content .align-center:not(table){display:block}.rst-content .code-block-caption .headerlink,.rst-content .eqno .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink,.rst-content dl dt .headerlink,.rst-content h1 .headerlink,.rst-content h2 .headerlink,.rst-content h3 .headerlink,.rst-content h4 .headerlink,.rst-content h5 .headerlink,.rst-content h6 .headerlink,.rst-content p.caption .headerlink,.rst-content p .headerlink,.rst-content table>caption .headerlink{opacity:0;font-size:14px;font-family:FontAwesome;margin-left:.5em}.rst-content .code-block-caption .headerlink:focus,.rst-content .code-block-caption:hover .headerlink,.rst-content .eqno .headerlink:focus,.rst-content .eqno:hover .headerlink,.rst-content .toctree-wrapper>p.caption .headerlink:focus,.rst-content .toctree-wrapper>p.caption:hover .headerlink,.rst-content dl dt .headerlink:focus,.rst-content dl dt:hover .headerlink,.rst-content h1 .headerlink:focus,.rst-content h1:hover .headerlink,.rst-content h2 .headerlink:focus,.rst-content h2:hover .headerlink,.rst-content h3 .headerlink:focus,.rst-content h3:hover .headerlink,.rst-content h4 .headerlink:focus,.rst-content h4:hover .headerlink,.rst-content h5 .headerlink:focus,.rst-content h5:hover .headerlink,.rst-content h6 .headerlink:focus,.rst-content h6:hover .headerlink,.rst-content p.caption .headerlink:focus,.rst-content p.caption:hover .headerlink,.rst-content p .headerlink:focus,.rst-content p:hover .headerlink,.rst-content table>caption .headerlink:focus,.rst-content table>caption:hover .headerlink{opacity:1}.rst-content p a{overflow-wrap:anywhere}.rst-content .wy-table td p,.rst-content .wy-table td ul,.rst-content .wy-table th p,.rst-content .wy-table th ul,.rst-content table.docutils td p,.rst-content table.docutils td ul,.rst-content table.docutils th p,.rst-content table.docutils th ul,.rst-content table.field-list td p,.rst-content table.field-list td ul,.rst-content table.field-list th p,.rst-content table.field-list th ul{font-size:inherit}.rst-content .btn:focus{outline:2px solid}.rst-content table>caption .headerlink:after{font-size:12px}.rst-content .centered{text-align:center}.rst-content .sidebar{float:right;width:40%;display:block;margin:0 0 24px 24px;padding:24px;background:#f3f6f6;border:1px solid #e1e4e5}.rst-content .sidebar dl,.rst-content .sidebar p,.rst-content .sidebar ul{font-size:90%}.rst-content .sidebar .last,.rst-content .sidebar>:last-child{margin-bottom:0}.rst-content .sidebar .sidebar-title{display:block;font-family:Roboto Slab,ff-tisa-web-pro,Georgia,Arial,sans-serif;font-weight:700;background:#e1e4e5;padding:6px 12px;margin:-24px -24px 24px;font-size:100%}.rst-content .highlighted{background:#f1c40f;box-shadow:0 0 0 2px #f1c40f;display:inline;font-weight:700}.rst-content .citation-reference,.rst-content .footnote-reference{vertical-align:baseline;position:relative;top:-.4em;line-height:0;font-size:90%}.rst-content .citation-reference>span.fn-bracket,.rst-content .footnote-reference>span.fn-bracket{display:none}.rst-content .hlist{width:100%}.rst-content dl dt span.classifier:before{content:" : "}.rst-content dl dt span.classifier-delimiter{display:none!important}html.writer-html4 .rst-content table.docutils.citation,html.writer-html4 .rst-content table.docutils.footnote{background:none;border:none}html.writer-html4 .rst-content table.docutils.citation td,html.writer-html4 .rst-content table.docutils.citation tr,html.writer-html4 .rst-content table.docutils.footnote td,html.writer-html4 .rst-content table.docutils.footnote tr{border:none;background-color:transparent!important;white-space:normal}html.writer-html4 .rst-content table.docutils.citation td.label,html.writer-html4 .rst-content table.docutils.footnote td.label{padding-left:0;padding-right:0;vertical-align:top}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{display:grid;grid-template-columns:auto minmax(80%,95%)}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{display:inline-grid;grid-template-columns:max-content auto}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{display:grid;grid-template-columns:auto auto minmax(.65rem,auto) minmax(40%,95%)}html.writer-html5 .rst-content aside.citation>span.label,html.writer-html5 .rst-content aside.footnote>span.label,html.writer-html5 .rst-content div.citation>span.label{grid-column-start:1;grid-column-end:2}html.writer-html5 .rst-content aside.citation>span.backrefs,html.writer-html5 .rst-content aside.footnote>span.backrefs,html.writer-html5 .rst-content div.citation>span.backrefs{grid-column-start:2;grid-column-end:3;grid-row-start:1;grid-row-end:3}html.writer-html5 .rst-content aside.citation>p,html.writer-html5 .rst-content aside.footnote>p,html.writer-html5 .rst-content div.citation>p{grid-column-start:4;grid-column-end:5}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.field-list,html.writer-html5 .rst-content dl.footnote{margin-bottom:24px}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dt{padding-left:1rem}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.field-list>dd,html.writer-html5 .rst-content dl.field-list>dt,html.writer-html5 .rst-content dl.footnote>dd,html.writer-html5 .rst-content dl.footnote>dt{margin-bottom:0}html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{font-size:.9rem}html.writer-html5 .rst-content dl.citation>dt,html.writer-html5 .rst-content dl.footnote>dt{margin:0 .5rem .5rem 0;line-height:1.2rem;word-break:break-all;font-weight:400}html.writer-html5 .rst-content dl.citation>dt>span.brackets:before,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:before{content:"["}html.writer-html5 .rst-content dl.citation>dt>span.brackets:after,html.writer-html5 .rst-content dl.footnote>dt>span.brackets:after{content:"]"}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a{word-break:keep-all}html.writer-html5 .rst-content dl.citation>dt>span.fn-backref>a:not(:first-child):before,html.writer-html5 .rst-content dl.footnote>dt>span.fn-backref>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content dl.citation>dd,html.writer-html5 .rst-content dl.footnote>dd{margin:0 0 .5rem;line-height:1.2rem}html.writer-html5 .rst-content dl.citation>dd p,html.writer-html5 .rst-content dl.footnote>dd p{font-size:.9rem}html.writer-html5 .rst-content aside.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content div.citation{padding-left:1rem;padding-right:1rem;font-size:.9rem;line-height:1.2rem}html.writer-html5 .rst-content aside.citation p,html.writer-html5 .rst-content aside.footnote p,html.writer-html5 .rst-content div.citation p{font-size:.9rem;line-height:1.2rem;margin-bottom:12px}html.writer-html5 .rst-content aside.citation span.backrefs,html.writer-html5 .rst-content aside.footnote span.backrefs,html.writer-html5 .rst-content div.citation span.backrefs{text-align:left;font-style:italic;margin-left:.65rem;word-break:break-word;word-spacing:-.1rem;max-width:5rem}html.writer-html5 .rst-content aside.citation span.backrefs>a,html.writer-html5 .rst-content aside.footnote span.backrefs>a,html.writer-html5 .rst-content div.citation span.backrefs>a{word-break:keep-all}html.writer-html5 .rst-content aside.citation span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content aside.footnote span.backrefs>a:not(:first-child):before,html.writer-html5 .rst-content div.citation span.backrefs>a:not(:first-child):before{content:" "}html.writer-html5 .rst-content aside.citation span.label,html.writer-html5 .rst-content aside.footnote span.label,html.writer-html5 .rst-content div.citation span.label{line-height:1.2rem}html.writer-html5 .rst-content aside.citation-list,html.writer-html5 .rst-content aside.footnote-list,html.writer-html5 .rst-content div.citation-list{margin-bottom:24px}html.writer-html5 .rst-content dl.option-list kbd{font-size:.9rem}.rst-content table.docutils.footnote,html.writer-html4 .rst-content table.docutils.citation,html.writer-html5 .rst-content aside.footnote,html.writer-html5 .rst-content aside.footnote-list aside.footnote,html.writer-html5 .rst-content div.citation-list>div.citation,html.writer-html5 .rst-content dl.citation,html.writer-html5 .rst-content dl.footnote{color:grey}.rst-content table.docutils.footnote code,.rst-content table.docutils.footnote tt,html.writer-html4 .rst-content table.docutils.citation code,html.writer-html4 .rst-content table.docutils.citation tt,html.writer-html5 .rst-content aside.footnote-list aside.footnote code,html.writer-html5 .rst-content aside.footnote-list aside.footnote tt,html.writer-html5 .rst-content aside.footnote code,html.writer-html5 .rst-content aside.footnote tt,html.writer-html5 .rst-content div.citation-list>div.citation code,html.writer-html5 .rst-content div.citation-list>div.citation tt,html.writer-html5 .rst-content dl.citation code,html.writer-html5 .rst-content dl.citation tt,html.writer-html5 .rst-content dl.footnote code,html.writer-html5 .rst-content dl.footnote tt{color:#555}.rst-content .wy-table-responsive.citation,.rst-content .wy-table-responsive.footnote{margin-bottom:0}.rst-content .wy-table-responsive.citation+:not(.citation),.rst-content .wy-table-responsive.footnote+:not(.footnote){margin-top:24px}.rst-content .wy-table-responsive.citation:last-child,.rst-content .wy-table-responsive.footnote:last-child{margin-bottom:24px}.rst-content table.docutils th{border-color:#e1e4e5}html.writer-html5 .rst-content table.docutils th{border:1px solid #e1e4e5}html.writer-html5 .rst-content table.docutils td>p,html.writer-html5 .rst-content table.docutils th>p{line-height:1rem;margin-bottom:0;font-size:.9rem}.rst-content table.docutils td .last,.rst-content table.docutils td .last>:last-child{margin-bottom:0}.rst-content table.field-list,.rst-content table.field-list td{border:none}.rst-content table.field-list td p{line-height:inherit}.rst-content table.field-list td>strong{display:inline-block}.rst-content table.field-list .field-name{padding-right:10px;text-align:left;white-space:nowrap}.rst-content table.field-list .field-body{text-align:left}.rst-content code,.rst-content tt{color:#000;font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;padding:2px 5px}.rst-content code big,.rst-content code em,.rst-content tt big,.rst-content tt em{font-size:100%!important;line-height:normal}.rst-content code.literal,.rst-content tt.literal{color:#e74c3c;white-space:normal}.rst-content code.xref,.rst-content tt.xref,a .rst-content code,a .rst-content tt{font-weight:700;color:#404040;overflow-wrap:normal}.rst-content kbd,.rst-content pre,.rst-content samp{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace}.rst-content a code,.rst-content a tt{color:#2980b9}.rst-content dl{margin-bottom:24px}.rst-content dl dt{font-weight:700;margin-bottom:12px}.rst-content dl ol,.rst-content dl p,.rst-content dl table,.rst-content dl ul{margin-bottom:12px}.rst-content dl dd{margin:0 0 12px 24px;line-height:24px}.rst-content dl dd>ol:last-child,.rst-content dl dd>p:last-child,.rst-content dl dd>table:last-child,.rst-content dl dd>ul:last-child{margin-bottom:0}html.writer-html4 .rst-content dl:not(.docutils),html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple){margin-bottom:24px}html.writer-html4 .rst-content dl:not(.docutils)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{display:table;margin:6px 0;font-size:90%;line-height:normal;background:#e7f2fa;color:#2980b9;border-top:3px solid #6ab0de;padding:6px;position:relative}html.writer-html4 .rst-content dl:not(.docutils)>dt:before,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:before{color:#6ab0de}html.writer-html4 .rst-content dl:not(.docutils)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt{margin-bottom:6px;border:none;border-left:3px solid #ccc;background:#f0f0f0;color:#555}html.writer-html4 .rst-content dl:not(.docutils) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) dl:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt .headerlink{color:#404040;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils)>dt:first-child,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple)>dt:first-child{margin-top:0}html.writer-html4 .rst-content dl:not(.docutils) code.descclassname,html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descclassname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{background-color:transparent;border:none;padding:0;font-size:100%!important}html.writer-html4 .rst-content dl:not(.docutils) code.descname,html.writer-html4 .rst-content dl:not(.docutils) tt.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) code.descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) tt.descname{font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .optional,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .optional{display:inline-block;padding:0 4px;color:#000;font-weight:700}html.writer-html4 .rst-content dl:not(.docutils) .property,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .property{display:inline-block;padding-right:8px;max-width:100%}html.writer-html4 .rst-content dl:not(.docutils) .k,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .k{font-style:italic}html.writer-html4 .rst-content dl:not(.docutils) .descclassname,html.writer-html4 .rst-content dl:not(.docutils) .descname,html.writer-html4 .rst-content dl:not(.docutils) .sig-name,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descclassname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .descname,html.writer-html5 .rst-content dl[class]:not(.option-list):not(.field-list):not(.footnote):not(.citation):not(.glossary):not(.simple) .sig-name{font-family:SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,Courier,monospace;color:#000}.rst-content .viewcode-back,.rst-content .viewcode-link{display:inline-block;color:#27ae60;font-size:80%;padding-left:24px}.rst-content .viewcode-back{display:block;float:right}.rst-content p.rubric{margin-bottom:12px;font-weight:700}.rst-content code.download,.rst-content tt.download{background:inherit;padding:inherit;font-weight:400;font-family:inherit;font-size:inherit;color:inherit;border:inherit;white-space:inherit}.rst-content code.download span:first-child,.rst-content tt.download span:first-child{-webkit-font-smoothing:subpixel-antialiased}.rst-content code.download span:first-child:before,.rst-content tt.download span:first-child:before{margin-right:4px}.rst-content .guilabel,.rst-content .menuselection{font-size:80%;font-weight:700;border-radius:4px;padding:2.4px 6px;margin:auto 2px}.rst-content .guilabel,.rst-content .menuselection{border:1px solid #7fbbe3;background:#e7f2fa}.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>.kbd,.rst-content :not(dl.option-list)>:not(dt):not(kbd):not(.kbd)>kbd{color:inherit;font-size:80%;background-color:#fff;border:1px solid #a6a6a6;border-radius:4px;box-shadow:0 2px grey;padding:2.4px 6px;margin:auto 0}.rst-content .versionmodified{font-style:italic}@media screen and (max-width:480px){.rst-content .sidebar{width:100%}}span[id*=MathJax-Span]{color:#404040}.math{text-align:center}@font-face{font-family:Lato;src:url(fonts/lato-normal.woff2?bd03a2cc277bbbc338d464e679fe9942) format("woff2"),url(fonts/lato-normal.woff?27bd77b9162d388cb8d4c4217c7c5e2a) format("woff");font-weight:400;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold.woff2?cccb897485813c7c256901dbca54ecf2) format("woff2"),url(fonts/lato-bold.woff?d878b6c29b10beca227e9eef4246111b) format("woff");font-weight:700;font-style:normal;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-bold-italic.woff2?0b6bb6725576b072c5d0b02ecdd1900d) format("woff2"),url(fonts/lato-bold-italic.woff?9c7e4e9eb485b4a121c760e61bc3707c) format("woff");font-weight:700;font-style:italic;font-display:block}@font-face{font-family:Lato;src:url(fonts/lato-normal-italic.woff2?4eb103b4d12be57cb1d040ed5e162e9d) format("woff2"),url(fonts/lato-normal-italic.woff?f28f2d6482446544ef1ea1ccc6dd5892) format("woff");font-weight:400;font-style:italic;font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:400;src:url(fonts/Roboto-Slab-Regular.woff2?7abf5b8d04d26a2cafea937019bca958) format("woff2"),url(fonts/Roboto-Slab-Regular.woff?c1be9284088d487c5e3ff0a10a92e58c) format("woff");font-display:block}@font-face{font-family:Roboto Slab;font-style:normal;font-weight:700;src:url(fonts/Roboto-Slab-Bold.woff2?9984f4a9bda09be08e83f2506954adbe) format("woff2"),url(fonts/Roboto-Slab-Bold.woff?bed5564a116b05148e3b3bea6fb1162a) format("woff");font-display:block} \ No newline at end of file diff --git a/docs/_static/custom.css b/docs/_static/custom.css deleted file mode 100644 index 2a924f1d..00000000 --- a/docs/_static/custom.css +++ /dev/null @@ -1 +0,0 @@ -/* This file intentionally left blank. */ diff --git a/docs/_static/graphviz.css b/docs/_static/graphviz.css new file mode 100644 index 00000000..027576e3 --- /dev/null +++ b/docs/_static/graphviz.css @@ -0,0 +1,19 @@ +/* + * graphviz.css + * ~~~~~~~~~~~~ + * + * Sphinx stylesheet -- graphviz extension. + * + * :copyright: Copyright 2007-2024 by the Sphinx team, see AUTHORS. + * :license: BSD, see LICENSE for details. + * + */ + +img.graphviz { + border: 0; + max-width: 100%; +} + +object.graphviz { + max-width: 100%; +} diff --git a/docs/_static/jquery.js b/docs/_static/jquery.js new file mode 100644 index 00000000..c4c6022f --- /dev/null +++ b/docs/_static/jquery.js @@ -0,0 +1,2 @@ +/*! jQuery v3.6.0 | (c) OpenJS Foundation and other contributors | jquery.org/license */ +!function(e,t){"use strict";"object"==typeof module&&"object"==typeof module.exports?module.exports=e.document?t(e,!0):function(e){if(!e.document)throw new Error("jQuery requires a window with a document");return t(e)}:t(e)}("undefined"!=typeof window?window:this,function(C,e){"use strict";var t=[],r=Object.getPrototypeOf,s=t.slice,g=t.flat?function(e){return t.flat.call(e)}:function(e){return t.concat.apply([],e)},u=t.push,i=t.indexOf,n={},o=n.toString,v=n.hasOwnProperty,a=v.toString,l=a.call(Object),y={},m=function(e){return"function"==typeof e&&"number"!=typeof e.nodeType&&"function"!=typeof e.item},x=function(e){return null!=e&&e===e.window},E=C.document,c={type:!0,src:!0,nonce:!0,noModule:!0};function b(e,t,n){var r,i,o=(n=n||E).createElement("script");if(o.text=e,t)for(r in c)(i=t[r]||t.getAttribute&&t.getAttribute(r))&&o.setAttribute(r,i);n.head.appendChild(o).parentNode.removeChild(o)}function w(e){return null==e?e+"":"object"==typeof e||"function"==typeof e?n[o.call(e)]||"object":typeof e}var f="3.6.0",S=function(e,t){return new S.fn.init(e,t)};function p(e){var t=!!e&&"length"in e&&e.length,n=w(e);return!m(e)&&!x(e)&&("array"===n||0===t||"number"==typeof t&&0+~]|"+M+")"+M+"*"),U=new RegExp(M+"|>"),X=new RegExp(F),V=new RegExp("^"+I+"$"),G={ID:new RegExp("^#("+I+")"),CLASS:new RegExp("^\\.("+I+")"),TAG:new RegExp("^("+I+"|[*])"),ATTR:new RegExp("^"+W),PSEUDO:new RegExp("^"+F),CHILD:new RegExp("^:(only|first|last|nth|nth-last)-(child|of-type)(?:\\("+M+"*(even|odd|(([+-]|)(\\d*)n|)"+M+"*(?:([+-]|)"+M+"*(\\d+)|))"+M+"*\\)|)","i"),bool:new RegExp("^(?:"+R+")$","i"),needsContext:new RegExp("^"+M+"*[>+~]|:(even|odd|eq|gt|lt|nth|first|last)(?:\\("+M+"*((?:-\\d)?\\d*)"+M+"*\\)|)(?=[^-]|$)","i")},Y=/HTML$/i,Q=/^(?:input|select|textarea|button)$/i,J=/^h\d$/i,K=/^[^{]+\{\s*\[native \w/,Z=/^(?:#([\w-]+)|(\w+)|\.([\w-]+))$/,ee=/[+~]/,te=new RegExp("\\\\[\\da-fA-F]{1,6}"+M+"?|\\\\([^\\r\\n\\f])","g"),ne=function(e,t){var n="0x"+e.slice(1)-65536;return t||(n<0?String.fromCharCode(n+65536):String.fromCharCode(n>>10|55296,1023&n|56320))},re=/([\0-\x1f\x7f]|^-?\d)|^-$|[^\0-\x1f\x7f-\uFFFF\w-]/g,ie=function(e,t){return t?"\0"===e?"\ufffd":e.slice(0,-1)+"\\"+e.charCodeAt(e.length-1).toString(16)+" ":"\\"+e},oe=function(){T()},ae=be(function(e){return!0===e.disabled&&"fieldset"===e.nodeName.toLowerCase()},{dir:"parentNode",next:"legend"});try{H.apply(t=O.call(p.childNodes),p.childNodes),t[p.childNodes.length].nodeType}catch(e){H={apply:t.length?function(e,t){L.apply(e,O.call(t))}:function(e,t){var n=e.length,r=0;while(e[n++]=t[r++]);e.length=n-1}}}function se(t,e,n,r){var i,o,a,s,u,l,c,f=e&&e.ownerDocument,p=e?e.nodeType:9;if(n=n||[],"string"!=typeof t||!t||1!==p&&9!==p&&11!==p)return n;if(!r&&(T(e),e=e||C,E)){if(11!==p&&(u=Z.exec(t)))if(i=u[1]){if(9===p){if(!(a=e.getElementById(i)))return n;if(a.id===i)return n.push(a),n}else if(f&&(a=f.getElementById(i))&&y(e,a)&&a.id===i)return n.push(a),n}else{if(u[2])return H.apply(n,e.getElementsByTagName(t)),n;if((i=u[3])&&d.getElementsByClassName&&e.getElementsByClassName)return H.apply(n,e.getElementsByClassName(i)),n}if(d.qsa&&!N[t+" "]&&(!v||!v.test(t))&&(1!==p||"object"!==e.nodeName.toLowerCase())){if(c=t,f=e,1===p&&(U.test(t)||z.test(t))){(f=ee.test(t)&&ye(e.parentNode)||e)===e&&d.scope||((s=e.getAttribute("id"))?s=s.replace(re,ie):e.setAttribute("id",s=S)),o=(l=h(t)).length;while(o--)l[o]=(s?"#"+s:":scope")+" "+xe(l[o]);c=l.join(",")}try{return H.apply(n,f.querySelectorAll(c)),n}catch(e){N(t,!0)}finally{s===S&&e.removeAttribute("id")}}}return g(t.replace($,"$1"),e,n,r)}function ue(){var r=[];return function e(t,n){return r.push(t+" ")>b.cacheLength&&delete e[r.shift()],e[t+" "]=n}}function le(e){return e[S]=!0,e}function ce(e){var t=C.createElement("fieldset");try{return!!e(t)}catch(e){return!1}finally{t.parentNode&&t.parentNode.removeChild(t),t=null}}function fe(e,t){var n=e.split("|"),r=n.length;while(r--)b.attrHandle[n[r]]=t}function pe(e,t){var n=t&&e,r=n&&1===e.nodeType&&1===t.nodeType&&e.sourceIndex-t.sourceIndex;if(r)return r;if(n)while(n=n.nextSibling)if(n===t)return-1;return e?1:-1}function de(t){return function(e){return"input"===e.nodeName.toLowerCase()&&e.type===t}}function he(n){return function(e){var t=e.nodeName.toLowerCase();return("input"===t||"button"===t)&&e.type===n}}function ge(t){return function(e){return"form"in e?e.parentNode&&!1===e.disabled?"label"in e?"label"in e.parentNode?e.parentNode.disabled===t:e.disabled===t:e.isDisabled===t||e.isDisabled!==!t&&ae(e)===t:e.disabled===t:"label"in e&&e.disabled===t}}function ve(a){return le(function(o){return o=+o,le(function(e,t){var n,r=a([],e.length,o),i=r.length;while(i--)e[n=r[i]]&&(e[n]=!(t[n]=e[n]))})})}function ye(e){return e&&"undefined"!=typeof e.getElementsByTagName&&e}for(e in d=se.support={},i=se.isXML=function(e){var t=e&&e.namespaceURI,n=e&&(e.ownerDocument||e).documentElement;return!Y.test(t||n&&n.nodeName||"HTML")},T=se.setDocument=function(e){var t,n,r=e?e.ownerDocument||e:p;return r!=C&&9===r.nodeType&&r.documentElement&&(a=(C=r).documentElement,E=!i(C),p!=C&&(n=C.defaultView)&&n.top!==n&&(n.addEventListener?n.addEventListener("unload",oe,!1):n.attachEvent&&n.attachEvent("onunload",oe)),d.scope=ce(function(e){return a.appendChild(e).appendChild(C.createElement("div")),"undefined"!=typeof e.querySelectorAll&&!e.querySelectorAll(":scope fieldset div").length}),d.attributes=ce(function(e){return e.className="i",!e.getAttribute("className")}),d.getElementsByTagName=ce(function(e){return e.appendChild(C.createComment("")),!e.getElementsByTagName("*").length}),d.getElementsByClassName=K.test(C.getElementsByClassName),d.getById=ce(function(e){return a.appendChild(e).id=S,!C.getElementsByName||!C.getElementsByName(S).length}),d.getById?(b.filter.ID=function(e){var t=e.replace(te,ne);return function(e){return e.getAttribute("id")===t}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n=t.getElementById(e);return n?[n]:[]}}):(b.filter.ID=function(e){var n=e.replace(te,ne);return function(e){var t="undefined"!=typeof e.getAttributeNode&&e.getAttributeNode("id");return t&&t.value===n}},b.find.ID=function(e,t){if("undefined"!=typeof t.getElementById&&E){var n,r,i,o=t.getElementById(e);if(o){if((n=o.getAttributeNode("id"))&&n.value===e)return[o];i=t.getElementsByName(e),r=0;while(o=i[r++])if((n=o.getAttributeNode("id"))&&n.value===e)return[o]}return[]}}),b.find.TAG=d.getElementsByTagName?function(e,t){return"undefined"!=typeof t.getElementsByTagName?t.getElementsByTagName(e):d.qsa?t.querySelectorAll(e):void 0}:function(e,t){var n,r=[],i=0,o=t.getElementsByTagName(e);if("*"===e){while(n=o[i++])1===n.nodeType&&r.push(n);return r}return o},b.find.CLASS=d.getElementsByClassName&&function(e,t){if("undefined"!=typeof t.getElementsByClassName&&E)return t.getElementsByClassName(e)},s=[],v=[],(d.qsa=K.test(C.querySelectorAll))&&(ce(function(e){var t;a.appendChild(e).innerHTML="",e.querySelectorAll("[msallowcapture^='']").length&&v.push("[*^$]="+M+"*(?:''|\"\")"),e.querySelectorAll("[selected]").length||v.push("\\["+M+"*(?:value|"+R+")"),e.querySelectorAll("[id~="+S+"-]").length||v.push("~="),(t=C.createElement("input")).setAttribute("name",""),e.appendChild(t),e.querySelectorAll("[name='']").length||v.push("\\["+M+"*name"+M+"*="+M+"*(?:''|\"\")"),e.querySelectorAll(":checked").length||v.push(":checked"),e.querySelectorAll("a#"+S+"+*").length||v.push(".#.+[+~]"),e.querySelectorAll("\\\f"),v.push("[\\r\\n\\f]")}),ce(function(e){e.innerHTML="";var t=C.createElement("input");t.setAttribute("type","hidden"),e.appendChild(t).setAttribute("name","D"),e.querySelectorAll("[name=d]").length&&v.push("name"+M+"*[*^$|!~]?="),2!==e.querySelectorAll(":enabled").length&&v.push(":enabled",":disabled"),a.appendChild(e).disabled=!0,2!==e.querySelectorAll(":disabled").length&&v.push(":enabled",":disabled"),e.querySelectorAll("*,:x"),v.push(",.*:")})),(d.matchesSelector=K.test(c=a.matches||a.webkitMatchesSelector||a.mozMatchesSelector||a.oMatchesSelector||a.msMatchesSelector))&&ce(function(e){d.disconnectedMatch=c.call(e,"*"),c.call(e,"[s!='']:x"),s.push("!=",F)}),v=v.length&&new RegExp(v.join("|")),s=s.length&&new RegExp(s.join("|")),t=K.test(a.compareDocumentPosition),y=t||K.test(a.contains)?function(e,t){var n=9===e.nodeType?e.documentElement:e,r=t&&t.parentNode;return e===r||!(!r||1!==r.nodeType||!(n.contains?n.contains(r):e.compareDocumentPosition&&16&e.compareDocumentPosition(r)))}:function(e,t){if(t)while(t=t.parentNode)if(t===e)return!0;return!1},j=t?function(e,t){if(e===t)return l=!0,0;var n=!e.compareDocumentPosition-!t.compareDocumentPosition;return n||(1&(n=(e.ownerDocument||e)==(t.ownerDocument||t)?e.compareDocumentPosition(t):1)||!d.sortDetached&&t.compareDocumentPosition(e)===n?e==C||e.ownerDocument==p&&y(p,e)?-1:t==C||t.ownerDocument==p&&y(p,t)?1:u?P(u,e)-P(u,t):0:4&n?-1:1)}:function(e,t){if(e===t)return l=!0,0;var n,r=0,i=e.parentNode,o=t.parentNode,a=[e],s=[t];if(!i||!o)return e==C?-1:t==C?1:i?-1:o?1:u?P(u,e)-P(u,t):0;if(i===o)return pe(e,t);n=e;while(n=n.parentNode)a.unshift(n);n=t;while(n=n.parentNode)s.unshift(n);while(a[r]===s[r])r++;return r?pe(a[r],s[r]):a[r]==p?-1:s[r]==p?1:0}),C},se.matches=function(e,t){return se(e,null,null,t)},se.matchesSelector=function(e,t){if(T(e),d.matchesSelector&&E&&!N[t+" "]&&(!s||!s.test(t))&&(!v||!v.test(t)))try{var n=c.call(e,t);if(n||d.disconnectedMatch||e.document&&11!==e.document.nodeType)return n}catch(e){N(t,!0)}return 0":{dir:"parentNode",first:!0}," ":{dir:"parentNode"},"+":{dir:"previousSibling",first:!0},"~":{dir:"previousSibling"}},preFilter:{ATTR:function(e){return e[1]=e[1].replace(te,ne),e[3]=(e[3]||e[4]||e[5]||"").replace(te,ne),"~="===e[2]&&(e[3]=" "+e[3]+" "),e.slice(0,4)},CHILD:function(e){return e[1]=e[1].toLowerCase(),"nth"===e[1].slice(0,3)?(e[3]||se.error(e[0]),e[4]=+(e[4]?e[5]+(e[6]||1):2*("even"===e[3]||"odd"===e[3])),e[5]=+(e[7]+e[8]||"odd"===e[3])):e[3]&&se.error(e[0]),e},PSEUDO:function(e){var t,n=!e[6]&&e[2];return G.CHILD.test(e[0])?null:(e[3]?e[2]=e[4]||e[5]||"":n&&X.test(n)&&(t=h(n,!0))&&(t=n.indexOf(")",n.length-t)-n.length)&&(e[0]=e[0].slice(0,t),e[2]=n.slice(0,t)),e.slice(0,3))}},filter:{TAG:function(e){var t=e.replace(te,ne).toLowerCase();return"*"===e?function(){return!0}:function(e){return e.nodeName&&e.nodeName.toLowerCase()===t}},CLASS:function(e){var t=m[e+" "];return t||(t=new RegExp("(^|"+M+")"+e+"("+M+"|$)"))&&m(e,function(e){return t.test("string"==typeof e.className&&e.className||"undefined"!=typeof e.getAttribute&&e.getAttribute("class")||"")})},ATTR:function(n,r,i){return function(e){var t=se.attr(e,n);return null==t?"!="===r:!r||(t+="","="===r?t===i:"!="===r?t!==i:"^="===r?i&&0===t.indexOf(i):"*="===r?i&&-1:\x20\t\r\n\f]*)[\x20\t\r\n\f]*\/?>(?:<\/\1>|)$/i;function j(e,n,r){return m(n)?S.grep(e,function(e,t){return!!n.call(e,t,e)!==r}):n.nodeType?S.grep(e,function(e){return e===n!==r}):"string"!=typeof n?S.grep(e,function(e){return-1)[^>]*|#([\w-]+))$/;(S.fn.init=function(e,t,n){var r,i;if(!e)return this;if(n=n||D,"string"==typeof e){if(!(r="<"===e[0]&&">"===e[e.length-1]&&3<=e.length?[null,e,null]:q.exec(e))||!r[1]&&t)return!t||t.jquery?(t||n).find(e):this.constructor(t).find(e);if(r[1]){if(t=t instanceof S?t[0]:t,S.merge(this,S.parseHTML(r[1],t&&t.nodeType?t.ownerDocument||t:E,!0)),N.test(r[1])&&S.isPlainObject(t))for(r in t)m(this[r])?this[r](t[r]):this.attr(r,t[r]);return this}return(i=E.getElementById(r[2]))&&(this[0]=i,this.length=1),this}return e.nodeType?(this[0]=e,this.length=1,this):m(e)?void 0!==n.ready?n.ready(e):e(S):S.makeArray(e,this)}).prototype=S.fn,D=S(E);var L=/^(?:parents|prev(?:Until|All))/,H={children:!0,contents:!0,next:!0,prev:!0};function O(e,t){while((e=e[t])&&1!==e.nodeType);return e}S.fn.extend({has:function(e){var t=S(e,this),n=t.length;return this.filter(function(){for(var e=0;e\x20\t\r\n\f]*)/i,he=/^$|^module$|\/(?:java|ecma)script/i;ce=E.createDocumentFragment().appendChild(E.createElement("div")),(fe=E.createElement("input")).setAttribute("type","radio"),fe.setAttribute("checked","checked"),fe.setAttribute("name","t"),ce.appendChild(fe),y.checkClone=ce.cloneNode(!0).cloneNode(!0).lastChild.checked,ce.innerHTML="",y.noCloneChecked=!!ce.cloneNode(!0).lastChild.defaultValue,ce.innerHTML="",y.option=!!ce.lastChild;var ge={thead:[1,"","
"],col:[2,"","
"],tr:[2,"","
"],td:[3,"","
"],_default:[0,"",""]};function ve(e,t){var n;return n="undefined"!=typeof e.getElementsByTagName?e.getElementsByTagName(t||"*"):"undefined"!=typeof e.querySelectorAll?e.querySelectorAll(t||"*"):[],void 0===t||t&&A(e,t)?S.merge([e],n):n}function ye(e,t){for(var n=0,r=e.length;n",""]);var me=/<|&#?\w+;/;function xe(e,t,n,r,i){for(var o,a,s,u,l,c,f=t.createDocumentFragment(),p=[],d=0,h=e.length;d\s*$/g;function je(e,t){return A(e,"table")&&A(11!==t.nodeType?t:t.firstChild,"tr")&&S(e).children("tbody")[0]||e}function De(e){return e.type=(null!==e.getAttribute("type"))+"/"+e.type,e}function qe(e){return"true/"===(e.type||"").slice(0,5)?e.type=e.type.slice(5):e.removeAttribute("type"),e}function Le(e,t){var n,r,i,o,a,s;if(1===t.nodeType){if(Y.hasData(e)&&(s=Y.get(e).events))for(i in Y.remove(t,"handle events"),s)for(n=0,r=s[i].length;n").attr(n.scriptAttrs||{}).prop({charset:n.scriptCharset,src:n.url}).on("load error",i=function(e){r.remove(),i=null,e&&t("error"===e.type?404:200,e.type)}),E.head.appendChild(r[0])},abort:function(){i&&i()}}});var _t,zt=[],Ut=/(=)\?(?=&|$)|\?\?/;S.ajaxSetup({jsonp:"callback",jsonpCallback:function(){var e=zt.pop()||S.expando+"_"+wt.guid++;return this[e]=!0,e}}),S.ajaxPrefilter("json jsonp",function(e,t,n){var r,i,o,a=!1!==e.jsonp&&(Ut.test(e.url)?"url":"string"==typeof e.data&&0===(e.contentType||"").indexOf("application/x-www-form-urlencoded")&&Ut.test(e.data)&&"data");if(a||"jsonp"===e.dataTypes[0])return r=e.jsonpCallback=m(e.jsonpCallback)?e.jsonpCallback():e.jsonpCallback,a?e[a]=e[a].replace(Ut,"$1"+r):!1!==e.jsonp&&(e.url+=(Tt.test(e.url)?"&":"?")+e.jsonp+"="+r),e.converters["script json"]=function(){return o||S.error(r+" was not called"),o[0]},e.dataTypes[0]="json",i=C[r],C[r]=function(){o=arguments},n.always(function(){void 0===i?S(C).removeProp(r):C[r]=i,e[r]&&(e.jsonpCallback=t.jsonpCallback,zt.push(r)),o&&m(i)&&i(o[0]),o=i=void 0}),"script"}),y.createHTMLDocument=((_t=E.implementation.createHTMLDocument("").body).innerHTML="
",2===_t.childNodes.length),S.parseHTML=function(e,t,n){return"string"!=typeof e?[]:("boolean"==typeof t&&(n=t,t=!1),t||(y.createHTMLDocument?((r=(t=E.implementation.createHTMLDocument("")).createElement("base")).href=E.location.href,t.head.appendChild(r)):t=E),o=!n&&[],(i=N.exec(e))?[t.createElement(i[1])]:(i=xe([e],t,o),o&&o.length&&S(o).remove(),S.merge([],i.childNodes)));var r,i,o},S.fn.load=function(e,t,n){var r,i,o,a=this,s=e.indexOf(" ");return-1").append(S.parseHTML(e)).find(r):e)}).always(n&&function(e,t){a.each(function(){n.apply(this,o||[e.responseText,t,e])})}),this},S.expr.pseudos.animated=function(t){return S.grep(S.timers,function(e){return t===e.elem}).length},S.offset={setOffset:function(e,t,n){var r,i,o,a,s,u,l=S.css(e,"position"),c=S(e),f={};"static"===l&&(e.style.position="relative"),s=c.offset(),o=S.css(e,"top"),u=S.css(e,"left"),("absolute"===l||"fixed"===l)&&-1<(o+u).indexOf("auto")?(a=(r=c.position()).top,i=r.left):(a=parseFloat(o)||0,i=parseFloat(u)||0),m(t)&&(t=t.call(e,n,S.extend({},s))),null!=t.top&&(f.top=t.top-s.top+a),null!=t.left&&(f.left=t.left-s.left+i),"using"in t?t.using.call(e,f):c.css(f)}},S.fn.extend({offset:function(t){if(arguments.length)return void 0===t?this:this.each(function(e){S.offset.setOffset(this,t,e)});var e,n,r=this[0];return r?r.getClientRects().length?(e=r.getBoundingClientRect(),n=r.ownerDocument.defaultView,{top:e.top+n.pageYOffset,left:e.left+n.pageXOffset}):{top:0,left:0}:void 0},position:function(){if(this[0]){var e,t,n,r=this[0],i={top:0,left:0};if("fixed"===S.css(r,"position"))t=r.getBoundingClientRect();else{t=this.offset(),n=r.ownerDocument,e=r.offsetParent||n.documentElement;while(e&&(e===n.body||e===n.documentElement)&&"static"===S.css(e,"position"))e=e.parentNode;e&&e!==r&&1===e.nodeType&&((i=S(e).offset()).top+=S.css(e,"borderTopWidth",!0),i.left+=S.css(e,"borderLeftWidth",!0))}return{top:t.top-i.top-S.css(r,"marginTop",!0),left:t.left-i.left-S.css(r,"marginLeft",!0)}}},offsetParent:function(){return this.map(function(){var e=this.offsetParent;while(e&&"static"===S.css(e,"position"))e=e.offsetParent;return e||re})}}),S.each({scrollLeft:"pageXOffset",scrollTop:"pageYOffset"},function(t,i){var o="pageYOffset"===i;S.fn[t]=function(e){return $(this,function(e,t,n){var r;if(x(e)?r=e:9===e.nodeType&&(r=e.defaultView),void 0===n)return r?r[i]:e[t];r?r.scrollTo(o?r.pageXOffset:n,o?n:r.pageYOffset):e[t]=n},t,e,arguments.length)}}),S.each(["top","left"],function(e,n){S.cssHooks[n]=Fe(y.pixelPosition,function(e,t){if(t)return t=We(e,n),Pe.test(t)?S(e).position()[n]+"px":t})}),S.each({Height:"height",Width:"width"},function(a,s){S.each({padding:"inner"+a,content:s,"":"outer"+a},function(r,o){S.fn[o]=function(e,t){var n=arguments.length&&(r||"boolean"!=typeof e),i=r||(!0===e||!0===t?"margin":"border");return $(this,function(e,t,n){var r;return x(e)?0===o.indexOf("outer")?e["inner"+a]:e.document.documentElement["client"+a]:9===e.nodeType?(r=e.documentElement,Math.max(e.body["scroll"+a],r["scroll"+a],e.body["offset"+a],r["offset"+a],r["client"+a])):void 0===n?S.css(e,t,i):S.style(e,t,n,i)},s,n?e:void 0,n)}})}),S.each(["ajaxStart","ajaxStop","ajaxComplete","ajaxError","ajaxSuccess","ajaxSend"],function(e,t){S.fn[t]=function(e){return this.on(t,e)}}),S.fn.extend({bind:function(e,t,n){return this.on(e,null,t,n)},unbind:function(e,t){return this.off(e,null,t)},delegate:function(e,t,n,r){return this.on(t,e,n,r)},undelegate:function(e,t,n){return 1===arguments.length?this.off(e,"**"):this.off(t,e||"**",n)},hover:function(e,t){return this.mouseenter(e).mouseleave(t||e)}}),S.each("blur focus focusin focusout resize scroll click dblclick mousedown mouseup mousemove mouseover mouseout mouseenter mouseleave change select submit keydown keypress keyup contextmenu".split(" "),function(e,n){S.fn[n]=function(e,t){return 0",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=y.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=y.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),y.elements=c+" "+a,j(b)}function f(a){var b=x[a[v]];return b||(b={},w++,a[v]=w,x[w]=b),b}function g(a,c,d){if(c||(c=b),q)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():u.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||t.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),q)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return y.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(y,b.frag)}function j(a){a||(a=b);var d=f(a);return!y.shivCSS||p||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),q||i(a,d),a}function k(a){for(var b,c=a.getElementsByTagName("*"),e=c.length,f=RegExp("^(?:"+d().join("|")+")$","i"),g=[];e--;)b=c[e],f.test(b.nodeName)&&g.push(b.applyElement(l(b)));return g}function l(a){for(var b,c=a.attributes,d=c.length,e=a.ownerDocument.createElement(A+":"+a.nodeName);d--;)b=c[d],b.specified&&e.setAttribute(b.nodeName,b.nodeValue);return e.style.cssText=a.style.cssText,e}function m(a){for(var b,c=a.split("{"),e=c.length,f=RegExp("(^|[\\s,>+~])("+d().join("|")+")(?=[[\\s,>+~#.:]|$)","gi"),g="$1"+A+"\\:$2";e--;)b=c[e]=c[e].split("}"),b[b.length-1]=b[b.length-1].replace(f,g),c[e]=b.join("}");return c.join("{")}function n(a){for(var b=a.length;b--;)a[b].removeNode()}function o(a){function b(){clearTimeout(g._removeSheetTimer),d&&d.removeNode(!0),d=null}var d,e,g=f(a),h=a.namespaces,i=a.parentWindow;return!B||a.printShived?a:("undefined"==typeof h[A]&&h.add(A),i.attachEvent("onbeforeprint",function(){b();for(var f,g,h,i=a.styleSheets,j=[],l=i.length,n=Array(l);l--;)n[l]=i[l];for(;h=n.pop();)if(!h.disabled&&z.test(h.media)){try{f=h.imports,g=f.length}catch(o){g=0}for(l=0;g>l;l++)n.push(f[l]);try{j.push(h.cssText)}catch(o){}}j=m(j.reverse().join("")),e=k(a),d=c(a,j)}),i.attachEvent("onafterprint",function(){n(e),clearTimeout(g._removeSheetTimer),g._removeSheetTimer=setTimeout(b,500)}),a.printShived=!0,a)}var p,q,r="3.7.3",s=a.html5||{},t=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,u=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,v="_html5shiv",w=0,x={};!function(){try{var a=b.createElement("a");a.innerHTML="",p="hidden"in a,q=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){p=!0,q=!0}}();var y={elements:s.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:r,shivCSS:s.shivCSS!==!1,supportsUnknownElements:q,shivMethods:s.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=y,j(b);var z=/^$|\b(?:all|print)\b/,A="html5shiv",B=!q&&function(){var c=b.documentElement;return!("undefined"==typeof b.namespaces||"undefined"==typeof b.parentWindow||"undefined"==typeof c.applyElement||"undefined"==typeof c.removeNode||"undefined"==typeof a.attachEvent)}();y.type+=" print",y.shivPrint=o,o(b),"object"==typeof module&&module.exports&&(module.exports=y)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_static/js/html5shiv.min.js b/docs/_static/js/html5shiv.min.js new file mode 100644 index 00000000..cd1c674f --- /dev/null +++ b/docs/_static/js/html5shiv.min.js @@ -0,0 +1,4 @@ +/** +* @preserve HTML5 Shiv 3.7.3 | @afarkas @jdalton @jon_neal @rem | MIT/GPL2 Licensed +*/ +!function(a,b){function c(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function d(){var a=t.elements;return"string"==typeof a?a.split(" "):a}function e(a,b){var c=t.elements;"string"!=typeof c&&(c=c.join(" ")),"string"!=typeof a&&(a=a.join(" ")),t.elements=c+" "+a,j(b)}function f(a){var b=s[a[q]];return b||(b={},r++,a[q]=r,s[r]=b),b}function g(a,c,d){if(c||(c=b),l)return c.createElement(a);d||(d=f(c));var e;return e=d.cache[a]?d.cache[a].cloneNode():p.test(a)?(d.cache[a]=d.createElem(a)).cloneNode():d.createElem(a),!e.canHaveChildren||o.test(a)||e.tagUrn?e:d.frag.appendChild(e)}function h(a,c){if(a||(a=b),l)return a.createDocumentFragment();c=c||f(a);for(var e=c.frag.cloneNode(),g=0,h=d(),i=h.length;i>g;g++)e.createElement(h[g]);return e}function i(a,b){b.cache||(b.cache={},b.createElem=a.createElement,b.createFrag=a.createDocumentFragment,b.frag=b.createFrag()),a.createElement=function(c){return t.shivMethods?g(c,a,b):b.createElem(c)},a.createDocumentFragment=Function("h,f","return function(){var n=f.cloneNode(),c=n.createElement;h.shivMethods&&("+d().join().replace(/[\w\-:]+/g,function(a){return b.createElem(a),b.frag.createElement(a),'c("'+a+'")'})+");return n}")(t,b.frag)}function j(a){a||(a=b);var d=f(a);return!t.shivCSS||k||d.hasCSS||(d.hasCSS=!!c(a,"article,aside,dialog,figcaption,figure,footer,header,hgroup,main,nav,section{display:block}mark{background:#FF0;color:#000}template{display:none}")),l||i(a,d),a}var k,l,m="3.7.3-pre",n=a.html5||{},o=/^<|^(?:button|map|select|textarea|object|iframe|option|optgroup)$/i,p=/^(?:a|b|code|div|fieldset|h1|h2|h3|h4|h5|h6|i|label|li|ol|p|q|span|strong|style|table|tbody|td|th|tr|ul)$/i,q="_html5shiv",r=0,s={};!function(){try{var a=b.createElement("a");a.innerHTML="",k="hidden"in a,l=1==a.childNodes.length||function(){b.createElement("a");var a=b.createDocumentFragment();return"undefined"==typeof a.cloneNode||"undefined"==typeof a.createDocumentFragment||"undefined"==typeof a.createElement}()}catch(c){k=!0,l=!0}}();var t={elements:n.elements||"abbr article aside audio bdi canvas data datalist details dialog figcaption figure footer header hgroup main mark meter nav output picture progress section summary template time video",version:m,shivCSS:n.shivCSS!==!1,supportsUnknownElements:l,shivMethods:n.shivMethods!==!1,type:"default",shivDocument:j,createElement:g,createDocumentFragment:h,addElements:e};a.html5=t,j(b),"object"==typeof module&&module.exports&&(module.exports=t)}("undefined"!=typeof window?window:this,document); \ No newline at end of file diff --git a/docs/_static/js/theme.js b/docs/_static/js/theme.js new file mode 100644 index 00000000..1fddb6ee --- /dev/null +++ b/docs/_static/js/theme.js @@ -0,0 +1 @@ +!function(n){var e={};function t(i){if(e[i])return e[i].exports;var o=e[i]={i:i,l:!1,exports:{}};return n[i].call(o.exports,o,o.exports,t),o.l=!0,o.exports}t.m=n,t.c=e,t.d=function(n,e,i){t.o(n,e)||Object.defineProperty(n,e,{enumerable:!0,get:i})},t.r=function(n){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(n,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(n,"__esModule",{value:!0})},t.t=function(n,e){if(1&e&&(n=t(n)),8&e)return n;if(4&e&&"object"==typeof n&&n&&n.__esModule)return n;var i=Object.create(null);if(t.r(i),Object.defineProperty(i,"default",{enumerable:!0,value:n}),2&e&&"string"!=typeof n)for(var o in n)t.d(i,o,function(e){return n[e]}.bind(null,o));return i},t.n=function(n){var e=n&&n.__esModule?function(){return n.default}:function(){return n};return t.d(e,"a",e),e},t.o=function(n,e){return Object.prototype.hasOwnProperty.call(n,e)},t.p="",t(t.s=0)}([function(n,e,t){t(1),n.exports=t(3)},function(n,e,t){(function(){var e="undefined"!=typeof window?window.jQuery:t(2);n.exports.ThemeNav={navBar:null,win:null,winScroll:!1,winResize:!1,linkScroll:!1,winPosition:0,winHeight:null,docHeight:null,isRunning:!1,enable:function(n){var t=this;void 0===n&&(n=!0),t.isRunning||(t.isRunning=!0,e((function(e){t.init(e),t.reset(),t.win.on("hashchange",t.reset),n&&t.win.on("scroll",(function(){t.linkScroll||t.winScroll||(t.winScroll=!0,requestAnimationFrame((function(){t.onScroll()})))})),t.win.on("resize",(function(){t.winResize||(t.winResize=!0,requestAnimationFrame((function(){t.onResize()})))})),t.onResize()})))},enableSticky:function(){this.enable(!0)},init:function(n){n(document);var e=this;this.navBar=n("div.wy-side-scroll:first"),this.win=n(window),n(document).on("click","[data-toggle='wy-nav-top']",(function(){n("[data-toggle='wy-nav-shift']").toggleClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift")})).on("click",".wy-menu-vertical .current ul li a",(function(){var t=n(this);n("[data-toggle='wy-nav-shift']").removeClass("shift"),n("[data-toggle='rst-versions']").toggleClass("shift"),e.toggleCurrent(t),e.hashChange()})).on("click","[data-toggle='rst-current-version']",(function(){n("[data-toggle='rst-versions']").toggleClass("shift-up")})),n("table.docutils:not(.field-list,.footnote,.citation)").wrap("
"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each((function(){var t=n(this);expand=n(''),expand.on("click",(function(n){return e.toggleCurrent(t),n.stopPropagation(),!1})),t.prepend(expand)}))},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),t=e.find('[href="'+n+'"]');if(0===t.length){var i=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(t=e.find('[href="#'+i.attr("id")+'"]')).length&&(t=e.find('[href="#"]'))}if(t.length>0){$(".wy-menu-vertical .current").removeClass("current").attr("aria-expanded","false"),t.addClass("current").attr("aria-expanded","true"),t.closest("li.toctree-l1").parent().addClass("current").attr("aria-expanded","true");for(let n=1;n<=10;n++)t.closest("li.toctree-l"+n).addClass("current").attr("aria-expanded","true");t[0].scrollIntoView()}}catch(n){console.log("Error expanding nav for anchor",n)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,t=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(t),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",(function(){this.linkScroll=!1}))},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current").attr("aria-expanded","false"),e.siblings().find("li.current").removeClass("current").attr("aria-expanded","false");var t=e.find("> ul li");t.length&&(t.removeClass("current").attr("aria-expanded","false"),e.toggleClass("current").attr("aria-expanded",(function(n,e){return"true"==e?"false":"true"})))}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:n.exports.ThemeNav,StickyNav:n.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],t=0;t + +launchlaunchlitelite \ No newline at end of file diff --git a/docs/_static/no_image.png b/docs/_static/no_image.png new file mode 100644 index 00000000..8c2d48d5 Binary files /dev/null and b/docs/_static/no_image.png differ diff --git a/docs/_static/pygments.css b/docs/_static/pygments.css index 04a41742..0d49244e 100644 --- a/docs/_static/pygments.css +++ b/docs/_static/pygments.css @@ -4,81 +4,72 @@ span.linenos { color: inherit; background-color: transparent; padding-left: 5px; td.linenos .special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } span.linenos.special { color: #000000; background-color: #ffffc0; padding-left: 5px; padding-right: 5px; } .highlight .hll { background-color: #ffffcc } -.highlight { background: #f8f8f8; } -.highlight .c { color: #8f5902; font-style: italic } /* Comment */ -.highlight .err { color: #a40000; border: 1px solid #ef2929 } /* Error */ -.highlight .g { color: #000000 } /* Generic */ -.highlight .k { color: #004461; font-weight: bold } /* Keyword */ -.highlight .l { color: #000000 } /* Literal */ -.highlight .n { color: #000000 } /* Name */ -.highlight .o { color: #582800 } /* Operator */ -.highlight .x { color: #000000 } /* Other */ -.highlight .p { color: #000000; font-weight: bold } /* Punctuation */ -.highlight .ch { color: #8f5902; font-style: italic } /* Comment.Hashbang */ -.highlight .cm { color: #8f5902; font-style: italic } /* Comment.Multiline */ -.highlight .cp { color: #8f5902 } /* Comment.Preproc */ -.highlight .cpf { color: #8f5902; font-style: italic } /* Comment.PreprocFile */ -.highlight .c1 { color: #8f5902; font-style: italic } /* Comment.Single */ -.highlight .cs { color: #8f5902; font-style: italic } /* Comment.Special */ -.highlight .gd { color: #a40000 } /* Generic.Deleted */ -.highlight .ge { color: #000000; font-style: italic } /* Generic.Emph */ -.highlight .ges { color: #000000 } /* Generic.EmphStrong */ -.highlight .gr { color: #ef2929 } /* Generic.Error */ +.highlight { background: #eeffcc; } +.highlight .c { color: #408090; font-style: italic } /* Comment */ +.highlight .err { border: 1px solid #FF0000 } /* Error */ +.highlight .k { color: #007020; font-weight: bold } /* Keyword */ +.highlight .o { color: #666666 } /* Operator */ +.highlight .ch { color: #408090; font-style: italic } /* Comment.Hashbang */ +.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ +.highlight .cp { color: #007020 } /* Comment.Preproc */ +.highlight .cpf { color: #408090; font-style: italic } /* Comment.PreprocFile */ +.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ +.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ +.highlight .gd { color: #A00000 } /* Generic.Deleted */ +.highlight .ge { font-style: italic } /* Generic.Emph */ +.highlight .ges { font-weight: bold; font-style: italic } /* Generic.EmphStrong */ +.highlight .gr { color: #FF0000 } /* Generic.Error */ .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ .highlight .gi { color: #00A000 } /* Generic.Inserted */ -.highlight .go { color: #888888 } /* Generic.Output */ -.highlight .gp { color: #745334 } /* Generic.Prompt */ -.highlight .gs { color: #000000; font-weight: bold } /* Generic.Strong */ +.highlight .go { color: #333333 } /* Generic.Output */ +.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ +.highlight .gs { font-weight: bold } /* Generic.Strong */ .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ -.highlight .gt { color: #a40000; font-weight: bold } /* Generic.Traceback */ -.highlight .kc { color: #004461; font-weight: bold } /* Keyword.Constant */ -.highlight .kd { color: #004461; font-weight: bold } /* Keyword.Declaration */ -.highlight .kn { color: #004461; font-weight: bold } /* Keyword.Namespace */ -.highlight .kp { color: #004461; font-weight: bold } /* Keyword.Pseudo */ -.highlight .kr { color: #004461; font-weight: bold } /* Keyword.Reserved */ -.highlight .kt { color: #004461; font-weight: bold } /* Keyword.Type */ -.highlight .ld { color: #000000 } /* Literal.Date */ -.highlight .m { color: #990000 } /* Literal.Number */ -.highlight .s { color: #4e9a06 } /* Literal.String */ -.highlight .na { color: #c4a000 } /* Name.Attribute */ -.highlight .nb { color: #004461 } /* Name.Builtin */ -.highlight .nc { color: #000000 } /* Name.Class */ -.highlight .no { color: #000000 } /* Name.Constant */ -.highlight .nd { color: #888888 } /* Name.Decorator */ -.highlight .ni { color: #ce5c00 } /* Name.Entity */ -.highlight .ne { color: #cc0000; font-weight: bold } /* Name.Exception */ -.highlight .nf { color: #000000 } /* Name.Function */ -.highlight .nl { color: #f57900 } /* Name.Label */ -.highlight .nn { color: #000000 } /* Name.Namespace */ -.highlight .nx { color: #000000 } /* Name.Other */ -.highlight .py { color: #000000 } /* Name.Property */ -.highlight .nt { color: #004461; font-weight: bold } /* Name.Tag */ -.highlight .nv { color: #000000 } /* Name.Variable */ -.highlight .ow { color: #004461; font-weight: bold } /* Operator.Word */ -.highlight .pm { color: #000000; font-weight: bold } /* Punctuation.Marker */ -.highlight .w { color: #f8f8f8 } /* Text.Whitespace */ -.highlight .mb { color: #990000 } /* Literal.Number.Bin */ -.highlight .mf { color: #990000 } /* Literal.Number.Float */ -.highlight .mh { color: #990000 } /* Literal.Number.Hex */ -.highlight .mi { color: #990000 } /* Literal.Number.Integer */ -.highlight .mo { color: #990000 } /* Literal.Number.Oct */ -.highlight .sa { color: #4e9a06 } /* Literal.String.Affix */ -.highlight .sb { color: #4e9a06 } /* Literal.String.Backtick */ -.highlight .sc { color: #4e9a06 } /* Literal.String.Char */ -.highlight .dl { color: #4e9a06 } /* Literal.String.Delimiter */ -.highlight .sd { color: #8f5902; font-style: italic } /* Literal.String.Doc */ -.highlight .s2 { color: #4e9a06 } /* Literal.String.Double */ -.highlight .se { color: #4e9a06 } /* Literal.String.Escape */ -.highlight .sh { color: #4e9a06 } /* Literal.String.Heredoc */ -.highlight .si { color: #4e9a06 } /* Literal.String.Interpol */ -.highlight .sx { color: #4e9a06 } /* Literal.String.Other */ -.highlight .sr { color: #4e9a06 } /* Literal.String.Regex */ -.highlight .s1 { color: #4e9a06 } /* Literal.String.Single */ -.highlight .ss { color: #4e9a06 } /* Literal.String.Symbol */ -.highlight .bp { color: #3465a4 } /* Name.Builtin.Pseudo */ -.highlight .fm { color: #000000 } /* Name.Function.Magic */ -.highlight .vc { color: #000000 } /* Name.Variable.Class */ -.highlight .vg { color: #000000 } /* Name.Variable.Global */ -.highlight .vi { color: #000000 } /* Name.Variable.Instance */ -.highlight .vm { color: #000000 } /* Name.Variable.Magic */ -.highlight .il { color: #990000 } /* Literal.Number.Integer.Long */ \ No newline at end of file +.highlight .gt { color: #0044DD } /* Generic.Traceback */ +.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ +.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ +.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ +.highlight .kp { color: #007020 } /* Keyword.Pseudo */ +.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ +.highlight .kt { color: #902000 } /* Keyword.Type */ +.highlight .m { color: #208050 } /* Literal.Number */ +.highlight .s { color: #4070a0 } /* Literal.String */ +.highlight .na { color: #4070a0 } /* Name.Attribute */ +.highlight .nb { color: #007020 } /* Name.Builtin */ +.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ +.highlight .no { color: #60add5 } /* Name.Constant */ +.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ +.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ +.highlight .ne { color: #007020 } /* Name.Exception */ +.highlight .nf { color: #06287e } /* Name.Function */ +.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ +.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ +.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ +.highlight .nv { color: #bb60d5 } /* Name.Variable */ +.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ +.highlight .w { color: #bbbbbb } /* Text.Whitespace */ +.highlight .mb { color: #208050 } /* Literal.Number.Bin */ +.highlight .mf { color: #208050 } /* Literal.Number.Float */ +.highlight .mh { color: #208050 } /* Literal.Number.Hex */ +.highlight .mi { color: #208050 } /* Literal.Number.Integer */ +.highlight .mo { color: #208050 } /* Literal.Number.Oct */ +.highlight .sa { color: #4070a0 } /* Literal.String.Affix */ +.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ +.highlight .sc { color: #4070a0 } /* Literal.String.Char */ +.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */ +.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ +.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ +.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ +.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ +.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ +.highlight .sx { color: #c65d09 } /* Literal.String.Other */ +.highlight .sr { color: #235388 } /* Literal.String.Regex */ +.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ +.highlight .ss { color: #517918 } /* Literal.String.Symbol */ +.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ +.highlight .fm { color: #06287e } /* Name.Function.Magic */ +.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ +.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ +.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ +.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */ +.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/docs/_static/sg_gallery-binder.css b/docs/_static/sg_gallery-binder.css new file mode 100644 index 00000000..420005d2 --- /dev/null +++ b/docs/_static/sg_gallery-binder.css @@ -0,0 +1,11 @@ +/* CSS for binder integration */ + +div.binder-badge { + margin: 1em auto; + vertical-align: middle; +} + +div.lite-badge { + margin: 1em auto; + vertical-align: middle; +} diff --git a/docs/_static/sg_gallery-dataframe.css b/docs/_static/sg_gallery-dataframe.css new file mode 100644 index 00000000..fac74c43 --- /dev/null +++ b/docs/_static/sg_gallery-dataframe.css @@ -0,0 +1,47 @@ +/* Pandas dataframe css */ +/* Taken from: https://github.com/spatialaudio/nbsphinx/blob/fb3ba670fc1ba5f54d4c487573dbc1b4ecf7e9ff/src/nbsphinx.py#L587-L619 */ +html[data-theme="light"] { + --sg-text-color: #000; + --sg-tr-odd-color: #f5f5f5; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); +} +html[data-theme="dark"] { + --sg-text-color: #fff; + --sg-tr-odd-color: #373737; + --sg-tr-hover-color: rgba(30, 81, 122, 0.2); +} + +table.dataframe { + border: none !important; + border-collapse: collapse; + border-spacing: 0; + border-color: transparent; + color: var(--sg-text-color); + font-size: 12px; + table-layout: fixed; + width: auto; +} +table.dataframe thead { + border-bottom: 1px solid var(--sg-text-color); + vertical-align: bottom; +} +table.dataframe tr, +table.dataframe th, +table.dataframe td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +table.dataframe th { + font-weight: bold; +} +table.dataframe tbody tr:nth-child(odd) { + background: var(--sg-tr-odd-color); +} +table.dataframe tbody tr:hover { + background: var(--sg-tr-hover-color); +} diff --git a/docs/_static/sg_gallery-rendered-html.css b/docs/_static/sg_gallery-rendered-html.css new file mode 100644 index 00000000..93dc2ffb --- /dev/null +++ b/docs/_static/sg_gallery-rendered-html.css @@ -0,0 +1,224 @@ +/* Adapted from notebook/static/style/style.min.css */ +html[data-theme="light"] { + --sg-text-color: #000; + --sg-background-color: #ffffff; + --sg-code-background-color: #eff0f1; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); + --sg-tr-odd-color: #f5f5f5; +} +html[data-theme="dark"] { + --sg-text-color: #fff; + --sg-background-color: #121212; + --sg-code-background-color: #2f2f30; + --sg-tr-hover-color: rgba(66, 165, 245, 0.2); + --sg-tr-odd-color: #1f1f1f; +} + +.rendered_html { + color: var(--sg-text-color); + /* any extras will just be numbers: */ +} +.rendered_html em { + font-style: italic; +} +.rendered_html strong { + font-weight: bold; +} +.rendered_html u { + text-decoration: underline; +} +.rendered_html :link { + text-decoration: underline; +} +.rendered_html :visited { + text-decoration: underline; +} +.rendered_html h1 { + font-size: 185.7%; + margin: 1.08em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h2 { + font-size: 157.1%; + margin: 1.27em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h3 { + font-size: 128.6%; + margin: 1.55em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h4 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; +} +.rendered_html h5 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; + font-style: italic; +} +.rendered_html h6 { + font-size: 100%; + margin: 2em 0 0 0; + font-weight: bold; + line-height: 1.0; + font-style: italic; +} +.rendered_html h1:first-child { + margin-top: 0.538em; +} +.rendered_html h2:first-child { + margin-top: 0.636em; +} +.rendered_html h3:first-child { + margin-top: 0.777em; +} +.rendered_html h4:first-child { + margin-top: 1em; +} +.rendered_html h5:first-child { + margin-top: 1em; +} +.rendered_html h6:first-child { + margin-top: 1em; +} +.rendered_html ul:not(.list-inline), +.rendered_html ol:not(.list-inline) { + padding-left: 2em; +} +.rendered_html ul { + list-style: disc; +} +.rendered_html ul ul { + list-style: square; + margin-top: 0; +} +.rendered_html ul ul ul { + list-style: circle; +} +.rendered_html ol { + list-style: decimal; +} +.rendered_html ol ol { + list-style: upper-alpha; + margin-top: 0; +} +.rendered_html ol ol ol { + list-style: lower-alpha; +} +.rendered_html ol ol ol ol { + list-style: lower-roman; +} +.rendered_html ol ol ol ol ol { + list-style: decimal; +} +.rendered_html * + ul { + margin-top: 1em; +} +.rendered_html * + ol { + margin-top: 1em; +} +.rendered_html hr { + color: var(--sg-text-color); + background-color: var(--sg-text-color); +} +.rendered_html pre { + margin: 1em 2em; + padding: 0px; + background-color: var(--sg-background-color); +} +.rendered_html code { + background-color: var(--sg-code-background-color); +} +.rendered_html p code { + padding: 1px 5px; +} +.rendered_html pre code { + background-color: var(--sg-background-color); +} +.rendered_html pre, +.rendered_html code { + border: 0; + color: var(--sg-text-color); + font-size: 100%; +} +.rendered_html blockquote { + margin: 1em 2em; +} +.rendered_html table { + margin-left: auto; + margin-right: auto; + border: none; + border-collapse: collapse; + border-spacing: 0; + color: var(--sg-text-color); + font-size: 12px; + table-layout: fixed; +} +.rendered_html thead { + border-bottom: 1px solid var(--sg-text-color); + vertical-align: bottom; +} +.rendered_html tr, +.rendered_html th, +.rendered_html td { + text-align: right; + vertical-align: middle; + padding: 0.5em 0.5em; + line-height: normal; + white-space: normal; + max-width: none; + border: none; +} +.rendered_html th { + font-weight: bold; +} +.rendered_html tbody tr:nth-child(odd) { + background: var(--sg-tr-odd-color); +} +.rendered_html tbody tr:hover { + color: var(--sg-text-color); + background: var(--sg-tr-hover-color); +} +.rendered_html * + table { + margin-top: 1em; +} +.rendered_html p { + text-align: left; +} +.rendered_html * + p { + margin-top: 1em; +} +.rendered_html img { + display: block; + margin-left: auto; + margin-right: auto; +} +.rendered_html * + img { + margin-top: 1em; +} +.rendered_html img, +.rendered_html svg { + max-width: 100%; + height: auto; +} +.rendered_html img.unconfined, +.rendered_html svg.unconfined { + max-width: none; +} +.rendered_html .alert { + margin-bottom: initial; +} +.rendered_html * + .alert { + margin-top: 1em; +} +[dir="rtl"] .rendered_html p { + text-align: right; +} diff --git a/docs/_static/sg_gallery.css b/docs/_static/sg_gallery.css new file mode 100644 index 00000000..9bcd33c8 --- /dev/null +++ b/docs/_static/sg_gallery.css @@ -0,0 +1,367 @@ +/* +Sphinx-Gallery has compatible CSS to fix default sphinx themes +Tested for Sphinx 1.3.1 for all themes: default, alabaster, sphinxdoc, +scrolls, agogo, traditional, nature, haiku, pyramid +Tested for Read the Docs theme 0.1.7 */ + +/* Define light colors */ +:root, html[data-theme="light"], body[data-theme="light"]{ + --sg-tooltip-foreground: black; + --sg-tooltip-background: rgba(250, 250, 250, 0.9); + --sg-tooltip-border: #ccc transparent; + --sg-thumb-box-shadow-color: #6c757d40; + --sg-thumb-hover-border: #0069d9; + --sg-script-out: #888; + --sg-script-pre: #fafae2; + --sg-pytb-foreground: #000; + --sg-pytb-background: #ffe4e4; + --sg-pytb-border-color: #f66; + --sg-download-a-background-color: #ffc; + --sg-download-a-background-image: linear-gradient(to bottom, #ffc, #d5d57e); + --sg-download-a-border-color: 1px solid #c2c22d; + --sg-download-a-color: #000; + --sg-download-a-hover-background-color: #d5d57e; + --sg-download-a-hover-box-shadow-1: rgba(255, 255, 255, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(0, 0, 0, 0.25); +} +@media(prefers-color-scheme: light) { + :root[data-theme="auto"], html[data-theme="auto"], body[data-theme="auto"] { + --sg-tooltip-foreground: black; + --sg-tooltip-background: rgba(250, 250, 250, 0.9); + --sg-tooltip-border: #ccc transparent; + --sg-thumb-box-shadow-color: #6c757d40; + --sg-thumb-hover-border: #0069d9; + --sg-script-out: #888; + --sg-script-pre: #fafae2; + --sg-pytb-foreground: #000; + --sg-pytb-background: #ffe4e4; + --sg-pytb-border-color: #f66; + --sg-download-a-background-color: #ffc; + --sg-download-a-background-image: linear-gradient(to bottom, #ffc, #d5d57e); + --sg-download-a-border-color: 1px solid #c2c22d; + --sg-download-a-color: #000; + --sg-download-a-hover-background-color: #d5d57e; + --sg-download-a-hover-box-shadow-1: rgba(255, 255, 255, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(0, 0, 0, 0.25); + } +} + +html[data-theme="dark"], body[data-theme="dark"] { + --sg-tooltip-foreground: white; + --sg-tooltip-background: rgba(10, 10, 10, 0.9); + --sg-tooltip-border: #333 transparent; + --sg-thumb-box-shadow-color: #79848d40; + --sg-thumb-hover-border: #003975; + --sg-script-out: rgb(179, 179, 179); + --sg-script-pre: #2e2e22; + --sg-pytb-foreground: #fff; + --sg-pytb-background: #1b1717; + --sg-pytb-border-color: #622; + --sg-download-a-background-color: #443; + --sg-download-a-background-image: linear-gradient(to bottom, #443, #221); + --sg-download-a-border-color: 1px solid #3a3a0d; + --sg-download-a-color: #fff; + --sg-download-a-hover-background-color: #616135; + --sg-download-a-hover-box-shadow-1: rgba(0, 0, 0, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(255, 255, 255, 0.25); +} +@media(prefers-color-scheme: dark){ + html[data-theme="auto"], body[data-theme="auto"] { + --sg-tooltip-foreground: white; + --sg-tooltip-background: rgba(10, 10, 10, 0.9); + --sg-tooltip-border: #333 transparent; + --sg-thumb-box-shadow-color: #79848d40; + --sg-thumb-hover-border: #003975; + --sg-script-out: rgb(179, 179, 179); + --sg-script-pre: #2e2e22; + --sg-pytb-foreground: #fff; + --sg-pytb-background: #1b1717; + --sg-pytb-border-color: #622; + --sg-download-a-background-color: #443; + --sg-download-a-background-image: linear-gradient(to bottom, #443, #221); + --sg-download-a-border-color: 1px solid #3a3a0d; + --sg-download-a-color: #fff; + --sg-download-a-hover-background-color: #616135; + --sg-download-a-hover-box-shadow-1: rgba(0, 0, 0, 0.1); + --sg-download-a-hover-box-shadow-2: rgba(255, 255, 255, 0.25); + } +} + +.sphx-glr-thumbnails { + width: 100%; + margin: 0px 0px 20px 0px; + + /* align thumbnails on a grid */ + justify-content: space-between; + display: grid; + /* each grid column should be at least 160px (this will determine + the actual number of columns) and then take as much of the + remaining width as possible */ + grid-template-columns: repeat(auto-fill, minmax(160px, 1fr)); + gap: 15px; +} +.sphx-glr-thumbnails .toctree-wrapper { + /* hide empty toctree divs added to the DOM + by sphinx even though the toctree is hidden + (they would fill grid places with empty divs) */ + display: none; +} +.sphx-glr-thumbcontainer { + background: transparent; + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + box-shadow: 0 0 10px var(--sg-thumb-box-shadow-color); + + /* useful to absolutely position link in div */ + position: relative; + + /* thumbnail width should include padding and borders + and take all available space */ + box-sizing: border-box; + width: 100%; + padding: 10px; + border: 1px solid transparent; + + /* align content in thumbnail */ + display: flex; + flex-direction: column; + align-items: center; + gap: 7px; +} +.sphx-glr-thumbcontainer p { + position: absolute; + top: 0; + left: 0; +} +.sphx-glr-thumbcontainer p, +.sphx-glr-thumbcontainer p a { + /* link should cover the whole thumbnail div */ + width: 100%; + height: 100%; +} +.sphx-glr-thumbcontainer p a span { + /* text within link should be masked + (we are just interested in the href) */ + display: none; +} +.sphx-glr-thumbcontainer:hover { + border: 1px solid; + border-color: var(--sg-thumb-hover-border); + cursor: pointer; +} +.sphx-glr-thumbcontainer a.internal { + bottom: 0; + display: block; + left: 0; + box-sizing: border-box; + padding: 150px 10px 0; + position: absolute; + right: 0; + top: 0; +} +/* Next one is to avoid Sphinx traditional theme to cover all the +thumbnail with its default link Background color */ +.sphx-glr-thumbcontainer a.internal:hover { + background-color: transparent; +} + +.sphx-glr-thumbcontainer p { + margin: 0 0 0.1em 0; +} +.sphx-glr-thumbcontainer .figure { + margin: 10px; + width: 160px; +} +.sphx-glr-thumbcontainer img { + display: inline; + max-height: 112px; + max-width: 160px; +} + +.sphx-glr-thumbcontainer[tooltip]::before { + content: ""; + position: absolute; + pointer-events: none; + top: 0; + left: 0; + width: 100%; + height: 100%; + z-index: 97; + background-color: var(--sg-tooltip-background); + backdrop-filter: blur(3px); + opacity: 0; + transition: opacity 0.3s; +} + +.sphx-glr-thumbcontainer[tooltip]:hover::before { + opacity: 1; +} + +.sphx-glr-thumbcontainer[tooltip]:hover::after { + -webkit-border-radius: 4px; + -moz-border-radius: 4px; + border-radius: 4px; + color: var(--sg-tooltip-foreground); + content: attr(tooltip); + padding: 10px 10px 5px; + z-index: 98; + width: 100%; + max-height: 100%; + position: absolute; + pointer-events: none; + top: 0; + box-sizing: border-box; + overflow: hidden; + display: -webkit-box; + -webkit-box-orient: vertical; + -webkit-line-clamp: 6; +} + +.sphx-glr-script-out { + color: var(--sg-script-out); + display: flex; + gap: 0.5em; +} +.sphx-glr-script-out::before { + content: "Out:"; + /* These numbers come from the pre style in the pydata sphinx theme. This + * turns out to match perfectly on the rtd theme, but be a bit too low for + * the pydata sphinx theme. As I could not find a dimension to use that was + * scaled the same way, I just picked one option that worked pretty close for + * both. */ + line-height: 1.4; + padding-top: 10px; +} +.sphx-glr-script-out .highlight { + background-color: transparent; + /* These options make the div expand... */ + flex-grow: 1; + /* ... but also keep it from overflowing its flex container. */ + overflow: auto; +} +.sphx-glr-script-out .highlight pre { + background-color: var(--sg-script-pre); + border: 0; + max-height: 30em; + overflow: auto; + padding-left: 1ex; + /* This margin is necessary in the pydata sphinx theme because pre has a box + * shadow which would be clipped by the overflow:auto in the parent div + * above. */ + margin: 2px; + word-break: break-word; +} +.sphx-glr-script-out + p { + margin-top: 1.8em; +} +blockquote.sphx-glr-script-out { + margin-left: 0pt; +} +.sphx-glr-script-out.highlight-pytb .highlight pre { + color: var(--sg-pytb-foreground); + background-color: var(--sg-pytb-background); + border: 1px solid var(--sg-pytb-border-color); + margin-top: 10px; + padding: 7px; +} + +div.sphx-glr-footer { + text-align: center; +} + +div.sphx-glr-download { + margin: 1em auto; + vertical-align: middle; +} + +div.sphx-glr-download a { + background-color: var(--sg-download-a-background-color); + background-image: var(--sg-download-a-background-image); + border-radius: 4px; + border: 1px solid var(--sg-download-a-border-color); + color: var(--sg-download-a-color); + display: inline-block; + font-weight: bold; + padding: 1ex; + text-align: center; +} + +div.sphx-glr-download code.download { + display: inline-block; + white-space: normal; + word-break: normal; + overflow-wrap: break-word; + /* border and background are given by the enclosing 'a' */ + border: none; + background: none; +} + +div.sphx-glr-download a:hover { + box-shadow: inset 0 1px 0 var(--sg-download-a-hover-box-shadow-1), 0 1px 5px var(--sg-download-a-hover-box-shadow-2); + text-decoration: none; + background-image: none; + background-color: var(--sg-download-a-hover-background-color); +} + +div.sphx-glr-sidebar-item img { + max-height: 20px; +} + +.sphx-glr-example-title:target::before { + display: block; + content: ""; + margin-top: -50px; + height: 50px; + visibility: hidden; +} + +ul.sphx-glr-horizontal { + list-style: none; + padding: 0; +} +ul.sphx-glr-horizontal li { + display: inline; +} +ul.sphx-glr-horizontal img { + height: auto !important; +} + +.sphx-glr-single-img { + margin: auto; + display: block; + max-width: 100%; +} + +.sphx-glr-multi-img { + max-width: 42%; + height: auto; +} + +div.sphx-glr-animation { + margin: auto; + display: block; + max-width: 100%; +} +div.sphx-glr-animation .animation { + display: block; +} + +p.sphx-glr-signature a.reference.external { + -moz-border-radius: 5px; + -webkit-border-radius: 5px; + border-radius: 5px; + padding: 3px; + font-size: 75%; + text-align: right; + margin-left: auto; + display: table; +} + +.sphx-glr-clear { + clear: both; +} + +a.sphx-glr-backref-instance { + text-decoration: none; +} diff --git a/docs/content/api/api.html b/docs/content/api/api.html new file mode 100644 index 00000000..9161d5e3 --- /dev/null +++ b/docs/content/api/api.html @@ -0,0 +1,132 @@ + + + + + + + API — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

API

+

The source code is split into two types of files. Those that contain a set of python functions, and those that contain classes.

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/HDF.html b/docs/content/api/base/HDF.html new file mode 100644 index 00000000..70e17868 --- /dev/null +++ b/docs/content/api/base/HDF.html @@ -0,0 +1,287 @@ + + + + + + + Heirarchical Data Format (HDF) — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Heirarchical Data Format (HDF)

+
+
+geobipy.src.base.HDF.hdfRead.find(filename, tag)
+

Find the locations of all groups with ‘tag’ in their path.

+
+
Parameters:
+
    +
  • filename (str) – HDF5 file name

  • +
  • tag (str) – Sub string that appears in the group name.

  • +
+
+
Returns:
+

out – List of paths into the HDF5 file.

+
+
Return type:
+

list

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfRead.readKeyFromFile(h5obj, fName, groupName, key, index=None, **kwargs)
+

Reads in the keys from a file

+

Iterates over group names and keys and reads them from a HDF5 file

+
+
Parameters:
+
    +
  • h5obj (h5py._hl.files.File or h5py._hl.group.Group) – An opened hdf5 handle or a h5py group object

  • +
  • fName (str) – The path and/or file name to the file that was opened

  • +
  • groupName (str or list of str) – The group(s) path within the hdf5 file to read from. i.e. ‘/group1/group1a’

  • +
  • key (str or list of str) – The key(s) in the group to read

  • +
  • index (slice, optional) – Specifies the index’th entry of the data to return. If the group was created using a createHDF procedure in parallel with the nRepeats option, index specifies the index’th entry from which to read the data.

  • +
  • necessary. (Any other parameters in **kwargs are optional but may be necessary if an object's .fromHDF() procedure requires extra arguments. Refer to the object you wish to read in to determine whether extra arguments are) –

  • +
+
+
Returns:
+

out – Returns the read in entries as a list if there are multiple or as a single object if there is only one.

+
+
Return type:
+

object or list

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfRead.readKeyFromFiles(fNames, groupName, key, index=None, **kwargs)
+

Reads in the keys from multiple files

+

Iterates over filenames, group names, and keys and reads them from a HDF5 file

+
+
Parameters:
+
    +
  • fNames (str or list of str) – The path(s) and/or file name(s)

  • +
  • groupName (str or list of str) – The group(s) path within the hdf5 file(s) to read from. i.e. ‘/group1/group1a’

  • +
  • key (str or list of str) – The key(s) in the group to read

  • +
  • index (slice, optional) – Specifies the index’th entry of the data to return. If the group was created using a createHDF procedure in parallel with the nRepeats option, index specifies the index’th entry from which to read the data.

  • +
  • necessary. (Any other parameters in **kwargs are optional but may be necessary if an object's .fromHDF() procedure requires extra arguments. Refer to the object you wish to read in to determine whether extra arguments are) –

  • +
+
+
Returns:
+

out – Returns the read in entries as a list if there are multiple or as a single object if there is only one.

+
+
Return type:
+

object or list

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfRead.read_all(fName)
+

Reads all the entries written to a HDF file

+

Iterates through the highest set of keys in the hdf5 file, and reads each one to a list. If each entry has an attached .readHdf procedure, that will be used to read in an object (Those objects imported at the top of this file can be successfully read in using this attached procedure.) If an entry is a numpy array, that will be the return type. This function will read in the entire file! Use this with caution if you are using large files.

+
+
Parameters:
+

fName (str) – A path and/or file name.

+
+
Returns:
+

out – A list of the read in items from the hdf5 file.

+
+
Return type:
+

list

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfRead.read_groups_with_tag(filename, tag, index=None, **kwargs)
+

Reads all groups with ‘tag’ in their path into memory.

+
+
Parameters:
+
    +
  • filename (str) – HDF5 file name

  • +
  • tag (str) – Sub string that appears in the group name.

  • +
+
+
Returns:
+

out – List of geobipy classes.

+
+
Return type:
+

list

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfRead.read_item(h5obj, index=None, **kwargs)
+

Read an object from a HDF file

+

This function provides a flexible way to read in either a numpy hdf5 entry, or an object in this package. The objects in this package may have an attached .createHdf and writeHdf procedure. If so, this function will read in those objects and return that object. If the entry is instead a numpy array, a numpy array will be returned.

+
+
Parameters:
+
    +
  • hObj (h5py._hl.dataset.Dataset or h5py._hl.group.Group) – A h5py object from which to read entries.

  • +
  • index (slice, optional) – Specifies the index’th entry of the data to return. If the group was created using a createHDF procedure in parallel with the nRepeats option, index specifies the index’th entry from which to read the data.

  • +
  • necessary. (Any other parameters in **kwargs are optional but may be necessary if an object's .fromHDF() procedure requires extra arguments. Refer to the object you wish to read in to determine whether extra arguments are) –

  • +
+
+
Returns:
+

out – An object that has a .fromHdf() procedure or a numpy array of the returned variable.

+
+
Return type:
+

object or numpy.ndarray

+
+
+
+ +
+
+geobipy.src.base.HDF.hdfWrite.write_nd(arr, h5obj, myName, index=None)
+

Writes a numpy array to a preallocated dataset in a h5py group object

+
+
Parameters:
+
    +
  • h5obj (h5py._hl.files.File or h5py._hl.group.Group) – A HDF file or group object to write the contents to. The dataset must have already been allocated in the file.

  • +
  • myName (str) – The name of the h5py dataset key inside the h5py object. e.g. ‘/group1/group1a/dataset’

  • +
  • index (slice, optional) – Specifies the index’th entry of the data to return. If the group was created using a createHDF procedure in parallel with the nRepeats option, index specifies the index’th entry from which to read the data.

  • +
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/MPI.html b/docs/content/api/base/MPI.html new file mode 100644 index 00000000..d4071d45 --- /dev/null +++ b/docs/content/api/base/MPI.html @@ -0,0 +1,580 @@ + + + + + + + MPI wrapper functions — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

MPI wrapper functions

+

Module containing custom MPI functions

+
+
+geobipy.src.base.MPI.Bcast(self, world, root=0, dtype=None, ndim=None, shape=None)
+

Broadcast a string or a numpy array

+

Broadcast a string or a numpy array from a root rank to all ranks in an MPI communicator. Must be called collectively. +In order to call this function collectively, the variable ‘self’ must be instantiated on every rank. See the example section for more details.

+
+
Parameters:
+
    +
  • self (str or numpy.ndarray) – A string or numpy array to broadcast from root.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The broadcast object on every rank.

+
+
Return type:
+

same type as self

+
+
Raises:
+

TypeError – If self is a list, tell the user to use the specific Bcast_list function. While it has less code and seems like it might be faster, MPI actually pickles the list, broadcasts that binary stream, and unpickles on the other side. For a large number of lists, this can take a long time. This way, the user is made aware of the time benefits of using numpy arrays.

+
+
+

Examples

+

Given a numpy array instantiated on the master rank 0, in order to broadcast it, I must also instantiate a variable with the same name on all other ranks.

+
>>> import numpy as np
+>>> from mpi4py import MPI
+>>> from geobipy.src.base import MPI as myMPI
+>>> world = MPI.COMM_WORLD
+>>> if world.rank == 0:
+>>>     x=StatArray(arange(10))
+>>> # Instantiate on all other ranks before broadcasting
+>>> else:
+>>>     x=None
+>>> y = myMPI.Bcast(x, world)
+>>>
+>>> # A string example
+>>> if (world.rank == 0):
+>>>     s = 'some string'  # This may have been read in through an input file for production code
+>>> else:
+>>>     s = ''
+>>> s = myMPI.Bcast(s,world)
+
+
+
+ +
+
+geobipy.src.base.MPI.Bcast_1int(self, world, root=0)
+

Broadcast a single integer

+

In order to broadcast scalar values using the faster numpy approach, the value must cast into a 1D ndarray. Must be called collectively.

+
+
Parameters:
+
    +
  • self (int) – The integer to broadcast.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The broadcast integer.

+
+
Return type:
+

int

+
+
+

Examples

+

Given an integer instantiated on the master rank 0, in order to broadcast it, I must also instantiate a variable with the same name on all other ranks.

+
>>> import numpy as np
+>>> from mpi4py import MPI
+>>> from geobipy.src.base import MPI as myMPI
+>>> world = MPI.COMM_WORLD
+>>> if world.rank == 0:
+>>>     i = 5
+>>> # Instantiate on all other ranks before broadcasting
+>>> else:
+>>>     i=None
+>>> i = myMPI.Bcast(i, world)
+
+
+
+ +
+
+geobipy.src.base.MPI.Bcast_list(self, world, root=0)
+

Broadcast a list by pickling, sending, and unpickling. This is slower than using numpy arrays and uppercase (Bcast) mpi4py routines. Must be called collectively.

+
+
Parameters:
+
    +
  • self (list) – A list to broadcast.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The broadcast list on every MPI rank.

+
+
Return type:
+

list

+
+
+
+ +
+
+geobipy.src.base.MPI.Irecv(source, world, dtype=None, ndim=None, shape=None)
+

Irecv a numpy array. Auto determines data type and shape. Must be accompanied by Isend on the source rank.

+
+ +
+
+geobipy.src.base.MPI.IrecvFromLeft(world, wrap=True)
+

Irecv an array from the rank left of world.rank.

+
+ +
+
+geobipy.src.base.MPI.IrecvFromRight(world, wrap=True)
+

IRecv an array from the rank right of world.rank.

+
+ +
+
+geobipy.src.base.MPI.Irecv_1int(source, world)
+

Recv a single integer. Must be accompanied by Isend_1int on the source rank.

+
+
Parameters:
+
    +
  • self (int) – Integer to Recv

  • +
  • source (int) – Receive from this rank.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
+
+
Returns:
+

out – The received integer.

+
+
Return type:
+

int

+
+
+
+ +
+
+geobipy.src.base.MPI.Isend(self, dest, world, dtype=None, ndim=None, shape=None)
+

Isend a numpy array. Auto determines data type and shape. Must be accompanied by Irecv on the dest rank.

+
+ +
+
+geobipy.src.base.MPI.IsendToLeft(self, world, wrap=True)
+

ISend an array to the rank left of world.rank.

+
+ +
+
+geobipy.src.base.MPI.IsendToRight(self, world, wrap=True)
+

ISend an array to the rank left of world.rank.

+
+ +
+
+geobipy.src.base.MPI.Isend_1int(self, dest, world)
+

Send a single integer. Must be accompanied by Irecv_1int on the dest rank.

+
+
Parameters:
+
    +
  • self (int) – The integer to Send.

  • +
  • dest (int) – Rank to receive

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
+
+
Returns:
+

out – The sent integer.

+
+
Return type:
+

int

+
+
+
+ +
+
+geobipy.src.base.MPI.Scatterv(self, starts, chunks, world, axis=0, root=0)
+

ScatterV an array to all ranks in an MPI communicator.

+

Each rank gets a chunk defined by a starting index and chunk size. Must be called collectively. The ‘starts’ and ‘chunks’ must be available on every MPI rank. Must be called collectively. See the example for more details.

+
+
Parameters:
+
    +
  • self (numpy.ndarray) – A numpy array to broadcast from root.

  • +
  • starts (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the starting index for a chunk to be sent to that core. e.g. starts[0] is the starting index for rank = 0.

  • +
  • chunks (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the size of a chunk to be sent to that core. e.g. chunks[0] is the chunk size for rank = 0.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • axis (int, optional) – Axis along which to Scatterv to the ranks if self is a 2D numpy array. Default is 0

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – A chunk of self on each MPI rank with size chunk[world.rank].

+
+
Return type:
+

numpy.ndarray

+
+
+

Examples

+
>>> import numpy as np
+>>> from mpi4py import MPI
+>>> from geobipy.src.base import MPI as myMPI
+>>> world = MPI.COMM_WORLD
+>>> # Globally define a size N
+>>> N = 1000
+>>> # On each rank, compute the starting indices and chunk size for the given world.
+>>> starts,chunks=loadBalance_shrinkingArrays(N, world.size)
+>>> # Create an array on the master rank
+>>> if (world.rank == 0):
+>>>     x = arange(N)
+>>> else:
+>>>     x = None
+>>> # Scatter the array x among ranks.
+>>> myChunk = myMPI.Scatterv(x, starts, chunks, world, root=0)
+
+
+
+ +
+
+geobipy.src.base.MPI.Scatterv_list(self, starts, chunks, world, root=0)
+

Scatterv a list by pickling, sending, receiving, and unpickling. This is slower than using numpy arrays and uppercase (Scatterv) mpi4py routines. Must be called collectively.

+
+
Parameters:
+
    +
  • self (list) – A list to scatterv.

  • +
  • starts (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the starting index for a chunk to be sent to that core. e.g. starts[0] is the starting index for rank = 0.

  • +
  • chunks (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the size of a chunk to be sent to that core. e.g. chunks[0] is the chunk size for rank = 0.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – A chunk of self on each MPI rank with size chunk[world.rank].

+
+
Return type:
+

list

+
+
+
+ +
+
+geobipy.src.base.MPI.Scatterv_numpy(self, starts, chunks, dtype, world, axis=0, root=0)
+

ScatterV a numpy array to all ranks in an MPI communicator.

+

Each rank gets a chunk defined by a starting index and chunk size. Must be called collectively. The ‘starts’ and ‘chunks’ must be available on every MPI rank. See the example for more details. Must be called collectively.

+
+
Parameters:
+
    +
  • self (numpy.ndarray) – A numpy array to broadcast from root.

  • +
  • starts (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the starting index for a chunk to be sent to that core. e.g. starts[0] is the starting index for rank = 0.

  • +
  • chunks (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the size of a chunk to be sent to that core. e.g. chunks[0] is the chunk size for rank = 0.

  • +
  • dtype (type) – The type of the numpy array being scattered. Must exist on all ranks.

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • axis (int, optional) – Axis along which to Scatterv to the ranks if self is a 2D numpy array. Default is 0

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – A chunk of self on each MPI rank with size chunk[world.rank].

+
+
Return type:
+

numpy.ndarray

+
+
+
+ +
+
+geobipy.src.base.MPI.banner(world, aStr=None, end='\n', rank=0)
+

Prints a String with Separators above and below

+
+
Parameters:
+
    +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • aStr (str) – A string to print.

  • +
  • end (str) – string appended after the last value, default is a newline.

  • +
  • rank (int) – The rank to print from, default is the master rank, 0.

  • +
+
+
+
+ +
+
+geobipy.src.base.MPI.bcastType(self, world, root=0)
+

Gets the type of an object and broadcasts it to every rank in an MPI communicator.

+

Adaptively broadcasts the type of an object. Must be called collectively.

+
+
Parameters:
+
    +
  • self (object) – For numpy arrays and numpy scalars, a numpy data type will be broadcast. +For arbitrary objects, the attached __class__.__name__ will be broadcast. +For lists, the data type will be list

  • +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The data type broadcast to every rank including the rank broadcast from.

+
+
Return type:
+

object

+
+
+
+ +
+
+geobipy.src.base.MPI.helloWorld(world)
+

Print hello from every rank in an MPI communicator

+
+
Parameters:
+

world (mpi4py.MPI.Comm) – MPI parallel communicator.

+
+
+
+ +
+
+geobipy.src.base.MPI.loadBalance1D_shrinkingArrays(N, nChunks)
+

Splits the length of an array into a number of chunks. Load balances the chunks in a shrinking arrays fashion.

+

Given a length N, split N up into nChunks and return the starting index and size of each chunk. +After being split equally among the chunks, the remainder is split so that the first remainder +chunks get +1 in size. e.g. N=10, nChunks=3 would return starts=[0,4,7] chunks=[4,3,3]

+
+
Parameters:
+
    +
  • N (int) – A size to split into chunks.

  • +
  • nChunks (int) – The number of chunks to split N into.

  • +
+
+
Returns:
+

    +
  • starts (ndarray of ints) – The starting indices of each chunk.

  • +
  • chunks (ndarray of ints) – The size of each chunk.

  • +
+

+
+
+
+ +
+
+geobipy.src.base.MPI.loadBalance3D_shrinkingArrays(shape, nChunks)
+

Splits three dimensions among nChunks.

+

The number of chunks honours the relative difference in the values of shape. e.g. if shape is [600, 600, 300], then the number of chunks will be larger for the +first two dimensions, and less for the third. +Once the chunks are obtained, the start indices and chunk sizes for each dimension are returned.

+
+
Parameters:
+
    +
  • N (array_like) – A 3D shape to split.

  • +
  • nChunks (int) – The number of chunks to split shape into.

  • +
+
+
Returns:
+

    +
  • starts (ndarray of ints) – The starting indices of each chunk.

  • +
  • chunks (ndarray of ints) – The size of each chunk.

  • +
+

+
+
+
+ +
+
+geobipy.src.base.MPI.ordered_print(world, this, title=None)
+

Prints numbers from each rank in order of rank

+

This routine will print an item from each rank in order of rank. +This routine is SLOW due to lots of communication, but is useful for illustration purposes, or debugging. +Do not use this in production code! The title is used in a banner

+
+
Parameters:
+
    +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • this (array_like) – Variable to print, must exist on every rank in the communicator.

  • +
  • title (str, optional) – Creates a banner to separate output with a clear indication of what is being written.

  • +
+
+
+
+ +
+
+geobipy.src.base.MPI.print(aStr='', end='\n', **kwargs)
+

Prints the str to sys.stdout and flushes the buffer so that printing is immediate

+
+
Parameters:
+
    +
  • aStr (str) – A string to print.

  • +
  • end (str) – string appended after the last value, default is a newline.

  • +
+
+
+
+ +
+
+geobipy.src.base.MPI.rankPrint(world, aStr='', end='\n', rank=0)
+

Prints only from the specified MPI rank

+
+
Parameters:
+
    +
  • world (mpi4py.MPI.Comm) – MPI parallel communicator.

  • +
  • aStr (str) – A string to print.

  • +
  • end (str) – string appended after the last value, default is a newline.

  • +
  • rank (int) – The rank to print from, default is the master rank, 0.

  • +
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/base.html b/docs/content/api/base/base.html new file mode 100644 index 00000000..7e2b0130 --- /dev/null +++ b/docs/content/api/base/base.html @@ -0,0 +1,143 @@ + + + + + + + Core routines needed for GeoBIPy — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Core routines needed for GeoBIPy

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/fileIO.html b/docs/content/api/base/fileIO.html new file mode 100644 index 00000000..139b33b9 --- /dev/null +++ b/docs/content/api/base/fileIO.html @@ -0,0 +1,425 @@ + + + + + + + fileIO — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

fileIO

+

@fileIO +Module with custom file handling operations

+
+
+geobipy.src.base.fileIO.bytes2readable(nBytes)
+

Convert bytes to KB, MB, …, PB

+
+
Parameters:
+

nBytes (float) – The number of bytes

+
+
Returns:
+

out – The number of KB, MB, etc.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.fileIO.deleteFile(fname)
+

Deletes a file if it exists

+
+
Parameters:
+

fName (str) – A path and/or file name

+
+
+
+ +
+
+geobipy.src.base.fileIO.dirExists(dirPath)
+

Check if a directory exists on disk

+
+
Parameters:
+

dirPath (str) – A directory path

+
+
Returns:
+

out – Whether the directory path exists

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.fileIO.fileExists(fname)
+

Check if a single file exists on disk

+
+
Parameters:
+

fname (str) – A file name

+
+
Returns:
+

out – Whether the file exists or not

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.fileIO.filesExist(fNames)
+

Check if all files in fNames exist on disk

+
+
Parameters:
+

fNames (list of str)

+
+
Returns:
+

out – Whether all files exist or not

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.fileIO.getFileExtension(fname)
+

Gets the extension of the filename

+
+
Parameters:
+

fname (str) – A path and/or file name

+
+
Returns:
+

out – The extension of the filename

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.fileIO.getFileSize(fName)
+

Get the size of a file on disk

+
+
Parameters:
+

fName (str) – A path and/or file name

+
+
Returns:
+

out – The file size in KB, MB, etc.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.fileIO.getNcolumns(fName, nHeaders=0)
+

Gets the number of columns in a file using the line after nHeaders

+
+
Parameters:
+
    +
  • fName (str) – A path and/or file name

  • +
  • nHeaders (int, optional) – Number of header lines to skip before obtaining the number of columns

  • +
+
+
Returns:
+

out – The number of columns

+
+
Return type:
+

int

+
+
+
+ +
+
+geobipy.src.base.fileIO.getNlines(fname, nHeaders=0)
+

Gets the number of lines in a file after taking into account the number of header lines

+
+
Parameters:
+
    +
  • fName (str) – A path and/or file name

  • +
  • nHeaders (int, optional) – Subtract the number of header lines from the total number of lines in the file

  • +
+
+
Returns:
+

out – Number of lines without the headers

+
+
Return type:
+

int, optional

+
+
+
+ +
+
+geobipy.src.base.fileIO.get_column_name(filename)
+

Read headers names from a line in a file

+
+
Parameters:
+
    +
  • fName (str) – A path and/or file name.

  • +
  • i (in or list of ints, optional) – The indices of the entries to read from the string. By default, all entries are read in.

  • +
  • nHeaders (int, optional) – The number of header lines to skip in the file.

  • +
+
+
Returns:
+

out – A list of the parsed header entries.

+
+
Return type:
+

list of str

+
+
+
+ +
+
+geobipy.src.base.fileIO.get_real_numbers_from_line(line, indices=None, delimiters=',')
+

Reads strictly the numbers from a string

+
+
Parameters:
+
    +
  • line (str) – A string, or a line from a file.

  • +
  • i (in or list of ints, optional) – The indices of the entries to read from the string. By default, all entries are read in.

  • +
  • delimiters (str) – Splits the line based on these delimiters.

  • +
+
+
Returns:
+

out – The values read in from the string.

+
+
Return type:
+

numpy.ndarray

+
+
+
+ +
+
+geobipy.src.base.fileIO.int2str(i, N)
+

Converts an integer to a string with leading zeros in order to maintain the correct order in the file system

+
+
Parameters:
+
    +
  • i (int) – The integer to convert to a string.

  • +
  • N (int) – The maximum number of digits you wish to have in the integer. e.g. int2str(3,4)=’0003’.

  • +
+
+
Returns:
+

out – The integer padded with zeroes on the front.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.fileIO.isFileExtension(fname, ext)
+

Check that the filename is of the given extension

+
+
Parameters:
+
    +
  • fname (str) – A path and/or file name

  • +
  • ext (str) – The extension you want to check the file name against

  • +
+
+
Returns:
+

out – Whether the extension of fname matches ext

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.fileIO.parseString(this, delimiters=',')
+

Parse a string into its entries

+
+
Parameters:
+
    +
  • this (str) – The string to parse.

  • +
  • delimiters (str) – Patterns to split against, e.g. ‘,’ splits at every comma.

  • +
+
+
Returns:
+

out – A list of the parsed entries

+
+
Return type:
+

list of str

+
+
+
+ +
+
+geobipy.src.base.fileIO.wccount(filename)
+

Count the number of lines in a file using a wc system call

+
+
Parameters:
+

fName (str) – A path and/or file name

+
+
Returns:
+

out – The number of lines in the file

+
+
Return type:
+

int

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/interpolation.html b/docs/content/api/base/interpolation.html new file mode 100644 index 00000000..09e3e2aa --- /dev/null +++ b/docs/content/api/base/interpolation.html @@ -0,0 +1,134 @@ + + + + + + + Interpolation — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Interpolation

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/plotting.html b/docs/content/api/base/plotting.html new file mode 100644 index 00000000..72f22827 --- /dev/null +++ b/docs/content/api/base/plotting.html @@ -0,0 +1,642 @@ + + + + + + + plotting — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

plotting

+
+
+geobipy.src.base.plotting.bar(values, edges, line=None, **kwargs)
+

Plot a bar chart.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – Bar values

  • +
  • edges (array_like or StatArray) – edges of the bars

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.hist

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.clabel(cb, label, length=20, wrap=False, **kwargs)
+

Create a colourbar label with default fontsizes

+
+
Parameters:
+
    +
  • cb (matplotlib.colorbar.Colorbar) – A colourbar to label

  • +
  • label (str) – The colourbar label

  • +
+
+
+
+ +
+
+geobipy.src.base.plotting.generate_subplots(n, ax=None)
+

Generates subplots depending on whats given

+
+
Parameters:
+
    +
  • n (int) – number of subplots

  • +
  • ax (variable, optional) – List of subplots. +gridspec.GridSpec or gridspec.Subplotspec +list of gridspec.Subplotspec +Defaults to None.

  • +
+
+
+
+ +
+
+geobipy.src.base.plotting.hillshade(arr, azimuth=30, altitude=30)
+

Create hillshade from a numpy array containing elevation data.

+

Taken from https://github.com/royalosyin/Work-with-DEM-data-using-Python-from-Simple-to-Complicated/blob/master/ex07-Hillshade%20from%20a%20Digital%20Elevation%20Model%20(DEM).ipynb

+
+
Parameters:
+
    +
  • arr (numpy array of shape (rows, columns)) – Numpy array containing elevation values to be used to created hillshade.

  • +
  • azimuth (float (default=30)) – The desired azimuth for the hillshade.

  • +
  • altitude (float (default=30)) – The desired sun angle altitude for the hillshade.

  • +
+
+
Returns:
+

A numpy array containing hillshade values.

+
+
Return type:
+

numpy array

+
+
+
+ +
+
+geobipy.src.base.plotting.hlines(*args, **kwargs)
+

Plot y against x

+

If x and y are StatArrays, the axes are automatically labelled.

+
+
Parameters:
+
    +
  • x (array_like or StatArray) – The abcissa

  • +
  • y (array_like or StatArray) – The ordinate, can be upto 2 dimensions.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • labels (bool, optional) – Plot the labels? Default is True.

  • +
+
+
Returns:
+

matplotlib.Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.plot

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.make_colourmap(seq, cname)
+

Generate a Linear Segmented colourmap

+

Generates a colourmap from the sequence given and registers the colourmap with matplotlib.

+
+
Parameters:
+
    +
  • seq (array of hex colours.) – e.g. [‘#000000’,’#00fcfd’,…]

  • +
  • cname (str) – Name of the colourmap.

  • +
+
+
Returns:
+

matplotlib.colors.LinearSegmentedColormap.

+
+
Return type:
+

out

+
+
+
+ +
+
+geobipy.src.base.plotting.pause(interval)
+

Custom pause command to override matplotlib.pyplot.pause +which keeps the figure on top of all others when using interactve mode.

+
+
Parameters:
+

interval (float) – Pause for interval seconds.

+
+
+
+ +
+
+geobipy.src.base.plotting.pcolor(values, x=None, y=None, **kwargs)
+

Create a pseudocolour plot of a 2D array, Actually uses pcolormesh for speed.

+

Create a colour plot of a 2D array. +If the arrays x, y, and values are geobipy.StatArray classes, the axes can be automatically labelled. +Can take any other matplotlib arguments and keyword arguments e.g. cmap etc.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – A 2D array of colour values.

  • +
  • x (1D array_like or StatArray, optional) – Horizontal coordinates of the values edges.

  • +
  • y (1D array_like or StatArray, optional) – Vertical coordinates of the values edges.

  • +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • reciprocateX (bool, optional) – Take the reciprocal of the X axis before other transforms

  • +
  • reciprocateY (bool, optional) – Take the reciprocal of the Y axis before other transforms

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
  • classes (dict, optional) – A dictionary containing three entries. +classes[‘id’] : array_like of same shape as self containing the class id of each element in self. +classes[‘cmaps’] : list of matplotlib colourmaps. The number of colourmaps should equal the number of classes. +classes[‘labels’] : list of str. The length should equal the number of classes. +If classes is provided, alpha is ignored if provided.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.pcolor_1D(values, y=None, **kwargs)
+

Create a pseudocolour plot of an array, Actually uses pcolormesh for speed.

+

Create a colour plot of an array. +If the arrays x, y, and values are geobipy.StatArray classes, the axes can be automatically labelled. +Can take any other matplotlib arguments and keyword arguments e.g. cmap etc.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – An array of colour values.

  • +
  • x (1D array_like or StatArray) – Horizontal coordinates of the values edges.

  • +
  • y (1D array_like or StatArray, optional) – Vertical coordinates of the values edges.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • clabel (str, optional) – colourbar label

  • +
  • grid (bool, optional) – Show the grid lines

  • +
  • transpose (bool, optional) – Transpose the image

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.pcolor_as_bar(X, Y, values, **kwargs)
+

Create a pseudocolour plot of a 2D array, Actually uses pcolormesh for speed.

+

Create a colour plot of a 2D array. +If the arrays x, y, and values are geobipy.StatArray classes, the axes can be automatically labelled. +Can take any other matplotlib arguments and keyword arguments e.g. cmap etc.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – A 2D array of colour values.

  • +
  • X (1D array_like or StatArray, optional) – Horizontal coordinates of the values edges.

  • +
  • Y (1D array_like or StatArray, optional) – Vertical coordinates of the values edges.

  • +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • alphaColour ('trans' or length 3 array) – If ‘trans’, low alpha values are mapped to transparency +If 3 array, each entry is the RGB value of a colour to map to, e.g. white = [1, 1, 1].

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (array_like, optional) – Set the x and y limits to the first and last locations that don’t equal the values in trim.

  • +
  • classes (dict, optional) – A dictionary containing three entries. +classes[‘id’] : array_like of same shape as self containing the class id of each element in self. +classes[‘cmaps’] : list of matplotlib colourmaps. The number of colourmaps should equal the number of classes. +classes[‘labels’] : list of str. The length should equal the number of classes. +If classes is provided, alpha is ignored if provided.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.plot(x, y, **kwargs)
+

Plot y against x

+

If x and y are StatArrays, the axes are automatically labelled.

+
+
Parameters:
+
    +
  • x (array_like or StatArray) – The abcissa

  • +
  • y (array_like or StatArray) – The ordinate, can be upto 2 dimensions.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • labels (bool, optional) – Plot the labels? Default is True.

  • +
+
+
Returns:
+

matplotlib.Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.plot

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.pretty(ax)
+

Make a plot with nice axes.

+

Removes fluff from the axes.

+
+
Parameters:
+

ax (matplotlib .Axes) – A .Axes class from for example ax = plt.subplot(111), or ax = plt.gca()

+
+
+
+ +
+
+geobipy.src.base.plotting.scatter2D(x, c, y=None, i=None, *args, **kwargs)
+

Create a 2D scatter plot.

+

Create a 2D scatter plot, if the y values are not given, the colours are used instead. +If the arrays x, y, and c are geobipy.StatArray classes, the axes can be automatically labelled. +Can take any other matplotlib arguments and keyword arguments e.g. markersize etc.

+
+
Parameters:
+
    +
  • x (1D array_like or StatArray) – Horizontal locations of the points to plot

  • +
  • c (1D array_like or StatArray) – Colour values of the points

  • +
  • y (1D array_like or StatArray, optional) – Vertical locations of the points to plot, if y = None, then y = c.

  • +
  • i (sequence of ints or numpy.slice, optional) – Plot a geobipy_kwargs of x, y, c, using the indices in i.

  • +
  • log ('e' or float, optional) – Take the log of the colour to base ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.scatter

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.setAlphaPerPcolormeshPixel(pcmesh, alphaArray)
+

Set the opacity of each pixel in a pcolormesh

+
+
Parameters:
+
    +
  • pcmesh (matplotlib.collections.QuadMesh) – pcolormesh object

  • +
  • alphaArray (array_like) – Values per pixel each between 0 and 1.

  • +
+
+
+
+ +
+
+geobipy.src.base.plotting.sizeLegend(values, intervals=None, **kwargs)
+

Add a legend to a plot if the point sizes have been specified.

+

If values is an StatArray, labels are generated automatically.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – The array that was used as the size (s=) in a scatter function.

  • +
  • intervals (array_like, optional) – The legend will have items at each value in intervals.

  • +
  • **kwargs (dict) – kwargs are applied to plt.legend.

  • +
+
+
+
+ +
+
+geobipy.src.base.plotting.stackplot2D(x, y, labels=[], colors=['#000000', '#FFFF00', '#1CE6FF', '#FF34FF', '#FF4A46', '#008941', '#006FA6', '#A30059', '#FFDBE5', '#7A4900', '#0000A6', '#63FFAC', '#B79762', '#004D43', '#8FB0FF', '#997D87', '#5A0007', '#809693', '#FEFFE6', '#1B4400', '#4FC601', '#3B5DFF', '#4A3B53', '#FF2F80', '#61615A', '#BA0900', '#6B7900', '#00C2A0', '#FFAA92', '#FF90C9', '#B903AA', '#D16100', '#DDEFFF', '#000035', '#7B4F4B', '#A1C299', '#300018', '#0AA6D8', '#013349', '#00846F', '#372101', '#FFB500', '#C2FFED', '#A079BF', '#CC0744', '#C0B9B2', '#C2FF99', '#001E09', '#00489C', '#6F0062', '#0CBD66', '#EEC3FF', '#456D75', '#B77B68', '#7A87A1', '#788D66', '#885578', '#FAD09F', '#FF8A9A', '#D157A0', '#BEC459', '#456648', '#0086ED', '#886F4C', '#34362D', '#B4A8BD', '#00A6AA', '#452C2C', '#636375', '#A3C8C9', '#FF913F', '#938A81', '#575329', '#00FECF', '#B05B6F', '#8CD0FF', '#3B9700', '#04F757', '#C8A1A1', '#1E6E00', '#7900D7', '#A77500', '#6367A9', '#A05837', '#6B002C', '#772600', '#D790FF', '#9B9700', '#549E79', '#FFF69F', '#201625', '#72418F', '#BC23FF', '#99ADC0', '#3A2465', '#922329', '#5B4534', '#FDE8DC', '#404E55', '#0089A3', '#CB7E98', '#A4E804', '#324E72', '#6A3A4C'], **kwargs)
+

Plot a 2D array with column elements stacked on top of each other.

+
+
Parameters:
+
    +
  • x (array_like or StatArray) – The abcissa.

  • +
  • y (array_like or StatArray, 2D) – The cumulative sum along the columns is taken and stacked on top of each other.

  • +
  • labels (list of str, optional) – The labels to assign to each column.

  • +
  • colors (matplotlib.colors.LinearSegmentedColormap or list of colours) – The colour used for each column.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.scatterplot

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+geobipy.src.base.plotting.step(x, y, **kwargs)
+

Plots y against x as a piecewise constant (step like) function.

+
+
Parameters:
+
    +
  • x (array_like)

  • +
  • y (array_like)

  • +
  • flipY (bool, optional) – Flip the y axis

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY – Flip the Y axis

  • +
  • noLabels (bool, optional) – Do not plot the labels

  • +
+
+
+
+ +
+
+geobipy.src.base.plotting.vlines(*args, **kwargs)
+

Plot y against x

+

If x and y are StatArrays, the axes are automatically labelled.

+
+
Parameters:
+
    +
  • x (array_like or StatArray) – The abcissa

  • +
  • y (array_like or StatArray) – The ordinate, can be upto 2 dimensions.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • labels (bool, optional) – Plot the labels? Default is True.

  • +
+
+
Returns:
+

matplotlib.Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.plot

For additional keyword arguments you may use.

+
+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/base/utilities.html b/docs/content/api/base/utilities.html new file mode 100644 index 00000000..1505f650 --- /dev/null +++ b/docs/content/api/base/utilities.html @@ -0,0 +1,617 @@ + + + + + + + utilities — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

utilities

+
+
+geobipy.src.base.utilities.Ax(A, x)
+

Custom matrix vector multiplication for different representations of the matrix.

+
+
Parameters:
+
    +
  • A (float or ndarray of floats) – A scalar, 1D array, or 2D array. +If A is scalar, assume it represents a diagonal matrix with constant value. +If A is 1D, assume it represents a diagonal matrix and do an element wise multiply. +If A is 2D, take the dot product.

  • +
  • x (numpy.ndarray) – The 1D vector to multiply A with.

  • +
+
+
Returns:
+

out – Resultant matrix vector multiplication.

+
+
Return type:
+

ndarray of floats

+
+
+
+ +
+
+geobipy.src.base.utilities.Det(A, N=1.0)
+

Custom function to compute the determinant of a matrix.

+
+
Parameters:
+
    +
  • A (float or ndarray of floats) – If A is 2D: Use numpy.linalg.det obtain determinant. Uses LU factorization. +If A is 1D: Take the cumulative product of the numbers, assumes A represents a diagonal matrix. +If A is scalar: Take the number to power N, assumes A represents a diagonal matrix with constant value.

  • +
  • N (int, optional) – If A is a scalar, N is the number of elements in the constant valued diagonal.

  • +
+
+
Returns:
+

out – The determinant of the matrix.

+
+
Return type:
+

float

+
+
+
+ +
+
+geobipy.src.base.utilities.Inv(A)
+

Custom matrix inversion upto 2 dimensions.

+
+
Parameters:
+

A (float or ndarray of floats) – A scalar, 1D array, or 2D array. +If A is scalar, assume it represents a diagonal matrix with constant value and take the reciprocal. +If A is 1D, assume it is the diagonal of a matrix: take reciprocal of entries. +If A is 2D, invert using linalg.

+
+
Returns:
+

out – The inversion of A.

+
+
Return type:
+

float or ndarray of floats

+
+
+
+ +
+
+geobipy.src.base.utilities.LogDet(A, N=1.0)
+

Custom function to get the natural logarithm of the determinant.

+
+
Parameters:
+
    +
  • A (float or numpy.ndarray of floats) – If A is 2D: Use linalg.to obtain determinant. Uses LU factorization. +If A is 1D: Take the cumulative product of the numbers, assumes A represents a diagonal matrix. +If A is scalar: Take the number to power N, assumes A represents a diagonal matrix with constant value.

  • +
  • N (int, optional) – If A is a scalar, N is the number of elements in the constant valued diagonal.

  • +
+
+
Returns:
+

out – The logged determinant of the matrix.

+
+
Return type:
+

float

+
+
+
+ +
+
+geobipy.src.base.utilities.cosSin1(x, y, a, p)
+

Simple function for creating tests.

+
+ +
+
+geobipy.src.base.utilities.expReal(this)
+

Custom exponential of a number to allow a large negative exponent, overflow truncates without warning message.

+
+
Parameters:
+

this (float) – Real number to take exponential to.

+
+
Returns:
+

out – exp(this).

+
+
Return type:
+

float

+
+
+
+ +
+
+geobipy.src.base.utilities.findFirstLastNotValue(this, values, invalid_val=-1)
+

Find the indices to the first and last non zero values along each axis

+
+
Parameters:
+

this (array_like) – An array of numbers

+
+
Returns:
+

out – Indices of the first and last non zero values along each axisgg

+
+
Return type:
+

array_like

+
+
+
+ +
+
+geobipy.src.base.utilities.findFirstNonZeros(this, axis, invalid_val=-1)
+

Find the indices to the first non zero values

+
+
Parameters:
+
    +
  • this (array_like) – An array of numbers

  • +
  • axis (int) – Axis along which to find first non zeros

  • +
  • invalid_val (int) – If all values along that axis are zero, use this value

  • +
+
+
Returns:
+

out – Indices of the first non zero values.

+
+
Return type:
+

ints

+
+
+
+ +
+
+geobipy.src.base.utilities.findLastNonZeros(this, axis, invalid_val=-1)
+

Find the indices to the last non zero values

+
+
Parameters:
+
    +
  • this (array_like) – An array of numbers

  • +
  • axis (int) – Axis along which to find last non zeros

  • +
  • invalid_val (int) – If all values along that axis are zero, use this value

  • +
+
+
Returns:
+

out – Indices of the last non zero values.

+
+
Return type:
+

ints

+
+
+
+ +
+
+geobipy.src.base.utilities.findNans(this)
+

Find the indicies to NaN values.

+
+
Parameters:
+

this (array_like) – An array of numbers.

+
+
Returns:
+

out – Integer array to locations of nans.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+geobipy.src.base.utilities.findNotNans(this)
+

Find the indicies to non NaN values.

+
+
Parameters:
+

this (array_like) – An array of numbers.

+
+
Returns:
+

out – Integer array to locations of non nans.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+geobipy.src.base.utilities.getName(self, default='')
+

Tries to obtain an attached name to a variable.

+

If the variable is an object with a getName() procedure, that function will take precedence. +If the variable does not have that procedure, a variable called name will be sought. +If this fails, the specified default will be returned.

+
+
Parameters:
+

self (any type) – Any type of variable.

+
+
Returns:
+

out – A string containing the variable’s name or the default.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.utilities.getNameUnits(self, defaultName='', defaultUnits='')
+

Tries to obtain any attached name and units to a variable. Any units are surrounded by round brackets.

+
+
Parameters:
+

self (any type) – Any type of variable.

+
+
Returns:
+

out – A string containing the variable’s name and units or the defaults.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.utilities.getUnits(self, default='')
+

Tries to obtain an attached units to a variable.

+

If the variable is an object with a getUnits() procedure, that function will take precedence. +If the variable does not have that procedure, a variable called units will be sought. +If this fails, the specified default will be returned.

+
+
Parameters:
+

self (any type) – Any type of variable.

+
+
Returns:
+

out – A string containing the variable’s units or the default.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.utilities.histogramEqualize(values, nBins=256)
+

Equalize the histogram of the values so that all colours have an equal amount

+
+
Parameters:
+
    +
  • values (array_like) – Values to be equalized.

  • +
  • nBins (int) – Number of bins to use.

  • +
+
+
Returns:
+

    +
  • res (array_like) – Equalized values

  • +
  • cdf (array_like) – Cumulative Density Function.

  • +
+

+
+
+
+ +
+
+geobipy.src.base.utilities.interleave(a, b)
+

Interleave two arrays together like zip

+
+
Parameters:
+
    +
  • a (array_like) – Interleave in [0::2]

  • +
  • b (array_like) – Interleave in [1::2]

  • +
+
+
Returns:
+

out – Interleaved arrays

+
+
Return type:
+

array_like

+
+
+
+ +
+
+geobipy.src.base.utilities.isInt(this)
+

Check whether an entry is a subtype of an int

+
+
Parameters:
+

this (variable) – Variable to check whether an int or not

+
+
Returns:
+

out – Is or is not an int

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.utilities.isIntorSlice(this)
+

Check whether an entry is a subtype of an int or a slice

+
+
Parameters:
+

this (variable) – Variable to check whether an int/slice or not

+
+
Returns:
+

out – Is or is not an int/slice

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.utilities.isNumpy(x)
+

Test that the variable is a compatible numpy type with built ins like .ndim

+
+
Parameters:
+

x (anything) – A variable to check

+
+
Returns:
+

out – Whether the variable is a compatible numpy type

+
+
Return type:
+

bool

+
+
+
+ +
+
+geobipy.src.base.utilities.mergeComplex(this)
+

Merge a 1D array containing a vertical concatenation of N real then N imaginary components into an N/2 complex 1D array.

+
+
Parameters:
+

this (numpy.ndarray of float64) – 1D array containing the vertical concatentation of real then imaginary values.

+
+
Returns:
+

out – The combined real and imaginary components into a complex 1D array.

+
+
Return type:
+

numpy.ndarray of complex128

+
+
+
+ +
+
+geobipy.src.base.utilities.rosenbrock(x, y, a, b)
+

Generates values from the Rosenbrock function.

+
+ +
+
+geobipy.src.base.utilities.smooth(x, a)
+

Smooth x by an LTI gaussian filter, forwards and backwards pass.

+
+
Parameters:
+
    +
  • x (array_like) – signal to process

  • +
  • a (scalar between 0.0 and 1.0) – Weight

  • +
+
+
Returns:
+

out – Smoothed signal

+
+
Return type:
+

array_like

+
+
+
+ +
+
+geobipy.src.base.utilities.splitComplex(this)
+

Splits a vector of complex numbers into a vertical concatenation of the real and imaginary components.

+
+
Parameters:
+

this (numpy.ndarray of complex128) – 1D array of complex numbers.

+
+
Returns:
+

out – Vertically concatenated real then imaginary components of this.

+
+
Return type:
+

numpy.ndarray of float64

+
+
+
+ +
+
+geobipy.src.base.utilities.str_to_raw(s)
+

Helper function for latex

+
+
Parameters:
+

s (str) – String with special latex commands.

+
+
Returns:
+

out – String with latex special characters.

+
+
Return type:
+

str

+
+
+
+ +
+
+geobipy.src.base.utilities.tanh(this)
+

Custom hyperbolic tangent, return correct overflow.

+
+ +
+
+geobipy.src.base.utilities.trim_by_percentile(values, percent)
+

Trim an array by a given percentile from either end

+
+
Parameters:
+
    +
  • values (array_like) – Values to trim

  • +
  • percent (float) – Percent from 0.0 to 100.0

  • +
+
+
Returns:
+

out – Trimmed values

+
+
Return type:
+

array_like

+
+
+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/classes.html b/docs/content/api/classes/classes.html new file mode 100644 index 00000000..4d1652c0 --- /dev/null +++ b/docs/content/api/classes/classes.html @@ -0,0 +1,146 @@ + + + + + + + Classes used in GeoBIPy — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/core/StatArray.html b/docs/content/api/classes/core/StatArray.html new file mode 100644 index 00000000..6f71430c --- /dev/null +++ b/docs/content/api/classes/core/StatArray.html @@ -0,0 +1,140 @@ + + + + + + + StatArray — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

StatArray

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/core/core.html b/docs/content/api/classes/core/core.html new file mode 100644 index 00000000..f6c3131f --- /dev/null +++ b/docs/content/api/classes/core/core.html @@ -0,0 +1,145 @@ + + + + + + + Core classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Core classes

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/core/myObject.html b/docs/content/api/classes/core/myObject.html new file mode 100644 index 00000000..75427eaf --- /dev/null +++ b/docs/content/api/classes/core/myObject.html @@ -0,0 +1,166 @@ + + + + + + + Core object class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Core object class

+

o

+
+
+class geobipy.src.classes.core.myObject.myObject
+
+
+getsizeof()
+

Get the size of the object in memory with nice output

+
+ +
+
+toHdf(h5obj, name, withPosterior=False)
+

Create and write to HDF.

+
+
Parameters:
+
    +
  • h5obj (h5py._hl.files.File or h5py._hl.group.Group) – A HDF file or group object to write the contents to.

  • +
  • myName (str) – The name of the group to write the StatArray to.

  • +
+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/data.html b/docs/content/api/classes/data/data.html new file mode 100644 index 00000000..aefe6957 --- /dev/null +++ b/docs/content/api/classes/data/data.html @@ -0,0 +1,146 @@ + + + + + + + Data classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Data classes

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/EmDataPoint.html b/docs/content/api/classes/data/datapoint/EmDataPoint.html new file mode 100644 index 00000000..29e494d1 --- /dev/null +++ b/docs/content/api/classes/data/datapoint/EmDataPoint.html @@ -0,0 +1,230 @@ + + + + + + + EmDataPoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

EmDataPoint

+digraph inheritance4ab37ba733 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "DataPoint" [URL="datapoint.html#geobipy.src.classes.data.datapoint.DataPoint.DataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a data point."]; + "Point" -> "DataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "EmDataPoint" [URL="#geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Abstract EmDataPoint Class"]; + "DataPoint" -> "EmDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint(x=0.0, y=0.0, z=0.0, elevation=None, components=None, channels_per_system=None, data=None, std=None, predictedData=None, channel_names=None, lineNumber=0.0, fiducial=0.0, **kwargs)
+

Abstract EmDataPoint Class

+

This is an abstract base class for TdemDataPoint and FdemDataPoint classes

+ +
+
+property active
+

Gets the indices to the observed data values that are not NaN

+
+
Returns:
+

out – Indices into the observed data that are not NaN

+
+
Return type:
+

array of ints

+
+
+
+ +
+
+find_best_halfspace(minConductivity=0.0001, maxConductivity=10000.0, nSamples=100)
+

Computes the best value of a half space that fits the data.

+

Carries out a brute force search of the halfspace conductivity that best fits the data. +The profile of data misfit vs halfspace conductivity is not quadratic, so a bisection will not work.

+
+
Parameters:
+
    +
  • minConductivity (float, optional) – The minimum conductivity to search over

  • +
  • maxConductivity (float, optional) – The maximum conductivity to search over

  • +
  • nSamples (int, optional) – The number of values between the min and max

  • +
+
+
Returns:
+

out – The best fitting log10 conductivity for the half space

+
+
Return type:
+

float64

+
+
+
+ +
+
+plot_halfspace_responses(minConductivity=-4.0, maxConductivity=2.0, nSamples=100, **kwargs)
+

Plots the reponses of different half space models.

+
+
Parameters:
+
    +
  • minConductivity (float, optional) – The minimum log10 conductivity to search over

  • +
  • maxConductivity (float, optional) – The maximum log10 conductivity to search over

  • +
  • nInc (int, optional) – The number of increments between the min and max

  • +
+
+
+
+ +
+
+property predictedData
+

The predicted data.

+
+ +
+
+update_posteriors()
+

Update any attached posteriors

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/FdemDataPoint.html b/docs/content/api/classes/data/datapoint/FdemDataPoint.html new file mode 100644 index 00000000..aafc1008 --- /dev/null +++ b/docs/content/api/classes/data/datapoint/FdemDataPoint.html @@ -0,0 +1,335 @@ + + + + + + + FdemDataPoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

FdemDataPoint

+digraph inheritance205783337d { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "DataPoint" [URL="datapoint.html#geobipy.src.classes.data.datapoint.DataPoint.DataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a data point."]; + "Point" -> "DataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "EmDataPoint" [URL="EmDataPoint.html#geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Abstract EmDataPoint Class"]; + "DataPoint" -> "EmDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "FdemDataPoint" [URL="#geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a Frequency domain electromagnetic data point."]; + "EmDataPoint" -> "FdemDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@FdemDataPoint_Class +Module describing a frequency domain EMData Point that contains a single measurement.

+
+
+class geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint(x=0.0, y=0.0, z=0.0, elevation=0.0, data=None, std=None, predictedData=None, system=None, lineNumber=0.0, fiducial=0.0)
+

Class defines a Frequency domain electromagnetic data point.

+

Contains an easting, northing, height, elevation, observed and predicted data, and uncertainty estimates for the data.

+

FdemDataPoint(x, y, z, elevation, data, std, system, lineNumber, fiducial)

+
+
Parameters:
+
    +
  • x (float) – Easting co-ordinate of the data point

  • +
  • y (float) – Northing co-ordinate of the data point

  • +
  • z (float) – Height above ground of the data point

  • +
  • elevation (float, optional) – Elevation from sea level of the data point

  • +
  • data (geobipy.StatArray or array_like, optional) – Data values to assign the data of length 2*number of frequencies. +* If None, initialized with zeros.

  • +
  • std (geobipy.StatArray or array_like, optional) – Estimated uncertainty standard deviation of the data of length 2*number of frequencies. +* If None, initialized with ones if data is None, else 0.1*data values.

  • +
  • system (str or geobipy.FdemSystem, optional) – Describes the acquisition system with loop orientation and frequencies. +* If str should be the path to a system file to read in. +* If geobipy.FdemSystem, will be deepcopied.

  • +
  • lineNumber (float, optional) – The line number associated with the datapoint

  • +
  • fiducial (float, optional) – The fiducial associated with the datapoint

  • +
+
+
+
+
+calibrate(Predicted=True)
+

Apply calibration factors to the data point

+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+forward(mod)
+

Forward model the data from the given model

+
+ +
+
+frequencies(system=0)
+

Return the frequencies in an StatArray

+
+ +
+
+classmethod fromHdf(grp, index=None, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+getFrequency(channel, system=0)
+

Return the measurement frequency of the channel

+
+
Parameters:
+
    +
  • channel (int) – Channel number

  • +
  • system (int, optional) – System number

  • +
+
+
Returns:
+

out – The measurement frequency of the channel

+
+
Return type:
+

float

+
+
+
+ +
+
+getMeasurementType(channel, system=0)
+

Returns the measurement type of the channel

+
+
Parameters:
+
    +
  • channel (int) – Channel number

  • +
  • system (int, optional) – System number

  • +
+
+
Returns:
+

out – Either “In-Phase “ or “Quadrature “

+
+
Return type:
+

str

+
+
+
+ +
+
+plot(title='Frequency Domain EM Data', system=0, with_error_bars=True, **kwargs)
+

Plot the Inphase and Quadrature Data

+
+
Parameters:
+
    +
  • title (str) – Title of the plot

  • +
  • system (int) – If multiple system are present, select which one

  • +
  • with_error_bars (bool) – Plot vertical lines representing 1 standard deviation

  • +
+
+
+
+

See also

+
+
matplotlib.pyplot.errorbar

For more keyword arguements

+
+
+
+
+
Returns:
+

out – Figure axis

+
+
Return type:
+

matplotlib.pyplot.ax

+
+
+
+ +
+
+plot_predicted(title='Frequency Domain EM Data', system=0, **kwargs)
+

Plot the predicted Inphase and Quadrature Data

+
+
Parameters:
+
    +
  • title (str) – Title of the plot

  • +
  • system (int) – If multiple system are present, select which one

  • +
+
+
+
+

See also

+
+
matplotlib.pyplot.semilogx

For more keyword arguements

+
+
+
+
+
Returns:
+

out – Figure axis

+
+
Return type:
+

matplotlib.pyplot.ax

+
+
+
+ +
+
+sensitivity(mod, **kwargs)
+

Compute the sensitivty matrix for the given model

+
+ +
+
+updateSensitivity(model)
+

Compute an updated sensitivity matrix based on the one already containined in the FdemDataPoint object

+
+ +
+
+update_posteriors()
+

Update any attached posteriors

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/TdemDataPoint.html b/docs/content/api/classes/data/datapoint/TdemDataPoint.html new file mode 100644 index 00000000..e55f7215 --- /dev/null +++ b/docs/content/api/classes/data/datapoint/TdemDataPoint.html @@ -0,0 +1,407 @@ + + + + + + + TdemDataPoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

TdemDataPoint

+digraph inheritance42929051d3 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "DataPoint" [URL="datapoint.html#geobipy.src.classes.data.datapoint.DataPoint.DataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a data point."]; + "Point" -> "DataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "EmDataPoint" [URL="EmDataPoint.html#geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Abstract EmDataPoint Class"]; + "DataPoint" -> "EmDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TdemDataPoint" [URL="#geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Initialize a Time domain EMData Point"]; + "EmDataPoint" -> "TdemDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint(x=0.0, y=0.0, z=0.0, elevation=0.0, primary_field=None, secondary_field=None, relative_error=None, additive_error=None, std=None, predicted_primary_field=None, predicted_secondary_field=None, system=None, transmitter_loop=None, receiver_loop=None, lineNumber=0.0, fiducial=0.0)
+

Initialize a Time domain EMData Point

+

TdemDataPoint(x, y, z, elevation, data, std, system, transmitter_loop, receiver_loop, lineNumber, fiducial)

+
+
Parameters:
+
    +
  • x (float64) – The easting co-ordinate of the data point

  • +
  • y (float64) – The northing co-ordinate of the data point

  • +
  • z (float64) – The height of the data point above ground

  • +
  • elevation (float64, optional) – The elevation of the data point, default is 0.0

  • +
  • data (list of arrays, optional) – A list of 1D arrays, where each array contains the data in each system. +The arrays are vertically concatenated inside the TdemDataPoint object

  • +
  • std (list of arrays, optional) – A list of 1D arrays, where each array contains the errors in each system. +The arrays are vertically concatenated inside the TdemDataPoint object

  • +
  • system (TdemSystem, optional) – Time domain system class

  • +
  • transmitter_loop (EmLoop, optional) – Transmitter loop class

  • +
  • receiver_loop (EmLoop, optional) – Receiver loop class

  • +
  • lineNumber (float, optional) – The line number associated with the datapoint

  • +
  • fiducial (float, optional) – The fiducial associated with the datapoint

  • +
+
+
Returns:
+

out – A time domain EM sounding

+
+
Return type:
+

TdemDataPoint

+
+
+

Notes

+

The data argument is a set of lists with length equal to the number of systems. +These data are unpacked and vertically concatenated in this class. +The parameter self._data will have length equal to the sum of the number of time gates in each system. +The same is true for the errors, and the predicted data vector.

+
+
+property addressof
+

Print a summary of the EMdataPoint

+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+dualMoment()
+

Returns True if the number of systems is > 1

+
+ +
+
+forward(model)
+

Forward model the data from the given model

+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+property iplotActive
+

Get the active data indices per system. Used for plotting.

+
+ +
+
+off_time(system=0)
+

Return the window times in an StatArray

+
+ +
+
+perturb()
+

Propose a new EM data point given the specified attached propsal distributions

+
+
Parameters:
+
    +
  • newHeight (bool) – Propose a new observation height.

  • +
  • newRelativeError (bool) – Propose a new relative error.

  • +
  • newAdditiveError (bool) – Propose a new additive error.

  • +
  • newCalibration (bool) – Propose new calibration parameters.

  • +
+
+
Returns:
+

out – The proposed data point

+
+
Return type:
+

subclass of EmDataPoint

+
+
+

Notes

+

For each boolean, the associated proposal must have been set.

+
+
Raises:
+

TypeError – If a proposal has not been set on a requested parameter

+
+
+
+ +
+
+plot(title='Time Domain EM Data', with_error_bars=True, **kwargs)
+

Plot the Inphase and Quadrature Data for an EM measurement

+
+ +
+
+property predictedData
+

The predicted data.

+
+ +
+
+property probability
+

Evaluate the probability for the EM data point given the specified attached priors

+
+
Parameters:
+
    +
  • rEerr (bool) – Include the relative error when evaluating the prior

  • +
  • aEerr (bool) – Include the additive error when evaluating the prior

  • +
  • height (bool) – Include the elevation when evaluating the prior

  • +
  • calibration (bool) – Include the calibration parameters when evaluating the prior

  • +
  • verbose (bool) – Return the components of the probability, i.e. the individually evaluated priors

  • +
+
+
Returns:
+

out – The evaluation of the probability using all assigned priors

+
+
Return type:
+

float64

+
+
+

Notes

+

For each boolean, the associated prior must have been set.

+
+
Raises:
+

TypeError – If a prior has not been set on a requested parameter

+
+
+
+ +
+
+read(dataFileName)
+

Read in a time domain data point from a file.

+
+
Parameters:
+

dataFileName (str or list of str) – File names of the data point. Multiple can be given for multiple moments at the same location.

+
+
Returns:
+

out – Time domain data point

+
+
Return type:
+

geobipy.TdemDataPoint

+
+
+
+ +
+
+sensitivity(model, ix=None, model_changed=False)
+

Compute the sensitivty matrix for the given model

+
+ +
+
+set_posteriors(log=10)
+

Set the posteriors based on the attached priors

+
+
Parameters:
+

log

+
+
+
+ +
+
+set_proposals(relative_error_proposal=None, additive_error_proposal=None, **kwargs)
+

Set the proposals on the datapoint’s perturbable parameters

+
+
Parameters:
+
    +
  • heightProposal (geobipy.baseDistribution, optional) – The proposal to attach to the height. Must be univariate

  • +
  • relativeErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, relativeErrorProposal is univariate. +If there are more than one system, relativeErrorProposal is multivariate.

  • +
  • additiveErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, additiveErrorProposal is univariate. +If there are more than one system, additiveErrorProposal is multivariate.

  • +
+
+
+
+ +
+
+property std
+

Updates the data errors

+

Assumes a t^-0.5 behaviour e.g. logarithmic gate averaging +V0 is assumed to be ln(Error @ 1ms)

+
+
Parameters:
+
    +
  • relativeErr (list of scalars or list of array_like) – A fraction percentage that is multiplied by the observed data. The list should have length equal to the number of systems. The entries in each item can be scalar or array_like.

  • +
  • additiveErr (list of scalars or list of array_like) – An absolute value of additive error. The list should have length equal to the number of systems. The entries in each item can be scalar or array_like.

  • +
+
+
Raises:
+
    +
  • TypeError – If relativeErr or additiveErr is not a list

  • +
  • TypeError – If the length of relativeErr or additiveErr is not equal to the number of systems

  • +
  • TypeError – If any item in the relativeErr or additiveErr lists is not a scalar or array_like of length equal to the number of time channels

  • +
  • ValueError – If any relative or additive errors are <= 0.0

  • +
+
+
+
+ +
+
+property summary
+

Print a summary of the EMdataPoint

+
+ +
+
+update_posteriors()
+

Update any attached posteriors

+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/Tempest_dataPoint.html b/docs/content/api/classes/data/datapoint/Tempest_dataPoint.html new file mode 100644 index 00000000..816501b2 --- /dev/null +++ b/docs/content/api/classes/data/datapoint/Tempest_dataPoint.html @@ -0,0 +1,328 @@ + + + + + + + Tempest_datapoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Tempest_datapoint

+digraph inheritance06e99732c6 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "DataPoint" [URL="datapoint.html#geobipy.src.classes.data.datapoint.DataPoint.DataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a data point."]; + "Point" -> "DataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "EmDataPoint" [URL="EmDataPoint.html#geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Abstract EmDataPoint Class"]; + "DataPoint" -> "EmDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TdemDataPoint" [URL="TdemDataPoint.html#geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Initialize a Time domain EMData Point"]; + "EmDataPoint" -> "TdemDataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Tempest_datapoint" [URL="#geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Initialize a Tempest Time domain data point"]; + "TdemDataPoint" -> "Tempest_datapoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint(*args, additive_error_multiplier=None, **kwargs)
+

Initialize a Tempest Time domain data point

+

TdemDataPoint(x, y, z, elevation, data, std, system, transmitter_loop, receiver_loop, lineNumber, fiducial)

+
+
Parameters:
+
    +
  • x (float64) – The easting co-ordinate of the data point

  • +
  • y (float64) – The northing co-ordinate of the data point

  • +
  • z (float64) – The height of the data point above ground

  • +
  • elevation (float64, optional) – The elevation of the data point, default is 0.0

  • +
  • data (list of arrays, optional) – A list of 1D arrays, where each array contains the data in each system. +The arrays are vertically concatenated inside the TdemDataPoint object

  • +
  • std (list of arrays, optional) – A list of 1D arrays, where each array contains the errors in each system. +The arrays are vertically concatenated inside the TdemDataPoint object

  • +
  • system (TdemSystem, optional) – Time domain system class

  • +
  • transmitter_loop (EmLoop, optional) – Transmitter loop class

  • +
  • receiver_loop (EmLoop, optional) – Receiver loop class

  • +
  • lineNumber (float, optional) – The line number associated with the datapoint

  • +
  • fiducial (float, optional) – The fiducial associated with the datapoint

  • +
+
+
Returns:
+

out – A time domain EM sounding

+
+
Return type:
+

TdemDataPoint

+
+
+

Notes

+

The data argument is a set of lists with length equal to the number of systems. +These data are unpacked and vertically concatenated in this class. +The parameter self._data will have length equal to the sum of the number of time gates in each system. +The same is true for the errors, and the predicted data vector.

+
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+perturb()
+

Propose a new EM data point given the specified attached propsal distributions

+
+
Parameters:
+
    +
  • newHeight (bool) – Propose a new observation height.

  • +
  • newRelativeError (bool) – Propose a new relative error.

  • +
  • newAdditiveError (bool) – Propose a new additive error.

  • +
  • newCalibration (bool) – Propose new calibration parameters.

  • +
+
+
Returns:
+

out – The proposed data point

+
+
Return type:
+

subclass of EmDataPoint

+
+
+

Notes

+

For each boolean, the associated proposal must have been set.

+
+
Raises:
+

TypeError – If a proposal has not been set on a requested parameter

+
+
+
+ +
+
+plot(**kwargs)
+

Plot the Inphase and Quadrature Data for an EM measurement

+
+ +
+
+property predictedData
+

The predicted data.

+
+ +
+
+property probability
+

Evaluate the probability for the EM data point given the specified attached priors

+
+
Parameters:
+
    +
  • rEerr (bool) – Include the relative error when evaluating the prior

  • +
  • aEerr (bool) – Include the additive error when evaluating the prior

  • +
  • height (bool) – Include the elevation when evaluating the prior

  • +
  • calibration (bool) – Include the calibration parameters when evaluating the prior

  • +
  • verbose (bool) – Return the components of the probability, i.e. the individually evaluated priors

  • +
+
+
Returns:
+

out – The evaluation of the probability using all assigned priors

+
+
Return type:
+

float64

+
+
+

Notes

+

For each boolean, the associated prior must have been set.

+
+
Raises:
+

TypeError – If a prior has not been set on a requested parameter

+
+
+
+ +
+
+set_additive_error_posterior(log=None)
+
+ +
+
+set_posteriors(log=None)
+

Set the posteriors based on the attached priors

+
+
Parameters:
+

log

+
+
+
+ +
+
+set_proposals(relative_error_proposal=None, additive_error_proposal=None, **kwargs)
+

Set the proposals on the datapoint’s perturbable parameters

+
+
Parameters:
+
    +
  • heightProposal (geobipy.baseDistribution, optional) – The proposal to attach to the height. Must be univariate

  • +
  • relativeErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, relativeErrorProposal is univariate. +If there are more than one system, relativeErrorProposal is multivariate.

  • +
  • additiveErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, additiveErrorProposal is univariate. +If there are more than one system, additiveErrorProposal is multivariate.

  • +
+
+
+
+ +
+
+set_relative_error_posterior()
+
+ +
+
+property std
+

Updates the data errors

+

Assumes a t^-0.5 behaviour e.g. logarithmic gate averaging +V0 is assumed to be ln(Error @ 1ms)

+
+
Parameters:
+
    +
  • relativeErr (list of scalars or list of array_like) – A fraction percentage that is multiplied by the observed data. The list should have length equal to the number of systems. The entries in each item can be scalar or array_like.

  • +
  • additiveErr (list of scalars or list of array_like) – An absolute value of additive error. The list should have length equal to the number of systems. The entries in each item can be scalar or array_like.

  • +
+
+
Raises:
+
    +
  • TypeError – If relativeErr or additiveErr is not a list

  • +
  • TypeError – If the length of relativeErr or additiveErr is not equal to the number of systems

  • +
  • TypeError – If any item in the relativeErr or additiveErr lists is not a scalar or array_like of length equal to the number of time channels

  • +
  • ValueError – If any relative or additive errors are <= 0.0

  • +
+
+
+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/datapoint.html b/docs/content/api/classes/data/datapoint/datapoint.html new file mode 100644 index 00000000..421edffa --- /dev/null +++ b/docs/content/api/classes/data/datapoint/datapoint.html @@ -0,0 +1,394 @@ + + + + + + + DataPoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

DataPoint

+digraph inheritance476311a242 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "DataPoint" [URL="#geobipy.src.classes.data.datapoint.DataPoint.DataPoint",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defines a data point."]; + "Point" -> "DataPoint" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.datapoint.DataPoint.DataPoint(x=0.0, y=0.0, z=0.0, elevation=None, data=None, std=None, predictedData=None, units=None, channel_names=None, lineNumber=0.0, fiducial=0.0, **kwargs)
+

Class defines a data point.

+

Contains an easting, northing, height, elevation, observed and predicted data, and uncertainty estimates for the data.

+

DataPoint(x, y, z, elevation, nChannels, data, std, units)

+
+
Parameters:
+
    +
  • nChannelsPerSystem (int or array_like) – Number of data channels in the data +* If int, a single acquisition system is assumed. +* If array_like, each entry is the number of channels for each system.

  • +
  • x (float) – Easting co-ordinate of the data point

  • +
  • y (float) – Northing co-ordinate of the data point

  • +
  • z (float) – Height above ground of the data point

  • +
  • elevation (float, optional) – Elevation from sea level of the data point

  • +
  • data (geobipy.StatArray or array_like, optional) – Data values to assign the data of length sum(nChannelsPerSystem). +* If None, initialized with zeros.

  • +
  • std (geobipy.StatArray or array_like, optional) – Estimated uncertainty standard deviation of the data of length sum(nChannelsPerSystem). +* If None, initialized with ones if data is None, else 0.1*data values.

  • +
  • predictedData (geobipy.StatArray or array_like, optional) – Predicted data values to assign the data of length sum(nChannelsPerSystem). +* If None, initialized with zeros.

  • +
  • units (str, optional) – Units of the data. Default is “ppm”.

  • +
  • channel_names (list of str, optional) – Names of each channel of length sum(nChannelsPerSystem)

  • +
+
+
+
+
+active
+

Gets the indices to the observed data values that are not NaN

+
+
Returns:
+

out – Indices into the observed data that are not NaN

+
+
Return type:
+

array of ints

+
+
+
+ +
+
+property addressof
+

Print a summary of the EMdataPoint

+
+ +
+
+createHdf(parent, myName, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+data_misfit()
+

Compute the \(L_{2}\) norm squared misfit between the observed and predicted data

+
+\[\| \mathbf{W}_{d} (\mathbf{d}^{obs}-\mathbf{d}^{pre})\|_{2}^{2},\]
+

where \(\mathbf{W}_{d}\) are the reciprocal data errors.

+
+
Parameters:
+

squared (bool) – Return the squared misfit.

+
+
Returns:
+

out – The misfit value.

+
+
Return type:
+

float64

+
+
+
+ +
+
+property deltaD
+

Get the difference between the predicted and observed data,

+
+\[\delta \mathbf{d} = \mathbf{d}^{pre} - \mathbf{d}^{obs}.\]
+
+
Returns:
+

out – The residual between the active observed and predicted data +with size equal to the number of active channels.

+
+
Return type:
+

StatArray

+
+
+
+ +
+
+classmethod fromHdf(grp, index=None, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+likelihood(log)
+

Compute the likelihood of the current predicted data given the observed data and assigned errors

+
+
Returns:
+

out – Likelihood of the data point

+
+
Return type:
+

float64

+
+
+
+ +
+
+perturb()
+

Propose a new EM data point given the specified attached propsal distributions

+
+
Parameters:
+
    +
  • newHeight (bool) – Propose a new observation height.

  • +
  • newRelativeError (bool) – Propose a new relative error.

  • +
  • newAdditiveError (bool) – Propose a new additive error.

  • +
  • newCalibration (bool) – Propose new calibration parameters.

  • +
+
+
Returns:
+

out – The proposed data point

+
+
Return type:
+

subclass of EmDataPoint

+
+
+

Notes

+

For each boolean, the associated proposal must have been set.

+
+
Raises:
+

TypeError – If a proposal has not been set on a requested parameter

+
+
+
+ +
+
+property predictedData
+

The predicted data.

+
+ +
+
+property probability
+

Evaluate the probability for the EM data point given the specified attached priors

+
+
Parameters:
+
    +
  • rEerr (bool) – Include the relative error when evaluating the prior

  • +
  • aEerr (bool) – Include the additive error when evaluating the prior

  • +
  • height (bool) – Include the elevation when evaluating the prior

  • +
  • calibration (bool) – Include the calibration parameters when evaluating the prior

  • +
  • verbose (bool) – Return the components of the probability, i.e. the individually evaluated priors

  • +
+
+
Returns:
+

out – The evaluation of the probability using all assigned priors

+
+
Return type:
+

float64

+
+
+

Notes

+

For each boolean, the associated prior must have been set.

+
+
Raises:
+

TypeError – If a prior has not been set on a requested parameter

+
+
+
+ +
+
+set_additive_error_posterior(log=10)
+
+ +
+
+set_posteriors(log=10)
+

Set the posteriors based on the attached priors

+
+
Parameters:
+

log

+
+
+
+ +
+
+set_proposals(relative_error_proposal=None, additive_error_proposal=None, **kwargs)
+

Set the proposals on the datapoint’s perturbable parameters

+
+
Parameters:
+
    +
  • heightProposal (geobipy.baseDistribution, optional) – The proposal to attach to the height. Must be univariate

  • +
  • relativeErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, relativeErrorProposal is univariate. +If there are more than one system, relativeErrorProposal is multivariate.

  • +
  • additiveErrorProposal (geobipy.baseDistribution, optional) – The proposal to attach to the relative error. +If the datapoint has only one system, additiveErrorProposal is univariate. +If there are more than one system, additiveErrorProposal is multivariate.

  • +
+
+
+
+ +
+
+set_relative_error_posterior()
+
+ +
+
+property std
+

Compute the data errors.

+
+ +
+
+property summary
+

Print a summary of the EMdataPoint

+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/datapoint/datapointrst.html b/docs/content/api/classes/data/datapoint/datapointrst.html new file mode 100644 index 00000000..fbb4438a --- /dev/null +++ b/docs/content/api/classes/data/datapoint/datapointrst.html @@ -0,0 +1,149 @@ + + + + + + + Datapoint classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Datapoint classes

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/dataset/Data.html b/docs/content/api/classes/data/dataset/Data.html new file mode 100644 index 00000000..6424ee2c --- /dev/null +++ b/docs/content/api/classes/data/dataset/Data.html @@ -0,0 +1,606 @@ + + + + + + + Data — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Data

+digraph inheritance6003e78ca1 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Data" [URL="#geobipy.src.classes.data.dataset.Data.Data",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a set of Data."]; + "Point" -> "Data" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@Data_Class +Module describing a Data Set where values are associated with an xyz co-ordinate

+
+
+class geobipy.src.classes.data.dataset.Data.Data(components=None, channels_per_system=1, x=None, y=None, z=None, elevation=None, data=None, std=None, predictedData=None, fiducial=None, lineNumber=None, units=None, channel_names=None, **kwargs)
+

Class defining a set of Data.

+

Data(channels_per_system, x, y, z, data, std, predictedData, dataUnits, channel_names)

+
+
Parameters:
+
    +
  • nPoints (int) – Number of points in the data.

  • +
  • channels_per_system (int or array_like) – Number of data channels in the data +* If int, a single acquisition system is assumed. +* If array_like, each item describes the number of points per acquisition system.

  • +
  • x (geobipy.StatArray or array_like, optional) – The x co-ordinates. Default is zeros of size nPoints.

  • +
  • y (geobipy.StatArray or array_like, optional) – The y co-ordinates. Default is zeros of size nPoints.

  • +
  • z (geobipy.StatArrayor array_like, optional) – The z co-ordinates. Default is zeros of size nPoints.

  • +
  • data (geobipy.StatArrayor array_like, optional) – The values of the data. +* If None, zeroes are assigned

  • +
  • std (geobipy.StatArrayor array_like, optional) – The uncertainty estimates of the data. +* If None, ones are assigned if data is None, else 0.1*data

  • +
  • predictedData (geobipy.StatArrayor array_like, optional) – The predicted data. +* If None, zeros are assigned.

  • +
  • dataUnits (str) – Units of the data.

  • +
  • channel_names (list of str, optional) – Names of each channel of length sum(channels_per_system)

  • +
+
+
Returns:
+

out – Data class

+
+
Return type:
+

Data

+
+
+
+
+Bcast(world, root=0)
+

Broadcast a Data object using MPI

+
+
Parameters:
+
    +
  • world (mpi4py.MPI.COMM_WORLD) – MPI communicator

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – Data broadcast to each core in the communicator

+
+
Return type:
+

geobipy.Data

+
+
+
+ +
+
+Scatterv(starts, chunks, world, root=0)
+

Scatterv a Data object using MPI

+
+
Parameters:
+
    +
  • starts (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the starting index for a chunk to be sent to that core. e.g. starts[0] is the starting index for rank = 0.

  • +
  • chunks (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the size of a chunk to be sent to that core. e.g. chunks[0] is the chunk size for rank = 0.

  • +
  • world (mpi4py.MPI.Comm) – The MPI communicator over which to Scatterv.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The Data distributed amongst ranks.

+
+
Return type:
+

geobipy.Data

+
+
+
+ +
+
+property active
+

Logical array whether the channel is active or not.

+

An inactive channel is one where channel values are NaN for all points.

+
+
Returns:
+

out – Indices of non-NaN columns.

+
+
Return type:
+

bools

+
+
+
+ +
+
+addToVTK(vtk, prop=['data', 'predicted', 'std'], system=None)
+

Adds a member to a VTK handle.

+
+
Parameters:
+
    +
  • vtk (pyvtk.VtkData) – vtk handle returned from self.vtkStructure()

  • +
  • prop (str or list of str, optional) – List of the member to add to a VTK handle, either “data”, “predicted”, or “std”.

  • +
  • system (int, optional) – The system for which to add the data

  • +
+
+
+
+ +
+
+property additive_error
+

The data.

+
+ +
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+channel_index(channel, system)
+

Gets the data in the specified channel

+
+
Parameters:
+
    +
  • channel (int) – Index of the channel to return +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
+
+
Returns:
+

out – The index of the channel

+
+
Return type:
+

int

+
+
+
+ +
+
+createHdf(parent, myName, withPosterior=True, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+property data
+

The data.

+
+ +
+
+data_misfit(squared=False)
+

Compute the \(L_{2}\) norm squared misfit between the observed and predicted data

+
+\[\| \mathbf{W}_{d} (\mathbf{d}^{obs}-\mathbf{d}^{pre})\|_{2}^{2},\]
+

where \(\mathbf{W}_{d}\) are the reciprocal data errors.

+
+
Parameters:
+

squared (bool) – Return the squared misfit.

+
+
Returns:
+

out – The misfit value.

+
+
Return type:
+

float64

+
+
+
+ +
+
+datapoint(i)
+

Get the ith data point from the data set

+
+
Parameters:
+

i (int) – The data point to get

+
+
Returns:
+

out – The data point

+
+
Return type:
+

geobipy.DataPoint

+
+
+
+ +
+
+property deltaD
+

Get the difference between the predicted and observed data,

+
+\[\delta \mathbf{d} = \mathbf{d}^{pre} - \mathbf{d}^{obs}.\]
+
+
Returns:
+

out – The residual between the active observed and predicted data.

+
+
Return type:
+

StatArray

+
+
+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+line(line)
+

Get the data from the given line number

+
+ +
+
+mapData(channel, system=None, *args, **kwargs)
+

Interpolate the data channel between the x, y co-ordinates.

+
+
Parameters:
+
    +
  • channel (int) – Index of the channel to return +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
+
+
+
+ +
+
+mapPredictedData(channel, system=None, *args, **kwargs)
+

Interpolate the predicted data channel between the x, y co-ordinates.

+
+
Parameters:
+
    +
  • channel (int) – Index of the channel to return +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
+
+
+
+ +
+
+mapStd(channel, system=None, *args, **kwargs)
+

Interpolate the standard deviation channel between the x, y co-ordinates.

+
+
Parameters:
+
    +
  • channel (int) – Index of the channel to return +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
+
+
+
+ +
+
+nPointsPerLine()
+

Gets the number of points in each line.

+
+
Returns:
+

out – Number of points in each line

+
+
Return type:
+

ints

+
+
+
+ +
+
+plot_data(x='index', channels=None, system=None, **kwargs)
+

Plots the specifed channels as a line plot.

+

Plots the channels along a specified co-ordinate e.g. ‘x’. A legend is auto generated.

+
+
Parameters:
+
    +
  • xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

  • +
  • channels (ints, optional) – Indices of the channels to plot. All are plotted if None +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • values (arraylike, optional) – Specifies values to plot against the chosen axis. Takes precedence over channels.

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
  • legend (bool) – Attach a legend to the plot. Default is True.

  • +
+
+
Returns:
+

    +
  • ax (matplotlib.axes) – Plot axes handle

  • +
  • legend (matplotlib.legend.Legend) – The attached legend.

  • +
+

+
+
+
+

See also

+
+
geobipy.plotting.plot

For additional keyword arguments

+
+
+
+
+ +
+
+plot_predicted(xAxis='index', channels=None, system=None, **kwargs)
+

Plots the specifed predicted data channels as a line plot.

+

Plots the channels along a specified co-ordinate e.g. ‘x’. A legend is auto generated.

+
+
Parameters:
+
    +
  • xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

  • +
  • channels (ints, optional) – Indices of the channels to plot. All are plotted if None +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
  • noLegend (bool) – Do not attach a legend to the plot. Default is False, a legend is attached.

  • +
+
+
Returns:
+

    +
  • ax (matplotlib.axes) – Plot axes handle

  • +
  • legend (matplotlib.legend.Legend) – The attached legend.

  • +
+

+
+
+
+

See also

+
+
geobipy.plotting.plot

For additional keyword arguments

+
+
+
+
+ +
+
+property predictedData
+

The predicted data.

+
+ +
+
+classmethod read_csv(data_filename, **kwargs)
+

Reads the data and system parameters from file

+
+
Parameters:
+
    +
  • dataFilename (str or list of str) – Time domain data file names

  • +
  • systemFilename (str or list of str) – Time domain system file names

  • +
+
+
+

Notes

+

File Format

+

The data columns are read in according to the column names in the first line. +The header line should contain at least the following column names. +Extra columns may exist, but will be ignored. In this description, +the column name or its alternatives are given followed by what the name represents. +Optional columns are also described.

+

Required columns

+
+
line

Line number for the data point

+
+
id or fid

Id number of the data point, these be unique

+
+
x or northing or n

Northing co-ordinate of the data point

+
+
y or easting or e

Easting co-ordinate of the data point

+
+
z or dtm or dem_elev or dem_np or topo

Elevation of the ground at the data point

+
+
alt or laser or bheight

Altitude of the transmitter coil

+
+
Off[0] to Off[nWindows-1] (with the number and brackets)

The measurements for each time specified in the accompanying system file under Receiver Window Times

+
+
+

Optional columns

+

If any loop orientation columns are omitted the loop is assumed to be horizontal.

+
+
TxPitch

Pitch of the transmitter loop

+
+
TxRoll

Roll of the transmitter loop

+
+
TxYaw

Yaw of the transmitter loop

+
+
RxPitch

Pitch of the receiver loop

+
+
RxRoll

Roll of the receiver loop

+
+
RxYaw

Yaw of the receiver loop

+
+
OffErr[0] to ErrOff[nWindows-1]

Error estimates for the data

+
+
+
+

See also

+

INFORMATION

+
+
+ +
+
+property relative_error
+

The data.

+
+ +
+
+property std
+

The data.

+
+ +
+
+property summary
+

Display a summary of the Data

+
+ +
+
+writeHdf(parent, name, withPosterior=True)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/dataset/FdemData.html b/docs/content/api/classes/data/dataset/FdemData.html new file mode 100644 index 00000000..09731445 --- /dev/null +++ b/docs/content/api/classes/data/dataset/FdemData.html @@ -0,0 +1,478 @@ + + + + + + + FdemData — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

FdemData

+digraph inheritance66f423f7ef { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Data" [URL="Data.html#geobipy.src.classes.data.dataset.Data.Data",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a set of Data."]; + "Point" -> "Data" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "FdemData" [URL="#geobipy.src.classes.data.dataset.FdemData.FdemData",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class extension to geobipy.Data defining a Fourier domain electro magnetic data set"]; + "Data" -> "FdemData" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@FdemData_Class +Module describing an EMData Set where channels are associated with an xyz co-ordinate

+
+
+class geobipy.src.classes.data.dataset.FdemData.FdemData(system=None, **kwargs)
+

Class extension to geobipy.Data defining a Fourier domain electro magnetic data set

+

FdemData(nPoints, nFrequencies, system)

+
+
Parameters:
+
    +
  • nPoints (int, optional) – Number of observations in the data set

  • +
  • nFrequencies (int, optional) – Number of measurement frequencies

  • +
  • system (str or geobipy.FdemSystem, optional) –

      +
    • If str: Must be a file name from which to read FD system information.

    • +
    • If FdemSystem: A deepcopy is made.

    • +
    +

  • +
+
+
Returns:
+

out – Contains x, y, z, elevation, and data values for a frequency domain dataset.

+
+
Return type:
+

FdemData

+
+
+

Notes

+

FdemData.read() requires a data filename and a system class or system filename to be specified. +The data file is structured using columns with the first line containing header information. +The header should contain the following entries +Line [ID or FID] [X or N or northing] [Y or E or easting] [Z or DTM or dem_elev] [Alt or Laser or bheight] [I Q] … [I Q] +Do not include brackets [] +[I Q] are the in-phase and quadrature values for each measurement frequency.

+

If a system filename is given, it too is structured using columns with the first line containing header information +Each subsequent row contains the information for each measurement frequency

+

freq tor tmom tx ty tz ror rmom rx ry rz +378 z 1 0 0 0 z 1 7.93 0 0 +1776 z 1 0 0 0 z 1 7.91 0 0 +…

+

where tor and ror are the orientations of the transmitter/reciever loops [x or z]. +tmom and rmom are the moments of the loops. +t/rx,y,z are the loop offsets from the observation locations in the data file.

+
+
+Bcast(world, root=0)
+

Broadcast the FdemData using MPI

+
+
Parameters:
+

world (mpi4py.MPI.COMM_WORLD) – MPI communicator

+
+
Returns:
+

out – A copy of the data on each core

+
+
Return type:
+

geobipy.FdemData

+
+
+

Examples

+
>>> from mpi4py import MPI
+>>> from geobipy import FdemData
+
+
+
>>> world = MPI.COMM_WORLD
+
+
+
>>> rank = world.rank
+
+
+
>>> if (rank == 0): # Only the master reads in the data
+>>>     D = FdemData()
+>>>     D.read(dataFile, systemFile)
+>>> else:
+>>>     D = FdemData() # Must instantiate an empty object to Bcast
+
+
+
>>> D2 = D.Bcast(world)
+
+
+
+ +
+
+Scatterv(starts, chunks, world, root=0)
+

Distributes the FdemData between all cores using MPI

+
+
Parameters:
+
    +
  • starts (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the starting index for a chunk to be sent to that core. e.g. starts[0] is the starting index for rank = 0.

  • +
  • chunks (array of ints) – 1D array of ints with size equal to the number of MPI ranks. Each element gives the size of a chunk to be sent to that core. e.g. chunks[0] is the chunk size for rank = 0.

  • +
  • world (mpi4py.MPI.COMM_WORLD) – The MPI communicator

  • +
+
+
Returns:
+

out – The data distributed amongst cores

+
+
Return type:
+

geobipy.FdemData

+
+
+

Examples

+
>>> from mpi4py import MPI
+>>> from geobipy import FdemData
+>>> import numpy as np
+
+
+
>>> world = MPI.COMM_WORLD
+
+
+
>>> rank = world.rank
+
+
+
>>> if (rank == 0): # Only the master reads in the data
+>>>     D = FdemData()
+>>>     D.read(dataFile, systemFile)
+>>> else:
+>>>     D = FdemData() # Must instantiate an empty object to Bcast
+
+
+
>>> # In this example, assume there are 10 data and 4 cores
+>>> start = asarray([0, 2, 4, 6])
+>>> chunks = asarray([2, 2, 2, 4])
+
+
+
>>> D2 = D.Scatterv(start, chunks, world)
+
+
+
+ +
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+createHdf(parent, myName, withPosterior=True, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+datapoint(index=None, fiducial=None)
+

Get the ith data point from the data set

+
+
Parameters:
+
    +
  • index (int, optional) – Index of the data point to get.

  • +
  • fiducial (float, optional) – Fiducial of the data point to get.

  • +
+
+
Returns:
+

out – The data point.

+
+
Return type:
+

geobipy.FdemDataPoint

+
+
Raises:
+

Exception – If neither an index or fiducial are given.

+
+
+
+ +
+
+fileInformation()
+

Description of the data file.

+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+getFrequency(channel, system=0)
+

Return the measurement frequency of the channel

+
+
Parameters:
+
    +
  • channel (int) – Channel number

  • +
  • system (int, optional) – System number

  • +
+
+
Returns:
+

out – The measurement frequency of the channel

+
+
Return type:
+

float

+
+
+
+ +
+
+getMeasurementType(channel, system=0)
+

Returns the measurement type of the channel

+
+
Parameters:
+
    +
  • channel (int) – Channel number

  • +
  • system (int, optional) – System number

  • +
+
+
Returns:
+

out – Either “In-Phase “ or “Quadrature “

+
+
Return type:
+

str

+
+
+
+ +
+
+property nActiveData
+

Number of active data per data point.

+

For each data point, counts the number of channels that are NOT nan.

+
+
Returns:
+

out – Number of active data

+
+
Return type:
+

int

+
+
+
+ +
+
+plotLine(line, system=0, x='index', **kwargs)
+

Plot the specified line

+
+ +
+
+plot_data(x='index', channels=None, **kwargs)
+

Plots the specifed channels as a line plot.

+

Plots the channels along a specified co-ordinate e.g. ‘x’. A legend is auto generated.

+
+
Parameters:
+
    +
  • xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

  • +
  • channels (ints, optional) – The indices of the channels to plot. All are plotted if channels is None.

  • +
  • legend (bool) – Attach a legend to the plot. Default is True.

  • +
+
+
Returns:
+

    +
  • ax (matplotlib.axes) – Plot axes handle

  • +
  • legend (matplotlib.legend.Legend) – The attached legend.

  • +
+

+
+
+
+

See also

+
+
geobipy.plotting.plot

For additional keyword arguments

+
+
+
+
+ +
+
+readAarhusFile(dataFilename)
+

Read in frequency domain data from an Aarhus workbench file.

+
+
Parameters:
+

dataFilename (str) – The data file.

+
+
+
+ +
+
+classmethod read_csv(dataFilename, system)
+

Read in both the Fdem data and FDEM system files

+

The data file is structured using columns with the first line containing header information. +The header should contain the following entries +Line [ID or FID] [X or N or northing] [Y or E or easting] [Z or DTM or dem_elev] [Alt or Laser or bheight] [I Q] … [I Q] +Do not include brackets [] +[I Q] are the in-phase and quadrature values for each measurement frequency.

+

If a system filename is given, it too is structured using columns with the first line containing header information +Each subsequent row contains the information for each measurement frequency

+

freq tor tmom tx ty tz ror rmom rx ry rz +378 z 1 0 0 0 z 1 7.93 0 0 +1776 z 1 0 0 0 z 1 7.91 0 0 +…

+

where tor and ror are the orientations of the transmitter/reciever loops [x or z]. +tmom and rmom are the moments of the loops. +t/rx,y,z are the loop offsets from the observation locations in the data file.

+
+ +
+
+single
+

alias of FdemDataPoint

+
+ +
+
+property std
+

The data.

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/dataset/TdemData.html b/docs/content/api/classes/data/dataset/TdemData.html new file mode 100644 index 00000000..f3cda679 --- /dev/null +++ b/docs/content/api/classes/data/dataset/TdemData.html @@ -0,0 +1,457 @@ + + + + + + + TdemData — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

TdemData

+digraph inheritanceeb2965ddb0 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Data" [URL="Data.html#geobipy.src.classes.data.dataset.Data.Data",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a set of Data."]; + "Point" -> "Data" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TdemData" [URL="#geobipy.src.classes.data.dataset.TdemData.TdemData",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Time domain electro magnetic data set"]; + "Data" -> "TdemData" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.dataset.TdemData.TdemData(system=None, **kwargs)
+

Time domain electro magnetic data set

+

A time domain data set with easting, northing, height, and elevation values. Each sounding in the data set can be given a receiver and transmitter loop.

+

TdemData(nPoints=1, nTimes=[1], nSystems=1)

+
+
Parameters:
+
    +
  • nPoints (int, optional) – Number of soundings in the data file

  • +
  • nTimes (array of ints, optional) – Array of size nSystemsx1 containing the number of time gates in each system

  • +
  • nSystem (int, optional) – Number of measurement systems

  • +
+
+
Returns:
+

out – Time domain data set

+
+
Return type:
+

TdemData

+
+
+
+

See also

+
+
read()

For information on file format

+
+
+
+
+
+Bcast(world, root=0, system=None)
+

Broadcast the TdemData using MPI

+
+ +
+
+Scatterv(starts, chunks, world, root=0, system=None)
+

Scatterv the TdemData using MPI

+
+ +
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+createHdf(parent, myName, withPosterior=True, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+property data
+

The data.

+
+ +
+
+datapoint(index=None, fiducial=None)
+

Get the ith data point from the data set

+
+
Parameters:
+
    +
  • index (int, optional) – Index of the data point to get.

  • +
  • fiducial (float, optional) – Fiducial of the data point to get.

  • +
+
+
Returns:
+

out – The data point.

+
+
Return type:
+

geobipy.FdemDataPoint

+
+
Raises:
+

Exception – If neither an index or fiducial are given.

+
+
+
+ +
+
+estimateAdditiveError()
+

Uses the late times after 1ms to estimate the additive errors and error bounds in the data.

+
+ +
+
+static fileInformation()
+

Description of PointCloud3D file.

+
+
Returns:
+

out – File description.

+
+
Return type:
+

str

+
+
+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+mapChannel(channel, system=0, *args, **kwargs)
+

Create a map of the specified data channel

+
+ +
+
+property nPoints
+

Get the number of points

+
+ +
+
+off_time(system=0)
+

Obtain the times from the system file

+
+ +
+
+pcolor(component=0, system=0, yAxis='index', **kwargs)
+

Plot the data in the given system as a 2D array

+
+ +
+
+plot_data(*args, **kwargs)
+

Plots the data

+
+
Parameters:
+
    +
  • system (int) – System to plot

  • +
  • channels (sequence of ints) – Channels to plot

  • +
+
+
+
+ +
+
+plot_predicted(*args, **kwargs)
+

Plots the specifed predicted data channels as a line plot.

+

Plots the channels along a specified co-ordinate e.g. ‘x’. A legend is auto generated.

+
+
Parameters:
+
    +
  • xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

  • +
  • channels (ints, optional) – Indices of the channels to plot. All are plotted if None +* If system is None, 0 <= channel < self.nChannels else 0 <= channel < self.nChannelsPerSystem[system]

  • +
  • system (int, optional) – The system to obtain the channel from.

  • +
  • noLegend (bool) – Do not attach a legend to the plot. Default is False, a legend is attached.

  • +
+
+
Returns:
+

    +
  • ax (matplotlib.axes) – Plot axes handle

  • +
  • legend (matplotlib.legend.Legend) – The attached legend.

  • +
+

+
+
+
+

See also

+
+
geobipy.plotting.plot

For additional keyword arguments

+
+
+
+
+ +
+
+property predicted_primary_field
+

The data.

+
+ +
+
+property predicted_secondary_field
+

The data.

+
+ +
+
+property primary_field
+

The data.

+
+ +
+
+classmethod read_csv(data_filename, system)
+

Reads the data and system parameters from file

+
+
Parameters:
+
    +
  • dataFilename (str or list of str) – Time domain data file names

  • +
  • systemFilename (str or list of str) – Time domain system file names

  • +
+
+
+

Notes

+

File Format

+

The data columns are read in according to the column names in the first line. +The header line should contain at least the following column names. +Extra columns may exist, but will be ignored. +In this description, the column name or its alternatives are given followed by what the name represents. +Optional columns are also described.

+

Required columns

+
+
line

Line number for the data point

+
+
id or fid

Id number of the data point, these be unique

+
+
x or northing or n

Northing co-ordinate of the data point

+
+
y or easting or e

Easting co-ordinate of the data point

+
+
z or dtm or dem_elev or dem_np or topo

Elevation of the ground at the data point

+
+
alt or laser or bheight

Altitude of the transmitter coil

+
+
Off[0] to Off[nWindows-1] (with the number and brackets)

The measurements for each time specified in the accompanying system file under Receiver Window Times

+
+
+

Optional columns

+

If any loop orientation columns are omitted the loop is assumed to be horizontal.

+
+
TxPitch

Pitch of the transmitter loop

+
+
TxRoll

Roll of the transmitter loop

+
+
TxYaw

Yaw of the transmitter loop

+
+
RxPitch

Pitch of the receiver loop

+
+
RxRoll

Roll of the receiver loop

+
+
RxYaw

Yaw of the receiver loop

+
+
OffErr[0] to ErrOff[nWindows-1]

Error estimates for the data

+
+
+
+

See also

+

INFORMATION

+
+
+ +
+
+property secondary_field
+

The data.

+
+ +
+
+single
+

alias of TdemDataPoint

+
+ +
+
+property std
+

The data.

+
+ +
+
+property summary
+

Display a summary of the Data

+
+ +
+
+writeHdf(parent, name, withPosterior=True)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/dataset/TempestData.html b/docs/content/api/classes/data/dataset/TempestData.html new file mode 100644 index 00000000..8856eb91 --- /dev/null +++ b/docs/content/api/classes/data/dataset/TempestData.html @@ -0,0 +1,375 @@ + + + + + + + TempestData — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

TempestData

+digraph inheritance92a4e4a9a8 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Data" [URL="Data.html#geobipy.src.classes.data.dataset.Data.Data",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a set of Data."]; + "Point" -> "Data" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TdemData" [URL="TdemData.html#geobipy.src.classes.data.dataset.TdemData.TdemData",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Time domain electro magnetic data set"]; + "Data" -> "TdemData" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TempestData" [URL="#geobipy.src.classes.data.dataset.TempestData.TempestData",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Time domain electro magnetic data set"]; + "TdemData" -> "TempestData" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.data.dataset.TempestData.TempestData(*args, **kwargs)
+

Time domain electro magnetic data set

+

A time domain data set with easting, northing, height, and elevation values. Each sounding in the data set can be given a receiver and transmitter loop.

+

TdemData(nPoints=1, nTimes=[1], nSxfystems=1)

+
+
Parameters:
+
    +
  • nPoints (int, optional) – Number of soundings in the data file

  • +
  • nTimes (array of ints, optional) – Array of size nSystemsx1 containing the number of time gates in each system

  • +
  • nSystem (int, optional) – Number of measurement systems

  • +
+
+
Returns:
+

out – Time domain data set

+
+
Return type:
+

TdemData

+
+
+
+

See also

+
+
read()

For information on file format

+
+
+
+
+
+property additive_error
+

The data.

+
+ +
+
+createHdf(parent, myName, withPosterior=True, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+classmethod fromHdf(grp, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+plot_data(system=0, channels=None, x='index', **kwargs)
+

Plots the data

+
+
Parameters:
+
    +
  • system (int) – System to plot

  • +
  • channels (sequence of ints) – Channels to plot

  • +
+
+
+
+ +
+
+plot_predicted(system=0, channels=None, xAxis='index', **kwargs)
+

Plots the data

+
+
Parameters:
+
    +
  • system (int) – System to plot

  • +
  • channels (sequence of ints) – Channels to plot

  • +
+
+
+
+ +
+
+classmethod read_csv(data_filename, system_filename)
+

Reads the data and system parameters from file

+
+
Parameters:
+
    +
  • dataFilename (str or list of str) – Time domain data file names

  • +
  • systemFilename (str or list of str) – Time domain system file names

  • +
+
+
+

Notes

+

File Format

+

The data columns are read in according to the column names in the first line. +The header line should contain at least the following column names. +Extra columns may exist, but will be ignored. In this description, +the column name or its alternatives are given followed by what the name represents. +Optional columns are also described.

+

Required columns

+
+
line

Line number for the data point

+
+
id or fid

Id number of the data point, these be unique

+
+
x or northing or n

Northing co-ordinate of the data point

+
+
y or easting or e

Easting co-ordinate of the data point

+
+
z or dtm or dem_elev or dem_np or topo

Elevation of the ground at the data point

+
+
alt or laser or bheight

Altitude of the transmitter coil

+
+
Off[0] to Off[nWindows-1] (with the number and brackets)

The measurements for each time specified in the accompanying system file under Receiver Window Times

+
+
+

Optional columns

+

If any loop orientation columns are omitted the loop is assumed to be horizontal.

+
+
TxPitch

Pitch of the transmitter loop

+
+
TxRoll

Roll of the transmitter loop

+
+
TxYaw

Yaw of the transmitter loop

+
+
RxPitch

Pitch of the receiver loop

+
+
RxRoll

Roll of the receiver loop

+
+
RxYaw

Yaw of the receiver loop

+
+
OffErr[0] to ErrOff[nWindows-1]

Error estimates for the data

+
+
+
+

See also

+

INFORMATION

+
+
+ +
+
+classmethod read_netcdf(dataFilename, systemFilename, indices=None)
+

Reads the data and system parameters from file

+
+
Parameters:
+
    +
  • dataFilename (str or list of str) – Time domain data file names

  • +
  • systemFilename (str or list of str) – Time domain system file names

  • +
+
+
+

Notes

+

File Format +The data columns are read in according to the column names in the first line. +The header line should contain at least the following column names. +Extra columns may exist, but will be ignored. In this description, the column name or its +alternatives are given followed by what the name represents. Optional columns are also described.

+

Required columns

+
+
line

Line number for the data point

+
+
id or fid

Id number of the data point, these be unique

+
+
x or northing or n

Northing co-ordinate of the data point

+
+
y or easting or e

Easting co-ordinate of the data point

+
+
z or dtm or dem_elev or dem_np or topo

Elevation of the ground at the data point

+
+
alt or laser or bheight

Altitude of the transmitter coil

+
+
Off[0] to Off[nWindows-1] (with the number and brackets)

The measurements for each time specified in the accompanying system file under Receiver Window Times

+
+
+

Optional columns +If any loop orientation columns are omitted the loop is assumed to be horizontal.

+
+
TxPitch

Pitch of the transmitter loop

+
+
TxRoll

Roll of the transmitter loop

+
+
TxYaw

Yaw of the transmitter loop

+
+
RxPitch

Pitch of the receiver loop

+
+
RxRoll

Roll of the receiver loop

+
+
RxYaw

Yaw of the receiver loop

+
+
OffErr[0] to ErrOff[nWindows-1]

Error estimates for the data

+
+
+
+

See also

+

INFORMATION

+
+
+ +
+
+property relative_error
+

The data.

+
+ +
+
+single
+

alias of Tempest_datapoint

+
+ +
+
+writeHdf(parent, name, withPosterior=True)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/data/dataset/dataset.html b/docs/content/api/classes/data/dataset/dataset.html new file mode 100644 index 00000000..b4d808e6 --- /dev/null +++ b/docs/content/api/classes/data/dataset/dataset.html @@ -0,0 +1,149 @@ + + + + + + + Dataset classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Dataset classes

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/mesh/RectilinearMesh1D.html b/docs/content/api/classes/mesh/RectilinearMesh1D.html new file mode 100644 index 00000000..024e8fcf --- /dev/null +++ b/docs/content/api/classes/mesh/RectilinearMesh1D.html @@ -0,0 +1,581 @@ + + + + + + + RectilinearMesh1D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

RectilinearMesh1D

+digraph inheritancef15b3b6232 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Mesh" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Abstract Base Class"]; + "myObject" -> "Mesh" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh1D" [URL="#geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a 1D rectilinear mesh with cell centres and edges."]; + "Mesh" -> "RectilinearMesh1D" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@RectilinearMesh1D_Class +Module describing a 1D Rectilinear Mesh class

+
+
+class geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D(centres=None, edges=None, widths=None, log=None, relative_to=None, dimension=0)
+

Class defining a 1D rectilinear mesh with cell centres and edges.

+

Contains a simple 1D mesh with cell edges, widths, and centre locations.

+

RectilinearMesh1D(centres, edges, edgesMin, edgesMax)

+
+
Parameters:
+
    +
  • centres (geobipy.DataArray, optional) – The locations of the centre of each cell. Only centres, edges, or widths can be given.

  • +
  • edges (geobipy.DataArray, optional) – The locations of the edges of each cell, including the outermost edges. Only centres, edges, or widths can be given.

  • +
  • widths (geobipy.DataArray, optional) – The widths of the cells.

  • +
  • log ('e' or float, optional) – Entries are given in linear space, but internally cells are logged. +Plotting is in log space.

  • +
  • relative_to (float, optional) – If a float is given, updates will be relative to this value.

  • +
+
+
Returns:
+

out – The 1D mesh.

+
+
Return type:
+

RectilinearMesh1D

+
+
Raises:
+
    +
  • Exception – If both centres and edges are given.

  • +
  • TypeError – centres must be a geobipy.DataArray.

  • +
  • TypeError – edges must be a geobipy.DataArray.

  • +
+
+
+
+
+cellIndex(values, clip=False, trim=False, **kwargs)
+

Get the index to the cell that each value in values falls in.

+
+
Parameters:
+
    +
  • values (array_like) – The values to find the cell indices for

  • +
  • clip (bool) – A negative index which would normally wrap will clip to 0 and self.bins.size instead.

  • +
  • trim (bool) – Do not include out of axis indices. Negates clip, since they wont be included in the output.

  • +
+
+
Returns:
+

out – The cell indices

+
+
Return type:
+

array_like

+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None, upcast=True)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+delete_edge(i, values=None)
+

Delete an edge from the mesh

+
+
Parameters:
+

i (int) – The edge to remove.

+
+
Returns:
+

out – Mesh with edge removed.

+
+
Return type:
+

RectilinearMesh1D

+
+
+
+ +
+
+classmethod fromHdf(grp, index=None, skip_posterior=False)
+

Reads in the object froma HDF file

+
+ +
+
+gradient(values)
+

Compute the gradient

+

Parameter gradient \(\nabla_{z}\sigma\) at the ith layer is computed via

+
+(1)\[\nabla_{z}^{i}\sigma = \frac{\sigma_{i+1} - \sigma_{i}}{h_{i} - h_{min}}\]
+

where \(\sigma_{i+1}\) and \(\sigma_{i}\) are the log-parameters on either side of an interface, \(h_{i}\) is the log-thickness of the ith layer, and \(h_{min}\) is the minimum log thickness defined by

+
+(2)\[h_{min} = \frac{z_{max} - z_{min}}{2 k_{max}}\]
+

where \(k_{max}\) is a maximum number of layers, set to be far greater than the expected final solution.

+
+ +
+
+hdfName()
+

Reprodicibility procedure

+
+ +
+
+in_bounds(values)
+

Return whether values are inside the cell edges

+
+
Parameters:
+

values (array_like) – Check if these are inside left <= values < right.

+
+
Returns:
+

out – Are the values inside.

+
+
Return type:
+

bools

+
+
+
+ +
+
+insert_edge(value, values=None)
+

Insert a new edge.

+
+
Parameters:
+

value (numpy.float64) – Location at which to insert a new edge

+
+
Returns:
+

out – Mesh with inserted edge.

+
+
Return type:
+

geobipy.RectilinearMesh1D

+
+
+
+ +
+
+map_to_pdf(distribution, pdf, log=False, axis=0)
+

Creates a Hitmap from the model given the variance of each layer.

+

For each depth, creates a normal distribution with a mean equal to the interpolated parameter +at that depth and variance specified with variance.

+
+
Parameters:
+
    +
  • variance (array_like) – The variance of each layer

  • +
  • Hitmap (geobipy.Hitmap) – Hitmap to convert the model to. +Must be instantiated before calling so that the model can be interpolated correctly

  • +
+
+
+
+ +
+
+mask_cells(distance, values=None)
+

Mask cells by a distance.

+

If the edges of the cell are further than distance away, extra cells are inserted such that +the cell’s new edges are at distance away from the centre.

+
+
Parameters:
+
    +
  • distance (float) – Distance to mask

  • +
  • values (array_like, optional) – If given, values will be remapped to the masked mesh.

  • +
+
+
Returns:
+

    +
  • out (RectilinearMesh1D) – Masked mesh

  • +
  • indices (ints) – Location of the original centres in the expanded mesh

  • +
  • out_values (array_like, optional) – If values is given, values will be remapped to the masked mesh.

  • +
+

+
+
+
+ +
+
+pad(size)
+

Copies the properties of a mesh including all priors or proposals, but pads memory to the given size

+
+
Parameters:
+

size (int) – Create memory upto this size.

+
+
Returns:
+

out – Padded mesg

+
+
Return type:
+

RectilinearMesh1D

+
+
+
+ +
+
+pcolor(values, **kwargs)
+

Create a pseudocolour plot.

+

Can take any other matplotlib arguments and keyword arguments e.g. cmap etc.

+
+
Parameters:
+
    +
  • values (array_like) – The value of each cell.

  • +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
+
+
+
+

See also

+
+
geobipy.plotting.pcolor

For non matplotlib keywords.

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+perturb(values=None)
+

Perturb the mesh

+

Generates a new mesh by perturbing the current mesh based on four probabilities. +The probabilities correspond to +* Birth, the insertion of a new interface +* Death, the deletion of an interface +* Change, change one the existing interfaces +* No change, do nothing and return the original

+

The methods.set_priors and setProposals must be used before calling self.perturb.

+

If an interface is created, or an interface perturbed, any resulting cell width must be greater than the minimum width \(h_{min}\). +If the new cell width test fails, the birth or perturbation tries again. If the cycle fails after 10 tries, the entire process begins again +such that a death, or no change is possible thus preventing any never-ending cycles.

+
+
Returns:
+

out – The perturbed mesh

+
+
Return type:
+

RectilinearMesh1D

+
+
+
+

See also

+
+
RectilinearMesh1D.set_priors

Must be used before calling self.perturb

+
+
RectilinearMesh1D.setProposals

Must be used before calling self.perturb

+
+
+
+
+ +
+
+piecewise_constant_interpolate(values, other, bound=False, axis=0)
+

Interpolate values of the cells to another RectilinearMesh in a piecewise constant manner.

+
+
Parameters:
+
    +
  • values (geobipy.StatArray) – The values to interpolate. Has size self.nCells

  • +
  • mesh (geobipy.RectilinearMeshND for N = 1, 2, 3.) – A mesh to interpolate to. +If 2D, axis must be given to specify which axis to interpolate against.

  • +
  • bound (bool, optional) – Interpolated values above the top of the model are nan.

  • +
  • axis (int, optional) – Axis to interpolate the value to.

  • +
+
+
Returns:
+

out – The interpolated values at the cell centres of the other mesh.

+
+
Return type:
+

array

+
+
+
+ +
+
+plot(values, **kwargs)
+

Plots values using the mesh as a line

+
+
Parameters:
+
    +
  • reciprocateX (bool, optional) – Take the reciprocal of the x axis

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • noLabels (bool, optional) – Do not plot the labels

  • +
+
+
+
+ +
+
+plot_grid(**kwargs)
+

Plot the grid lines of the mesh.

+
+

See also

+
+
geobipy.StatArray.pcolor

For additional plotting arguments

+
+
+
+
+ +
+
+property probability
+

Evaluate the prior probability for the mesh.

+

The following equation describes the components of the prior that correspond to the Model1D,

+
+\[p(k | I)p(\boldsymbol{e}| k, I),\]
+

where \(k, I, \boldsymbol{e}\) are the number of cells, prior information, edge location respectively.

+

The multiplication here can be turned into a summation by taking the log of the components.

+
+
Parameters:
+

components (bool, optional) – Return all components used in the final probability as well as the final probability

+
+
Returns:
+

    +
  • probability (numpy.float64) – The probability

  • +
  • components (array_like, optional) – Return the components of the probability, i.e. the individually evaluated priors as a second return argument if comonents=True on input.

  • +
+

+
+
+
+ +
+
+property range
+

Get the difference between end edges.

+
+ +
+
+set_priors(n_cells_prior=None, edges_prior=None, **kwargs)
+

Setup the priors of the mesh.

+

By default the following priors are set unless explictly specified.

+

Prior on the number of cells

+

Uninformative prior using a uniform distribution.

+
+(3)\[p(k | I) = +\begin{cases} +\frac{1}{k_{max} - 1} & \quad 1 \leq k \leq k_{max} \newline +0 & \quad otherwise +\end{cases}.\]
+

Prior on the cell edges

+

We use order statistics for the prior on cell edges.

+
+(4)\[p(\boldsymbol{e} | k, I) = \frac{(k -1)!}{\prod_{i=0}^{k-1} \Delta e_{i}},\]
+

where the numerator describes the number of ways that \((k - 1)\) interfaces can be ordered and +\(\Delta e_{i} = (e_{max} - e_{min}) - 2 i h_{min}\) describes the interval that is available to place an edge when there are already i edges in the model

+
+
Parameters:
+
    +
  • min_edge (float64) – Minimum edge possible

  • +
  • max_edge (float64) – Maximum edge possible

  • +
  • max_cells (int) – Maximum number of cells allowable

  • +
  • min_width (float64, optional) – Minimum width of any layer. If min_width = None, min_width is computed from min_edge, max_edge, and max_cells (recommended).

  • +
  • prng (numpy.random.RandomState(), optional) – Random number generator, if none is given, will use numpy’s global generator.

  • +
  • n_cells_prior (geobipy.Distribution, optional) – Distribution describing the prior on the number of cells. Overrides the default.

  • +
  • edge_prior (geobipy.Distribution, optional) – Distribution describing the prior on the cell edges. Overrides the default.

  • +
+
+
+
+

See also

+
+
RectilinearMesh1D.perturb

For a description of the perturbation cycle.

+
+
+
+
+ +
+
+set_proposals(probabilities, **kwargs)
+

Setup the proposal distibution.

+
+
Parameters:
+
    +
  • probabilities (array_like) – Probability of birth, death, perturb, and no change for the model +e.g. probabilities = [0.5, 0.25, 0.15, 0.1]

  • +
  • parameterProposal (geobipy.Distribution) – The proposal distribution for the parameter.

  • +
  • prng (numpy.random.RandomState(), optional) – Random number generator, if none is given, will use numpy’s global generator.

  • +
+
+
+
+

See also

+
+
geobipy.Model1D.perturb

For a description of the perturbation cycle.

+
+
+
+
+ +
+
+property summary
+

Summary of self

+
+ +
+
+unperturb()
+

After a mesh has had its structure perturbed, remap back its previous state. Used for the reversible jump McMC step.

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/mesh/RectilinearMesh2D.html b/docs/content/api/classes/mesh/RectilinearMesh2D.html new file mode 100644 index 00000000..b9c92c1f --- /dev/null +++ b/docs/content/api/classes/mesh/RectilinearMesh2D.html @@ -0,0 +1,591 @@ + + + + + + + RectilinearMesh2D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

RectilinearMesh2D

+digraph inheritancefab15836cf { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Mesh" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Abstract Base Class"]; + "myObject" -> "Mesh" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh2D" [URL="#geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a 2D rectilinear mesh with cell centres and edges."]; + "Mesh" -> "RectilinearMesh2D" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@RectilinearMesh2D_Class +Module describing a 2D Rectilinear Mesh class with x and y axes specified

+
+
+class geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D(x=None, y=None, **kwargs)
+

Class defining a 2D rectilinear mesh with cell centres and edges.

+

Contains a simple 2D mesh with cell edges, widths, and centre locations. +There are two ways of instantiating the RectilinearMesh2D. +The first is by specifying the x and y cell centres or edges. In this case, +the abscissa is the standard x axis, and y is the ordinate. The z co-ordinates are None. +The second is by specifyin the x, y, and z cell centres or edges. In this case, +The mesh is a 2D plane with the ordinate parallel to z, and the “horizontal” locations +have co-ordinates (x, y). +This allows you to, for example, create a vertical 2D mesh that is not parallel to either the +x or y axis, like a typical line of data. +If x, y, and z are specified, plots can be made against distance which calculated cumulatively between points.

+

RectilinearMesh2D([x_centres or x_edges], [y_centres or y_edges], [z_centres or z_edges])

+
+
Parameters:
+
    +
  • x (geobipy.RectilinearMesh1D, optional) – text

  • +
  • y (float, optional) – text

  • +
  • z (geobipy.RectilinearMesh1D, optional) – text

  • +
  • relative_to (geobipy.RectilinearMesh1D, optional) – text

  • +
  • x_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “x” direction. Only x_centres or x_edges can be given.

  • +
  • x_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “x” direction. Only x_centres or x_edges can be given.

  • +
  • y_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “y” direction. Only y_centres or y_edges can be given.

  • +
  • y_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “y” direction. Only y_centres or y_edges can be given.

  • +
  • z_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “z” direction. Only z_centres or z_edges can be given.

  • +
  • z_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “z” direction. Only z_centres or z_edges can be given.

  • +
  • [x (float, optional) – See geobipy.RectilinearMesh1D for edgesMin description.

  • +
  • y – See geobipy.RectilinearMesh1D for edgesMin description.

  • +
  • z]edgesMin (float, optional) – See geobipy.RectilinearMesh1D for edgesMin description.

  • +
  • [x – See geobipy.RectilinearMesh1D for edgesMax description.

  • +
  • y – See geobipy.RectilinearMesh1D for edgesMax description.

  • +
  • z]edgesMax (float, optional) – See geobipy.RectilinearMesh1D for edgesMax description.

  • +
  • [x – See geobipy.RectilinearMesh1D for log description.

  • +
  • y – See geobipy.RectilinearMesh1D for log description.

  • +
  • z]log ('e' or float, optional) – See geobipy.RectilinearMesh1D for log description.

  • +
  • [x – See geobipy.RectilinearMesh1D for relative_to description.

  • +
  • y – See geobipy.RectilinearMesh1D for relative_to description.

  • +
  • z]relative_to (float, optional) – See geobipy.RectilinearMesh1D for relative_to description.

  • +
+
+
Returns:
+

out – The 2D mesh.

+
+
Return type:
+

RectilinearMesh2D

+
+
+
+
+cellIndex(values, axis, clip=False, trim=False)
+

Return the cell indices of values along axis.

+
+
Parameters:
+
    +
  • values (scalar or array_like) – Locations to obtain the cell index for

  • +
  • axis (int) – Axis along which to obtain indices

  • +
  • clip (bool) – A negative index which would normally wrap will clip to 0 instead.

  • +
  • trim (bool) – Do not include out of axis indices. Negates clip, since they wont be included in the output.

  • +
+
+
Returns:
+

out – indices for the locations along the axis

+
+
Return type:
+

ints

+
+
+
+ +
+
+cellIndices(x, y=None, clip=False, trim=False)
+

Return the cell indices in x and z for two floats.

+
+
Parameters:
+
    +
  • x (scalar or array_like) – x location

  • +
  • y (scalar or array_like) – y location (or z location if instantiated with 3 co-ordinates)

  • +
  • clip (bool) – A negative index which would normally wrap will clip to 0 instead.

  • +
  • trim (bool) – Do not include out of axis indices. Negates clip, since they wont be included in the output.

  • +
+
+
Returns:
+

out – indices for the locations along [axis0, axis1]

+
+
Return type:
+

ints

+
+
+
+ +
+
+centres(axis=0)
+

Ravelled cell centres

+
+
Returns:
+

out – ravelled cell centre locations.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None, upcast=True)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+property distance
+

The distance along the top of the mesh using the x and y co-ordinates.

+
+ +
+
+edges(axis)
+

Gets the cell edges in the given dimension

+
+ +
+
+hasSameSize(other)
+

Determines if the meshes have the same dimension sizes

+
+ +
+
+in_bounds(x, y)
+

Return whether values are inside the cell edges

+
+
Parameters:
+

values (array_like) – Check if these are inside left <= values < right.

+
+
Returns:
+

out – Are the values inside.

+
+
Return type:
+

bools

+
+
+
+ +
+
+intervalStatistic(arr, intervals, axis=0, statistic='mean')
+

Compute a statistic of the array between the intervals given along dimension dim.

+
+
Parameters:
+
    +
  • arr (array_like) – 2D array to take the mean over the given intervals

  • +
  • intervals (array_like) – A new set of mesh edges. The mean is computed between each two edges in the array.

  • +
  • axis (int, optional) – Which axis to take the mean

  • +
  • statistic (string or callable, optional) –

    +
    The statistic to compute (default is ‘mean’).

    The following statistics are available:

    +
    +
    +
      +
    • ’mean’ : compute the mean of values for points within each bin. +Empty bins will be represented by NaN.

    • +
    • ’median’ : compute the median of values for points within each +bin. Empty bins will be represented by NaN.

    • +
    • ’count’ : compute the count of points within each bin. This is +identical to an unweighted histogram. values array is not +referenced.

    • +
    • ’sum’ : compute the sum of values for points within each bin. +This is identical to a weighted histogram.

    • +
    • ’min’ : compute the minimum of values for points within each bin. +Empty bins will be represented by NaN.

    • +
    • ’max’ : compute the maximum of values for point within each bin. +Empty bins will be represented by NaN.

    • +
    • function : a user-defined function which takes a 1D array of +values, and outputs a single numerical statistic. This function +will be called on the values in each bin. Empty bins will be +represented by function([]), or NaN if this returns an error.

    • +
    +

  • +
+
+
+
+

See also

+
+
scipy.stats.binned_statistic

for more information

+
+
+
+
+ +
+
+mask_cells(axis=None, x_distance=None, y_distance=None, values=None)
+

Mask cells by a distance.

+

If the edges of the cell are further than distance away, extra cells are inserted such that +the cell’s new edges are at distance away from the centre.

+
+
Parameters:
+
    +
  • xAxis (array_like) – Alternative axis to use for masking. Must have size self.x.nEdges

  • +
  • x_distance (float, optional) – Mask along the x axis using this distance. +Defaults to None.

  • +
  • y_distance (float, optional) – Mask along the y axis using this distance. +Defaults to None.

  • +
  • values (array_like, optional.) – If given, values will be remapped to the masked mesh. +Has shape (y.nCells, x.nCells)

  • +
+
+
Returns:
+

    +
  • out (RectilinearMesh2D) – Masked mesh

  • +
  • x_indices (ints, optional) – Location of the original centres in the expanded mesh along the x axis.

  • +
  • y_indices (ints, optional) – Location of the original centres in the expanded mesh along the y axis.

  • +
  • out_values (array_like, optional) – If values is given, values will be remapped to the masked mesh.

  • +
+

+
+
+
+ +
+
+property nCells
+

The number of cells in the mesh.

+
+
Returns:
+

out – Number of cells

+
+
Return type:
+

int

+
+
+
+ +
+
+property nNodes
+

The number of nodes in the mesh.

+
+
Returns:
+

out – Number of nodes

+
+
Return type:
+

int

+
+
+
+ +
+
+property nodes
+

Ravelled cell nodes

+
+
Returns:
+

out – ravelled cell node locations.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+pcolor(values, axis=None, yAxis='absolute', **kwargs)
+

Create a pseudocolour plot of a 2D array using the mesh.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – A 2D array of colour values.

  • +
  • xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line

  • +
  • zAxis (str) – If zAxis is ‘absolute’ the vertical axis is the relative_to plus z. +If zAxis is ‘relative’ the vertical axis is z.

  • +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+plot_grid(**kwargs)
+

Plot the mesh grid lines.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal axis uses self.x +If xAxis is ‘y’, the horizontal axis uses self.y +If xAxis is ‘r’, the horizontal axis uses sqrt(self.x^2 + self.y^2)

+
+
+
+ +
+
+plot_relative_to(axis=0, **kwargs)
+

Plot the relative_to of the mesh as a line.

+
+ +
+
+ravelIndices(ixy, order='C')
+

Return a global index into a 1D array given the two cell indices in x and z.

+
+
Parameters:
+

ixy (tuple of array_like) – A tuple of integer arrays, one array for each dimension.

+
+
Returns:
+

out – Global index.

+
+
Return type:
+

int

+
+
+
+ +
+
+property shape
+

The dimensions of the mesh

+
+
Returns:
+

out – Array of integers

+
+
Return type:
+

array_like

+
+
+
+ +
+
+property summary
+

Display a summary of the 3D Point Cloud

+
+ +
+
+unravelIndex(indices, order='C')
+

Return a global index into a 1D array given the two cell indices in x and z.

+
+
Parameters:
+

indices (array_like) – An integer array whose elements are indices into the flattened +version of an array.

+
+
Returns:
+

unraveled_coords – Each array in the tuple has the same shape as the self.shape.

+
+
Return type:
+

tuple of ndarray

+
+
+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+
+property x_centres
+

Creates an array suitable for plt.pcolormesh for the abscissa.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line.

+
+
+
+ +
+
+property x_edges
+

Creates an array suitable for plt.pcolormesh for the ordinate

+
+ +
+
+property y_centres
+

Creates an array suitable for plt.pcolormesh for the abscissa.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line.

+
+
+
+ +
+
+property y_edges
+

Creates an array suitable for plt.pcolormesh for the ordinate

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/mesh/RectilinearMesh2D_stitched.html b/docs/content/api/classes/mesh/RectilinearMesh2D_stitched.html new file mode 100644 index 00000000..b3a43446 --- /dev/null +++ b/docs/content/api/classes/mesh/RectilinearMesh2D_stitched.html @@ -0,0 +1,262 @@ + + + + + + + RectilinearMesh2D_stitched — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

RectilinearMesh2D_stitched

+digraph inheritance9774ecb1cc { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Mesh" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Abstract Base Class"]; + "myObject" -> "Mesh" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh2D" [URL="RectilinearMesh2D.html#geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a 2D rectilinear mesh with cell centres and edges."]; + "Mesh" -> "RectilinearMesh2D" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh2D_stitched" [URL="#geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining stitched 1D rectilinear meshes."]; + "RectilinearMesh2D" -> "RectilinearMesh2D_stitched" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@RectilinearMesh2D_Class +Module describing a 2D Rectilinear Mesh class with x and y axes specified

+
+
+class geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched(max_cells, x=None, relative_to=None, nCells=None, **kwargs)
+

Class defining stitched 1D rectilinear meshes.

+
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None, upcast=False)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+classmethod fromHdf(grp, index=None, skip_posterior=False)
+

Reads in the object from a HDF file

+
+ +
+
+property nCells
+

The number of cells in the mesh.

+
+
Returns:
+

out – Number of cells

+
+
Return type:
+

int

+
+
+
+ +
+
+pcolor(values, **kwargs)
+

Create a pseudocolour plot of a 2D array using the mesh.

+
+
Parameters:
+
    +
  • values (array_like or StatArray) – A 2D array of colour values.

  • +
  • xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line

  • +
  • zAxis (str) – If zAxis is ‘absolute’ the vertical axis is the relative_to plus z. +If zAxis is ‘relative’ the vertical axis is z.

  • +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
+
+
Returns:
+

matplotlib .Axes

+
+
Return type:
+

ax

+
+
+
+

See also

+
+
matplotlib.pyplot.pcolormesh

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+property shape
+

The dimensions of the mesh

+
+
Returns:
+

out – Array of integers

+
+
Return type:
+

array_like

+
+
+
+ +
+
+property summary
+

Display a summary of the 3D Point Cloud

+
+ +
+
+property y_edges
+

Creates an array suitable for plt.pcolormesh for the ordinate

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/mesh/RectilinearMesh3D.html b/docs/content/api/classes/mesh/RectilinearMesh3D.html new file mode 100644 index 00000000..7a0209d7 --- /dev/null +++ b/docs/content/api/classes/mesh/RectilinearMesh3D.html @@ -0,0 +1,437 @@ + + + + + + + RectilinearMesh3D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

RectilinearMesh3D

+digraph inheritanceb5cabbf6a5 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Mesh" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Abstract Base Class"]; + "myObject" -> "Mesh" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh2D" [URL="RectilinearMesh2D.html#geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a 2D rectilinear mesh with cell centres and edges."]; + "Mesh" -> "RectilinearMesh2D" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "RectilinearMesh3D" [URL="#geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a 3D rectilinear mesh with cell centres and edges."]; + "RectilinearMesh2D" -> "RectilinearMesh3D" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@RectilinearMesh2D_Class +Module describing a 2D Rectilinear Mesh class with x and y axes specified

+
+
+class geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D(x=None, y=None, z=None, **kwargs)
+

Class defining a 3D rectilinear mesh with cell centres and edges.

+

Contains a simple mesh with cell edges, widths, and centre locations. +There are two ways of instantiating the RectilinearMesh2D. +The first is by specifying the x and y cell centres or edges. In this case, +the abscissa is the standard x axis, and y is the ordinate. The z co-ordinates are None. +The second is by specifyin the x, y, and z cell centres or edges. In this case, +The mesh is a 2D plane with the ordinate parallel to z, and the “horizontal” locations +have co-ordinates (x, y). +This allows you to, for example, create a vertical 2D mesh that is not parallel to either the +x or y axis, like a typical line of data. +If x, y, and z are specified, plots can be made against distance which calculated cumulatively between points.

+

RectilinearMesh2D([x_centres or x_edges], [y_centres or y_edges], [z_centres or z_edges])

+
+
Parameters:
+
    +
  • x_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “x” direction. Only x_centres or x_edges can be given.

  • +
  • x_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “x” direction. Only x_centres or x_edges can be given.

  • +
  • y_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “y” direction. Only y_centres or y_edges can be given.

  • +
  • y_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “y” direction. Only y_centres or y_edges can be given.

  • +
  • z_centres (geobipy.StatArray, optional) – The locations of the centre of each cell in the “z” direction. Only z_centres or z_edges can be given.

  • +
  • z_edges (geobipy.StatArray, optional) – The locations of the edges of each cell, including the outermost edges, in the “z” direction. Only z_centres or z_edges can be given.

  • +
  • relative_toCentres (geobipy.StatArray, optional) – The relative_to of each point at the x, y locations. Only relative_toCentres or relative_toEdges can be given, not both. +Has shape (y.nCells, x.nCells).

  • +
  • relative_toEdges (geobipy.StatArray, optional) – The relative_to of each point at the x, y locations of the edges of each cell, including the outermost edges. Only relative_toCentres or relative_toEdges can be given, not both. +Has shape (y.nEdges, x.nEdges).

  • +
  • [x (float, optional) – See geobipy.RectilinearMesh1D for log description.

  • +
  • y (float, optional) – See geobipy.RectilinearMesh1D for log description.

  • +
  • z]log ('e' or float, optional) – See geobipy.RectilinearMesh1D for log description.

  • +
  • [x – See geobipy.RectilinearMesh1D for relative_to description.

  • +
  • y – See geobipy.RectilinearMesh1D for relative_to description.

  • +
  • z]relative_to (float, optional) – See geobipy.RectilinearMesh1D for relative_to description.

  • +
+
+
Returns:
+

out – The 2D mesh.

+
+
Return type:
+

RectilinearMesh2D

+
+
+
+
+cellIndices(x, y, z, clip=False, trim=False)
+

Return the cell indices in x and z for two floats.

+
+
Parameters:
+
    +
  • x (scalar or array_like) – x location

  • +
  • y (scalar or array_like) – y location (or z location if instantiated with 3 co-ordinates)

  • +
  • clip (bool) – A negative index which would normally wrap will clip to 0 instead.

  • +
  • trim (bool) – Do not include out of axis indices. Negates clip, since they wont be included in the output.

  • +
+
+
Returns:
+

out – indices for the locations along [axis0, axis1]

+
+
Return type:
+

ints

+
+
+
+ +
+
+centres(axis)
+

Ravelled cell centres

+
+
Returns:
+

out – ravelled cell centre locations.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None, upcast=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+edges(axis)
+

Gets the cell edges in the given dimension

+
+ +
+
+classmethod fromHdf(grp, index=None, skip_posterior=False)
+

Reads in the object from a HDF file

+
+ +
+
+property nCells
+

The number of cells in the mesh.

+
+
Returns:
+

out – Number of cells

+
+
Return type:
+

int

+
+
+
+ +
+
+property nNodes
+

The number of nodes in the mesh.

+
+
Returns:
+

out – Number of nodes

+
+
Return type:
+

int

+
+
+
+ +
+
+plot_grid()
+

Plot the mesh grid lines.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal axis uses self.x +If xAxis is ‘y’, the horizontal axis uses self.y +If xAxis is ‘r’, the horizontal axis uses sqrt(self.x^2 + self.y^2)

+
+
+
+ +
+
+pyvista_mesh(**kwargs)
+

Creates a pyvista plotting object linked to VTK.

+

Use mesh.plot(show_edges=True, show_grid=True) to plot the mesh.

+
+ +
+
+ravelIndices(indices, order='C')
+

Return a global index into a 1D array given the two cell indices in x and z.

+
+
Parameters:
+

indices (array_like) – A tuple of integer arrays, one array for each dimension.

+
+
Returns:
+

out – Global index.

+
+
Return type:
+

int

+
+
+
+ +
+
+property shape
+

The dimensions of the mesh

+
+
Returns:
+

out – Array of integers

+
+
Return type:
+

array_like

+
+
+
+ +
+
+property summary
+

Display a summary of the 3D Point Cloud

+
+ +
+
+unravelIndex(index, order='C')
+

Return local indices given a global one.

+
+
Parameters:
+

indices (array_like) – An integer array whose elements are indices into the flattened +version of an array.

+
+
Returns:
+

unraveled_coords – Each array in the tuple has the same shape as the self.shape.

+
+
Return type:
+

tuple of ndarray

+
+
+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+
+xRange()
+

Get the range of x

+
+
Returns:
+

out – The range of x

+
+
Return type:
+

numpy.float64

+
+
+
+ +
+
+property x_centres
+

Creates an array suitable for plt.pcolormesh for the abscissa.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line.

+
+
+
+ +
+
+property x_edges
+

Creates an array suitable for plt.pcolormesh for the ordinate

+
+ +
+
+property y_centres
+

Creates an array suitable for plt.pcolormesh for the abscissa.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘x’, the horizontal xAxis uses self.x +If xAxis is ‘y’, the horizontal xAxis uses self.y +If xAxis is ‘r’, the horizontal xAxis uses cumulative distance along the line.

+
+
+
+ +
+
+property y_edges
+

Creates an array suitable for plt.pcolormesh for the ordinate

+
+ +
+
+zRange()
+

Get the range of z

+
+
Returns:
+

out – The range of z

+
+
Return type:
+

numpy.float64

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/mesh/mesh.html b/docs/content/api/classes/mesh/mesh.html new file mode 100644 index 00000000..c1098aba --- /dev/null +++ b/docs/content/api/classes/mesh/mesh.html @@ -0,0 +1,150 @@ + + + + + + + Mesh classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+ + +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/model/Model_.html b/docs/content/api/classes/model/Model_.html new file mode 100644 index 00000000..f706d0ca --- /dev/null +++ b/docs/content/api/classes/model/Model_.html @@ -0,0 +1,438 @@ + + + + + + + Model — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Model

+digraph inheritance456558ffd0 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Model" [URL="#geobipy.src.classes.model.Model.Model",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Generic model class with an attached mesh."]; + "myObject" -> "Model" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@Model_Class +Module describing a Model

+
+
+class geobipy.src.classes.model.Model.Model(mesh=None, values=None)
+

Generic model class with an attached mesh.

+
+
+classmethod fromHdf(grp, index=None, skip_posterior=False)
+

Reads in the object from a HDF file

+
+ +
+
+property gradient
+

Compute the gradient

+

Parameter gradient :math:”nabla_{z}sigma” at the ith layer is computed via

+
+ +
+
+gradient_probability(log=True)
+

Evaluate the prior for the gradient of the parameter with depth

+
+
Parameters:
+

hmin (float64) – The minimum thickness of any layer.

+
+
Returns:
+

out – The probability given the prior on the gradient of the parameters with depth.

+
+
Return type:
+

numpy.float64

+
+
+
+ +
+
+local_inverse_hessian(observation=None)
+

Generate a localized Hessian matrix using +a dataPoint and the current realization of the Model1D.

+
+
Parameters:
+

observation (geobipy.DataPoint, geobipy.Dataset, optional) – The observed data to use when computing the local estimate of the variance.

+
+
Returns:
+

out – Hessian matrix

+
+
Return type:
+

array_like

+
+
+
+ +
+
+local_precision(observation=None)
+

Generate a localized inverse Hessian matrix using a dataPoint and the current realization of the Model1D.

+
+
Parameters:
+

datapoint (geobipy.DataPoint, optional) – The data point to use when computing the local estimate of the variance. +If None, only the prior derivative is used.

+
+
Returns:
+

out – Inverse Hessian matrix

+
+
Return type:
+

array_like

+
+
+
+ +
+
+local_variance(observation=None)
+

Generate a localized inverse Hessian matrix using a dataPoint and the current realization of the Model1D.

+
+
Parameters:
+

datapoint (geobipy.DataPoint, optional) – The data point to use when computing the local estimate of the variance. +If None, only the prior derivative is used.

+
+
Returns:
+

out – Inverse Hessian matrix

+
+
Return type:
+

array_like

+
+
+
+ +
+
+pad(shape)
+

Copies the properties of a model including all priors or proposals, but pads memory to the given size

+
+
Parameters:
+

size (int, tuple) – Create memory upto this size.

+
+
Returns:
+

out – Padded model

+
+
Return type:
+

geobipy.Model1D

+
+
+
+ +
+
+pcolor(**kwargs)
+

Plot like an image

+
+
Parameters:
+
    +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
+
+
+
+ +
+
+perturb(*args, **kwargs)
+

Perturb a model’s structure and parameter values.

+

Uses a stochastic newtown approach if a datapoint is provided. +Otherwise, uses the existing proposal distribution attached to +self.par to generate new values.

+
+
Parameters:
+

observation (geobipy.DataPoint, optional) – The datapoint to use to perturb using a stochastic Newton approach.

+
+
Returns:
+

    +
  • remappedModel (geobipy.Model) – The current model remapped onto the perturbed dimension.

  • +
  • perturbedModel (geobipy.Model) – The model with perturbed structure and parameter values.

  • +
+

+
+
+
+ +
+
+probability(solve_value, solve_gradient)
+

Evaluate the prior probability for the 1D Model.

+
+
Parameters:
+
    +
  • sPar (bool) – Evaluate the prior on the parameters in the final probability

  • +
  • sGradient (bool) – Evaluate the prior on the parameter gradient in the final probability

  • +
  • components (bool, optional) – Return all components used in the final probability as well as the final probability

  • +
+
+
Returns:
+

    +
  • probability (numpy.float64) – The probability

  • +
  • components (array_like, optional) – Return the components of the probability, i.e. the individually evaluated priors as a second return argument if comonents=True on input.

  • +
+

+
+
+
+ +
+
+proposal_probabilities(remapped_model, observation=None, structure_only=False, alpha=1.0)
+

Return the forward and reverse proposal probabilities for the model

+

Returns the denominator and numerator for the model’s components of the proposal ratio.

+
+(1)\[q(k, \boldsymbol{z} | \boldsymbol{m}^{'})\]
+

and

+
+(2)\[q(k^{'}, \boldsymbol{z}^{'} | \boldsymbol{m})\]
+

Each component is dependent on the event that was chosen during perturbation.

+
+
Parameters:
+
    +
  • remappedModel (geobipy.Model1D) – The current model, remapped onto the dimension of self.

  • +
  • observation (geobipy.DataPoint) – The perturbed datapoint that was used to generate self.

  • +
+
+
Returns:
+

    +
  • forward (float) – The forward proposal probability

  • +
  • reverse (float) – The reverse proposal probability

  • +
+

+
+
+
+ +
+
+set_priors(values_prior=None, gradient_prior=None, **kwargs)
+

Setup the priors of a 1D model.

+
+
Parameters:
+
    +
  • halfSpaceValue (float) – Value of the parameter for the halfspace.

  • +
  • min_edge (float64) – Minimum depth possible for the model

  • +
  • max_edge (float64) – Maximum depth possible for the model

  • +
  • max_cells (int) – Maximum number of layers allowable in the model

  • +
  • parameterPrior (bool) – Sets a prior on the parameter values

  • +
  • gradientPrior (bool) – Sets a prior on the gradient of the parameter values

  • +
  • parameterLimits (array_like, optional) – Length 2 array with the bounds on the parameter values to impose.

  • +
  • min_width (float64, optional) – Minimum thickness of any layer. If min_width = None, min_width is computed from min_edge, max_edge, and max_cells (recommended).

  • +
  • factor (float, optional) – Tuning parameter used in the std of the parameter prior.

  • +
  • prng (numpy.random.RandomState(), optional) – Random number generator, if none is given, will use numpy’s global generator.

  • +
+
+
+
+

See also

+
+
geobipy.Model1D.perturb

For a description of the perturbation cycle.

+
+
+
+
+ +
+
+set_proposals(proposal=None, **kwargs)
+

Setup the proposals of a 1D model.

+
+
Parameters:
+
    +
  • halfSpaceValue (float) – Value of the parameter for the halfspace.

  • +
  • probabilities (array_like) – Probability of birth, death, perturb, and no change for the model +e.g. pWheel = [0.5, 0.25, 0.15, 0.1]

  • +
  • parameterProposal (geobipy.Distribution) – The proposal distribution for the parameter.

  • +
  • prng (numpy.random.RandomState(), optional) – Random number generator, if none is given, will use numpy’s global generator.

  • +
+
+
+
+

See also

+
+
geobipy.Model1D.perturb

For a description of the perturbation cycle.

+
+
+
+
+ +
+
+property summary
+

Summary of self

+
+ +
+
+update_parameter_posterior(axis=0)
+

Imposes a model’s parameters with depth onto a 2D Hitmap.

+

The cells that the parameter-depth profile passes through are accumulated by 1.

+
+
Parameters:
+

Hitmap (geobipy.Hitmap) – The hitmap to add to

+
+
+
+ +
+
+update_posteriors(ratio=0.5)
+

Update any attached posterior distributions.

+
+
Parameters:
+

minimumRatio (float) – Only update the depth posterior if the layer parameter ratio +is greater than this number.

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/model/model.html b/docs/content/api/classes/model/model.html new file mode 100644 index 00000000..f7fa1030 --- /dev/null +++ b/docs/content/api/classes/model/model.html @@ -0,0 +1,144 @@ + + + + + + + Model classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Model classes

+
+ +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/pointcloud/Point.html b/docs/content/api/classes/pointcloud/Point.html new file mode 100644 index 00000000..06408348 --- /dev/null +++ b/docs/content/api/classes/pointcloud/Point.html @@ -0,0 +1,627 @@ + + + + + + + Point — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Point

+digraph inheritancefff5d93699 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Point" [URL="#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.pointcloud.Point.Point(x=None, y=None, z=None, elevation=None, **kwargs)
+

3D Point Cloud with x,y,z co-ordinates

+

Point(N, x, y, z)

+
+
Parameters:
+
    +
  • N (int) – Number of points

  • +
  • x (array_like or geobipy.StatArray, optional) – The x co-ordinates. Default is zeros of size N

  • +
  • y (array_like or geobipy.StatArray, optional) – The y co-ordinates. Default is zeros of size N

  • +
  • z (array_like or geobipy.StatArray, optional) – The z co-ordinates. Default is zeros of size N

  • +
  • units (str, optional) – The units of the co-ordinates. Default is “m”

  • +
+
+
Returns:
+

out – The 3D point cloud

+
+
Return type:
+

geobipy.PointCloud3D

+
+
+
+
+Bcast(world, root=0)
+

Broadcast a PointCloud3D using MPI

+
+
Parameters:
+
    +
  • world (mpi4py.MPI.COMM_WORLD) – MPI communicator

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – PointCloud3D broadcast to each rank

+
+
Return type:
+

geobipy.PointCloud3D

+
+
+
+ +
+
+Scatterv(starts, chunks, world, root=0)
+

ScatterV a PointCloud3D using MPI

+
+
Parameters:
+
    +
  • myStart (sequence of ints) – Indices into self that define the starting locations of the chunks to be sent to each rank.

  • +
  • myChunk (sequence of ints) – The size of each chunk that each rank will receive.

  • +
  • world (mpi4py.MPI.Comm) – The MPI communicator over which to Scatterv.

  • +
  • root (int, optional) – The MPI rank to broadcast from. Default is 0.

  • +
+
+
Returns:
+

out – The PointCloud3D distributed amongst ranks.

+
+
Return type:
+

geobipy.PointCloud3D

+
+
+
+ +
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+axis(axis='x')
+

Obtain the axis against which to plot values.

+
+
Parameters:
+

axis (str) – If axis is ‘index’, returns numpy.arange(self.nPoints) +If axis is ‘x’, returns self.x +If axis is ‘y’, returns self.y +If axis is ‘z’, returns self.z +If axis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If axis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

+
+
Returns:
+

out – The requested axis.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+block_indices(dx=None, dy=None, x_grid=None, y_grid=None)
+

Returns the indices of the points lying in blocks across the domain..

+

Performed before a block median filter by extracting the point location within blocks across the domain. +Idea taken from pygmt, however I extracted the indices and this was quite a bit faster.

+
+
Parameters:
+
    +
  • dx (float) – Grid spacing in x.

  • +
  • dy (float) – Grid spacing in y.

  • +
+
+
Returns:
+

ints

+
+
Return type:
+

Index into self whose points are the median location within blocks across the domain.

+
+
+
+ +
+
+block_median(dx=None, dy=None, x_grid=None, y_grid=None, values=None)
+

Median point within juxtaposed blocks across the domain.

+
+
Parameters:
+
    +
  • dx (float) – Increment in x.

  • +
  • dy (float) – Increment in y.

  • +
  • values (array_like, optional) – Used to compute the median in each block. +Defaults to None.

  • +
+
+
Returns:
+

geobipt.PointCloud3d

+
+
Return type:
+

Contains one point in each block.

+
+
+
+ +
+
+block_median_indices(dx=None, dy=None, x_grid=None, y_grid=None, values=None)
+

Index to the median point within juxtaposed blocks across the domain.

+
+
Parameters:
+
    +
  • dx (float) – Increment in x.

  • +
  • dy (float) – Increment in y.

  • +
  • values (array_like, optional) – Used to compute the median in each block. +Defaults to None.

  • +
+
+
Returns:
+

ints

+
+
Return type:
+

Index of the median point in each block.

+
+
+
+ +
+
+property bounds
+

Gets the bounding box of the data set

+
+ +
+
+centred_grid_nodes(bounds, spacing)
+

Generates grid nodes centred over bounds

+
+
Parameters:
+
    +
  • bounds (array_like) – bounds of the dimension

  • +
  • spacing (float) – distance between nodes

  • +
+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+fileInformation()
+

Description of PointCloud3D file.

+
+
Returns:
+

out – File description.

+
+
Return type:
+

str

+
+
+
+ +
+
+classmethod fromHdf(grp, index=None, **kwargs)
+

Reads the object from a HDF group

+
+ +
+
+interpolate(dx=None, dy=None, mesh=None, values=None, method='mc', mask=False, clip=True, i=None, block=False, **kwargs)
+

Interpolate values to a grid.

+

The grid is automatically generated such that it is centred over the point cloud.

+
+
Parameters:
+
    +
  • dx (float) – Grid spacing in x.

  • +
  • dy (float) – Grid spacing in y.

  • +
  • values (array_like, optional) – Values to interpolate. Must have size self.nPoints. +Defaults to None.

  • +
  • method (str, optional) –

      +
    • ‘ct’ uses Clough Tocher interpolation. Default

    • +
    • ’mc’ uses Minimum curvature and requires pygmt to be installed.

    • +
    +

  • +
  • mask (float, optional) – Cells of distance mask away from points are NaN. +Defaults to False.

  • +
  • clip (bool, optional) – Clip any overshot grid values to the min/max of values. +Defaults to True.

  • +
  • i (ints, optional) – Use only the i locations during interpolation. +Defaults to None.

  • +
  • block (bool, optional) – Perform a block median filter before interpolation. Inherrently smooths the final grid, but alleviates aliasing. +Defaults to False.

  • +
+
+
Returns:
+

geobipy.Model

+
+
Return type:
+

Interpolated values.

+
+
+
+ +
+
+map(dx, dy, i=None, **kwargs)
+

Create a map of a parameter

+
+ +
+
+move(dx, dy, dz)
+

Move the point by [dx,dy,dz]

+
+ +
+
+property nPoints
+

Get the number of points

+
+ +
+
+nearest(x, k=1, eps=0, p=2, radius=inf)
+

Obtain the k nearest neighbours

+
+

See also

+

See

+
+
+ +
+
+perturb()
+

Propose a new point given the attached propsal distributions

+
+ +
+
+plot(values, x='index', **kwargs)
+

Line plot of values against a co-ordinate.

+
+
Parameters:
+
    +
  • values (array_like) – Values to plot against a co-ordinate

  • +
  • xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

  • +
+
+
Returns:
+

ax – Plot axes handle

+
+
Return type:
+

matplotlib.axes

+
+
+
+

See also

+
+
geobipy.plotting.plot

For additional keyword arguments

+
+
+
+
+ +
+
+property probability
+

Evaluate the probability for the EM data point given the specified attached priors

+
+
Parameters:
+
    +
  • rEerr (bool) – Include the relative error when evaluating the prior

  • +
  • aEerr (bool) – Include the additive error when evaluating the prior

  • +
  • height (bool) – Include the elevation when evaluating the prior

  • +
  • calibration (bool) – Include the calibration parameters when evaluating the prior

  • +
  • verbose (bool) – Return the components of the probability, i.e. the individually evaluated priors

  • +
+
+
Returns:
+

out – The evaluation of the probability using all assigned priors

+
+
Return type:
+

float64

+
+
+

Notes

+

For each boolean, the associated prior must have been set.

+
+
Raises:
+

TypeError – If a prior has not been set on a requested parameter

+
+
+
+ +
+
+classmethod read_csv(filename, **kwargs)
+

Reads x y z co-ordinates from an ascii csv file.

+
+
Parameters:
+

filename (str) – Path to the file to read from.

+
+
+
+ +
+
+scatter2D(**kwargs)
+

Create a 2D scatter plot using the x, y coordinates of the point cloud.

+

Can take any other matplotlib arguments and keyword arguments e.g. markersize etc.

+
+
Parameters:
+
    +
  • c (1D array_like or StatArray, optional) – Colour values of the points, default is the height of the points

  • +
  • i (sequence of ints, optional) – Plot a subset of x, y, c, using the indices in i.

  • +
+
+
+
+

See also

+
+
geobipy.plotting.Scatter2D

For additional keyword arguments you may use.

+
+
+
+
+ +
+
+set_kdtree(ndim)
+

Creates a k-d tree of the point co-ordinates

+
+
Parameters:
+

nDims (int) – Either 2 or 3 to exclude or include the vertical co-ordinate

+
+
+
+ +
+
+set_x_posterior()
+
+ +
+
+set_y_posterior()
+
+ +
+
+set_z_posterior()
+
+ +
+
+property summary
+

Summary of self

+
+ +
+
+toVTK(fileName, pointData=None, format='binary')
+

Save the PointCloud3D to a VTK file.

+
+
Parameters:
+
    +
  • fileName (str) – Filename to save to.

  • +
  • pointData (geobipy.StatArray or list of geobipy.StatArray, optional) – Data at each point in the point cloud. Each entry is saved as a separate +vtk attribute.

  • +
  • format (str, optional) – “ascii” or “binary” format. Ascii is readable, binary is not but results in smaller files.

  • +
+
+
Raises:
+
    +
  • TypeError – If pointData is not a geobipy.StatArray or list of them.

  • +
  • ValueError – If any pointData entry does not have size equal to the number of points.

  • +
  • ValueError – If any StatArray does not have a name or units. This is needed for the vtk attribute.

  • +
+
+
+
+ +
+
+vtkStructure()
+

Generates a vtk mesh structure that can be used in a vtk file.

+
+
Returns:
+

out – Vtk data structure

+
+
Return type:
+

pyvtk.VtkData

+
+
+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+
+x_axis(xAxis='x')
+

Obtain the xAxis against which to plot values.

+
+
Parameters:
+

xAxis (str) – If xAxis is ‘index’, returns numpy.arange(self.nPoints) +If xAxis is ‘x’, returns self.x +If xAxis is ‘y’, returns self.y +If xAxis is ‘z’, returns self.z +If xAxis is ‘r2d’, returns cumulative distance along the line in 2D using x and y. +If xAxis is ‘r3d’, returns cumulative distance along the line in 3D using x, y, and z.

+
+
Returns:
+

out – The requested xAxis.

+
+
Return type:
+

array_like

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/pointcloud/pointcloud.html b/docs/content/api/classes/pointcloud/pointcloud.html new file mode 100644 index 00000000..6abaf4ea --- /dev/null +++ b/docs/content/api/classes/pointcloud/pointcloud.html @@ -0,0 +1,143 @@ + + + + + + + Pointcloud classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Pointcloud classes

+
+ +
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/Distribution.html b/docs/content/api/classes/statistics/Distribution.html new file mode 100644 index 00000000..134ab0f2 --- /dev/null +++ b/docs/content/api/classes/statistics/Distribution.html @@ -0,0 +1,185 @@ + + + + + + + Distribution Wrapper — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Distribution Wrapper

+

@Distribution_Class +Module describing statistical distributions

+
+
+geobipy.src.classes.statistics.Distribution.Distribution(distributionType, *args, **kwargs)
+

Instantiate a statistical distribution

+
+
Parameters:
+

distributionType (str or subclass of baseDistribution) – If distributionType is str, choose between {Normal, MvNormal, Uniform, Gamma, Order, Categorical} +if distributionType is subclass of baseDistribution, a copy is made

+
+
Returns:
+

out – Subclass of baseDistribution

+
+
Return type:
+

The distribution requested

+
+
+

Example

+
>>> from geobipy import Distribution
+>>> import numpy as np
+>>> import matplotlib.pyplot as plt
+>>> D = Distribution('Normal', 0.0, 1.0)
+>>> x = np.linspace(-5.0,5.0,100)
+>>> y = D.probability(x)
+>>> plt.figure()
+>>> plt.plot(x,y)
+>>> plt.show()
+>>> # To create a Distribution using a specific pseudo random number generator
+>>> prng = np.random.RandomState()
+>>> D = Distribution('Normal', 0.0, 1.0, prng=prng)
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/GammaDistribution.html b/docs/content/api/classes/statistics/GammaDistribution.html new file mode 100644 index 00000000..f0ee53dd --- /dev/null +++ b/docs/content/api/classes/statistics/GammaDistribution.html @@ -0,0 +1,166 @@ + + + + + + + Gamma Distribution — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Gamma Distribution

+digraph inheritance6948f34e9e { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "Gamma" [URL="#geobipy.src.classes.statistics.GammaDistribution.Gamma",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a normal distribution"]; +} +

@GammaDistribution +Module defining a gamma distribution with statistical procedures

+
+
+class geobipy.src.classes.statistics.GammaDistribution.Gamma(*args)
+

Class defining a normal distribution

+
+
+pdf(x)
+

set the PDF, for a gamma distribution

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/Histogram.html b/docs/content/api/classes/statistics/Histogram.html new file mode 100644 index 00000000..55b2e74c --- /dev/null +++ b/docs/content/api/classes/statistics/Histogram.html @@ -0,0 +1,430 @@ + + + + + + + Histogram — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

Histogram

+digraph inheritance05266a41c8 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Histogram" [URL="#geobipy.src.classes.statistics.Histogram.Histogram",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "Model" -> "Histogram" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Model" [URL="../model/Model_.html#geobipy.src.classes.model.Model.Model",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Generic model class with an attached mesh."]; + "myObject" -> "Model" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.statistics.Histogram.Histogram(mesh=None, values=None)
+
+
+credible_intervals(percent=90.0, axis=0)
+

Gets the median and the credible intervals for the specified axis.

+
+
Parameters:
+
    +
  • percent (float) – Confidence percentage.

  • +
  • log ('e' or float, optional) – Take the log of the credible intervals to a base. ‘e’ if log = ‘e’, or a number e.g. log = 10.

  • +
  • axis (int) – Along which axis to obtain the interval locations.

  • +
+
+
Returns:
+

    +
  • med (array_like) – Contains the medians along the specified axis. Has size equal to arr.shape[axis].

  • +
  • low (array_like) – Contains the lower interval along the specified axis. Has size equal to arr.shape[axis].

  • +
  • high (array_like) – Contains the upper interval along the specified axis. Has size equal to arr.shape[axis].

  • +
+

+
+
+
+ +
+
+credible_range(percent=90.0, log=None, axis=0)
+

Get the range of credibility with depth

+
+
Parameters:
+
    +
  • percent (float) – Percent of the credible intervals

  • +
  • log ('e' or float, optional) – If None: The range is the difference in linear space of the credible intervals +If ‘e’ or float: The range is the difference in log space, or ratio in linear space.

  • +
  • axis (int) – Axis along which to get the marginal histogram.

  • +
+
+
+
+ +
+
+fit_mixture_to_pdf_1d(mixture, **kwargs)
+

Find peaks in the histogram along an axis.

+
+
Parameters:
+
    +
  • intervals (array_like, optional) – Accumulate the histogram between these invervals before finding peaks

  • +
  • axis (int, optional) – Axis along which to find peaks.

  • +
+
+
+
+ +
+
+classmethod fromHdf(grp, index=None)
+

Reads in the object from a HDF file

+
+ +
+
+marginalize(axis=0)
+

Get the marginal histogram along an axis

+
+
Parameters:
+
    +
  • intervals (array_like) – Array of size 2 containing lower and upper limits between which to count.

  • +
  • log ('e' or float, optional) – Entries are given in linear space, but internally bins and values are logged. +Plotting is in log space.

  • +
  • axis (int) – Axis along which to get the marginal histogram.

  • +
+
+
Returns:
+

out

+
+
Return type:
+

geobipy.Histogram1D

+
+
+
+ +
+
+mean(axis=0)
+

Gets the mean along the given axis.

+

This is not the true mean of the original samples. It is the best estimated mean using the binned counts multiplied by the axis bin centres.

+
+
Parameters:
+
    +
  • log ('e' or float, optional.) – Take the log of the mean to base “log”

  • +
  • axis (int) – Axis to take the mean along.

  • +
+
+
Returns:
+

out – The means along the axis.

+
+
Return type:
+

geobipy.DataArray

+
+
+
+ +
+
+median(log=None, axis=0)
+

Gets the median for the specified axis.

+
+
Parameters:
+
    +
  • log ('e' or float, optional) – Take the log of the median to a base. ‘e’ if log = ‘e’, or a number e.g. log = 10.

  • +
  • axis (int) – Along which axis to obtain the median.

  • +
+
+
Returns:
+

out – The medians along the specified axis. Has size equal to arr.shape[axis].

+
+
Return type:
+

array_like

+
+
+
+ +
+
+mode(log=None, axis=0)
+

Gets the median for the specified axis.

+
+
Parameters:
+
    +
  • log ('e' or float, optional) – Take the log of the median to a base. ‘e’ if log = ‘e’, or a number e.g. log = 10.

  • +
  • axis (int) – Along which axis to obtain the median.

  • +
+
+
Returns:
+

out – The medians along the specified axis. Has size equal to arr.shape[axis].

+
+
Return type:
+

array_like

+
+
+
+ +
+
+opacity(percent=95.0, log=None, axis=0)
+

Return an opacity between 0 and 1 based on the difference between credible invervals of the hitmap.

+

Higher ranges in credibility map to less opaqueness.

+
+
Parameters:
+
    +
  • percent (float, optional.) – Confidence percentage.

  • +
  • log ('e' or float, optional.) – If None: Take the difference in credible intervals. +Else: Take the ratio of the credible intervals.

  • +
  • axis (int, optional.) – Along which axis to obtain the interval locations.

  • +
+
+
Returns:
+

out – Opacity along the axis.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+opacity_level(percent=95.0, log=None, axis=0)
+

Get the index along axis 1 from the bottom up that corresponds to the percent opacity

+
+ +
+
+pcolor(**kwargs)
+

Plot like an image

+
+
Parameters:
+
    +
  • alpha (scalar or array_like, optional) – If alpha is scalar, behaves like standard matplotlib alpha and opacity is applied to entire plot +If array_like, each pixel is given an individual alpha value.

  • +
  • log ('e' or float, optional) – Take the log of the colour to a base. ‘e’ if log = ‘e’, and a number e.g. log = 10. +Values in c that are <= 0 are masked.

  • +
  • equalize (bool, optional) – Equalize the histogram of the colourmap so that all colours have an equal amount.

  • +
  • nbins (int, optional) – Number of bins to use for histogram equalization.

  • +
  • xscale (str, optional) – Scale the x axis? e.g. xscale = ‘linear’ or ‘log’

  • +
  • yscale (str, optional) – Scale the y axis? e.g. yscale = ‘linear’ or ‘log’.

  • +
  • flipX (bool, optional) – Flip the X axis

  • +
  • flipY (bool, optional) – Flip the Y axis

  • +
  • grid (bool, optional) – Plot the grid

  • +
  • noColorbar (bool, optional) – Turn off the colour bar, useful if multiple plotting plotting routines are used on the same figure.

  • +
  • trim (bool, optional) – Set the x and y limits to the first and last non zero values along each axis.

  • +
+
+
+
+ +
+
+percentile(percent, log=None, reciprocate=False, axis=0)
+

Gets the median and the credible intervals for the specified axis.

+
+
Parameters:
+
    +
  • percent (float) – Confidence percentage.

  • +
  • log ('e' or float, optional) – Take the log of the credible intervals to a base. ‘e’ if log = ‘e’, or a number e.g. log = 10.

  • +
  • axis (int) – Along which axis to obtain the interval locations.

  • +
+
+
Returns:
+

    +
  • med (array_like) – Contains the medians along the specified axis. Has size equal to arr.shape[axis].

  • +
  • low (array_like) – Contains the lower interval along the specified axis. Has size equal to arr.shape[axis].

  • +
  • high (array_like) – Contains the upper interval along the specified axis. Has size equal to arr.shape[axis].

  • +
+

+
+
+
+ +
+
+plot(overlay=None, **kwargs)
+

Plots the histogram

+
+ +
+
+sample(n_samples, log=None)
+

Generates samples from the histogram.

+

A uniform distribution is used for each bin to generate samples. +The number of samples generated per bin is scaled by the count for that bin using the requested number of samples.

+
+
Parameters:
+

nSamples (int) – Number of samples to generate.

+
+
Returns:
+

out – The samples.

+
+
Return type:
+

geobipy.DataArray

+
+
+
+ +
+
+transparency(percent=95.0, log=None, axis=0, **kwargs)
+

Return a transparency value between 0 and 1 based on the difference between credible invervals of the hitmap.

+

Higher ranges in credibility are mapped to more transparency.

+
+
Parameters:
+
    +
  • percent (float) – Confidence percentage.

  • +
  • log ('e' or float, optional.) – If None: Take the difference in credible intervals. +Else: Take the ratio of the credible intervals.

  • +
  • axis (int) – Along which axis to obtain the interval locations.

  • +
+
+
Returns:
+

out – Transparency along the axis.

+
+
Return type:
+

array_like

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/MvNormalDistribution.html b/docs/content/api/classes/statistics/MvNormalDistribution.html new file mode 100644 index 00000000..413021c9 --- /dev/null +++ b/docs/content/api/classes/statistics/MvNormalDistribution.html @@ -0,0 +1,226 @@ + + + + + + + MvNormal — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

MvNormal

+digraph inheritanceea6c1a6edf { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "MvNormal" [URL="#geobipy.src.classes.statistics.MvNormalDistribution.MvNormal",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class extension to geobipy.baseDistribution"]; + "baseDistribution" -> "MvNormal" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "baseDistribution" [URL="baseDistribution.html#geobipy.src.classes.statistics.baseDistribution.baseDistribution",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Define an abstract base distribution class"]; + "myObject" -> "baseDistribution" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@MvNormalDistribution +Module defining a multivariate normal distribution with statistical procedures

+
+
+class geobipy.src.classes.statistics.MvNormalDistribution.MvNormal(mean, variance, ndim=None, prng=None, **kwargs)
+

Class extension to geobipy.baseDistribution

+

Handles a multivariate normal distribution. Uses Scipy to evaluate probabilities, +but Numpy to generate random samples since scipy is slow.

+

MvNormal(mean, variance, ndim, prng)

+
+
Parameters:
+
    +
  • mean (scalar or array_like) – Mean(s) for each dimension

  • +
  • variance (scalar or array_like) – Variance for each dimension

  • +
  • ndim (int, optional) – The number of dimensions in the multivariate normal. +Only used if mean and variance are scalars that are constant for all dimensions

  • +
  • prng (numpy.random.RandomState, optional) – A random state to generate random numbers. Required for parallel instantiation.

  • +
+
+
Returns:
+

out – Multivariate normal distribution.

+
+
Return type:
+

MvNormal

+
+
+
+
+bins(nBins=99, nStd=4.0, axis=None, relative=False)
+

Discretizes a range given the mean and variance of the distribution

+
+
Parameters:
+
    +
  • nBins (int, optional) – Number of bins to return.

  • +
  • nStd (float, optional) – The bin edges = mean +- nStd * variance.

  • +
  • dim (int, optional) – Get the bins of this dimension, if None, returns bins for all dimensions.

  • +
+
+
Returns:
+

bins – The bin edges.

+
+
Return type:
+

geobipy.StatArray

+
+
+
+ +
+
+property ndim
+

Place Holder for children

+
+ +
+
+pad(N)
+

Pads the mean and variance to the given size +N: Padded size

+
+ +
+
+probability(x, log, axis=None, **kwargs)
+

For a realization x, compute the probability

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/NormalDistribution.html b/docs/content/api/classes/statistics/NormalDistribution.html new file mode 100644 index 00000000..1fc47d3e --- /dev/null +++ b/docs/content/api/classes/statistics/NormalDistribution.html @@ -0,0 +1,216 @@ + + + + + + + Normal distribution — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Normal distribution

+digraph inheritance0a76c53a1d { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Normal" [URL="#geobipy.src.classes.statistics.NormalDistribution.Normal",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Univariate normal distribution"]; + "baseDistribution" -> "Normal" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "baseDistribution" [URL="baseDistribution.html#geobipy.src.classes.statistics.baseDistribution.baseDistribution",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Define an abstract base distribution class"]; + "myObject" -> "baseDistribution" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@NormalDistribution +Module defining a normal distribution with statistical procedures

+
+
+class geobipy.src.classes.statistics.NormalDistribution.Normal(mean=0.0, variance=1.0, log=False, prng=None, **kwargs)
+

Univariate normal distribution

+

Normal(mean, variance)

+
+
Parameters:
+
    +
  • mean (numpy.float) – The mean of the distribution

  • +
  • variance (numpy.float) – The variance of the distribution

  • +
+
+
+
+
+bins(nBins=99, nStd=4.0)
+

Discretizes a range given the mean and variance of the distribution

+
+ +
+
+cdf(x)
+

For a realization x, compute the probability

+
+ +
+
+property ndim
+

Place Holder for children

+
+ +
+
+probability(x, log)
+

For a realization x, compute the probability

+
+ +
+
+rng(size=1)
+

Generate random numbers

+
+
Parameters:
+

N (int or sequence of ints) – Number of samples to generate

+
+
Returns:
+

numpy.ndarray

+
+
Return type:
+

out

+
+
+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/OrderStatistics.html b/docs/content/api/classes/statistics/OrderStatistics.html new file mode 100644 index 00000000..4424a134 --- /dev/null +++ b/docs/content/api/classes/statistics/OrderStatistics.html @@ -0,0 +1,168 @@ + + + + + + + Order Statistics — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Order Statistics

+digraph inheritance76de29de59 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Order" [URL="#geobipy.src.classes.statistics.OrderStatistics.Order",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining Order Statistics"]; + "baseDistribution" -> "Order" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "baseDistribution" [URL="baseDistribution.html#geobipy.src.classes.statistics.baseDistribution.baseDistribution",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Define an abstract base distribution class"]; + "myObject" -> "baseDistribution" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@OrderStatistics +Module defining modified order statistics as in Malinverno2002Parsimonious Bayesian Markov chain Monte Carlo inversion +in a nonlinear geophysical problem,Geophysical Journal International

+
+
+class geobipy.src.classes.statistics.OrderStatistics.Order(denominator, **kwargs)
+

Class defining Order Statistics +Specific application to Bayesian inversion of EM data

+
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/UniformDistribution.html b/docs/content/api/classes/statistics/UniformDistribution.html new file mode 100644 index 00000000..2c1f19a5 --- /dev/null +++ b/docs/content/api/classes/statistics/UniformDistribution.html @@ -0,0 +1,198 @@ + + + + + + + Uniform distribution — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Uniform distribution

+digraph inheritancea4de341619 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "Uniform" [URL="#geobipy.src.classes.statistics.UniformDistribution.Uniform",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Class defining a uniform distribution"]; + "baseDistribution" -> "Uniform" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "baseDistribution" [URL="baseDistribution.html#geobipy.src.classes.statistics.baseDistribution.baseDistribution",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Define an abstract base distribution class"]; + "myObject" -> "baseDistribution" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@UniformDistribution +Module defining a uniform distribution with statistical procedures

+
+
+class geobipy.src.classes.statistics.UniformDistribution.Uniform(min=0.0, max=1.0, log=False, prng=None)
+

Class defining a uniform distribution

+
+
+bins(nBins=99, dim=None)
+

Discretizes a range given the min and max of the distribution

+
+
Parameters:
+
    +
  • nBins (int, optional) – Number of bins to return.

  • +
  • dim (int, optional) – Get the bins of this dimension, if None, returns bins for all dimensions.

  • +
+
+
Returns:
+

bins – The bin edges.

+
+
Return type:
+

array_like

+
+
+
+ +
+
+cdf(x, log=False)
+

Get the value of the cumulative distribution function for a x

+
+ +
+
+property ndim
+

Place Holder for children

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/baseDistribution.html b/docs/content/api/classes/statistics/baseDistribution.html new file mode 100644 index 00000000..19f2316c --- /dev/null +++ b/docs/content/api/classes/statistics/baseDistribution.html @@ -0,0 +1,186 @@ + + + + + + + baseDistribution — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

baseDistribution

+digraph inheritance78d6cc9397 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "baseDistribution" [URL="#geobipy.src.classes.statistics.baseDistribution.baseDistribution",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Define an abstract base distribution class"]; + "myObject" -> "baseDistribution" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.statistics.baseDistribution.baseDistribution(prng)
+

Define an abstract base distribution class

+
+
+bins()
+

Place Holder for children

+
+ +
+
+deepcopy()
+

Place holder for children

+
+ +
+
+property moment
+

Place Holder for children

+
+ +
+
+property ndim
+

Place Holder for children

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/statistics/statistics.html b/docs/content/api/classes/statistics/statistics.html new file mode 100644 index 00000000..84ea7e9e --- /dev/null +++ b/docs/content/api/classes/statistics/statistics.html @@ -0,0 +1,157 @@ + + + + + + + Statistics classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/docs/content/api/classes/system/CircularLoop.html b/docs/content/api/classes/system/CircularLoop.html new file mode 100644 index 00000000..bb7e9c68 --- /dev/null +++ b/docs/content/api/classes/system/CircularLoop.html @@ -0,0 +1,236 @@ + + + + + + + Circular Loop — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Circular Loop

+digraph inheritancea1db000af0 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "CircularLoop" [URL="#geobipy.src.classes.system.CircularLoop.CircularLoop",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Defines a circular loop for EM acquisition systems"]; + "EmLoop" -> "CircularLoop" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "EmLoop" [URL="EmLoop.html#geobipy.src.classes.system.EmLoop.EmLoop",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Defines a loop in an EM system e.g. transmitter or reciever"]; + "Point" -> "EmLoop" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "ABC" -> "EmLoop" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.system.CircularLoop.CircularLoop(x=None, y=None, z=None, elevation=None, orientation=None, moment=None, pitch=None, roll=None, yaw=None, radius=None, **kwargs)
+

Defines a circular loop for EM acquisition systems

+

CircularLoop(orient, moment, x, y, z, pitch, roll, yaw, radius)

+
+
Parameters:
+
    +
  • orient (str) – Orientation of the loop, ‘x’ or ‘z’

  • +
  • moment (int) – Moment of the loop

  • +
  • x (float) – X location of the loop relative to an observation location

  • +
  • y (float) – Y location of the loop relative to an observation location

  • +
  • z (float) – Z location of the loop relative to an observation location

  • +
  • pitch (float) – Pitch of the loop

  • +
  • roll (float) – Roll of the loop

  • +
  • yaw (float) – Yaw of the loop

  • +
  • radius (float) – Radius of the loop

  • +
+
+
+
+
+Bcast(world, root=0)
+

Broadcast using MPI

+
+
Parameters:
+

world (mpi4py.MPI.COMM_WORLD) – An MPI communicator

+
+
Returns:
+

out – A CircularLoop on each core

+
+
Return type:
+

CircularLoop

+
+
+
+ +
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+classmethod fromHdf(grp, index=None)
+

Reads in the object from a HDF file

+
+ +
+
+property summary
+

Print a summary

+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/system/EmLoop.html b/docs/content/api/classes/system/EmLoop.html new file mode 100644 index 00000000..21ee9a8e --- /dev/null +++ b/docs/content/api/classes/system/EmLoop.html @@ -0,0 +1,254 @@ + + + + + + + EmLoop — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+ +
+
+ +
+

EmLoop

+digraph inheritancef4d5cd9044 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "EmLoop" [URL="#geobipy.src.classes.system.EmLoop.EmLoop",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Defines a loop in an EM system e.g. transmitter or reciever"]; + "Point" -> "EmLoop" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "ABC" -> "EmLoop" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "Point" [URL="../pointcloud/Point.html#geobipy.src.classes.pointcloud.Point.Point",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="3D Point Cloud with x,y,z co-ordinates"]; + "myObject" -> "Point" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.system.EmLoop.EmLoop(x=None, y=None, z=None, elevation=None, orientation=None, moment=None, pitch=None, roll=None, yaw=None, **kwargs)
+

Defines a loop in an EM system e.g. transmitter or reciever

+

This is an abstract base class and should not be instantiated

+

EmLoop()

+
+
+append(other)
+

Append pointclouds together

+
+
Parameters:
+

other (geobipy.PointCloud3D) – 3D pointcloud

+
+
+
+ +
+
+createHdf(parent, name, withPosterior=True, add_axis=None, fillvalue=None)
+

Create the hdf group metadata in file +parent: HDF object to create a group inside +myName: Name of the group

+
+ +
+
+classmethod fromHdf(grp, index=None)
+

Reads in the object from a HDF file

+
+ +
+
+perturb()
+

Propose a new point given the attached propsal distributions

+
+ +
+
+property probability
+

Evaluate the probability for the EM data point given the specified attached priors

+
+
Parameters:
+
    +
  • rEerr (bool) – Include the relative error when evaluating the prior

  • +
  • aEerr (bool) – Include the additive error when evaluating the prior

  • +
  • height (bool) – Include the elevation when evaluating the prior

  • +
  • calibration (bool) – Include the calibration parameters when evaluating the prior

  • +
  • verbose (bool) – Return the components of the probability, i.e. the individually evaluated priors

  • +
+
+
Returns:
+

out – The evaluation of the probability using all assigned priors

+
+
Return type:
+

float64

+
+
+

Notes

+

For each boolean, the associated prior must have been set.

+
+
Raises:
+

TypeError – If a prior has not been set on a requested parameter

+
+
+
+ +
+
+set_pitch_posterior()
+
+ +
+
+set_roll_posterior()
+
+ +
+
+set_yaw_posterior()
+
+ +
+
+property summary
+

Print a summary

+
+ +
+
+writeHdf(parent, name, withPosterior=True, index=None)
+

Write the StatArray to an HDF object +parent: Upper hdf file or group +myName: object hdf name. Assumes createHdf has already been called +create: optionally create the data set as well before writing

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/system/FdemSystem.html b/docs/content/api/classes/system/FdemSystem.html new file mode 100644 index 00000000..eeed89d1 --- /dev/null +++ b/docs/content/api/classes/system/FdemSystem.html @@ -0,0 +1,219 @@ + + + + + + + Frequency domain system — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Frequency domain system

+digraph inheritance3626e3b5d4 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "FdemSystem" [URL="#geobipy.src.classes.system.FdemSystem.FdemSystem",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Defines a Frequency Domain ElectroMagnetic acquisition system"]; + "myObject" -> "FdemSystem" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +

@FdemSystem_Class +Module describing a frequency domain EM acquisition system

+
+
+class geobipy.src.classes.system.FdemSystem.FdemSystem(frequencies, transmitter, receiver, n_frequencies=None)
+

Defines a Frequency Domain ElectroMagnetic acquisition system

+
+
+Bcast(world, root=0)
+

Broadcast the FdemSystem using MPI

+
+ +
+
+property component_id
+

For each coil orientation pair, adds the index of the frequency to the appropriate list +e.g. two coils at the i$^{th}$ frequency with ‘x’ as their orientation cause i to be added to the ‘xx’ list.

+
+ +
+
+fileInformation()
+

Description of the system file.

+
+ +
+
+classmethod fromHdf(grp)
+

Reads the object from a HDF file

+
+ +
+
+classmethod read(filename)
+

Read in a file containing the system information

+

The system file is structured using columns with the first line containing header information +Each subsequent row contains the information for each measurement frequency +freq tor tmom tx ty tz ror rmom rx ry rz +378 z 1 0 0 0 z 1 7.93 0 0 +1776 z 1 0 0 0 z 1 7.91 0 0 +…

+

where tor and ror are the orientations of the transmitter/reciever loops [x or z]. +tmom and rmom are the moments of the loops. +t/rx,y,z are the loop offsets from the observation locations in the data file.

+
+ +
+
+property summary
+

Summary of the FdemSystem

+
+ +
+
+property tensor_id
+

For each coil orientation pair, adds the index of the frequency to the appropriate list +e.g. two coils at the i$^{th}$ frequency with ‘x’ as their orientation cause i to be added to the ‘xx’ list.

+
+ +
+
+toHdf(h5obj, name)
+

Write the object to a HDF file

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/system/TdemSystem.html b/docs/content/api/classes/system/TdemSystem.html new file mode 100644 index 00000000..4d5152ab --- /dev/null +++ b/docs/content/api/classes/system/TdemSystem.html @@ -0,0 +1,200 @@ + + + + + + + Time domain system — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Time domain system

+digraph inheritance997b7fa283 { +bgcolor=transparent; +rankdir=LR; +size="8.0, 12.0"; + "ABC" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Helper class that provides a standard way to create an ABC using"]; + "TDAEMSystem" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="TDAEMSystem Class"]; + "TdemSystem" [URL="#geobipy.src.classes.system.TdemSystem.TdemSystem",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top",tooltip="Initialize a Time domain system class"]; + "TdemSystem_GAAEM" -> "TdemSystem" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TdemSystem_GAAEM" [fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",tooltip="Initialize a Time domain system class"]; + "myObject" -> "TdemSystem_GAAEM" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "TDAEMSystem" -> "TdemSystem_GAAEM" [arrowsize=0.5,style="setlinewidth(0.5)"]; + "myObject" [URL="../core/myObject.html#geobipy.src.classes.core.myObject.myObject",fillcolor=white,fontname="Vera Sans, DejaVu Sans, Liberation Sans, Arial, Helvetica, sans",fontsize=10,height=0.25,shape=box,style="setlinewidth(0.5),filled",target="_top"]; + "ABC" -> "myObject" [arrowsize=0.5,style="setlinewidth(0.5)"]; +} +
+
+class geobipy.src.classes.system.TdemSystem.TdemSystem(offTimes=None, transmitterLoop=None, receiverLoop=None, loopOffset=None, waveform=None, offTimeFilters=None, components=['z'], system_filename=None)
+

Initialize a Time domain system class

+

TdemSystem(systemFileName)

+
+
Parameters:
+

systemFileName (str) – The system file to read from

+
+
Returns:
+

out – A time domain system class

+
+
Return type:
+

TdemSystem

+
+
+
+
+property get_modellingTimes
+

Generates regularly log spaced times that covers both the waveform and measurment times.

+
+
Parameters:
+
    +
  • waveformTimes (array_like) – Times of the waveform change points

  • +
  • measurementTimes (array_like) – Measurement times for the system

  • +
+
+
Returns:
+

out – Times spanning both the waveform and measrement times

+
+
Return type:
+

array_like

+
+
+
+ +
+
+property off_time
+

Time windows.

+
+ +
+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/api/classes/system/system.html b/docs/content/api/classes/system/system.html new file mode 100644 index 00000000..23c912e3 --- /dev/null +++ b/docs/content/api/classes/system/system.html @@ -0,0 +1,149 @@ + + + + + + + System classes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

System classes

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/getting_started/getting_started.html b/docs/content/getting_started/getting_started.html new file mode 100644 index 00000000..d13cf45f --- /dev/null +++ b/docs/content/getting_started/getting_started.html @@ -0,0 +1,128 @@ + + + + + + + Getting Started — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Getting Started

+ +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/content/getting_started/installation.html b/docs/content/getting_started/installation.html new file mode 100644 index 00000000..c9e5f509 --- /dev/null +++ b/docs/content/getting_started/installation.html @@ -0,0 +1,274 @@ + + + + + + + Installing GeoBIPy — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Installing GeoBIPy

+

First things first, install a Python 3.5+ distribution. This is the minimum version that we have tested with. +You will also need to install Numpy and a Fortran compiler.

+

This package has a few requirements depending on what you wish to do with it.

+

If you require a serial version of the code, see Installing a serial version of GeoBIPy.

+

If you require an parallel implementation, you will need to install an MPI library, and Python’s mpi4py module. See Installing MPI and mpi4py.

+

If you require parallel file reading and writing, you will also need to install an MPI enabled HDF5 library, as well as Python’s h5py wrapper to that library. It is important to read the notes below on installing h5py on top of a parallel HDF library. The traditional “pip install h5py” will not work correctly. See Installing parallel HDF5 and h5py to do this correctly.

+

If you need to install the parallel IO version of the code, we would recommend that you start with a clean install of Python. This makes it easier to determine whether you have installed and linked the correct version of the parallel HDF5 library.

+

There are two versions when installing GeoBIPy, a serial version, and a parallel version. Since GeoBIPy uses a Fortran backend for forward modelling frequency domain data, you will need to have a Fortran compiler installed. Make sure that the compiler can handle derived data types since I make use of object oriented programming in Fortran.

+
+

Installing a serial version of GeoBIPy

+

This is the easiest installation and provides access to a serial implementation of the code.

+

Simply clone the git repository, navigate to the package folder that contains the setup.py file, and type “pip install .”

+

You should then be able to import modules from geobipy. For this type of installation mpi will not need to be installed, and the serial version of h5py will suffice i.e. the standard “pip install h5py” is fine. h5py will automatically be installed during the install of GeoBIPy since it is a dependency.

+

Side note: Let’s say you ran a production run on a parallel machine with MPI and parallel HDF capabilities. You generated all the results, copied them back to your local machine, and wish to make plots and images. You will only need to install the serial version of the code on your local machine to do this.

+
+
+

Installing a parallel version of GeoBIPy

+

Installing the parallel version of the code is a little trickier due to the dependencies necessary between the OpenMPI and/or HDF libraries, and how Python’s mpi4py and h5py wrap around those.

+
+

Installing MPI and mpi4py

+

To run this code in parallel you will need both an MPI library and the python wrapper, mpi4py. You must install MPI first before mpi4py.

+
+

MPI

+

If you are installing GeoBIPy on a parallel machine, I would think that you have access to prebuilt MPI libraries. +If you are on a local laptop, you will need to install one.

+
+
+

mpi4py

+

At this point, if you have an mpi4py module already installed, please remove it (you can check with “pip list”). +If you started with a clean installation you should not have to worry about this. +To test whether a new install of mpi4py will see the mpi library you have, just type “which mpicc”. +The path that you see should point to the implementation that you want mpi4py to link to. +Make sure you are about to install mpi4py to the correct python installation. +If you type ‘which python’ it should return the path to the correct python distribution. +If you are using environments, make sure you have activated the correct one.

+

Next, use “env MPICC=<Your mpicc compiler> python -m pip install mpi4py”. This last option is very important, without it, pip might install its own MPI library called MPICH2. +I would try to avoid this because if you need to install the HDF5 library you will need know which directories to link to (see Installing parallel HDF5 and h5py).

+

At the end of the day, h5py needs to communicate with both the correct HDF5 library and mpi4py, and both of those need to communicate with the same MPI library.

+
+
+
+

Installing parallel HDF5 and h5py

+

If a parallel HDF5 library is not available, you will need to install one. First make sure you follow Installing MPI and mpi4py so that an MPI library is available to you. You must install a HDF5 library first before h5py.

+
+

HDF5

+

When you install HDF5, make sure that the correct MPI library can be seen by typing “which mpicc”. When you configure the HDF5 library, be sure to use the –enable-parallel option.

+
+
+

h5py

+

Once the HDF5 library is installed you will need to install a parallel enabled h5py package

+

Make sure you are about to install h5py to the correct python installation. If you type ‘which python’ it should return the path to the correct python installation.

+

First check the following

+
    +
  • HDF5_DIR = Get the path to the HDF5 installation.

  • +
  • Check that ‘which mpicc’ returns the correct version of an mpi enabled compiler. This needs to point to the same MPI library that mpi4py was installed on top of.

  • +
  • Do the following, replacing items in < > with your mpicc compiler and you HDF5 install directory.

  • +
+

This will install h5py and compile the source.

+
CC=<Your mpicc compiler> HDF5_MPI="ON" HDF5_DIR=<Your HDF5_DIR> pip install --no-binary=h5py h5py
+
+
+
+
+
+
+

Installing the time domain forward modeller

+

Ross Brodie at Geoscience Australia has written a great forward modeller, gatdaem1D, in C++ with a python interface. +You can obtain that code here at the GA repository

+

Go ahead and “git clone” that repository.

+

These instructions only describe how to install Ross’ forward modeller, but it is part of a larger code base for inversion. +If you wish to install his entire package, please follow his instructions.

+
+

Prerequisites

+

To compile his forward modeller, you will need a c++ compiler, and FFTW

+

On a Mac, installing these two items is easy if you use a package manager such as homebrew

+

If you use brew, simply do the following

+
brew install gcc
+brew install fftw
+
+
+

If you do not have brew, or use a package manager, you can install fftw from source instead.

+

Download fftw-3.3.7.tar.gz from the FFTW downloads .

+

Untar the folder and install fftw using the following.

+
tar -zxvf fftw-3.3.7.tar.gz
+cd fftw-3.3.7
+mkdir build
+cd build
+../configure --prefix=path-to-install-to/fftw-3.3.7 --enable-threads
+make
+make install
+
+
+

where, path-to-install-to is the location where you want fftw to be installed.

+
+
+

Compile the gatdaem1d shared library

+

Next, within the gatdaem1d folder, navigate to the makefiles folder and modify the top part of the file “gatdaem1d_python.make” to the following

+
SHELL = /bin/sh
+.SUFFIXES:
+.SUFFIXES: .cpp .o
+cxx = g++
+cxxflags = -std=c++11 -O3 -Wall -fPIC
+FFTW_DIR = path-to-fftw
+
+ldflags    += -shared
+bindir     = ../python/gatdaem1d
+
+srcdir     = ../src
+objdir     = ./obj
+includes   = -I$(srcdir) -I$(FFTW_DIR)/include
+libs       = -L$(FFTW_DIR)/lib -lfftw3
+library    = $(bindir)/gatdaem1d.so
+
+
+

You can find out where brew installed fftw by typing

+
brew info fftw
+
+
+

Which may return something like “/usr/local/Cellar/fftw/3.3.5”

+

In this case, path-to-fftw is “/usr/local/Cellar/fftw/3.3.5”

+

If you installed fftw from source, then path-to-fftw is that install path.

+

Next, type the following to compile the gatdaem1d c++ code.

+
make -f gatdaem1d_python.make
+
+
+
+
+

Installing the Python Bindings

+

Finally, to install the python wrapper to gatdaem1d, navigate to the python folder of the gatdaem1d repository. +Type,

+
pip install .
+
+
+

You should now have access to the time domain forward modeller within geobipy.

+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/plot_frequency_dataset.html b/docs/examples/Data/plot_frequency_dataset.html new file mode 100644 index 00000000..431dea8b --- /dev/null +++ b/docs/examples/Data/plot_frequency_dataset.html @@ -0,0 +1,575 @@ + + + + + + + Frequency domain dataset — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Frequency domain dataset

+
import matplotlib.pyplot as plt
+from geobipy import CircularLoop
+from geobipy import FdemSystem
+from geobipy import FdemData
+import h5py
+import numpy as np
+
+
+
+

Defining data using a frequency domain system

+

We can start by defining the frequencies, transmitter loops, and receiver loops +For each frequency we need to define a pair of loops

+
frequencies = np.asarray([395.0, 822.0, 3263.0, 8199.0, 38760.0, 128755.0])
+
+
+

Transmitter positions are defined relative to the observation locations in the data +This is usually a constant offset for all data points.

+
transmitters = CircularLoop(orientation=['z','z','x','z','z','z'],
+                             moment=np.r_[1, 1, -1, 1, 1, 1],
+                             x = np.r_[0,0,0,0,0,0],
+                             y = np.r_[0,0,0,0,0,0],
+                             z = np.r_[0,0,0,0,0,0],
+                             pitch = np.r_[0,0,0,0,0,0],
+                             roll = np.r_[0,0,0,0,0,0],
+                             yaw = np.r_[0,0,0,0,0,0],
+                             radius = np.r_[1,1,1,1,1,1])
+
+
+

Receiver positions are defined relative to the transmitter

+
receivers = CircularLoop(orientation=['z','z','x','z','z','z'],
+                             moment=np.r_[1, 1, -1, 1, 1, 1],
+                             x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89],
+                             y = np.r_[0,0,0,0,0,0],
+                             z = np.r_[0,0,0,0,0,0],
+                             pitch = np.r_[0,0,0,0,0,0],
+                             roll = np.r_[0,0,0,0,0,0],
+                             yaw = np.r_[0,0,0,0,0,0],
+                             radius = np.r_[1,1,1,1,1,1])
+
+# Instantiate the system for the data
+system = FdemSystem(frequencies=frequencies, transmitter=transmitters, receiver=receivers)
+
+# Create some data with random co-ordinates
+x = np.random.randn(100)
+y = np.random.randn(100)
+z = np.random.randn(100)
+
+data = FdemData(x=x, y=-y, z=z, system = system)
+
+
+
+
+

Reading in the Data

+

Of course measured field data is stored on disk. So instead we can read data from file.

+
dataFolder = "..//..//supplementary//data//"
+# The data file name
+dataFile = dataFolder + 'Resolve2.txt'
+# The EM system file name
+systemFile = dataFolder + 'FdemSystem2.stm'
+
+
+

Read in a data set from file.

+
FD1 = FdemData.read_csv(dataFile, systemFile)
+
+
+

Take a look at the channel names

+
for name in FD1.channel_names:
+    print(name)
+
+# #%%
+# # Get data points by slicing
+# FDa = FD1[10:]
+# FD1 = FD1[:10]
+
+# #%%
+# # Append data sets together
+# FD1.append(FDa)
+
+
+# #%%
+# # Plot the locations of the data points
+# plt.figure(figsize=(8,6))
+# _ = FD1.scatter2D();
+
+# #%%
+# # Plot all the data along the specified line
+# plt.figure(figsize=(8,6))
+# _ = FD1.plotLine(30010.0, log=10);
+
+# #%%
+# # Or, plot specific channels in the data
+# plt.figure(figsize=(8,6))
+# _ = FD1.plot(channels=[0,11,8], log=10, linewidth=0.5);
+
+
+
In_Phase 380.0
+In_Phase 1776.0
+In_Phase 3345.0
+In_Phase 8171.0
+In_Phase 41020.0
+In_Phase 129550.0
+Quadrature 380.0
+Quadrature 1776.0
+Quadrature 3345.0
+Quadrature 8171.0
+Quadrature 41020.0
+Quadrature 129550.0
+
+
+

Read in a second data set

+
FD2 = FdemData.read_csv(dataFilename=dataFolder + 'Resolve1.txt', system=dataFolder + 'FdemSystem1.stm')
+
+
+
Warning: Your data contains values that are <= 0.0
+
+
+

We can create maps of the elevations in two separate figures

+
plt.figure(figsize=(8,6))
+_ = FD1.map(dx=50.0, dy=50.0, mask = 200.0)
+plt.axis('equal');
+
+
+plot frequency dataset
surface [WARNING]: 66659 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+
+(np.float64(584494.28), np.float64(590194.28), np.float64(4639054.24), np.float64(4661854.24))
+
+
+
plt.figure(figsize=(8,6))
+_ = FD2.map(dx=50.0, dy=50.0, mask = 200.0)
+plt.axis('equal');
+
+
+plot frequency dataset
surface [WARNING]: 123487 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+
+(np.float64(662822.398), np.float64(668372.398), np.float64(4560028.655), np.float64(4600678.655))
+
+
+

Or, we can plot both data sets in one figure to see their positions relative +to each other.

+

In this case, I use a 2D scatter plot of the data point co-ordinates, and pass +one of the channels as the colour.

+
plt.figure(figsize=(8,6))
+_ = FD1.scatter2D(s=1.0, c=FD1.data[:, 0])
+_ = FD2.scatter2D(s=1.0, c=FD2.data[:, 0], cmap='jet');
+
+
+plot frequency dataset

Or, interpolate the values to create a gridded “map”. mapChannel will +interpolate the specified channel number.

+
plt.figure(figsize=(8,6))
+_ = FD1.mapData(channel=3, system=0, dx=200, dy=200, mask=250)
+plt.axis('equal');
+
+
+In_Phase 8171.0
surface [WARNING]: 70336 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+
+(np.float64(584419.28), np.float64(590219.28), np.float64(4638979.24), np.float64(4661979.24))
+
+
+

Export the data to VTK

+
FD1.to_vtk('FD_one.vtk')
+# FD2.to_vtk('FD_two.vtk')
+
+
+
+
+

Obtain a line from the data set

+

Take a look at the line numbers in the dataset

+
print(np.unique(FD1.lineNumber))
+
+
+
[30010 30020 30030 ... 30100 39010 39020]
+
+
+
L = FD1.line(30010.0)
+
+
+

A summary will now show the properties of the line.

+
print(L.summary)
+
+
+
FdemData
+x:
+|   StatArray
+|   Name:   Easting (m)
+|   Address:['0x156368350']
+|   Shape:  (6710,)
+|   Values: [586852.29 586852.23 586852.17 ... 586123.57 586123.2  586122.82]
+|   Min:    586122.82
+|   Max:    586852.29
+|   has_posterior: False
+
+y:
+|   StatArray
+|   Name:   Northing (m)
+|   Address:['0x156369ed0']
+|   Shape:  (6710,)
+|   Values: [4639119.38 4639122.68 4639125.98 ... 4661765.26 4661768.84 4661772.42]
+|   Min:    4639119.38
+|   Max:    4661772.42
+|   has_posterior: False
+
+z:
+|   StatArray
+|   Name:   Height (m)
+|   Address:['0x1563681d0']
+|   Shape:  (6710,)
+|   Values: [36.115 36.498 36.835 ... 27.799 27.704 27.601]
+|   Min:    23.830000000000002
+|   Max:    50.567
+|   has_posterior: False
+
+elevation:
+|   StatArray
+|   Name:   Elevation (m)
+|   Address:['0x15468a450']
+|   Shape:  (6710,)
+|   Values: [1246.84 1246.71 1246.61 ... 1337.94 1337.96 1338.02]
+|   Min:    1213.18
+|   Max:    1338.02
+|   has_posterior: False
+
+channel names:
+|   In_Phase 380.0, In_Phase 1776.0, In_Phase 3345.0, In_Phase 8171.0, In_Phase 41020.0,
+|   In_Phase 129550.0, Quadrature 380.0, Quadrature 1776.0, Quadrature 3345.0, Quadrature 8171.0,
+|   Quadrature 41020.0, Quadrature 129550.0
+data:
+|   DataArray
+|   Name:   Data (ppm)
+|   Address:['0x17eb61950']
+|   Shape:  (80520,)
+|   Values: [145.3 435.8 260.6 ... 749.2 976.5 928.3]
+|   Min:    37.7
+|   Max:    3726.9
+
+predicted data:
+|   DataArray
+|   Name:   Predicted Data (ppm)
+|   Address:['0x17eb61950']
+|   Shape:  (80520,)
+|   Values: [0. 0. 0. ... 0. 0. 0.]
+|   Min:    0.0
+|   Max:    0.0
+
+std:
+|   DataArray
+|   Name:   std (ppm)
+|   Address:['0x17eb604d0']
+|   Shape:  (80520,)
+|   Values: [1.453 4.358 2.606 ... 7.492 9.765 9.283]
+|   Min:    0.37700000000000006
+|   Max:    37.269
+
+line number:
+|   DataArray
+|   Name:   Line number
+|   Address:['0x17ec01150']
+|   Shape:  (6710,)
+|   Values: [30010. 30010. 30010. ... 30010. 30010. 30010.]
+|   Min:    30010.0
+|   Max:    30010.0
+
+fiducial:
+|   DataArray
+|   Name:   Fiducial
+|   Address:['0x156368250']
+|   Shape:  (6710,)
+|   Values: [30000 30000 30000 ... 30670 30670 30670]
+|   Min:    30000
+|   Max:    30670
+
+relative error:
+|   DataArray
+|   Name:   Relative error (%)
+|   Address:['0x15468ad50']
+|   Shape:  (6710, 1)
+|   Values: [[0.01]
+|    [0.01]
+|    [0.01]
+|    ...
+|    [0.01]
+|    [0.01]
+|    [0.01]]
+|   Min:    0.01
+|   Max:    0.01
+
+additive error:
+|   DataArray
+|   Name:   Additive error (ppm)
+|   Address:['0x15468ac50']
+|   Shape:  (6710, 1)
+|   Values: [[0.]
+|    [0.]
+|    [0.]
+|    ...
+|    [0.]
+|    [0.]
+|    [0.]]
+|   Min:    0.0
+|   Max:    0.0
+
+
+

And we can scatter2D the points in the line.

+
plt.figure(figsize=(8,6))
+_ = L.scatter2D();
+
+
+plot frequency dataset

We can specify the axis along which to plot. +xAxis can be index, x, y, z, r2d, r3d

+
plt.figure(figsize=(8,6))
+_ = FD1.plot_data(channels=np.r_[0, 11, 8], log=10, linewidth=0.5);
+
+with h5py.File('fdem.h5', 'w') as f:
+    FD1.createHdf(f, 'fdem')
+    FD1.writeHdf(f, 'fdem')
+
+with h5py.File('fdem.h5', 'r') as f:
+    FD3 = FdemData.fromHdf(f['fdem'])
+
+with h5py.File('fdem.h5', 'r') as f:
+    fdp = FdemData.fromHdf(f['fdem'], index=0)
+
+
+# #%%
+# # Obtain a single datapoint from the data set
+# # +++++++++++++++++++++++++++++++++++++++++++
+# #
+# # Checkout :ref:`Frequency domain datapoint` for an example
+# # about how to use a datapoint once it is instantiated.
+# dp = FD1.datapoint(0)
+
+# # Prepare the dataset so that we can read a point at a time.
+# Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile)
+# # Get a datapoint from the file.
+# DataPoint = Dataset._read_record()
+
+plt.show()
+
+
+plot frequency dataset
+
+

File Format for frequency domain data

+

Here we describe the file format for frequency domain data.

+

The data columns are read in according to the column names in the first line.

+

In this description, the column name or its alternatives are given followed by what the name represents. +Optional columns are also described.

+
+

Required columns

+
+
line

Line number for the data point

+
+
fid

Unique identification number of the data point

+
+
x or northing or n

Northing co-ordinate of the data point, (m)

+
+
y or easting or e

Easting co-ordinate of the data point, (m)

+
+
z or alt

Altitude of the transmitter coil above ground level (m)

+
+
elevation

Elevation of the ground at the data point (m)

+
+
I_<frequency[0]> Q_<frequency[0]> … I_<frequency[last]> Q_<frequency[last]> - with the number and square brackets

The measurements for each frequency specified in the accompanying system file. +I is the real inphase measurement in (ppm) +Q is the imaginary quadrature measurement in (ppm)

+
+
+
+
+

Optional columns

+
+
InphaseErr[0] QuadratureErr[0] … InphaseErr[nFrequencies] QuadratureErr[nFrequencies]

Estimates of standard deviation for each inphase and quadrature measurement. +These must appear after the data colums.

+
+
+
+
+

Example Header

+

Line fid easting northing elevation height I_380 Q_380 … … I_129550 Q_129550

+
+
+
+

File Format for a frequency domain system

+

The system file is structured using columns with the first line containing header information

+

Each subsequent row contains the information for each measurement frequency

+
+
freq

Frequency of the channel

+
+
tor

Orientation of the transmitter loop ‘x’, or ‘z’

+
+
tmom

Transmitter moment

+
+
tx, ty, tx

Offset of the transmitter with respect to the observation locations

+
+
ror

Orientation of the receiver loop ‘x’, or ‘z’

+
+
rmom

Receiver moment

+
+
rx, ry, rz

Offset of the receiver with respect to the transmitter location

+
+
+

Example system files are contained in +the supplementary folder in this repository

+

See the Resolve.stm files.

+

Total running time of the script: (0 minutes 3.257 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/plot_pointcloud3d.html b/docs/examples/Data/plot_pointcloud3d.html new file mode 100644 index 00000000..040d58cd --- /dev/null +++ b/docs/examples/Data/plot_pointcloud3d.html @@ -0,0 +1,353 @@ + + + + + + + 3D Point Cloud class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

3D Point Cloud class

+
from geobipy import Point
+from os.path import join
+import numpy as np
+import matplotlib.pyplot as plt
+import h5py
+
+nPoints = 200
+
+
+

Create a quick test example using random points +$z=x(1-x)cos(4pi x)sin(4pi y^{2})^{2}$

+
x = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0)
+y = -np.abs((2.0 * np.random.rand(nPoints)) - 1.0)
+z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y)
+
+PC3D = Point(x=x, y=y, z=z)
+
+
+

Append pointclouds together

+
x = np.abs((2.0 * np.random.rand(nPoints)) - 1.0)
+y = np.abs((2.0 * np.random.rand(nPoints)) - 1.0)
+z = x * (1.0 - x) * np.cos(np.pi * x) * np.sin(np.pi * y)
+
+Other_PC = Point(x=x, y=y, z=z)
+PC3D.append(Other_PC)
+
+
+
<geobipy.src.classes.pointcloud.Point.Point object at 0x10efdda80>
+
+
+

Write a summary of the contents of the point cloud

+
print(PC3D.summary)
+
+
+
Point
+x:
+|   StatArray
+|   Name:   Easting (m)
+|   Address:['0x1462b7250']
+|   Shape:  (400,)
+|   Values: [-0.75575613 -0.37227056 -0.5082486  ...  0.91062364  0.56073342
+|     0.52761544]
+|   Min:    -0.9805553827696698
+|   Max:    0.9916814356778278
+|   has_posterior: False
+
+y:
+|   StatArray
+|   Name:   Northing (m)
+|   Address:['0x1462b72d0']
+|   Shape:  (400,)
+|   Values: [-0.15201495 -0.55717559 -0.53823673 ...  0.30290861  0.17254113
+|     0.23398822]
+|   Min:    -0.9899649834962019
+|   Max:    0.9979698382486921
+|   has_posterior: False
+
+z:
+|   StatArray
+|   Name:   Height (m)
+|   Address:['0x1462b7350']
+|   Shape:  (400,)
+|   Values: [-0.43897973  0.19632561 -0.01971922 ... -0.06368326 -0.02409831
+|    -0.01448342]
+|   Min:    -1.9063110648103332
+|   Max:    0.22889687797869024
+|   has_posterior: False
+
+elevation:
+|   StatArray
+|   Name:   Elevation (m)
+|   Address:['0x1462b73d0']
+|   Shape:  (400,)
+|   Values: [0. 0. 0. ... 0. 0. 0.]
+|   Min:    0.0
+|   Max:    0.0
+|   has_posterior: False
+
+
+

Get a single location from the point as a 3x1 vector

+
Point = PC3D[50]
+# Print the point to the screen
+
+
+

Plot the locations with Height as colour

+
plt.figure()
+PC3D.scatter2D(edgecolor='k')
+
+
+plot pointcloud3d
(<Axes: xlabel='Easting (m)', ylabel='Northing (m)'>, <matplotlib.collections.PathCollection object at 0x143a91b20>, <matplotlib.colorbar.Colorbar object at 0x144cb0c50>)
+
+
+

Plotting routines take matplotlib arguments for customization

+

For example, plotting the size of the points according to the absolute value of height

+
plt.figure()
+ax = PC3D.scatter2D(s=100*np.abs(PC3D.z), edgecolor='k')
+
+
+plot pointcloud3d

Interpolate the points to a 2D rectilinear mesh

+
mesh, dum = PC3D.interpolate(0.01, 0.01, values=PC3D.z, method='sibson', mask=0.03)
+
+# We can save that mesh to VTK
+PC3D.to_vtk('pc3d.vtk')
+mesh.to_vtk('interpolated_pc3d.vtk')
+
+
+

Grid the points using a triangulated CloughTocher, or minimum curvature interpolation

+
plt.figure()
+plt.subplot(331)
+PC3D.map(dx=0.01, dy=0.01, method='ct')
+plt.subplot(332)
+PC3D.map(dx=0.01, dy=0.01, method='mc')
+plt.subplot(333)
+PC3D.map(dx=0.01, dy=0.01, method='sibson')
+
+plt.subplot(334)
+PC3D.map(dx=0.01, dy=0.01, method='ct', mask=0.03)
+plt.subplot(335)
+PC3D.map(dx=0.01, dy=0.01, method='mc', mask=0.3)
+plt.subplot(336)
+PC3D.map(dx=0.01, dy=0.01, method='sibson', mask=0.03)
+
+
+plot pointcloud3d
surface [WARNING]: 5 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+surface [WARNING]: 5 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+
+(<Axes: >, <matplotlib.collections.QuadMesh object at 0x17eb0c470>, <matplotlib.colorbar.Colorbar object at 0x17eca7650>)
+
+
+

For lots of points, these surfaces can look noisy. Using a block filter will help

+
PCsub = PC3D.block_median(0.05, 0.05)
+plt.subplot(337)
+PCsub.map(dx=0.01, dy=0.01, method='ct', mask=0.03)
+plt.subplot(338)
+PCsub.map(dx=0.01, dy=0.01, method='mc', mask=0.03)
+plt.subplot(339)
+PCsub.map(dx=0.01, dy=0.01, method='sibson', mask=0.03)
+
+
+plot pointcloud3d
surface [WARNING]: 2 unusable points were supplied; these will be ignored.
+surface [WARNING]: You should have pre-processed the data with block-mean, -median, or -mode.
+surface [WARNING]: Check that previous processing steps write results with enough decimals.
+surface [WARNING]: Possibly some data were half-way between nodes and subject to IEEE 754 rounding.
+
+(<Axes: >, <matplotlib.collections.QuadMesh object at 0x17ec7aea0>, <matplotlib.colorbar.Colorbar object at 0x17eeb34d0>)
+
+
+

We can perform spatial searches on the 3D point cloud

+
PC3D.set_kdtree(ndim=2)
+p = PC3D.nearest((0.0,0.0), k=200, p=2, radius=0.3)
+
+
+

.nearest returns the distances and indices into the point cloud of the nearest points. +We can then obtain those points as another point cloud

+
# pNear = PC3D[p[1]]
+# plt.figure()
+# ax1 = plt.subplot(1,2,1)
+# pNear.scatter2D()
+# plt.plot(0.0, 0.0, 'x')
+# plt.subplot(1,2,2, sharex=ax1, sharey=ax1)
+# ax, sc, cb = PC3D.scatter2D(edgecolor='k')
+# searchRadius = plt.Circle((0.0, 0.0), 0.3, color='b', fill=False)
+# ax.add_artist(searchRadius)
+# plt.plot(0.0, 0.0, 'x')
+
+
+

Read in the xyz co-ordinates in columns 2,3,4 from a file. Skip 1 header line.

+
dataFolder = "..//..//supplementary//Data//"
+
+PC3D.read_csv(filename=dataFolder + 'Resolve1.txt')
+
+
+
<geobipy.src.classes.pointcloud.Point.Point object at 0x17ed3c0e0>
+
+
+
plt.figure()
+f = PC3D.scatter2D(s=10)
+
+with h5py.File('test.h5', 'w') as f:
+    PC3D.createHdf(f, 'test')
+    PC3D.writeHdf(f, 'test')
+
+with h5py.File('test.h5', 'r') as f:
+    PC3D1 = Point.fromHdf(f['test'])
+
+with h5py.File('test.h5', 'r') as f:
+    point = Point.fromHdf(f['test'], index=0)
+
+plt.show()
+
+
+plot pointcloud3d

Total running time of the script: (0 minutes 19.063 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/plot_skytem_dataset.html b/docs/examples/Data/plot_skytem_dataset.html new file mode 100644 index 00000000..0500e2cf --- /dev/null +++ b/docs/examples/Data/plot_skytem_dataset.html @@ -0,0 +1,507 @@ + + + + + + + Skytem dataset — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Skytem dataset

+
from geobipy import plotting as cP
+from os.path import join
+import matplotlib.pyplot as plt
+import numpy as np
+from geobipy import StatArray
+from geobipy import TdemData
+import h5py
+
+
+
+

Reading in the Data

+
dataFolder = "..//..//supplementary//data//"
+# The data file name
+dataFiles=dataFolder + 'skytem_saline_clay.csv'
+# dataFiles = dataFolder + 'Skytem.csv'
+# The EM system file name
+systemFiles=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm']
+
+from pathlib import Path
+for f in systemFiles[:1]:
+    txt = Path(f).read_text()
+    print(txt)
+
+
+
System Begin
+        Name = SkyTemHighMoment-ElkHills
+        Type = Time Domain
+        Transmitter Begin
+                NumberOfTurns = 1
+                PeakCurrent   = 1
+                LoopArea      = 1
+                BaseFrequency = 30.0
+                WaveformDigitisingFrequency = 491520
+                WaveFormCurrent Begin
+-4.00E-03       0.00E+00
+-3.91E-03       3.17E-01
+-3.81E-03       6.30E-01
+-3.72E-03       8.79E-01
+-3.68E-03       9.61E-01
+-2.30E-03       9.74E-01
+-1.01E-03       9.88E-01
+0.00E+00        1.00E+00
+3.25E-06        9.91E-01
+1.00E-04        7.02E-01
+2.02E-04        3.78E-01
+2.82E-04        1.16E-01
+3.08E-04        2.79E-02
+3.13E-04        1.21E-02
+3.15E-04        6.61E-03
+3.17E-04        3.03E-03
+3.19E-04        0.00E+00
+0.012666667     0.00E+00
+
+                WaveFormCurrent End
+        Transmitter End
+Receiver Begin
+        NumberOfWindows = 26
+        WindowWeightingScheme = AreaUnderCurve
+        WindowTimes Begin
+3.796E-04       3.872E-04
+3.876E-04       3.972E-04
+3.976E-04       4.102E-04
+4.106E-04       4.262E-04
+4.266E-04       4.462E-04
+4.466E-04       4.712E-04
+4.716E-04       5.022E-04
+5.026E-04       5.422E-04
+5.426E-04       5.932E-04
+5.936E-04       6.562E-04
+6.566E-04       7.372E-04
+7.376E-04       8.382E-04
+8.386E-04       9.652E-04
+9.656E-04       1.126E-03
+1.127E-03       1.328E-03
+1.329E-03       1.583E-03
+1.584E-03       1.905E-03
+1.906E-03       2.311E-03
+2.312E-03       2.822E-03
+2.823E-03       3.468E-03
+3.469E-03       4.260E-03
+4.261E-03       5.228E-03
+5.229E-03       6.413E-03
+6.414E-03       7.865E-03
+7.866E-03       9.641E-03
+9.642E-03       1.182E-02
+
+                WindowTimes End
+                LowPassFilter Begin
+                        CutOffFrequency = 300000 210000
+                        Order           = 1       2
+                LowPassFilter End
+        Receiver End
+ForwardModelling Begin
+                //TX loop area is was 340.82 m^2 -> r = sqrt(340.82/pi)
+                ModellingLoopRadius = 10.416
+                OutputType = dB/dt
+                XOutputScaling = 0
+                YOutputScaling = 0
+                ZOutputScaling = 1
+                SecondaryFieldNormalisation  =  none
+                FrequenciesPerDecade = 5
+                NumberOfAbsiccaInHankelTransformEvaluation = 21
+        ForwardModelling End
+
+System End
+
+
+

Read in the data from file

+
TD = TdemData.read_csv(dataFiles, systemFiles)
+
+
+
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+

Plot the locations of the data points

+
plt.figure(1, figsize=(8,6))
+_ = TD.scatter2D()
+
+
+plot skytem dataset

Plot all the data along the specified line

+
plt.figure(2, figsize=(8,6))
+_ = TD.plotLine(0.0, log=10)
+
+
+plot skytem dataset
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+

Or, plot specific channels in the data

+
plt.figure(3, figsize=(8,6))
+_ = TD.plot_data(system=0, channels=[1, 3, 5], log=10)
+
+
+plot skytem dataset
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+
plt.figure(4)
+plt.subplot(211)
+_ = TD.pcolor(system=0, xscale='log', log=10)
+plt.subplot(212)
+_ = TD.pcolor(system=1, xscale='log', log=10)
+
+
+plot skytem dataset
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+
plt.figure(5)
+ax = TD.scatter2D(c=TD.secondary_field[:, TD.channel_index(system=0, channel=6)], log=10)
+plt.axis('equal')
+
+
+# with h5py.File('tdem.h5', 'w') as f:
+#     TD.createHdf(f, 'tdem')
+#     TD.writeHdf(f, 'tdem')
+
+# with h5py.File('tdem.h5', 'r') as f:
+#     TD3 = TdemData.fromHdf(f['tdem'])
+
+# with h5py.File('tdem.h5', 'r') as f:
+#     tdp = TdemData.fromHdf(f['tdem'], index=0)
+
+
+# #%%
+# # Obtain a line from the data set
+# # +++++++++++++++++++++++++++++++
+# line = TD.line(0.0)
+
+# #%%
+# plt.figure(6)
+# _ = line.scatter2D(c=line.secondary_field[:, line.channel_index(system=0, channel=6)], log=10)
+
+# #%%
+# plt.figure(7)
+# _ = line.plot(xAxis='index', log=10)
+
+# Prepare the dataset so that we can read a point at a time.
+Dataset = TdemData._initialize_sequential_reading(dataFiles, systemFiles)
+# Get a datapoint from the file.
+DataPoint = Dataset._read_record()
+
+plt.show()
+
+
+plot skytem dataset
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+
+
+

File Format for time domain data

+

Here we describe the file format for time domain data.

+

The data columns are read in according to the column names in the first line

+

In this description, the column name or its alternatives are given followed by what the name represents +Optional columns are also described.

+
+

Required columns

+
+
line

Line number for the data point

+
+
fid

Unique identification number of the data point

+
+
x or northing or n

Northing co-ordinate of the data point, (m)

+
+
y or easting or e

Easting co-ordinate of the data point, (m)

+
+
z or alt

Altitude of the transmitter coil above ground level (m)

+
+
elevation

Elevation of the ground at the data point (m)

+
+
txrx_dx

Distance in x between transmitter and reciever (m)

+
+
txrx_dy

Distance in y between transmitter and reciever (m)

+
+
txrx_dz

Distance in z between transmitter and reciever (m)

+
+
Tx_Pitch

Pitch of the transmitter loop

+
+
Tx_Roll

Roll of the transmitter loop

+
+
Tx_Yaw

Yaw of the transmitter loop

+
+
Rx_Pitch

Pitch of the receiver loop

+
+
Rx_Roll

Roll of the receiver loop

+
+
Rx_Yaw

Yaw of the receiver loop

+
+
Off_time[0] Off_time[1] … Off_time[last] - with the number and square brackets

The measurements for each time gate specified in the accompanying system file under Receiver Window Times +The total number of off_time columns should equal the sum of the receiver windows in all system files.

+
+
+
+
+

Optional columns

+
+
Off_time_Error[0] Off_time_Error[1] … Off_time_Error[last]

Estimates of standard deviation for each off time measurement

+
+
+
+
+

Example Header

+

Line fid easting northing elevation height txrx_dx txrx_dy txrx_dz TxPitch TxRoll TxYaw RxPitch RxRoll RxYaw Off[0] Off[1]

+
+
+
+

File Format for a time domain system

+

Please see Page 13 of Ross Brodie’s instructions

+

We use GA-AEM for our airborne time domain forward modeller.

+

Example system files are contained in +the supplementary folder in this repository

+

Total running time of the script: (0 minutes 2.295 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/plot_tempest_dataset.html b/docs/examples/Data/plot_tempest_dataset.html new file mode 100644 index 00000000..0a0965bd --- /dev/null +++ b/docs/examples/Data/plot_tempest_dataset.html @@ -0,0 +1,370 @@ + + + + + + + Tempest dataset — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Tempest dataset

+
import h5py
+from geobipy import plotting as cP
+from os.path import join
+import matplotlib.pyplot as plt
+import numpy as np
+from geobipy import TempestData
+
+
+
+

Reading in the Data

+
dataFolder = "..//..//supplementary//data//"
+
+# # The data file name
+# dataFiles = dataFolder + 'Tempest.nc'
+# # The EM system file name
+# systemFiles = dataFolder + 'Tempest.stm'
+
+# #%%
+# # Read in the data from file
+# TD = TempestData.read_netcdf(dataFiles, systemFiles)
+
+# TD.write_csv(dataFolder + 'Tempest.csv')
+TD = TempestData.read_csv(dataFolder + 'tempest_saline_clay.csv', system_filename=dataFolder + 'Tempest.stm')
+
+
+
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+
+

Plot the locations of the data points

+
plt.figure(figsize=(8,6))
+_ = TD.scatter2D()
+plt.title("Scatter plot")
+
+
+Scatter plot
Text(0.5, 1.0, 'Scatter plot')
+
+
+

Plot all the data along the specified line

+
plt.figure(figsize=(8,6))
+_ = TD.plotLine(0.0)
+plt.title('Line {}'.format(225401.0))
+
+
+Line 225401.0
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+Text(0.5, 1.0, 'Line 225401.0')
+
+
+

Or, plot specific channels in the data

+
plt.figure(figsize=(8,6))
+_ = TD.plot_data(system=0, channels=[0, 6, 18])
+plt.title("3 channels of data")
+
+
+3 channels of data
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+Text(0.5, 1.0, '3 channels of data')
+
+
+
plt.figure()
+_ = TD.pcolor(system=0)
+plt.title('Data as an array')
+
+
+Data as an array
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+Text(0.5, 1.0, 'Data as an array')
+
+
+
plt.figure()
+ax = TD.scatter2D(c=TD.data[:, TD.channel_index(system=0, channel=10)], equalize=True)
+plt.axis('equal')
+plt.title(f"scatter plot of channel {TD.channel_index(system=0, channel=10)}")
+
+with h5py.File('tdem.h5', 'w') as f:
+    TD.createHdf(f, 'tdem')
+    TD.writeHdf(f, 'tdem')
+
+with h5py.File('tdem.h5', 'r') as f:
+    TD3 = TempestData.fromHdf(f['tdem'])
+
+with h5py.File('tdem.h5', 'r') as f:
+    tdp = TempestData.fromHdf(f['tdem'], index=0)
+
+
+# #%%
+# # Obtain a line from the data set
+# # +++++++++++++++++++++++++++++++
+# line = TD.line(0.0)
+
+# #%%
+# plt.figure()
+# _ = line.scatter2D()
+# plt.title('Channel')
+
+# #%%
+# plt.figure()
+# _ = line.plot_data(xAxis='index', log=10)
+# plt.title("All data along line")
+
+plt.show()
+
+
+scatter plot of channel 10
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+
+

Total running time of the script: (0 minutes 4.125 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/readme.html b/docs/examples/Data/readme.html new file mode 100644 index 00000000..55939109 --- /dev/null +++ b/docs/examples/Data/readme.html @@ -0,0 +1,115 @@ + + + + + + + Data — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Data

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Data/sg_execution_times.html b/docs/examples/Data/sg_execution_times.html new file mode 100644 index 00000000..2b2ae5b3 --- /dev/null +++ b/docs/examples/Data/sg_execution_times.html @@ -0,0 +1,155 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:28.741 total execution time for 4 files from examples/Data:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

3D Point Cloud class (plot_pointcloud3d.py)

00:19.063

0.0

Tempest dataset (plot_tempest_dataset.py)

00:04.125

0.0

Frequency domain dataset (plot_frequency_dataset.py)

00:03.257

0.0

Skytem dataset (plot_skytem_dataset.py)

00:02.295

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Datapoints/plot_resolve_datapoint.html b/docs/examples/Datapoints/plot_resolve_datapoint.html new file mode 100644 index 00000000..e273522f --- /dev/null +++ b/docs/examples/Datapoints/plot_resolve_datapoint.html @@ -0,0 +1,393 @@ + + + + + + + Frequency domain datapoint — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Frequency domain datapoint

+
from os.path import join
+import numpy as np
+import h5py
+import matplotlib.pyplot as plt
+from geobipy import CircularLoop
+from geobipy import FdemSystem
+from geobipy import FdemData
+from geobipy import FdemDataPoint
+from geobipy import RectilinearMesh1D
+from geobipy import Model
+from geobipy import StatArray
+from geobipy import Distribution
+
+# Instantiating a frequency domain data point
+# +++++++++++++++++++++++++++++++++++++++++++
+#
+# To instantiate a frequency domain datapoint we need to define some
+# characteristics of the acquisition system.
+#
+# We need to define the frequencies in Hz of the transmitter,
+# and the geometery of the loops used for each frequency.
+
+frequencies = np.asarray([380.0, 1776.0, 3345.0, 8171.0, 41020.0, 129550.0])
+
+# Transmitter positions are defined relative to the observation locations in the data
+# This is usually a constant offset for all data points.
+transmitters = CircularLoop(orientation=['z','z','x','z','z','z'],
+                             moment=np.r_[1, 1, -1, 1, 1, 1],
+                             x = np.r_[0,0,0,0,0,0],
+                             y = np.r_[0,0,0,0,0,0],
+                             z = np.r_[0,0,0,0,0,0],
+                             pitch = np.r_[0,0,0,0,0,0],
+                             roll = np.r_[0,0,0,0,0,0],
+                             yaw = np.r_[0,0,0,0,0,0],
+                             radius = np.r_[1,1,1,1,1,1])
+
+# Receiver positions are defined relative to the transmitter
+receivers = CircularLoop(orientation=['z','z','x','z','z','z'],
+                             moment=np.r_[1, 1, -1, 1, 1, 1],
+                             x = np.r_[7.91, 7.91, 9.03, 7.91, 7.91, 7.89],
+                             y = np.r_[0,0,0,0,0,0],
+                             z = np.r_[0,0,0,0,0,0],
+                             pitch = np.r_[0,0,0,0,0,0],
+                             roll = np.r_[0,0,0,0,0,0],
+                             yaw = np.r_[0,0,0,0,0,0],
+                             radius = np.r_[1,1,1,1,1,1])
+
+# Now we can instantiate the system.
+fds = FdemSystem(frequencies, transmitters, receivers)
+
+# And use the system to instantiate a datapoint
+#
+# Note the extra arguments that can be used to create the data point.
+# data is for any observed data one might have, while std are the estimated standard
+# deviations of those observed data.
+#
+# Define some in-phase then quadrature data for each frequency.
+data = np.r_[145.3, 435.8, 260.6, 875.1, 1502.7, 1516.9,
+             217.9, 412.5, 178.7, 516.5, 405.7, 255.7]
+
+fdp = FdemDataPoint(x=0.0, y=0.0, z=30.0, elevation=0.0,
+                    data=data, std=None, predictedData=None,
+                    system=fds, lineNumber=0.0, fiducial=0.0)
+
+# plt.figure()
+# _ = fdp.plot()
+
+# Obtaining a datapoint from a dataset
+# ++++++++++++++++++++++++++++++++++++
+#
+# More often than not, our observed data is stored in a file on disk.
+# We can read in a dataset and pull datapoints from it.
+#
+# For more information about the frequency domain data set see :ref:`Frequency domain dataset`
+
+# Set some paths and file names
+dataFolder = "..//..//supplementary//Data//"
+# The data file name
+dataFile = dataFolder + 'Resolve2.txt'
+# The EM system file name
+systemFile = dataFolder + 'FdemSystem2.stm'
+
+
+

Initialize and read an EM data set +Prepare the dataset so that we can read a point at a time.

+
Dataset = FdemData._initialize_sequential_reading(dataFile, systemFile)
+# Get a datapoint from the file.
+fdp = Dataset._read_record()
+
+
+
# # Initialize and read an EM data set
+# D = FdemData.read_csv(dataFile,systemFile)
+
+# # Get a data point from the dataset
+# fdp = D.datapoint(0)
+# plt.figure()
+# _ = fdp.plot()
+
+# Using a resolve datapoint
+# +++++++++++++++++++++++++
+
+# We can define a 1D layered earth model, and use it to predict some data
+nCells = 19
+par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$")
+depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm')
+depth[-1] = np.inf
+mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par)
+
+# Forward model the data
+fdp.forward(mod)
+
+plt.figure()
+plt.subplot(121)
+_ = mod.pcolor(transpose=True)
+plt.subplot(122)
+_ = fdp.plot_predicted()
+plt.tight_layout()
+
+# Compute the sensitivity matrix for a given model
+J = fdp.sensitivity(mod)
+
+plt.figure()
+_ = np.abs(J).pcolor(equalize=True, log=10, flipY=True)
+
+# Attaching statistical descriptors to the resolve datapoint
+# ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
+from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+# Set values of relative and additive error for both systems.
+fdp.relative_error = 0.05
+fdp.additive_error = 10.0
+# Define a multivariate log normal distribution as the prior on the predicted data.
+fdp.predictedData.prior = Distribution('MvLogNormal', fdp.data[fdp.active], fdp.std[fdp.active]**2.0, prng=prng)
+
+# This allows us to evaluate the likelihood of the predicted data
+print(fdp.likelihood(log=True))
+# Or the misfit
+print(fdp.data_misfit())
+
+# Plot the misfits for a range of half space conductivities
+plt.figure()
+_ = fdp.plot_halfspace_responses(-6.0, 4.0, 200)
+
+plt.title("Halfspace responses");
+
+# We can perform a quick search for the best fitting half space
+halfspace = fdp.find_best_halfspace()
+print('Best half space conductivity is {} $S/m$'.format(halfspace.values))
+plt.figure()
+_ = fdp.plot()
+_ = fdp.plot_predicted()
+
+# Compute the misfit between observed and predicted data
+print(fdp.data_misfit())
+
+# We can attach priors to the height of the datapoint,
+# the relative error multiplier, and the additive error noise floor
+
+
+# Define the distributions used as priors.
+zPrior = Distribution('Uniform', min=fdp.z - 2.0, max=fdp.z + 2.0, prng=prng)
+relativePrior = Distribution('Uniform', min=0.01, max=0.5, prng=prng)
+additivePrior = Distribution('Uniform', min=5, max=15, prng=prng)
+fdp.set_priors(z_prior=zPrior, relative_error_prior=relativePrior, additive_error_prior=additivePrior, prng=prng)
+
+
+# In order to perturb our solvable parameters, we need to attach proposal distributions
+z_proposal = Distribution('Normal', mean=fdp.z, variance = 0.01, prng=prng)
+relativeProposal = Distribution('MvNormal', mean=fdp.relative_error, variance=2.5e-7, prng=prng)
+additiveProposal = Distribution('MvLogNormal', mean=fdp.additive_error, variance=1e-4, prng=prng)
+fdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal)
+
+# With priors set we can auto generate the posteriors
+fdp.set_posteriors()
+
+nCells = 19
+par = StatArray(np.linspace(0.01, 0.1, nCells), "Conductivity", "$\frac{S}{m}$")
+depth = StatArray(np.arange(nCells+1) * 10.0, "Depth", 'm')
+depth[-1] = np.inf
+mod = Model(mesh=RectilinearMesh1D(edges=depth), values=par)
+fdp.forward(mod)
+
+# Perturb the datapoint and record the perturbations
+for i in range(10):
+    fdp.perturb()
+    fdp.update_posteriors()
+
+
+# Plot the posterior distributions
+fig = plt.figure()
+fdp.plot_posteriors(overlay=fdp)
+
+import h5py
+with h5py.File('fdp.h5', 'w') as f:
+    fdp.createHdf(f, 'fdp', withPosterior=True)
+    fdp.writeHdf(f, 'fdp', withPosterior=True)
+
+with h5py.File('fdp.h5', 'r') as f:
+    fdp1 = FdemDataPoint.fromHdf(f['fdp'])
+
+plt.figure()
+fdp1.plot_posteriors(overlay=fdp1)
+
+import h5py
+with h5py.File('fdp.h5', 'w') as f:
+    fdp.createHdf(f, 'fdp', withPosterior=True, add_axis=np.arange(10.0))
+
+    for i in range(10):
+        fdp.writeHdf(f, 'fdp', withPosterior=True, index=i)
+
+from geobipy import FdemData
+with h5py.File('fdp.h5', 'r') as f:
+    fdp1 = FdemDataPoint.fromHdf(f['fdp'], index=0)
+    fdp2 = FdemData.fromHdf(f['fdp'])
+
+fdp1.plot_posteriors(overlay=fdp1)
+
+plt.show()
+# %%
+
+
+
    +
  • Frequency Domain EM Data
  • +
  • plot resolve datapoint
  • +
  • Halfspace responses
  • +
  • Frequency Domain EM Data
  • +
  • Frequency Domain EM Data
  • +
  • Frequency Domain EM Data, Frequency Domain EM Data
  • +
+
-733.5454886696688
+1367.81945885548
+Best half space conductivity is [0.097701] $S/m$
+45286.828623928755
+
+
+

Total running time of the script: (0 minutes 7.271 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Datapoints/plot_skytem_datapoint.html b/docs/examples/Datapoints/plot_skytem_datapoint.html new file mode 100644 index 00000000..15979a4e --- /dev/null +++ b/docs/examples/Datapoints/plot_skytem_datapoint.html @@ -0,0 +1,406 @@ + + + + + + + Skytem Datapoint Class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Skytem Datapoint Class

+

Credits: +We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +https://github.com/GeoscienceAustralia/ga-aem

+

For ground-based time domain data, we are using Dieter Werthmuller’s python package Empymod +https://empymod.github.io/

+

Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy

+
from os.path import join
+import numpy as np
+import h5py
+import matplotlib.pyplot as plt
+from geobipy import Waveform
+from geobipy import SquareLoop, CircularLoop
+from geobipy import butterworth
+from geobipy import TdemSystem
+from geobipy import TdemData
+from geobipy import TdemDataPoint
+from geobipy import RectilinearMesh1D
+from geobipy import Model
+from geobipy import StatArray
+from geobipy import Distribution
+
+dataFolder = "..//..//supplementary//data//"
+
+# Obtaining a datapoint from a dataset
+# ++++++++++++++++++++++++++++++++++++
+# More often than not, our observed data is stored in a file on disk.
+# We can read in a dataset and pull datapoints from it.
+#
+# For more information about the time domain data set, see :ref:`Time domain dataset`
+
+# The data file name
+dataFile=dataFolder + 'skytem_saline_clay.csv'
+# The EM system file name
+systemFile=[dataFolder + 'SkytemHM.stm', dataFolder + 'SkytemLM.stm']
+
+
+

Initialize and read an EM data set +Prepare the dataset so that we can read a point at a time.

+
Dataset = TdemData._initialize_sequential_reading(dataFile, systemFile)
+# Get a datapoint from the file.
+tdp = Dataset._read_record()
+
+Dataset._file.close()
+
+
+
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+
+

Using a time domain datapoint

+

We can define a 1D layered earth model, and use it to predict some data

+
par = StatArray(np.r_[500.0, 20.0], "Conductivity", "$\frac{S}{m}$")
+mod = Model(RectilinearMesh1D(edges=np.r_[0, 75.0, np.inf]), values=par)
+
+
+

Forward model the data

+
tdp.forward(mod)
+
+
+
plt.figure()
+plt.subplot(121)
+_ = mod.pcolor()
+plt.subplot(122)
+_ = tdp.plot()
+_ = tdp.plot_predicted()
+plt.tight_layout()
+
+
+Time Domain EM Data
/Users/nfoks/codes/repositories/geobipy_docs/geobipy/src/classes/data/datapoint/TdemDataPoint.py:363: RuntimeWarning: divide by zero encountered in log
+  additive_error = exp(log(self.additive_error[i]) - 0.5 * (log(off_times) - log(1e-3)))
+
+
+
plt.figure()
+tdp.plotDataResidual(yscale='log', xscale='log')
+plt.title('new')
+
+
+new
Text(0.5, 1.0, 'new')
+
+
+

Compute the sensitivity matrix for a given model

+
J = tdp.sensitivity(mod)
+plt.figure()
+_ = np.abs(J).pcolor(equalize=True, log=10, flipY=True)
+
+
+plot skytem datapoint
+
+

Attaching statistical descriptors to the skytem datapoint

+
from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+# Set values of relative and additive error for both systems.
+tdp.relative_error = np.r_[0.05, 0.05]
+tdp.additive_error = np.r_[1e-14, 1e-13]
+# Define a multivariate normal distribution as the prior on the predicted data.
+data_prior = Distribution('MvNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng)
+
+tdp.set_priors(data_prior=data_prior)
+
+
+

This allows us to evaluate the likelihood of the predicted data

+
print(tdp.likelihood(log=True))
+# Or the misfit
+print(tdp.data_misfit())
+
+
+
-320327.7331520327
+643134.8665682999
+
+
+

Plot the misfits for a range of half space conductivities

+
plt.figure()
+_ = tdp.plot_halfspace_responses(-6.0, 4.0, 200)
+plt.title("Halfspace responses")
+
+
+Halfspace responses
Text(0.5, 1.0, 'Halfspace responses')
+
+
+

We can perform a quick search for the best fitting half space

+
halfspace = tdp.find_best_halfspace()
+
+print('Best half space conductivity is {} $S/m$'.format(halfspace.values))
+plt.figure()
+_ = tdp.plot()
+_ = tdp.plot_predicted()
+
+
+Time Domain EM Data
Best half space conductivity is [0.01047616] $S/m$
+
+
+

Compute the misfit between observed and predicted data

+
print(tdp.data_misfit())
+
+
+
19656.31514467744
+
+
+

We can attach priors to the height of the datapoint, +the relative error multiplier, and the additive error noise floor

+
# Define the distributions used as priors.
+z_prior = Distribution('Uniform', min=np.float64(tdp.z) - 2.0, max=np.float64(tdp.z) + 2.0, prng=prng)
+relativePrior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng)
+additivePrior = Distribution('Uniform', min=np.r_[1e-16, 1e-16], max=np.r_[1e-10, 1e-10], log=True, prng=prng)
+tdp.set_priors(relative_error_prior=relativePrior, additive_error_prior=additivePrior, z_prior=z_prior, prng=prng)
+
+
+
/Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_skytem_datapoint.py:135: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)
+  z_prior = Distribution('Uniform', min=np.float64(tdp.z) - 2.0, max=np.float64(tdp.z) + 2.0, prng=prng)
+
+
+

In order to perturb our solvable parameters, we need to attach proposal distributions

+
z_proposal = Distribution('Normal', mean=tdp.z, variance = 0.01, prng=prng)
+relativeProposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-7, prng=prng)
+additiveProposal = Distribution('MvLogNormal', mean=tdp.additive_error, variance=2.5e-3, linearSpace=True, prng=prng)
+tdp.set_proposals(relativeProposal, additiveProposal, z_proposal=z_proposal, prng=prng)
+
+
+

With priorss set we can auto generate the posteriors

+
tdp.set_posteriors()
+
+
+

Perturb the datapoint and record the perturbations +Note we are not using the priors to accept or reject perturbations.

+
for i in range(10):
+    tdp.perturb()
+    tdp.update_posteriors()
+
+
+

Plot the posterior distributions

+
plt.figure()
+tdp.plot_posteriors(overlay=tdp)
+
+plt.show()
+
+
+Time Domain EM Data
+
+

File Format for a time domain datapoint

+

Here we describe the file format for a time domain datapoint.

+

For individual datapoints we are using the AarhusInv data format.

+

Here we take the description for the AarhusInv TEM data file, modified to reflect what we can +currently handle in GeoBIPy.

+
+
Line 1 :: string

User-defined label describing the TEM datapoint. +This line must contain the following, separated by semicolons. +XUTM= +YUTM= +Elevation= +StationNumber= +LineNumber= +Current=

+
+
Line 2 :: first integer, sourceType

7 = Rectangular loop source parallel to the x - y plane

+
+
Line 2 :: second integer, polarization

3 = Vertical magnetic field

+
+
Line 3 :: 6 floats, transmitter and receiver offsets relative to X/Y UTM location.

If sourceType = 7, Position of the center loop sounding.

+
+
Line 4 :: Transmitter loop dimensions

If sourceType = 7, 2 floats. Loop side length in the x and y directions

+
+
Line 5 :: Fixed

3 3 3

+
+
Line 6 :: first integer, transmitter waveform type. Fixed

3 = User defined waveform.

+
+
Line 6 :: second integer, number of transmitter waveforms. Fixed

1

+
+
Line 7 :: transmitter waveform definition

A user-defined waveform with piecewise linear segments. +A full transmitter waveform definition consists of a number of linear segments +This line contains an integer as the first entry, which specifies the number of +segments, followed by each segment with 4 floats each. The 4 floats per segment +are the start and end times, and start and end amplitudes of the waveform. e.g. +3 -8.333e-03 -8.033e-03 0.0 1.0 -8.033e-03 0.0 1.0 1.0 0.0 5.4e-06 1.0 0.0

+
+
Line 8 :: On time information. Not used but needs specifying.

1 1 1

+
+
Line 9 :: On time low-pass filters. Not used but need specifying.

0

+
+
Line 10 :: On time high-pass filters. Not used but need specifying.

0

+
+
Line 11 :: Front-gate time. Not used but need specifying.

0.0

+
+
Line 12 :: first integer, Number of off time filters

Number of filters

+
+
Line 12 :: second integer, Order of the butterworth filter

1 or 2

+
+
Line 12 :: cutoff frequencies Hz, one per the number of filters

e.g. 4.5e5

+
+
Line 13 :: Off time high pass filters.

See Line 12

+
+
+

Lines after 13 contain 3 columns that pertain to +Measurement Time, Data Value, Estimated Standard Deviation

+

Example data files are contained in +the supplementary folder in this repository

+

Total running time of the script: (0 minutes 2.306 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Datapoints/plot_tempest_datapoint.html b/docs/examples/Datapoints/plot_tempest_datapoint.html new file mode 100644 index 00000000..be765fd1 --- /dev/null +++ b/docs/examples/Datapoints/plot_tempest_datapoint.html @@ -0,0 +1,400 @@ + + + + + + + Tempest Datapoint Class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Tempest Datapoint Class

+

Credits: +We would like to thank Ross Brodie at Geoscience Australia for his airborne time domain forward modeller +https://github.com/GeoscienceAustralia/ga-aem

+

For ground-based time domain data, we are using Dieter Werthmuller’s python package Empymod +https://empymod.github.io/

+

Thanks to Dieter for his help getting Empymod ready for incorporation into GeoBIPy

+
from os.path import join
+import numpy as np
+import h5py
+import matplotlib.pyplot as plt
+from geobipy import TempestData
+# from geobipy import TemDataPoint
+from geobipy import RectilinearMesh1D
+from geobipy import Model
+from geobipy import StatArray
+from geobipy import Distribution
+from geobipy import get_prng
+
+dataFolder = "..//..//supplementary//data//"
+# dataFolder = "source//examples//supplementary//Data"
+
+# Obtaining a tempest datapoint from a dataset
+# ++++++++++++++++++++++++++++++++++++++++++++
+# More often than not, our observed data is stored in a file on disk.
+# We can read in a dataset and pull datapoints from it.
+#
+# For more information about the time domain data set, see :ref:`Time domain dataset`
+
+# The data file name
+dataFile = dataFolder + 'tempest_saline_clay.csv'
+# The EM system file name
+systemFile = dataFolder + 'Tempest.stm'
+
+# Prepare the dataset so that we can read a point at a time.
+Dataset = TempestData._initialize_sequential_reading(dataFile, systemFile)
+# Get a datapoint from the file.
+tdp = Dataset._read_record(0)
+
+plt.figure()
+tdp.plot()
+
+prng = get_prng(seed=146100583096709124601953385843316024947)
+
+
+Time Domain EM Data
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+
+
+

Using a tempest domain datapoint

+

We can define a 1D layered earth model, and use it to predict some data

+
par = StatArray(np.r_[0.01, 0.1, 1.], "Conductivity", "$\frac{S}{m}$")
+mod = Model(mesh=RectilinearMesh1D(edges=np.r_[0.0, 50.0, 75.0, np.inf]), values=par)
+
+par = StatArray(np.logspace(-3, 3, 30), "Conductivity", "$\frac{S}{m}$")
+e = np.linspace(0, 350, 31); e[-1] = np.inf
+mod = Model(mesh=RectilinearMesh1D(edges=e), values=par)
+
+
+

Forward model the data

+
tdp.forward(mod)
+
+print('primary', tdp.primary_field)
+print('sx', tdp.secondary_field[:15])
+print('sz', tdp.secondary_field[15:])
+
+# #%%
+# plt.figure()
+# plt.subplot(121)
+# _ = mod.pcolor(transpose=True)
+# plt.subplot(122)
+# _ = tdp.plot()
+# _ = tdp.plot_predicted()
+# plt.tight_layout()
+# plt.suptitle('Model and response')
+
+# #%%
+# # plt.figure()
+# # tdp.plotDataResidual(xscale='log')
+# # plt.title('data residual')
+
+# #%%
+# # Compute the sensitivity matrix for a given model
+J = tdp.sensitivity(mod)
+# plt.figure()
+# _ = np.abs(J).pcolor(equalize=True, log=10, flipY=True)
+
+print('J', J)
+# print('J shape', J.shape)
+# print('sx 0', J[:16, 0])
+
+tdp.fm_dlogc(mod)
+
+print('new primary', tdp.primary_field)
+print('sx', tdp.secondary_field[:15])
+print('sz', tdp.secondary_field[15:])
+
+print('new J', tdp.sensitivity_matrix)
+
+
+
primary [34.27253219 17.55503397]
+sx [4.46362582 2.52720951 2.10544857 ... 0.34346631 0.27359586 0.19875285]
+sz [6.47100177 4.53101158 3.87594468 ... 1.05345525 0.79969548 0.56994112]
+J [[ 1.13463137e-01  1.49920887e-01  1.76789170e-01 ... -1.01809840e-09
+   1.13341751e-11  7.27489718e-13]
+ [ 2.09383016e-02  3.20412212e-02  4.74815387e-02 ... -1.02489023e-09
+   1.15994185e-11  7.25910166e-13]
+ [ 1.04188675e-02  1.61552555e-02  2.45575508e-02 ... -1.03167228e-09
+   1.18645190e-11  7.24296662e-13]
+ ...
+ [ 7.20880061e-05  1.13758034e-04  1.79138645e-04 ... -6.62044639e-09
+   1.91310127e-10  4.83737910e-13]
+ [ 3.95655826e-05  6.25753935e-05  9.87713313e-05 ... -6.33418159e-09
+   2.26727066e-10 -1.05451995e-12]
+ [ 1.60007270e-05  3.08385747e-05  5.05626410e-05 ... -1.28316523e-09
+   1.11041966e-10 -2.41585978e-12]]
+new primary [34.27253219 17.55503397]
+sx [4.46362582 2.52720951 2.10544857 ... 0.34346631 0.27359586 0.19875285]
+sz [6.47100177 4.53101158 3.87594468 ... 1.05345525 0.79969548 0.56994112]
+new J [[ 1.13463137e-01  1.49920887e-01  1.76789170e-01 ... -1.01809840e-09
+   1.13341751e-11  7.27489718e-13]
+ [ 2.09383016e-02  3.20412212e-02  4.74815387e-02 ... -1.02489023e-09
+   1.15994185e-11  7.25910166e-13]
+ [ 1.04188675e-02  1.61552555e-02  2.45575508e-02 ... -1.03167228e-09
+   1.18645190e-11  7.24296662e-13]
+ ...
+ [ 7.20880061e-05  1.13758034e-04  1.79138645e-04 ... -6.62044639e-09
+   1.91310127e-10  4.83737910e-13]
+ [ 3.95655826e-05  6.25753935e-05  9.87713313e-05 ... -6.33418159e-09
+   2.26727066e-10 -1.05451995e-12]
+ [ 1.60007270e-05  3.08385747e-05  5.05626410e-05 ... -1.28316523e-09
+   1.11041966e-10 -2.41585978e-12]]
+
+
+
+
+

Attaching statistical descriptors to the tempest datapoint

+
from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+# Set relative errors for the primary fields, and secondary fields.
+tdp.relative_error = np.r_[0.001, 0.001]
+
+# Set the additive errors for
+tdp.additive_error = np.hstack([[0.011474, 0.012810, 0.008507, 0.005154, 0.004742, 0.004477, 0.004168, 0.003539, 0.003352, 0.003213, 0.003161, 0.003122, 0.002587, 0.002038, 0.002201],
+                                [0.007383, 0.005693, 0.005178, 0.003659, 0.003426, 0.003046, 0.003095, 0.003247, 0.002775, 0.002627, 0.002460, 0.002178, 0.001754, 0.001405, 0.001283]])
+# Define a multivariate log normal distribution as the prior on the predicted data.
+tdp.predictedData.prior = Distribution('MvLogNormal', tdp.data[tdp.active], tdp.std[tdp.active]**2.0, prng=prng)
+
+
+

This allows us to evaluate the likelihood of the predicted data

+
print(tdp.likelihood(log=True))
+# Or the misfit
+print(tdp.data_misfit())
+
+
+
-36389.6500813217
+72940.71365767403
+
+
+

Plot the misfits for a range of half space conductivities

+
plt.figure()
+plt.subplot(1, 2, 1)
+_ = tdp.plot_halfspace_responses(-6.0, 4.0, 200)
+plt.title("Halfspace responses")
+
+
+Halfspace responses
Text(0.5, 1.0, 'Halfspace responses')
+
+
+

We can perform a quick search for the best fitting half space

+
halfspace = tdp.find_best_halfspace()
+print('Best half space conductivity is {} $S/m$'.format(halfspace.values))
+plt.subplot(1, 2, 2)
+_ = tdp.plot()
+_ = tdp.plot_predicted()
+
+plt.figure()
+tdp.plot_secondary_field()
+tdp.plot_predicted_secondary_field()
+
+# #%%
+# # We can attach priors to the height of the datapoint,
+# # the relative error multiplier, and the additive error noise floor
+
+# Define the distributions used as priors.
+relative_prior = Distribution('Uniform', min=np.r_[0.01, 0.01], max=np.r_[0.5, 0.5], prng=prng)
+receiver_x_prior = Distribution('Uniform', min=np.float64(tdp.receiver.x) - 1.0, max=np.float64(tdp.receiver.x) + 1.0, prng=prng)
+receiver_z_prior = Distribution('Uniform', min=np.float64(tdp.receiver.z) - 1.0, max=np.float64(tdp.receiver.z) + 1.0, prng=prng)
+receiver_pitch_prior = Distribution('Uniform', min=tdp.receiver.pitch - 5.0, max=tdp.receiver.pitch + 5.0, prng=prng)
+tdp.set_priors(relative_error_prior=relative_prior, receiver_x_prior=receiver_x_prior, receiver_z_prior=receiver_z_prior, receiver_pitch_prior=receiver_pitch_prior, prng=prng)
+
+
+
    +
  • Time Domain EM Data
  • +
  • plot tempest datapoint
  • +
+
Best half space conductivity is [0.01830738] $S/m$
+/Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_tempest_datapoint.py:156: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)
+  receiver_x_prior = Distribution('Uniform', min=np.float64(tdp.receiver.x) - 1.0, max=np.float64(tdp.receiver.x) + 1.0, prng=prng)
+/Users/nfoks/codes/repositories/geobipy_docs/documentation_source/source/examples/Datapoints/plot_tempest_datapoint.py:157: DeprecationWarning: Conversion of an array with ndim > 0 to a scalar is deprecated, and will error in future. Ensure you extract a single element from your array before performing this operation. (Deprecated NumPy 1.25.)
+  receiver_z_prior = Distribution('Uniform', min=np.float64(tdp.receiver.z) - 1.0, max=np.float64(tdp.receiver.z) + 1.0, prng=prng)
+
+
+

In order to perturb our solvable parameters, we need to attach proposal distributions

+
relative_proposal = Distribution('MvNormal', mean=tdp.relative_error, variance=2.5e-4, prng=prng)
+receiver_x_proposal = Distribution('Normal', mean=tdp.receiver.x, variance = 0.01, prng=prng)
+receiver_z_proposal = Distribution('Normal', mean=tdp.receiver.z, variance = 0.01, prng=prng)
+receiver_pitch_proposal = Distribution('Normal', mean=tdp.receiver.pitch, variance = 0.01, prng=prng)
+tdp.set_proposals(relative_error_proposal=relative_proposal,
+                  receiver_x_proposal=receiver_x_proposal,
+                  receiver_z_proposal=receiver_z_proposal,
+                  receiver_pitch_proposal=receiver_pitch_proposal,
+                  solve_additive_error=True, additive_error_proposal_variance=1e-4, prng=prng)
+
+
+

With priors set we can auto generate the posteriors

+
tdp.set_posteriors()
+
+
+

Perturb the datapoint and record the perturbations +Note we are not using the priors to accept or reject perturbations.

+
for i in range(10):
+    tdp.perturb()
+    tdp.update_posteriors()
+
+plt.show()
+
+
+

Total running time of the script: (0 minutes 1.653 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Datapoints/readme.html b/docs/examples/Datapoints/readme.html new file mode 100644 index 00000000..7687e145 --- /dev/null +++ b/docs/examples/Datapoints/readme.html @@ -0,0 +1,115 @@ + + + + + + + Datapoints — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Datapoints

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Datapoints/sg_execution_times.html b/docs/examples/Datapoints/sg_execution_times.html new file mode 100644 index 00000000..cdb90a2d --- /dev/null +++ b/docs/examples/Datapoints/sg_execution_times.html @@ -0,0 +1,151 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:11.231 total execution time for 3 files from examples/Datapoints:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Frequency domain datapoint (plot_resolve_datapoint.py)

00:07.271

0.0

Skytem Datapoint Class (plot_skytem_datapoint.py)

00:02.306

0.0

Tempest Datapoint Class (plot_tempest_datapoint.py)

00:01.653

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Distributions/plot_distributions.html b/docs/examples/Distributions/plot_distributions.html new file mode 100644 index 00000000..7a3930c0 --- /dev/null +++ b/docs/examples/Distributions/plot_distributions.html @@ -0,0 +1,208 @@ + + + + + + + Distribution Class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Distribution Class

+

Handles the initialization of different statistical distribution

+
from geobipy import Distribution
+from geobipy import plotting as cP
+import matplotlib.pyplot as plt
+import numpy as np
+
+from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+
+
+
+

Univariate Normal Distribution

+
D = Distribution('Normal', 0.0, 1.0, prng=prng)
+
+# Get the bins of the Distribution from +- 4 standard deviations of the mean
+bins = D.bins()
+
+# Grab random samples from the distribution
+D.rng(10)
+
+# We can then get the Probability Density Function for those bins
+pdf = D.probability(bins, log=False)
+
+# And we can plot that PDF
+plt.figure()
+plt.plot(bins, pdf)
+
+
+plot distributions
[<matplotlib.lines.Line2D object at 0x17f679190>]
+
+
+
+
+

Multivariate Normal Distribution

+
D = Distribution('MvNormal',[0.0,1.0,2.0],[1.0,1.0,1.0], prng=prng)
+D.rng()
+
+
+
array([ 0.64050649,  1.77177243, -0.34500474])
+
+
+

Uniform Distribution

+
D = Distribution('Uniform', 0.0, 1.0, prng=prng)
+D.bins()
+
+
+
DataArray([0.        , 0.01010101, 0.02020202, ..., 0.97979798,
+           0.98989899, 1.        ])
+
+
+

Total running time of the script: (0 minutes 0.050 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Distributions/readme.html b/docs/examples/Distributions/readme.html new file mode 100644 index 00000000..685f3361 --- /dev/null +++ b/docs/examples/Distributions/readme.html @@ -0,0 +1,115 @@ + + + + + + + Distributions — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Distributions

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Distributions/sg_execution_times.html b/docs/examples/Distributions/sg_execution_times.html new file mode 100644 index 00000000..0a20ee33 --- /dev/null +++ b/docs/examples/Distributions/sg_execution_times.html @@ -0,0 +1,143 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:00.050 total execution time for 1 file from examples/Distributions:

+
+ + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Distribution Class (plot_distributions.py)

00:00.050

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/HDF5/hdf5.html b/docs/examples/HDF5/hdf5.html new file mode 100644 index 00000000..c92db005 --- /dev/null +++ b/docs/examples/HDF5/hdf5.html @@ -0,0 +1,241 @@ + + + + + + + Using HDF5 within GeoBIPy — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Using HDF5 within GeoBIPy

+

Inference for large scale datasets in GeoBIPy is handled using MPI and distributed memory systems. +A common bottleneck with large parallel algorithms is the input output of information to disk. +We use HDF5 to read and write data in order to leverage the parallel capabililties of the HDF5 API.

+

Each object within GeoBIPy has a create_hdf, write_hdf, and read_hdf routine.

+
import numpy as np
+import h5py
+from geobipy import StatArray
+
+
+

StatArray

+
# Instantiate a StatArray
+x = StatArray(np.arange(10.0), name = 'an Array', units = 'some units')
+
+# Write the StatArray to a HDF file.
+with h5py.File("x.h5", 'w') as f:
+    x.toHdf(f, "x")
+
+# Read the StatArray back in.
+with h5py.File("x.h5", 'r') as f:
+    y = StatArray.fromHdf(f, 'x')
+
+print('x', x)
+print('y', y)
+
+
+

There are actually steps within the “toHdf” function. +First, space is created within the HDF file and second, the data is written to that space +These functions are split because during the execution of a parallel enabled program, +all the space within the HDF file needs to be allocated before we can write to the file +using multiple cores.

+
# Write the StatArray to a HDF file.
+with h5py.File("x.h5", 'w') as f:
+    x.createHdf(f, "x")
+    x.writeHdf(f, "x")
+
+# Read the StatArray back in.
+with h5py.File("x.h5", 'r') as f:
+    y = StatArray.fromHdf(f, 'x')
+
+print('x', x)
+print('y', y)
+
+
+

The create and write HDF methods also allow extra space to be allocated so that +the extra memory can be written later, perhaps by multiple cores. +Here we specify space for 2 arrays, the memory is stored contiguously as a numpy array. +We then write to only the first index.

+
# Write the StatArray to a HDF file.
+with h5py.File("x.h5", 'w') as f:
+    x.createHdf(f, "x", nRepeats=2)
+    x.writeHdf(f, "x", index=0)
+
+# Read the StatArray back in.
+with h5py.File("x.h5", 'r') as f:
+    y = StatArray.fromHdf(f, 'x', index=0)
+
+print('x', x)
+print('y', y)
+
+
+

The duplication can also be a shape.

+
# Write the StatArray to a HDF file.
+with h5py.File("x.h5", 'w') as f:
+    x.createHdf(f, "x", nRepeats=(2, 2))
+    x.writeHdf(f, "x", index=(0, 0))
+
+# Read the StatArray back in.
+with h5py.File("x.h5", 'r') as f:
+    y = StatArray.fromHdf(f, 'x', index=(0, 0))
+
+print('x', x)
+print('y', y)
+
+
+

Similarly, we can duplicate a 2D array with an extra 2D duplication

+
x = StatArray(np.random.randn(2, 2), name = 'an Array', units = 'some units')
+# Write the StatArray to a HDF file.
+with h5py.File("x.h5", 'w') as f:
+    x.createHdf(f, "x", nRepeats=(2, 2))
+    x.writeHdf(f, "x", index=(0, 0))
+
+# Read the StatArray back in.
+with h5py.File("x.h5", 'r') as f:
+    y = StatArray.fromHdf(f, 'x', index=(0, 0))
+
+print('x', x)
+print('y', y)
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/HDF5/readme.html b/docs/examples/HDF5/readme.html new file mode 100644 index 00000000..e2dafd36 --- /dev/null +++ b/docs/examples/HDF5/readme.html @@ -0,0 +1,115 @@ + + + + + + + HDF 5 — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

HDF 5

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/HDF5/sg_execution_times.html b/docs/examples/HDF5/sg_execution_times.html new file mode 100644 index 00000000..ae9d89ec --- /dev/null +++ b/docs/examples/HDF5/sg_execution_times.html @@ -0,0 +1,143 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:00.000 total execution time for 1 file from examples/HDF5:

+
+ + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Using HDF5 within GeoBIPy (hdf5.py)

00:00.000

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_1D/plot_inference_1d_resolve.html b/docs/examples/Inference_1D/plot_inference_1d_resolve.html new file mode 100644 index 00000000..228812aa --- /dev/null +++ b/docs/examples/Inference_1D/plot_inference_1d_resolve.html @@ -0,0 +1,253 @@ + + + + + + + Running GeoBIPy to invert Resolve data — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Running GeoBIPy to invert Resolve data

+
import os
+import sys
+import pathlib
+from datetime import timedelta
+import time
+import numpy as np
+from geobipy import Inference3D
+from geobipy import user_parameters
+from geobipy import get_prng
+
+def checkCommandArguments():
+    """Check the users command line arguments. """
+    import argparse
+    # warnings.filterwarnings('error')
+
+    Parser = argparse.ArgumentParser(description="GeoBIPy",
+                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    Parser.add_argument('--index', default=0, type=int, help='job array index 0-18')
+    Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']")
+    Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']")
+
+    return Parser.parse_args()
+
+
+
np.random.seed(0)
+
+args = checkCommandArguments()
+sys.path.append(os.getcwd())
+
+models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+data_type = "Resolve"
+model_type = models[args.index]
+
+
+

The directory where HDF files will be stored +%%

+
file_path = os.path.join(data_type, model_type)
+pathlib.Path(file_path).mkdir(parents=True, exist_ok=True)
+
+for filename in os.listdir(file_path):
+    try:
+        if os.path.isfile(file_path) or os.path.islink(file_path):
+            os.unlink(file_path)
+    except Exception as e:
+        print('Failed to delete %s. Reason: %s' % (file_path, e))
+
+output_directory = file_path
+
+data_filename = data_type + '_' + model_type
+
+supplementary = "..//..//supplementary//"
+
+parameter_file = supplementary + "//options_files//{}_options".format(data_type)
+inputFile = pathlib.Path(parameter_file)
+assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile))
+
+output_directory = pathlib.Path(output_directory)
+assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory))
+
+print('Using user input file {}'.format(parameter_file))
+print('Output files will be produced at {}'.format(output_directory))
+
+kwargs = user_parameters.read(inputFile)
+
+kwargs['n_markov_chains'] = 5000
+
+kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv'
+kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename']
+
+# Everyone needs the system classes read in early.
+data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename'])
+
+# Start keeping track of time.
+t0 = time.time()
+
+seed = 146100583096709124601953385843316024947
+prng = get_prng(seed=seed)
+
+inference3d = Inference3D(data, prng=prng)
+inference3d.create_hdf5(directory=output_directory, **kwargs)
+
+print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0))))
+
+inference3d.infer(index=30, **kwargs)
+
+
+Fiducial [30], Frequency Domain EM Data
Using user input file ..//..//supplementary////options_files//Resolve_options
+Output files will be produced at Resolve/glacial
+Creating HDF5 files, this may take a few minutes...
+Files are being created for data files ..//..//supplementary////data//Resolve_glacial.csv and system files ..//..//supplementary////data//..//data/FdemSystem2.stm
+Created hdf5 file for line 0.0 with 79 data points
+Created hdf5 files 79 total data points
+Created hdf5 files in 0:00:00.164874 h:m:s
+i=5000, k=1, acc=*49.200, 0.006 s/Model, 28.026 s Elapsed, eta=--:--:-- h:m:s
+
+Remaining Points -30/1 || Elapsed Time: 0:00:28.993323 h:m:s || ETA 0:00:00.935268 h:m:s
+
+
+

Total running time of the script: (0 minutes 29.449 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_1D/plot_inference_1d_skytem.html b/docs/examples/Inference_1D/plot_inference_1d_skytem.html new file mode 100644 index 00000000..87666570 --- /dev/null +++ b/docs/examples/Inference_1D/plot_inference_1d_skytem.html @@ -0,0 +1,326 @@ + + + + + + + Running GeoBIPy to invert Skytem data — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Running GeoBIPy to invert Skytem data

+
import os
+import sys
+import pathlib
+from datetime import timedelta
+import time
+import numpy as np
+from geobipy import Inference3D
+from geobipy import user_parameters
+from geobipy import get_prng
+
+def checkCommandArguments():
+    """Check the users command line arguments. """
+    import argparse
+    # warnings.filterwarnings('error')
+
+    Parser = argparse.ArgumentParser(description="GeoBIPy",
+                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    Parser.add_argument('--index', default=0, type=int, help='job array index 0-18')
+    Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem', 'tempest', 'resolve']")
+    Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']")
+
+    return Parser.parse_args()
+
+
+
np.random.seed(0)
+
+args = checkCommandArguments()
+sys.path.append(os.getcwd())
+
+models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+
+data_type = "Skytem"
+model_type = models[args.index]
+
+
+

The directory where HDF files will be stored +%%

+
file_path = os.path.join(data_type, model_type)
+pathlib.Path(file_path).mkdir(parents=True, exist_ok=True)
+
+for filename in os.listdir(file_path):
+    try:
+        if os.path.isfile(file_path) or os.path.islink(file_path):
+            os.unlink(file_path)
+    except Exception as e:
+        print('Failed to delete %s. Reason: %s' % (file_path, e))
+
+output_directory = file_path
+
+data_filename = data_type + '_' + model_type
+
+supplementary = "..//..//supplementary//"
+parameter_file = supplementary + "//options_files//{}_options".format(data_type)
+inputFile = pathlib.Path(parameter_file)
+assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile))
+
+output_directory = pathlib.Path(output_directory)
+assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory))
+
+print('Using user input file {}'.format(parameter_file))
+print('Output files will be produced at {}'.format(output_directory))
+
+kwargs = user_parameters.read(inputFile)
+
+kwargs['n_markov_chains'] = 5000
+
+kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv'
+kwargs['system_filename'] = [supplementary + "//data//" + x for x in kwargs['system_filename']]
+
+# Everyone needs the system classes read in early.
+data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename'])
+
+# Start keeping track of time.
+t0 = time.time()
+
+seed = 146100583096709124601953385843316024947
+prng = get_prng(seed=seed)
+
+inference3d = Inference3D(data, prng=prng)
+inference3d.create_hdf5(directory=output_directory, **kwargs)
+
+print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0))))
+
+inference3d.infer(index=2, **kwargs)
+
+
+Fiducial [2.], Time Domain EM Data
Using user input file ..//..//supplementary////options_files//Skytem_options
+Output files will be produced at Skytem/glacial
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Creating HDF5 files, this may take a few minutes...
+Files are being created for data files ..//..//supplementary////data//Skytem_glacial.csv and system files ['..//..//supplementary////data//..//data//SkytemHM.stm', '..//..//supplementary////data//..//data//SkytemLM.stm']
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Created hdf5 file for line 0.0 with 79 data points
+Created hdf5 files 79 total data points
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Created hdf5 files in 0:00:00.368412 h:m:s
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+i=5000, k=6, acc=*25.760, 0.019 s/Model, 94.498 s Elapsed, eta=--:--:-- h:m:s
+
+Remaining Points -2/1 || Elapsed Time: 0:01:36.342864 h:m:s || ETA 0:00:32.114288 h:m:s
+
+
+

Total running time of the script: (1 minutes 37.169 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_1D/plot_inference_1d_tempest.html b/docs/examples/Inference_1D/plot_inference_1d_tempest.html new file mode 100644 index 00000000..fedb9565 --- /dev/null +++ b/docs/examples/Inference_1D/plot_inference_1d_tempest.html @@ -0,0 +1,327 @@ + + + + + + + Running GeoBIPy to invert Tempest data — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Running GeoBIPy to invert Tempest data

+
import os
+import sys
+import pathlib
+from datetime import timedelta
+import time
+import numpy as np
+from geobipy import Inference3D
+from geobipy import user_parameters
+from geobipy import get_prng
+
+def checkCommandArguments():
+    """Check the users command line arguments. """
+    import argparse
+    # warnings.filterwarnings('error')
+
+    Parser = argparse.ArgumentParser(description="GeoBIPy",
+                                     formatter_class=argparse.ArgumentDefaultsHelpFormatter)
+    Parser.add_argument('--index', default=0, type=int, help='job array index 0-18')
+    Parser.add_argument('--data', default=None, help="Data type. Choose from ['skytem_512', 'tempest', 'resolve']")
+    Parser.add_argument('--model', default=None, help="Model type. Choose from ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']")
+
+    return Parser.parse_args()
+
+
+
np.random.seed(0)
+
+args = checkCommandArguments()
+sys.path.append(os.getcwd())
+
+models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+
+data_type = "Tempest"
+model_type = models[args.index]
+
+
+

The directory where HDF files will be stored +%%

+
file_path = os.path.join(data_type, model_type)
+pathlib.Path(file_path).mkdir(parents=True, exist_ok=True)
+
+for filename in os.listdir(file_path):
+    try:
+        if os.path.isfile(file_path) or os.path.islink(file_path):
+            os.unlink(file_path)
+    except Exception as e:
+        print('Failed to delete %s. Reason: %s' % (file_path, e))
+
+output_directory = file_path
+
+data_filename = data_type + '_' + model_type
+
+supplementary = "..//..//supplementary//"
+
+parameter_file = supplementary + "//options_files//{}_options".format(data_type)
+inputFile = pathlib.Path(parameter_file)
+assert inputFile.exists(), Exception("Cannot find input file {}".format(inputFile))
+
+output_directory = pathlib.Path(output_directory)
+assert output_directory.exists(), Exception("Make sure the output directory exists {}".format(output_directory))
+
+print('Using user input file {}'.format(parameter_file))
+print('Output files will be produced at {}'.format(output_directory))
+
+kwargs = user_parameters.read(inputFile)
+
+kwargs['n_markov_chains'] = 5000
+
+kwargs['data_filename'] = supplementary + '//data//' + data_filename + '.csv'
+kwargs['system_filename'] = supplementary + "//data//" + kwargs['system_filename']
+
+# Everyone needs the system classes read in early.
+data = kwargs['data_type']._initialize_sequential_reading(kwargs['data_filename'], kwargs['system_filename'])
+
+# Start keeping track of time.
+t0 = time.time()
+
+seed = 146100583096709124601953385843316024947
+prng = get_prng(seed=seed)
+
+inference3d = Inference3D(data, prng=prng)
+inference3d.create_hdf5(directory=output_directory, **kwargs)
+
+print("Created hdf5 files in {} h:m:s".format(str(timedelta(seconds=time.time()-t0))))
+
+inference3d.infer(index=2, **kwargs)
+
+
+Fiducial [2.], Time Domain EM Data
Using user input file ..//..//supplementary////options_files//Tempest_options
+Output files will be produced at Tempest/glacial
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+Creating HDF5 files, this may take a few minutes...
+Files are being created for data files ..//..//supplementary////data//Tempest_glacial.csv and system files ..//..//supplementary////data//..//data/tempest.stm
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+Created hdf5 file for line 0.0 with 79 data points
+Created hdf5 files 79 total data points
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+Created hdf5 files in 0:00:00.544066 h:m:s
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+i=5000, k=5, acc=*24.800, 0.023 s/Model, 113.655 s Elapsed, eta=--:--:-- h:m:s
+
+Remaining Points -2/1 || Elapsed Time: 0:01:55.563057 h:m:s || ETA 0:00:38.521019 h:m:s
+
+
+

Total running time of the script: (1 minutes 56.531 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_1D/readme.html b/docs/examples/Inference_1D/readme.html new file mode 100644 index 00000000..de7d97e9 --- /dev/null +++ b/docs/examples/Inference_1D/readme.html @@ -0,0 +1,121 @@ + + + + + + + 1D Inference — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

1D Inference

+

There are a couple of ways to run an inference using geobipy, the first is via command line using

+
geobipy skytem_options.py <output folder>
+
+
+

The other is with a python script similar to the examples in this folder. +In both cases, you will need to write an options file (also shown in these examples)

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_1D/sg_execution_times.html b/docs/examples/Inference_1D/sg_execution_times.html new file mode 100644 index 00000000..0a297009 --- /dev/null +++ b/docs/examples/Inference_1D/sg_execution_times.html @@ -0,0 +1,151 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

04:03.149 total execution time for 3 files from examples/Inference_1D:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Running GeoBIPy to invert Tempest data (plot_inference_1d_tempest.py)

01:56.531

0.0

Running GeoBIPy to invert Skytem data (plot_inference_1d_skytem.py)

01:37.169

0.0

Running GeoBIPy to invert Resolve data (plot_inference_1d_resolve.py)

00:29.449

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_2D/plot_inference_2d_resolve.html b/docs/examples/Inference_2D/plot_inference_2d_resolve.html new file mode 100644 index 00000000..b58c9f93 --- /dev/null +++ b/docs/examples/Inference_2D/plot_inference_2d_resolve.html @@ -0,0 +1,308 @@ + + + + + + + 2D Posterior analysis of Resolve inference — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

2D Posterior analysis of Resolve inference

+

All plotting in GeoBIPy can be carried out using the 3D inference class

+
    +
  • resolve glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • resolve saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • resolve resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • resolve resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • resolve coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • resolve ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
+
import matplotlib.pyplot as plt
+import numpy as np
+from geobipy import Model
+from geobipy import Inference2D
+
+def plot_2d_summary(folder, data_type, model_type):
+   #%%
+   # Inference for a line of inferences
+   # ++++++++++++++++++++++++++++++++++
+   #
+   # We can instantiate the inference handler by providing a path to the directory containing
+   # HDF5 files generated by GeoBIPy.
+   #
+   # The InfereceXD classes are low memory.  They only read information from the HDF5 files
+   # as and when it is needed.
+   #
+   # The first time you use these classes to create plots, expect longer initial processing times.
+   # I precompute expensive properties and store them in the HDF5 files for later use.
+
+   from numpy.random import Generator
+   from numpy.random import PCG64DXSM
+   generator = PCG64DXSM(seed=0)
+   prng = Generator(generator)
+
+   #%%
+   results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng)
+
+   kwargs = {
+         "log" : 10,
+         "cmap" : 'jet'
+         }
+
+   fig = plt.figure(figsize=(16, 8))
+   plt.suptitle("{} {}".format(data_type, model_type))
+   gs0 = fig.add_gridspec(6, 2, hspace=1.0)
+
+   true_model = Model.create_synthetic_model(model_type)
+   true_model.mesh.y_edges = true_model.mesh.y_edges / 10.0
+
+   kwargs['vmin'] = np.log10(np.min(true_model.values))
+   kwargs['vmax'] = np.log10(np.max(true_model.values))
+
+   ax = fig.add_subplot(gs0[0, 0])
+   true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+
+   plt.ylim([-160, 60])
+
+   ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # By adding the useVariance keyword, we can make regions of lower confidence more transparent
+   ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # # # # # We can also choose to keep parameters above the DOI opaque.
+   # # # # results_2d.compute_doi()
+   # # # # plt.subplot(313)
+   # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs);
+   # # # # results_2d.plot_data_elevation(linewidth=0.3);
+   # # # # results_2d.plot_elevation(linewidth=0.3);
+
+   ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax)
+   results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+   ax1.set_title('Best model')
+
+   del kwargs['vmin']
+   del kwargs['vmax']
+
+   ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%')
+   results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%')
+   results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%')
+   results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   #%%
+   # We can plot the parameter values that produced the highest posterior
+   ax1 = fig.add_subplot(gs0[2, 0], sharex=ax)
+   results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True)
+
+   ax1 = fig.add_subplot(gs0[1, 0], sharex=ax)
+
+   ll, bb, ww, hh = ax1.get_position().bounds
+   ax1.set_position([ll, bb, ww*0.8, hh])
+
+   results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True)
+   results_2d.plot_burned_in(ax=ax1, underlay=True)
+
+   #%%
+   # Now we can start plotting some more interesting posterior properties.
+   # How about the confidence?
+   ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax)
+   results_2d.plot_confidence(ax=ax1);
+   results_2d.plot_data_elevation(ax=ax1, linewidth=0.3);
+   results_2d.plot_elevation(ax=ax1, linewidth=0.3);
+
+   #%%
+   # We can take the interface depth posterior for each data point,
+   # and display an interface probability cross section
+   # This posterior can be washed out, so the clim_scaling keyword lets me saturate
+   # the top and bottom 0.5% of the colour range
+   ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax)
+   ax1.set_title('P(Interface)')
+   results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax)
+   results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+
+   plt.show()
+   # plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300)
+
+if __name__ == '__main__':
+   models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+
+   for model in models:
+      try:
+         plot_2d_summary("../../../Parallel_Inference/", "resolve", model)
+      except Exception as e:
+         print(model)
+         print(e)
+         pass
+
+
+

Total running time of the script: (0 minutes 21.524 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_2D/plot_inference_2d_skytem.html b/docs/examples/Inference_2D/plot_inference_2d_skytem.html new file mode 100644 index 00000000..5f012cbe --- /dev/null +++ b/docs/examples/Inference_2D/plot_inference_2d_skytem.html @@ -0,0 +1,1023 @@ + + + + + + + 2D Posterior analysis of Skytem inference — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

2D Posterior analysis of Skytem inference

+

All plotting in GeoBIPy can be carried out using the 3D inference class

+
    +
  • skytem glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • skytem saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • skytem resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • skytem resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • skytem coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • skytem ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
+
self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17f00d0d0']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17f00ebd0']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x182cba8d0']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17f00fa50']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17f00e9d0']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x17ec00950']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x17f00d650']
+|   Shape:  (79, 132)
+|   Values: [[0.01120778 0.01120778 0.01089737 ... 0.01842322 0.01842322 0.01842322]
+|    [0.01004315 0.01004315 0.01001518 ... 0.01627435 0.01627435 0.01627435]
+|    [0.0098209  0.0098209  0.00983342 ... 0.01667789 0.01667789 0.01667789]
+|    ...
+|    [0.06755182 0.06755182 0.11932536 ... 0.0915802  0.0915802  0.0915802 ]
+|    [0.08104576 0.08104576 0.09688295 ... 0.09217019 0.09217019 0.09217019]
+|    [0.09121028 0.09121028 0.09943253 ... 0.11213815 0.11213815 0.11213815]]
+|   Min:    0.007485002271267226
+|   Max:    0.21717666705189165
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x182cb9c50']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17f114d50']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x182c2d4d0']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17f115b50']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17ec01c50']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x17ec036d0']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x17ec006d0']
+|   Shape:  (79, 132)
+|   Values: [[0.01022279 0.01022279 0.01016321 ... 0.0112021  0.0112021  0.0112021 ]
+|    [0.01121804 0.01121804 0.01062572 ... 0.0110357  0.0110357  0.0110357 ]
+|    [0.01049917 0.01049917 0.01007536 ... 0.01354095 0.01354095 0.01354095]
+|    ...
+|    [0.06896877 0.06896877 0.082946   ... 0.07112381 0.07112381 0.07112381]
+|    [0.08055856 0.08055856 0.0985627  ... 0.07903887 0.07903887 0.07903887]
+|    [0.09166633 0.09166633 0.09878492 ... 0.11154601 0.11154601 0.11154601]]
+|   Min:    0.006775009348709323
+|   Max:    0.8077378262817626
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x18229f350']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17f21de50']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x182600c50']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x18229d5d0']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x18229dbd0']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x18229e6d0']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x18229cb50']
+|   Shape:  (79, 132)
+|   Values: [[0.02104953 0.02104953 0.02099504 ... 0.01952426 0.01952426 0.01952426]
+|    [0.02008789 0.02008789 0.01997577 ... 0.02017019 0.02017019 0.02017019]
+|    [0.0198301  0.0198301  0.01986773 ... 0.0145253  0.0145253  0.0145253 ]
+|    ...
+|    [0.00511314 0.00511314 0.0029654  ... 0.01258484 0.01258484 0.01258484]
+|    [0.00392961 0.00392961 0.00385047 ... 0.00870505 0.00870505 0.00870505]
+|    [0.00279718 0.00279718 0.00269725 ... 0.00924302 0.00924302 0.00924302]]
+|   Min:    0.0008600086897131668
+|   Max:    9.774547726888374
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17eab3d50']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x17eab07d0']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x17f2d60d0']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17eab31d0']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x17eab0bd0']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x182fd6bd0']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x182fd5650']
+|   Shape:  (79, 132)
+|   Values: [[0.01147797 0.01147797 0.01033699 ... 0.00131732 0.00131732 0.00131732]
+|    [0.00999299 0.00999299 0.01000167 ... 0.00120333 0.00120333 0.00120333]
+|    [0.01007022 0.01007022 0.01005285 ... 0.00844038 0.00844038 0.00844038]
+|    ...
+|    [0.06763163 0.06763163 0.11942951 ... 0.09924038 0.09924038 0.09924038]
+|    [0.08122468 0.08122468 0.09651811 ... 0.09608009 0.09608009 0.09608009]
+|    [0.09162505 0.09162505 0.09883382 ... 0.11272764 0.11272764 0.11272764]]
+|   Min:    0.0007661982574834428
+|   Max:    0.21045876958147633
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x181fa95d0']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x181fa88d0']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x181f49a50']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x181fa8450']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x181fa8150']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x181fa9150']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x181fa87d0']
+|   Shape:  (79, 132)
+|   Values: [[1.02310541 1.02310541 1.02438661 ... 1.05490484 1.05490484 1.05490484]
+|    [1.02229478 1.02229478 1.02483553 ... 0.83077926 0.83077926 0.83077926]
+|    [1.02256309 1.02256309 1.02298923 ... 1.16206135 1.16206135 1.16206135]
+|    ...
+|    [0.18317234 0.18317234 0.18317234 ... 0.04611609 0.04611609 0.04611609]
+|    [0.13296225 0.13296225 0.13296225 ... 0.02326468 0.02326468 0.02326468]
+|    [0.08897776 0.08897776 0.08897776 ... 0.02807133 0.02807133 0.02807133]]
+|   Min:    0.005922638381206899
+|   Max:    15.778868401978407
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+Model:
+mesh:
+|   RectilinearMesh2D:
+|   Shape: : (79, 132)
+|   x
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   79
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x180d2a5d0']
+|   |   Shape:  (79,)
+|   |   Values: [ 0.  1.  2. ... 76. 77. 78.]
+|   |   Min:    0.0
+|   |   Max:    78.0
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   Easting (m)
+|   |   Address:['0x180d2aad0']
+|   |   Shape:  (80,)
+|   |   Values: [-0.5  0.5  1.5 ... 76.5 77.5 78.5]
+|   |   Min:    -0.5
+|   |   Max:    78.5
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x17f866cd0']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+|   y
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   132
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x180d2a750']
+|   |   Shape:  (132,)
+|   |   Values: [  -3.1875   -7.7625  -12.3375 ... -593.3625 -597.9375 -602.5125]
+|   |   Min:    -602.5125
+|   |   Max:    -3.1875
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   elevation (m)
+|   |   Address:['0x180d2ac50']
+|   |   Shape:  (133,)
+|   |   Values: [  -0.9     -5.475  -10.05  ... -595.65  -600.225 -604.8  ]
+|   |   Min:    -604.8
+|   |   Max:    -0.9
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:   Elevation (m)
+|   |   Address:['0x180d216d0']
+|   |   Shape:  (79,)
+|   |   Values: [0. 0. 0. ... 0. 0. 0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   StatArray
+|   Name:   Mean Conductivity ($\frac{S}{m}$)
+|   Address:['0x180d21350']
+|   Shape:  (79, 132)
+|   Values: [[0.00056777 0.00056777 0.00056795 ... 0.00059446 0.00059446 0.00059446]
+|    [0.00052266 0.00052266 0.00052266 ... 0.00071041 0.00071041 0.00071041]
+|    [0.00080071 0.00080071 0.00080071 ... 0.00104502 0.00104502 0.00104502]
+|    ...
+|    [0.00854703 0.00854703 0.00857136 ... 0.00167267 0.00167267 0.00167267]
+|    [0.00894549 0.00894549 0.00908742 ... 0.00255047 0.00255047 0.00255047]
+|    [0.00925526 0.00925526 0.00948505 ... 0.00507188 0.00507188 0.00507188]]
+|   Min:    0.0004901610032814771
+|   Max:    0.7309893437585234
+|   has_posterior: False
+
+
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+self.n_components=1, self.nTimes=array([26, 19])
+
+
+
+

+
+
import argparse
+import matplotlib.pyplot as plt
+import numpy as np
+from geobipy import Model
+from geobipy import Inference2D
+
+def plot_2d_summary(folder, data_type, model_type):
+   #%%
+   # Inference for a line of inferences
+   # ++++++++++++++++++++++++++++++++++
+   #
+   # We can instantiate the inference handler by providing a path to the directory containing
+   # HDF5 files generated by GeoBIPy.
+   #
+   # The InfereceXD classes are low memory.  They only read information from the HDF5 files
+   # as and when it is needed.
+   #
+   # The first time you use these classes to create plots, expect longer initial processing times.
+   # I precompute expensive properties and store them in the HDF5 files for later use.
+
+   from numpy.random import Generator
+   from numpy.random import PCG64DXSM
+   generator = PCG64DXSM(seed=0)
+   prng = Generator(generator)
+
+   #%%
+   results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng)
+
+   kwargs = {
+         "log" : 10,
+         "cmap" : 'jet'
+         }
+
+   fig = plt.figure(figsize=(16, 8))
+   plt.suptitle("{} {}".format(data_type, model_type))
+   gs0 = fig.add_gridspec(6, 2, hspace=1.0)
+
+   true_model = Model.create_synthetic_model(model_type)
+
+   kwargs['vmin'] = np.log10(np.min(true_model.values))
+   kwargs['vmax'] = np.log10(np.max(true_model.values))
+
+   ax = fig.add_subplot(gs0[0, 0])
+   true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+
+   plt.ylim([-550, 60])
+
+   print(results_2d.mean_parameters().summary)
+
+   ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # By adding the useVariance keyword, we can make regions of lower confidence more transparent
+   ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # # # # # We can also choose to keep parameters above the DOI opaque.
+   # # # # results_2d.compute_doi()
+   # # # # plt.subplot(313)
+   # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs);
+   # # # # results_2d.plot_data_elevation(linewidth=0.3);
+   # # # # results_2d.plot_elevation(linewidth=0.3);
+
+   ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax)
+   results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+   ax1.set_title('Best model')
+
+   del kwargs['vmin']
+   del kwargs['vmax']
+
+   ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%')
+   results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%')
+   results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%')
+   results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   #%%
+   # We can plot the parameter values that produced the highest posterior
+   ax1 = fig.add_subplot(gs0[2, 0], sharex=ax)
+   results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True)
+
+   ax1 = fig.add_subplot(gs0[1, 0], sharex=ax)
+
+   ll, bb, ww, hh = ax1.get_position().bounds
+   ax1.set_position([ll, bb, ww*0.8, hh])
+
+   results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True)
+   results_2d.plot_burned_in(ax=ax1, underlay=True)
+
+   #%%
+   # Now we can start plotting some more interesting posterior properties.
+   # How about the confidence?
+   ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax)
+   results_2d.plot_confidence(ax=ax1);
+   results_2d.plot_data_elevation(ax=ax1, linewidth=0.3);
+   results_2d.plot_elevation(ax=ax1, linewidth=0.3);
+
+   #%%
+   # We can take the interface depth posterior for each data point,
+   # and display an interface probability cross section
+   # This posterior can be washed out, so the clim_scaling keyword lets me saturate
+   # the top and bottom 0.5% of the colour range
+   ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax)
+   ax1.set_title('P(Interface)')
+   results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax)
+   results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # plt.show()
+   plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300)
+
+
+if __name__ == '__main__':
+   types = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+
+   for model in types:
+      # try:
+         plot_2d_summary('../../../Parallel_Inference/', "skytem", model)
+      # except Exception as e:
+      #    print(model)
+      #    print(e)
+      #    pass
+
+
+

Total running time of the script: (0 minutes 18.965 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_2D/plot_inference_2d_tempest.html b/docs/examples/Inference_2D/plot_inference_2d_tempest.html new file mode 100644 index 00000000..ad755810 --- /dev/null +++ b/docs/examples/Inference_2D/plot_inference_2d_tempest.html @@ -0,0 +1,495 @@ + + + + + + + 2D Posterior analysis of Tempest inference — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

2D Posterior analysis of Tempest inference

+

All plotting in GeoBIPy can be carried out using the 3D inference class

+
    +
  • tempest glacial, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • tempest saline_clay, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • tempest resistive_dolomites, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • tempest resistive_basement, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • tempest coastal_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
  • tempest ice_over_salt_water, Best model, 5%, 50%, 95%, P(# of Layers), P(Interface)
  • +
+
self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+self.n_components=2, self.nTimes=array([15])
+
+
+
+

+
+
import argparse
+import matplotlib.pyplot as plt
+import numpy as np
+from geobipy import Model
+from geobipy import Inference2D
+
+def plot_2d_summary(folder, data_type, model_type):
+   #%%
+   # Inference for a line of inferences
+   # ++++++++++++++++++++++++++++++++++
+   #
+   # We can instantiate the inference handler by providing a path to the directory containing
+   # HDF5 files generated by GeoBIPy.
+   #
+   # The InfereceXD classes are low memory.  They only read information from the HDF5 files
+   # as and when it is needed.
+   #
+   # The first time you use these classes to create plots, expect longer initial processing times.
+   # I precompute expensive properties and store them in the HDF5 files for later use.
+
+   from numpy.random import Generator
+   from numpy.random import PCG64DXSM
+   generator = PCG64DXSM(seed=0)
+   prng = Generator(generator)
+
+   #%%
+   results_2d = Inference2D.fromHdf('{}/{}/{}/0.0.h5'.format(folder, data_type, model_type), prng=prng)
+
+   kwargs = {
+         "log" : 10,
+         "cmap" : 'jet'
+         }
+
+   fig = plt.figure(figsize=(16, 8))
+   plt.suptitle("{} {}".format(data_type, model_type))
+   gs0 = fig.add_gridspec(6, 2, hspace=1.0)
+
+   true_model = Model.create_synthetic_model(model_type)
+
+   kwargs['vmin'] = np.log10(np.min(true_model.values))
+   kwargs['vmax'] = np.log10(np.max(true_model.values))
+
+   ax = fig.add_subplot(gs0[0, 0])
+   true_model.pcolor(flipY=True, ax=ax, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax, xlabel=False, ylabel=False);
+
+   plt.ylim([-550, 60])
+
+   ax1 = fig.add_subplot(gs0[0, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mean_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # By adding the useVariance keyword, we can make regions of lower confidence more transparent
+   ax1 = fig.add_subplot(gs0[1, 1], sharex=ax, sharey=ax)
+   results_2d.plot_mode_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # # # # # We can also choose to keep parameters above the DOI opaque.
+   # # # # results_2d.compute_doi()
+   # # # # plt.subplot(313)
+   # # # # results_2d.plot_mean_model(use_variance=True, mask_below_doi=True, **kwargs);
+   # # # # results_2d.plot_data_elevation(linewidth=0.3);
+   # # # # results_2d.plot_elevation(linewidth=0.3);
+
+   ax1 = fig.add_subplot(gs0[2, 1], sharex=ax, sharey=ax)
+   results_2d.plot_best_model(ax=ax1, wrap_clabel=True, **kwargs);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+   ax1.set_title('Best model')
+
+   del kwargs['vmin']
+   del kwargs['vmax']
+
+   ax1 = fig.add_subplot(gs0[3, 1], sharex=ax, sharey=ax); ax1.set_title('5%')
+   results_2d.plot_percentile(ax=ax1, percent=0.05, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[4, 1], sharex=ax, sharey=ax); ax1.set_title('50%')
+   results_2d.plot_percentile(ax=ax1, percent=0.5, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 1], sharex=ax, sharey=ax); ax1.set_title('95%')
+   results_2d.plot_percentile(ax=ax1, percent=0.95, wrap_clabel=True, **kwargs)
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   #%%
+   # We can plot the parameter values that produced the highest posterior
+   ax1 = fig.add_subplot(gs0[2, 0], sharex=ax)
+   results_2d.plot_k_layers(ax=ax1, wrap_ylabel=True)
+
+   ax1 = fig.add_subplot(gs0[1, 0], sharex=ax)
+
+   ll, bb, ww, hh = ax1.get_position().bounds
+   ax1.set_position([ll, bb, ww*0.8, hh])
+
+   results_2d.plot_channel_saturation(ax=ax1, wrap_ylabel=True)
+   results_2d.plot_burned_in(ax=ax1, underlay=True)
+
+   #%%
+   # Now we can start plotting some more interesting posterior properties.
+   # How about the confidence?
+   ax1 = fig.add_subplot(gs0[3, 0], sharex=ax, sharey=ax)
+   results_2d.plot_confidence(ax=ax1);
+   results_2d.plot_data_elevation(ax=ax1, linewidth=0.3);
+   results_2d.plot_elevation(ax=ax1, linewidth=0.3);
+
+   #%%
+   # We can take the interface depth posterior for each data point,
+   # and display an interface probability cross section
+   # This posterior can be washed out, so the clim_scaling keyword lets me saturate
+   # the top and bottom 0.5% of the colour range
+   ax1 = fig.add_subplot(gs0[4, 0], sharex=ax, sharey=ax)
+   ax1.set_title('P(Interface)')
+   results_2d.plot_interfaces(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   ax1 = fig.add_subplot(gs0[5, 0], sharex=ax, sharey=ax)
+   results_2d.plot_entropy(cmap='Greys', clim_scaling=0.5, ax=ax1);
+   results_2d.plot_data_elevation(linewidth=0.3, ax=ax1);
+   results_2d.plot_elevation(linewidth=0.3, ax=ax1);
+
+   # plt.show()
+   plt.savefig('{}_{}.png'.format(data_type, model_type), dpi=300)
+
+
+if __name__ == '__main__':
+   models = ['glacial', 'saline_clay', 'resistive_dolomites', 'resistive_basement', 'coastal_salt_water', 'ice_over_salt_water']
+
+   # import warnings
+   # warnings.filterwarnings('error')
+   for model in models:
+      try:
+         plot_2d_summary('../../../Parallel_Inference/', "tempest", model)
+      except Exception as e:
+         print(model)
+         print(e)
+         pass
+
+
+

Total running time of the script: (0 minutes 19.999 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_2D/readme.html b/docs/examples/Inference_2D/readme.html new file mode 100644 index 00000000..c2617ee9 --- /dev/null +++ b/docs/examples/Inference_2D/readme.html @@ -0,0 +1,115 @@ + + + + + + + 2D Inference — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

2D Inference

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Inference_2D/sg_execution_times.html b/docs/examples/Inference_2D/sg_execution_times.html new file mode 100644 index 00000000..05fd9f17 --- /dev/null +++ b/docs/examples/Inference_2D/sg_execution_times.html @@ -0,0 +1,151 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

01:00.488 total execution time for 3 files from examples/Inference_2D:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

2D Posterior analysis of Resolve inference (plot_inference_2d_resolve.py)

00:21.524

0.0

2D Posterior analysis of Tempest inference (plot_inference_2d_tempest.py)

00:19.999

0.0

2D Posterior analysis of Skytem inference (plot_inference_2d_skytem.py)

00:18.965

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Meshes/plot_rectilinear_mesh_1d.html b/docs/examples/Meshes/plot_rectilinear_mesh_1d.html new file mode 100644 index 00000000..a36f7c3a --- /dev/null +++ b/docs/examples/Meshes/plot_rectilinear_mesh_1d.html @@ -0,0 +1,540 @@ + + + + + + + 1D Rectilinear Mesh — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

1D Rectilinear Mesh

+
from copy import deepcopy
+from geobipy import DataArray, StatArray
+from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh2D_stitched
+import matplotlib.gridspec as gridspec
+import matplotlib.pyplot as plt
+import numpy as np
+import h5py
+
+
+
+

The basics

+

Instantiate a new 1D rectilinear mesh by specifying cell centres, edges, or widths.

+
x = StatArray(np.cumsum(np.arange(0.0, 10.0)), 'Depth', 'm')
+
+
+

Cell edges

+
rm = RectilinearMesh1D(edges=x, centres=None, widths=None)
+
+
+

We can plot the grid of the mesh +Or Pcolor the mesh showing. An array of cell values is used as the colour.

+
arr = StatArray(np.random.randn(*rm.shape), "Name", "Units")
+p=0; plt.figure(p)
+plt.subplot(121)
+_ = rm.plot_grid(transpose=True, flip=True)
+plt.subplot(122)
+_ = rm.pcolor(arr, grid=True, transpose=True, flip=True)
+
+# Mask the mesh cells by a distance
+rm_masked, indices, arr2 = rm.mask_cells(2.0, values=arr)
+p+=1; plt.figure(p)
+_ = rm_masked.pcolor(StatArray(arr2), grid=True, transpose=True, flip=True)
+
+# Writing and reading to/from HDF5
+# ++++++++++++++++++++++++++++++++
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.toHdf(f, 'rm1d')
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(122)
+_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.createHdf(f, 'rm1d', add_axis=10)
+    for i in range(10):
+        rm.writeHdf(f, 'rm1d', index=i)
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0)
+with h5py.File('rm1d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(131)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(132)
+_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True)
+plt.subplot(133)
+_ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True)
+
+
+
    +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
+
+
+

Log-space rectilinear mesh

+

Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +Here we use edges

+
x = StatArray(np.logspace(-3, 3, 10), 'Depth', 'm')
+
+
+
rm = RectilinearMesh1D(edges=x, log=10)
+
+# We can plot the grid of the mesh
+# Or Pcolor the mesh showing. An array of cell values is used as the colour.
+p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.plot_grid(transpose=True, flip=True)
+plt.subplot(122)
+arr = StatArray(np.random.randn(rm.nCells), "Name", "Units")
+_ = rm.pcolor(arr, grid=True, transpose=True, flip=True)
+
+# Writing and reading to/from HDF5
+# ++++++++++++++++++++++++++++++++
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.toHdf(f, 'rm1d')
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(122)
+_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.createHdf(f, 'rm1d', add_axis=10)
+    for i in range(10):
+        rm.writeHdf(f, 'rm1d', index=i)
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0)
+with h5py.File('rm1d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(131)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(132)
+_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True)
+plt.subplot(133)
+_ = rm2.pcolor(np.repeat(arr[None, :], 10, 0), grid=True, flipY=True)
+
+
+
    +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
+
+
+

relative_to

+

Instantiate a new 1D rectilinear mesh by specifying cell centres or edges. +Here we use edges

+
x = StatArray(np.arange(11.0), 'Deviation', 'm')
+
+
+
rm = RectilinearMesh1D(edges=x, relative_to=5.0)
+
+
+

We can plot the grid of the mesh +Or Pcolor the mesh showing. An array of cell values is used as the colour.

+
p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.plot_grid(transpose=True, flip=True)
+plt.subplot(122)
+arr = StatArray(np.random.randn(rm.nCells), "Name", "Units")
+_ = rm.pcolor(arr, grid=True, transpose=True, flip=True)
+
+# Writing and reading to/from HDF5
+# ++++++++++++++++++++++++++++++++
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.createHdf(f, 'rm1d')
+    rm.writeHdf(f, 'rm1d')
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(122)
+_ = rm1.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.createHdf(f, 'rm1d', add_axis=3)
+    for i in range(3):
+        rm.relative_to += 0.5
+        rm.writeHdf(f, 'rm1d', index=i)
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'], index=0)
+with h5py.File('rm1d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(131)
+_ = rm.pcolor(StatArray(arr), grid=True, transpose=True, flip=True)
+plt.subplot(132)
+_ = rm1.pcolor(arr, grid=True, transpose=True, flip=True)
+plt.subplot(133)
+_ = rm2.pcolor(np.repeat(arr[None, :], 3, 0), grid=True, flipY=True)
+
+
+# Making a mesh perturbable
+# +++++++++++++++++++++++++
+n_cells = 2
+widths = DataArray(np.full(n_cells, fill_value=10.0), 'test')
+rm = RectilinearMesh1D(widths=widths, relative_to=0.0)
+
+
+
    +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
+
+
+

Randomness and Model Perturbations

+

We can set the priors on the 1D model by assigning minimum and maximum layer +depths and a maximum number of layers. These are used to create priors on +the number of cells in the model, a new depth interface, new parameter values +and the vertical gradient of those parameters. +The halfSpaceValue is used as a reference value for the parameter prior.

+
from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+# Set the priors
+rm.set_priors(min_edge = 1.0,
+              max_edge = 150.0,
+              max_cells = 30,
+              prng = prng)
+
+
+

We can evaluate the prior of the model using depths only

+
print('Log probability of the Mesh given its priors: ', rm.probability)
+
+
+
Log probability of the Mesh given its priors:  -3.367295829986474
+
+
+

To propose new meshes, we specify the probabilities of creating, removing, perturbing, and not changing +an edge interface +Here we force the creation of a layer.

+
rm.set_proposals(probabilities = [0.25, 0.25, 0.25, 0.25], prng=prng)
+rm.set_posteriors()
+
+rm0 = deepcopy(rm)
+
+
+

We can then perturb the layers of the model

+
for i in range(1000):
+    rm = rm.perturb()
+    rm.update_posteriors()
+
+
+
p+=1; fig = plt.figure(p)
+ax = rm._init_posterior_plots(fig)
+
+rm.plot_posteriors(axes=ax)
+
+with h5py.File('rm1d.h5', 'w') as f:
+    rm.createHdf(f, 'rm1d', withPosterior = True)
+    rm.writeHdf(f, 'rm1d', withPosterior = True)
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm1 = RectilinearMesh1D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(121)
+_ = rm.pcolor(StatArray(rm.shape), grid=True, transpose=True, flip=True)
+plt.subplot(122)
+_ = rm1.pcolor(StatArray(rm1.shape), grid=True, transpose=True, flip=True)
+
+p+=1; fig = plt.figure(p)
+ax = rm1._init_posterior_plots(fig)
+rm1.plot_posteriors(axes=ax)
+
+
+
    +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
+
[<Axes: xlabel='# of Layers', ylabel='Density'>, <Axes: xlabel='test', ylabel='Density'>]
+
+
+

Expanded

+
with h5py.File('rm1d.h5', 'w') as f:
+    tmp = rm.pad(rm.max_cells)
+    tmp.createHdf(f, 'rm1d', withPosterior=True, add_axis=DataArray(np.arange(3.0), name='Easting', units="m"))
+
+    print(list(f['rm1d'].keys()))
+
+    rm.relative_to = 5.0
+    print(rm.summary)
+    rm.writeHdf(f, 'rm1d', withPosterior = True, index=0)
+
+    rm = deepcopy(rm0)
+    for i in range(1000):
+        rm = rm.perturb(); rm.update_posteriors()
+    rm.relative_to = 10.0
+    rm.writeHdf(f, 'rm1d', withPosterior = True, index=1)
+
+    rm = deepcopy(rm0)
+    for i in range(1000):
+        rm = rm.perturb(); rm.update_posteriors()
+    rm.relative_to = 25.0
+    rm.writeHdf(f, 'rm1d', withPosterior = True, index=2)
+
+with h5py.File('rm1d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['rm1d'])
+
+p+=1; plt.figure(p)
+plt.subplot(121)
+arr = np.random.randn(3, rm.max_cells) * 10
+_ = rm0.pcolor(arr[0, :rm0.nCells.item()], grid=True, transpose=True, flip=True)
+plt.subplot(122)
+_ = rm2.pcolor(arr, grid=True, flipY=True, equalize=True)
+
+from geobipy import RectilinearMesh2D
+with h5py.File('rm1d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['rm1d'], index=0)
+
+plt.figure()
+plt.subplot(121)
+rm2.plot_grid(transpose=True, flip=True)
+plt.subplot(122)
+rm2.edges.posterior.pcolor(transpose=True, flip=True)
+
+plt.show()
+
+
+
    +
  • plot rectilinear mesh 1d
  • +
  • plot rectilinear mesh 1d
  • +
+
['nCells', 'x', 'y']
+RectilinearMesh1D
+Number of Cells:
+|   StatArray
+|   Name:   Number of cells
+|   Address:['0x154b0fd50' '0x182c1a750' '0x180970e10' '0x1809733f0']
+|   Shape:  (1,)
+|   Values: [15]
+|   Min:    15
+|   Max:    15
+|   Prior:
+|   |   Uniform Distribution:
+|   |   Min: 1
+|   |   Max: 30
+|   has_posterior: True
+
+Cell Centres:
+|   StatArray
+|   Name:   test
+|   Address:['0x17f3b4850']
+|   Shape:  (15,)
+|   Values: [  1.22331766   3.28906551   6.36608642 ...  66.85590133 108.99206549
+|    148.449825  ]
+|   Min:    1.2233176602046558
+|   Max:    148.4498250024555
+|   has_posterior: False
+
+Cell Edges:
+|   StatArray
+|   Name:   test
+|   Address:['0x1834e3950' '0x17ebbc1a0' '0x182174450' '0x17f387aa0' '0x1808cd750'
+|    '0x1808ced70']
+|   Shape:  (16,)
+|   Values: [  0.           2.44663532   4.13149571 ...  70.19008745 147.79404353
+|    149.10560647]
+|   Min:    0.0
+|   Max:    149.1056064716983
+|   Prior:
+|   |   Order Statistics:
+|   |   None
+|   Proposal:
+|   |   Uniform Distribution:
+|   |   Min: 1.0
+|   |   Max: 149.99999999999997
+|   has_posterior: True
+
+log:
+|   None
+relative_to:
+|   StatArray
+|   Name:
+|   Address:['0x17ef83e50']
+|   Shape:  (1,)
+|   Values: [5.]
+|   Min:    5.0
+|   Max:    5.0
+|   has_posterior: False
+
+
+

Total running time of the script: (0 minutes 4.585 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Meshes/plot_rectilinear_mesh_2d.html b/docs/examples/Meshes/plot_rectilinear_mesh_2d.html new file mode 100644 index 00000000..82c8678c --- /dev/null +++ b/docs/examples/Meshes/plot_rectilinear_mesh_2d.html @@ -0,0 +1,405 @@ + + + + + + + 2D Rectilinear Mesh — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

2D Rectilinear Mesh

+

This 2D rectilinear mesh defines a grid with straight cell boundaries.

+

It can be instantiated in two ways.

+

The first is by providing the cell centres or +cell edges in two dimensions.

+

The second embeds the 2D mesh in 3D by providing the cell centres or edges in three dimensions. +The first two dimensions specify the mesh coordinates in the horiztontal cartesian plane +while the third discretizes in depth. This allows us to characterize a mesh whose horizontal coordinates +do not follow a line that is parallel to either the “x” or “y” axis.

+
import h5py
+from geobipy import StatArray
+from geobipy import RectilinearMesh1D, RectilinearMesh2D, RectilinearMesh3D
+import matplotlib.pyplot as plt
+import numpy as np
+
+
+

Specify some cell centres in x and y

+
x = StatArray(np.arange(10.0), 'Easting', 'm')
+y = StatArray(np.arange(20.0), 'Depth', 'm')
+rm = RectilinearMesh2D(x_centres=x, y_centres=y)
+
+
+

We can plot the grid lines of the mesh.

+
p=0;
+plt.figure(p)
+_  = rm.plot_grid(flipY=True, linewidth=0.5)
+
+# Intersecting multisegment lines with a mesh
+arr = np.zeros(rm.shape)
+i = rm.line_indices([0.0, 3.0, 6.0, 9], [2.0, 6.0, 0.0, 10])
+arr[i[:, 0], i[:, 1]] = 1
+p += 1; plt.figure(p)
+rm.pcolor(values = arr)
+
+
+
    +
  • plot rectilinear mesh 2d
  • +
  • plot rectilinear mesh 2d
  • +
+
(<Axes: xlabel='Easting (m)', ylabel='Depth (m)'>, <matplotlib.collections.QuadMesh object at 0x180d19bb0>, <matplotlib.colorbar.Colorbar object at 0x1824970b0>)
+
+
+

We can pcolor the mesh by providing cell values.

+
xx, yy = np.meshgrid(rm.y.centres, rm.x.centres)
+arr = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values")
+
+rm2, values2 = rm.resample(0.5, 0.5, arr, method='linear')
+
+
+p += 1; plt.figure(p)
+_ = rm.pcolor(arr, grid=True, flipY=True, linewidth=0.5)
+
+
+plot rectilinear mesh 2d

Mask the x axis cells by a distance

+
rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, values=arr)
+p += 1; plt.figure(p)
+_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True)
+
+
+plot rectilinear mesh 2d

Mask the z axis cells by a distance

+
rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(y_distance=0.2, values=arr)
+p += 1; plt.figure(p)
+_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True)
+
+
+plot rectilinear mesh 2d

Mask axes by a distance

+
rm_masked, x_indices, z_indices, arr2 = rm.mask_cells(x_distance=0.4, y_distance=0.2, values=arr)
+p += 1; plt.figure(p)
+_ = rm_masked.pcolor(StatArray(arr2), grid=True, flipY=True)
+
+x = StatArray(np.arange(10.0), 'Easting', 'm')
+y = StatArray(np.cumsum(np.arange(15.0)), 'Depth', 'm')
+rm = RectilinearMesh2D(x_centres=x, y_centres=y)
+
+
+plot rectilinear mesh 2d

We can perform some interval statistics on the cell values of the mesh +Generate some values

+
a = np.repeat(np.arange(1.0, np.float64(rm.x.nCells+1))[:, np.newaxis], rm.y.nCells, 1)
+
+
+

Compute the mean over an interval for the mesh.

+
rm.intervalStatistic(a, intervals=[6.8, 12.4], axis=0, statistic='mean')
+
+
+
(array([[9., 9., 9., ..., 9., 9., 9.]]), [6.8, 12.4])
+
+
+

Compute the mean over multiple intervals for the mesh.

+
rm.intervalStatistic(a, intervals=[6.8, 12.4, 20.0, 40.0], axis=0, statistic='mean')
+
+
+
(array([[ 9.,  9.,  9., ...,  9.,  9.,  9.],
+       [nan, nan, nan, ..., nan, nan, nan],
+       [nan, nan, nan, ..., nan, nan, nan]]), [6.8, 12.4, 20.0, 40.0])
+
+
+

We can specify either axis

+
rm.intervalStatistic(a, intervals=[2.8, 4.2], axis=1, statistic='mean')
+
+
+
(array([[ 1.],
+       [ 2.],
+       [ 3.],
+       ...,
+       [ 8.],
+       [ 9.],
+       [10.]]), [2.8, 4.2])
+
+
+
rm.intervalStatistic(a, intervals=[2.8, 4.2, 5.1, 8.4], axis=1, statistic='mean')
+
+
+
(array([[ 1., nan,  1.],
+       [ 2., nan,  2.],
+       [ 3., nan,  3.],
+       ...,
+       [ 8., nan,  8.],
+       [ 9., nan,  9.],
+       [10., nan, 10.]]), [2.8, 4.2, 5.1, 8.4])
+
+
+

Slice the 2D mesh to retrieve either a 2D mesh or 1D mesh

+
rm2 = rm[:5, :5]
+rm3 = rm[:5, 5]
+rm4 = rm[5, :5]
+
+p += 1; plt.figure(p)
+plt.subplot(131)
+rm2.plot_grid()
+plt.subplot(132)
+rm3.plot_grid()
+plt.subplot(133)
+rm4.plot_grid(transpose=True)
+
+
+plot rectilinear mesh 2d

Resample a grid

+
values = StatArray(np.random.randn(*rm.shape))
+rm2, values2 = rm.resample(0.5, 0.5, values)
+
+p += 1; plt.figure(p)
+plt.subplot(121)
+rm.pcolor(values)
+plt.subplot(122)
+rm2.pcolor(values2)
+
+
+plot rectilinear mesh 2d
(<Axes: >, <matplotlib.collections.QuadMesh object at 0x17faebb00>, <matplotlib.colorbar.Colorbar object at 0x17f487b60>)
+
+
+
+

Axes in log space

+
x = StatArray(np.logspace(-1, 4, 10), 'x')
+y = StatArray(np.logspace(0, 3, 10), 'y')
+rm = RectilinearMesh2D(x_edges=x, x_log=10, y_edges=y, y_log=10)
+
+# We can plot the grid lines of the mesh.
+p += 1; plt.figure(p)
+_  = rm.plot_grid(linewidth=0.5)
+
+
+plot rectilinear mesh 2d
with h5py.File('rm2d.h5', 'w') as f:
+    rm.toHdf(f, 'test')
+
+with h5py.File('rm2d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['test'])
+
+arr = np.random.randn(*rm.shape)
+p += 1; plt.figure(p)
+plt.subplot(211)
+rm.pcolor(arr)
+plt.subplot(212)
+rm2.pcolor(arr)
+
+
+plot rectilinear mesh 2d
(<Axes: xlabel='x', ylabel='y'>, <matplotlib.collections.QuadMesh object at 0x182aca390>, <matplotlib.colorbar.Colorbar object at 0x17f6df0b0>)
+
+
+
+
+

relative_to

+
x = StatArray(np.arange(10.0), 'Northing', 'm')
+y = StatArray(np.arange(20.0), 'Depth', 'm')
+
+rm = RectilinearMesh2D(x_centres=x, y_centres=y)
+
+p += 1; plt.figure(p)
+plt.subplot(121)
+_  = rm.plot_grid(linewidth=0.5, flipY=True)
+rm = RectilinearMesh2D(x_centres=x, x_relative_to=0.2*np.random.randn(y.size), y_centres=y, y_relative_to=0.2*np.random.randn(x.size))
+plt.subplot(122)
+_  = rm.plot_grid(linewidth=0.5, flipY=True)
+
+# relative_to single
+with h5py.File('rm2d.h5', 'w') as f:
+    rm.toHdf(f, 'test')
+
+with h5py.File('rm2d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['test'])
+
+arr = np.random.randn(*rm.shape)
+p += 1; plt.figure(p)
+plt.subplot(211)
+rm.pcolor(arr, flipY=True)
+plt.subplot(212)
+rm2.pcolor(arr, flipY=True)
+
+# relative_to expanded
+with h5py.File('rm2d.h5', 'w') as f:
+    rm.createHdf(f, 'test', add_axis=RectilinearMesh1D(centres=StatArray(np.arange(3.0), name='Easting', units="m"), relative_to = 0.2*np.random.randn(x.size, y.size)))
+    for i in range(3):
+        rm.x.relative_to += 0.5
+        rm.y.relative_to += 0.5
+        rm.writeHdf(f, 'test', index=i)
+
+with h5py.File('rm2d.h5', 'r') as f:
+    rm2 = RectilinearMesh2D.fromHdf(f['test'], index=0)
+
+with h5py.File('rm2d.h5', 'r') as f:
+    rm3 = RectilinearMesh3D.fromHdf(f['test'])
+
+p += 1; plt.figure(p)
+plt.subplot(311)
+rm.pcolor(arr, flipY=True)
+plt.subplot(312)
+rm2.pcolor(arr, flipY=True)
+
+p += 1; plt.figure(p)
+arr = np.random.randn(*rm3.shape)
+plt.subplot(311)
+mesh = rm3[0, :, :]
+mesh.pcolor(arr[0, :, :], flipY=True)
+plt.subplot(312)
+mesh = rm3[:, 0, :]
+mesh.pcolor(arr[:, 0, :], flipY=True)
+plt.subplot(313)
+rm3[:, :, 0].pcolor(arr[:, :, 0])
+
+plt.show()
+
+
+
    +
  • plot rectilinear mesh 2d
  • +
  • plot rectilinear mesh 2d
  • +
  • plot rectilinear mesh 2d
  • +
  • plot rectilinear mesh 2d
  • +
+

Total running time of the script: (0 minutes 1.747 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Meshes/plot_rectilinear_mesh_3d.html b/docs/examples/Meshes/plot_rectilinear_mesh_3d.html new file mode 100644 index 00000000..bb6348d5 --- /dev/null +++ b/docs/examples/Meshes/plot_rectilinear_mesh_3d.html @@ -0,0 +1,301 @@ + + + + + + + 3D Rectilinear Mesh — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

3D Rectilinear Mesh

+

This 3D rectilinear mesh defines a grid with straight cell boundaries.

+
from geobipy import StatArray
+from geobipy import RectilinearMesh3D
+import matplotlib.pyplot as plt
+import numpy as np
+import h5py
+
+
+

Specify some cell centres in x and y

+
x = StatArray(np.arange(10.0), 'Easting', 'm')
+y = StatArray(np.arange(15.0), 'Northing', 'm')
+z = StatArray(np.arange(20.0), 'Depth', 'm')
+
+rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z)
+
+rm1 = rm[:5, :5, :5]
+rm2 = rm[:, :, 5]
+rm3 = rm[:, 5, :]
+rm4 = rm[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+rm2.plot_grid()
+plt.subplot(232)
+rm3.plot_grid()
+plt.subplot(233)
+rm4.plot_grid()
+
+
+plot rectilinear mesh 3d
rm2 = rm[:, 5, 5]
+rm3 = rm[5, :, 5]
+rm4 = rm[5, 5, :]
+
+plt.subplot(234)
+rm2.plot_grid()
+plt.subplot(235)
+rm3.plot_grid()
+plt.subplot(236)
+rm4.plot_grid()
+
+
+plot rectilinear mesh 3d
with h5py.File('rm3d.h5', 'w') as f:
+    rm.createHdf(f, 'test')
+    rm.writeHdf(f, 'test')
+
+with h5py.File('rm3d.h5', 'r') as f:
+    rm2 = RectilinearMesh3D.fromHdf(f['test'])
+
+rm.pyvista_mesh().save('rm3d.vtk')
+
+
+xx, yy = np.meshgrid(rm.y.centres, rm.x.centres)
+z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re")
+rm = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re)
+
+rm1 = rm[:5, :5, :5]
+rm2 = rm[:, :, 5]
+rm3 = rm[:, 5, :]
+rm4 = rm[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+rm2.plot_grid()
+plt.subplot(232)
+rm3.plot_grid()
+plt.subplot(233)
+rm4.plot_grid()
+
+
+plot rectilinear mesh 3d

We can plot the mesh in 3D!

+
pv = rm.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
mesh = rm.pyvista_mesh().save('rm3d_re1.vtk')
+
+x_re = StatArray(np.sin(np.repeat(rm.y.centres[:, None], rm.z.nCells, 1)), "x_re")
+
+xx, yy = np.meshgrid(rm.y.centres, rm.x.centres)
+z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re")
+rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re)
+
+rm1 = rm[:5, :5, :5]
+rm2 = rm[:, :, 5]
+rm3 = rm[:, 5, :]
+rm4 = rm[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+rm2.plot_grid()
+plt.subplot(232)
+rm3.plot_grid()
+plt.subplot(233)
+rm4.plot_grid()
+
+
+plot rectilinear mesh 3d

We can plot the mesh in 3D!

+
pv = rm.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
mesh = rm.pyvista_mesh().save('rm3d_re2.vtk')
+
+
+xx, yy = np.meshgrid(rm.z.centres, rm.y.centres)
+x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re")
+
+xx, yy = np.meshgrid(rm.z.centres, rm.x.centres)
+y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re")
+
+xx, yy = np.meshgrid(rm.y.centres, rm.x.centres)
+z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re")
+rm = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re)
+
+rm1 = rm[:5, :5, :5]
+rm2 = rm[:, :, 5]
+rm3 = rm[:, 5, :]
+rm4 = rm[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+rm2.plot_grid()
+plt.subplot(232)
+rm3.plot_grid()
+plt.subplot(233)
+rm4.plot_grid()
+
+
+plot rectilinear mesh 3d

We can plot the mesh in 3D!

+
pv = rm.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
mesh = rm.pyvista_mesh().save('rm3d_re3.vtk')
+
+with h5py.File('rm3d.h5', 'w') as f:
+    rm.toHdf(f, 'test')
+
+with h5py.File('rm3d.h5', 'r') as f:
+    rm2 = RectilinearMesh3D.fromHdf(f['test'])
+
+rm2.pyvista_mesh().save('rm3d_read.vtk')
+
+plt.show()
+
+
+

Total running time of the script: (0 minutes 1.378 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Meshes/readme.html b/docs/examples/Meshes/readme.html new file mode 100644 index 00000000..af813fbd --- /dev/null +++ b/docs/examples/Meshes/readme.html @@ -0,0 +1,115 @@ + + + + + + + Meshes — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Meshes

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Meshes/sg_execution_times.html b/docs/examples/Meshes/sg_execution_times.html new file mode 100644 index 00000000..f35888c7 --- /dev/null +++ b/docs/examples/Meshes/sg_execution_times.html @@ -0,0 +1,151 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:07.710 total execution time for 3 files from examples/Meshes:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

1D Rectilinear Mesh (plot_rectilinear_mesh_1d.py)

00:04.585

0.0

2D Rectilinear Mesh (plot_rectilinear_mesh_2d.py)

00:01.747

0.0

3D Rectilinear Mesh (plot_rectilinear_mesh_3d.py)

00:01.378

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Models/plot_model_1d.html b/docs/examples/Models/plot_model_1d.html new file mode 100644 index 00000000..90338be2 --- /dev/null +++ b/docs/examples/Models/plot_model_1d.html @@ -0,0 +1,323 @@ + + + + + + + 1D Model with an infinite halfspace — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

1D Model with an infinite halfspace

+
from copy import deepcopy
+from geobipy import StatArray
+from geobipy import RectilinearMesh1D
+from geobipy import Model
+from geobipy import Distribution
+import matplotlib.pyplot as plt
+import numpy as np
+
+
+
+

Instantiate the 1D Model with a Half Space

+
# Make a test model with 10 layers, and increasing parameter values
+nLayers = 2
+par = StatArray(np.linspace(0.001, 0.02, nLayers), "Conductivity", "$\\frac{S}{m}$")
+thk = StatArray(np.full(nLayers, fill_value=10.0))
+thk[-1] = np.inf
+mesh = RectilinearMesh1D(widths = thk)
+
+mod = Model(mesh = mesh, values=par)
+
+plt.figure()
+mod.plot_grid(transpose=True, flip=True)
+
+
+plot model 1d
+
+

Randomness and Model Perturbations

+

We can set the priors on the 1D model by assigning minimum and maximum layer +depths and a maximum number of layers. These are used to create priors on +the number of cells in the model, a new depth interface, new parameter values +and the vertical gradient of those parameters. +The halfSpaceValue is used as a reference value for the parameter prior.

+
from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+# Set the priors
+mod.set_priors(value_mean=0.01,
+              min_edge=1.0,
+              max_edge=150.0,
+              max_cells=30,
+              solve_value=True,
+              solve_gradient=True,
+              prng=prng)
+
+
+

We can evaluate the prior of the model using depths only

+
print('Log probability of the Model given its priors: ', mod.probability(False, False))
+# Or with priors on its parameters, and parameter gradient with depth.
+print('Log probability of the Model given its priors: ', mod.probability(True, True))
+
+
+
Log probability of the Model given its priors:  -3.367295829986474
+Log probability of the Model given its priors:  -9.157731937350919
+
+
+

To propose new models, we specify the probabilities of creating, removing, perturbing, and not changing +a layer interface

+
pProposal = Distribution('LogNormal', 0.01, np.log(2.0)**2.0, linearSpace=True, prng=prng)
+mod.set_proposals(probabilities=[0.25, 0.25, 0.5, 0.25], proposal=pProposal, prng=prng)
+
+
+

We can then perturb the layers of the model

+
remapped, perturbed = mod.perturb()
+
+
+
fig = plt.figure(figsize=(8, 6))
+ax = plt.subplot(121)
+mod.pcolor(transpose=True, flip=True, log=10)  # , grid=True)
+ax = plt.subplot(122)
+perturbed.pcolor(transpose=True, flip=True, log=10)  # , grid=True)
+
+
+plot model 1d
<Axes: >
+
+
+

We can evaluate the prior of the model using depths only

+
print('Log probability of the Model given its priors: ',perturbed.probability(False, False))
+# Or with priors on its parameters, and parameter gradient with depth.
+print('Log probability of the Model given its priors: ',perturbed.probability(True, True))
+
+
+
Log probability of the Model given its priors:  -3.367295829986474
+Log probability of the Model given its priors:  -8.559817917094882
+
+
+
+
+

Perturbing a model multiple times

+

In the stochasitic inference process, we perturb the model structure, +and parameter values, multiple times. +Each time the model is perturbed, we can record its state +in a posterior distribution.

+

For a 1D model, the parameter posterior is a 2D hitmap with depth in one dimension +and the parameter value in the other. +We also attach a 1D histogram for the number of layers, +and a 1D histogram for the locations of interfaces.

+

Since we have already set the priors on the Model, we can set the posteriors +based on bins from from the priors.

+
mod.set_posteriors()
+
+mod0 = deepcopy(mod)
+
+
+

Now we randomly perturb the model, and update its posteriors.

+
mod.update_posteriors()
+for i in range(1001):
+    remapped, perturbed = mod.perturb()
+
+    # And update the model posteriors
+    perturbed.update_posteriors()
+
+    mod = perturbed
+
+
+

We can now plot the posteriors of the model.

+

Remember in this case, we are simply perturbing the model structure and parameter values +The proposal for the parameter values is fixed and centred around a single value. +fig = plt.figure(figsize=(8, 6))

+
# plt.subplot(131)
+# mod.nCells.posterior.plot()
+# ax = plt.subplot(132)
+# mod.values.posterior.pcolor(cmap='gray_r', colorbar=False, flipY=True, logX=10)
+# plt.subplot(133, sharey=ax)
+# mod.mesh.edges.posterior.plot(transpose=True, flipY=True)
+
+# plt.figure()
+# mod.plot_posteriors(**{"cmap": 'gray_r',
+#                   "xscale": 'log',
+#                   "noColorbar": True,
+#                   "flipY": True,
+#                   'credible_interval_kwargs':{'axis': 1,
+#                                           'reciprocate': True,
+#                                           'xscale': 'log'}})
+# mod.par.posterior.plotCredibleIntervals(xscale='log', axis=1)
+
+
+fig = plt.figure(figsize=(8, 6))
+# gs = fig.add_gridspec(nrows=1, ncols=1)
+mod.plot_posteriors(axes=fig,
+                    edges_kwargs = {
+                        "transpose":True,
+                        "flipY":True
+                    },
+                    parameter_kwargs = {
+                        "cmap": 'gray_r',
+                        "xscale": 'log',
+                        "colorbar": False,
+                        "flipY": True,
+                        'credible_interval_kwargs':{
+                              'reciprocate':True,
+                            #   'axis': 1,
+                              'xscale': 'log'
+                        }
+                    },
+                    best = mod)
+
+
+plt.show()
+
+
+plot model 1d

Total running time of the script: (0 minutes 3.051 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Models/plot_model_2d.html b/docs/examples/Models/plot_model_2d.html new file mode 100644 index 00000000..37c24a92 --- /dev/null +++ b/docs/examples/Models/plot_model_2d.html @@ -0,0 +1,209 @@ + + + + + + + 2D Rectilinear Model — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

2D Rectilinear Model

+

This 2D rectilinear model defines a grid with straight cell boundaries.

+
from geobipy import StatArray
+from geobipy import RectilinearMesh2D
+from geobipy import Model
+import h5py
+import matplotlib.pyplot as plt
+import numpy as np
+
+
+

Specify some cell centres in x and y

+
x = StatArray(np.arange(11.0), 'Easting', 'm')
+y = StatArray(np.arange(11.0), 'Northing', 'm')
+mesh = RectilinearMesh2D(x_edges=x, y_edges=y)
+
+xx, yy = np.meshgrid(mesh.x.centres, mesh.y.centres)
+values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Values")
+
+mod = Model(mesh=mesh, values = values)
+
+plt.figure()
+mod.pcolor()
+
+mod2 = mod.resample(0.5, 0.5)
+mod3 = mod.resample(1.5, 1.5)
+plt.figure()
+plt.subplot(121)
+mod2.pcolor()
+plt.axis('equal')
+plt.subplot(122)
+mod3.pcolor()
+plt.axis('equal')
+
+
+# #%%
+# # We can plot the mesh in 3D!
+# pv = rm.pyvista_plotter()
+# pv.show()
+
+# rm.to_vtk('Model3D.vtk')
+
+with h5py.File('Model2D.h5', 'w') as f:
+    mod.toHdf(f, 'model')
+
+with h5py.File('Model2D.h5', 'r') as f:
+    mod2 = Model.fromHdf(f['model'])
+
+
+plt.show()
+
+
+
    +
  • plot model 2d
  • +
  • plot model 2d
  • +
+

Total running time of the script: (0 minutes 0.212 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Models/plot_model_3d.html b/docs/examples/Models/plot_model_3d.html new file mode 100644 index 00000000..69898032 --- /dev/null +++ b/docs/examples/Models/plot_model_3d.html @@ -0,0 +1,338 @@ + + + + + + + 3D Rectilinear Model — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

3D Rectilinear Model

+

This 3D rectilinear model defines a grid with straight cell boundaries.

+
from geobipy import StatArray
+from geobipy import RectilinearMesh3D
+from geobipy import Model
+import matplotlib.pyplot as plt
+import numpy as np
+import h5py
+
+
+"""
+3D Rectilinear Mesh
+-------------------
+This 3D rectilinear mesh defines a grid with straight cell boundaries.
+
+"""
+
+
+
'\n3D Rectilinear Mesh\n-------------------\nThis 3D rectilinear mesh defines a grid with straight cell boundaries.\n\n'
+
+
+
from geobipy import StatArray
+from geobipy import RectilinearMesh3D
+from geobipy import Model
+import matplotlib.pyplot as plt
+import numpy as np
+import h5py
+
+
+

Specify some cell centres in x and y

+
x = StatArray(np.arange(10.0), 'Easting', 'm')
+y = StatArray(np.arange(15.0), 'Northing', 'm')
+z = StatArray(np.arange(20.0), 'Depth', 'm')
+
+mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z)
+
+xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres)
+values = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "Height")
+values = np.repeat(values[:, :, None], mesh.z.nCells, 2)
+
+model = Model(mesh=mesh, values=values)
+
+model1 = model[:5, :5, :5]
+model2 = model[:, :, 5]
+model3 = model[:, 5, :]
+model4 = model[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+model2.pcolor()
+plt.subplot(232)
+model3.pcolor()
+plt.subplot(233)
+model4.pcolor()
+
+
+plot model 3d
(<Axes: xlabel='Northing (m)', ylabel='Depth (m)'>, <matplotlib.collections.QuadMesh object at 0x1828e1340>, <matplotlib.colorbar.Colorbar object at 0x182d6cb30>)
+
+
+
model2 = model[:, 5, 5]
+model3 = model[5, :, 5]
+model4 = model[5, 5, :]
+
+plt.subplot(234)
+model2.pcolor()
+plt.subplot(235)
+model3.pcolor()
+plt.subplot(236)
+model4.pcolor()
+
+
+plot model 3d
<Axes: xlabel='Depth (m)'>
+
+
+
with h5py.File('model3d.h5', 'w') as f:
+    model.createHdf(f, 'test')
+    model.writeHdf(f, 'test')
+
+with h5py.File('model3d.h5', 'r') as f:
+    model2 = Model.fromHdf(f['test'])
+
+model.pyvista_mesh().save('model3d.vtk')
+
+
+xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres)
+z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re")
+mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z, z_relative_to=z_re)
+model = Model(mesh=mesh, values=values)
+
+model1 = model[:5, :5, :5]
+model2 = model[:, :, 5]
+model3 = model[:, 5, :]
+model4 = model[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+model2.pcolor()
+plt.subplot(232)
+model3.pcolor()
+plt.subplot(233)
+model4.pcolor()
+
+
+plot model 3d
(<Axes: xlabel='Northing (m)', ylabel='z_re (m)'>, <matplotlib.collections.QuadMesh object at 0x17f18acc0>, <matplotlib.colorbar.Colorbar object at 0x182510410>)
+
+
+

We can plot the mesh in 3D!

+
pv = model.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
model.pyvista_mesh().save('model3d_re1.vtk')
+
+
+x_re = StatArray(np.sin(np.repeat(mesh.y.centres[:, None], mesh.z.nCells, 1)), "x_re")
+mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, z_edges=z, z_relative_to=z_re)
+model = Model(mesh=mesh, values=values)
+
+model1 = model[:5, :5, :5]
+model2 = model[:, :, 5]
+model3 = model[:, 5, :]
+model4 = model[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+model2.pcolor()
+plt.subplot(232)
+model3.pcolor()
+plt.subplot(233)
+model4.pcolor()
+
+
+plot model 3d
(<Axes: xlabel='Northing (m)', ylabel='z_re (m)'>, <matplotlib.collections.QuadMesh object at 0x17ee3a0f0>, <matplotlib.colorbar.Colorbar object at 0x154bb9a30>)
+
+
+

We can plot the mesh in 3D!

+
pv = model.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
model.pyvista_mesh().save('model3d_re2.vtk')
+
+
+xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres)
+y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re")
+
+mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re)
+model = Model(mesh=mesh, values=values)
+
+model1 = model[:5, :5, :5]
+model2 = model[:, :, 5]
+model3 = model[:, 5, :]
+model4 = model[5, :, :]
+
+plt.figure()
+plt.subplot(231)
+model2.pcolor()
+plt.subplot(232)
+model3.pcolor()
+plt.subplot(233)
+model4.pcolor()
+
+
+plot model 3d
(<Axes: xlabel='y_re (m)', ylabel='z_re (m)'>, <matplotlib.collections.QuadMesh object at 0x17faea990>, <matplotlib.colorbar.Colorbar object at 0x15e95dc10>)
+
+
+

We can plot the mesh in 3D!

+
pv = model.pyvista_plotter()
+
+
+

We can plot the mesh in 3D!

+
model.pyvista_mesh().save('model3d_re3.vtk')
+
+# with h5py.File('mesh3d.h5', 'w') as f:
+#     mesh.toHdf(f, 'test')
+
+# with h5py.File('mesh3d.h5', 'r') as f:
+#     mesh2 = RectilinearMesh3D.fromHdf(f['test'])
+
+# mesh2.pyvista_mesh().save('mesh3d_read.vtk')
+
+plt.show()
+
+
+

Total running time of the script: (0 minutes 0.788 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Models/readme.html b/docs/examples/Models/readme.html new file mode 100644 index 00000000..987fd79d --- /dev/null +++ b/docs/examples/Models/readme.html @@ -0,0 +1,115 @@ + + + + + + + Models — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Models

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Models/sg_execution_times.html b/docs/examples/Models/sg_execution_times.html new file mode 100644 index 00000000..4fffeaa1 --- /dev/null +++ b/docs/examples/Models/sg_execution_times.html @@ -0,0 +1,151 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:04.051 total execution time for 3 files from examples/Models:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

1D Model with an infinite halfspace (plot_model_1d.py)

00:03.051

0.0

3D Rectilinear Model (plot_model_3d.py)

00:00.788

0.0

2D Rectilinear Model (plot_model_2d.py)

00:00.212

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/plot_DataArray.html b/docs/examples/Statistics/plot_DataArray.html new file mode 100644 index 00000000..bc2cb777 --- /dev/null +++ b/docs/examples/Statistics/plot_DataArray.html @@ -0,0 +1,278 @@ + + + + + + + DataArray Class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

DataArray Class

+

Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays.

+
DataArray
+Name:   1
+Address:['0x17f629d50']
+Shape:  (1,)
+Values: [0.]
+Min:    0.0
+Max:    0.0
+
+DataArray
+Name:   10
+Address:['0x17f629450']
+Shape:  (10,)
+Values: [0. 0. 0. ... 0. 0. 0.]
+Min:    0.0
+Max:    0.0
+
+DataArray
+Name:   (2, 10)
+Address:['0x17f62aad0']
+Shape:  (2, 10)
+Values: [[0. 0. 0. ... 0. 0. 0.]
+ [0. 0. 0. ... 0. 0. 0.]]
+Min:    0.0
+Max:    0.0
+
+DataArray
+Name:   (2, 10)
+Address:['0x154ab7a50']
+Shape:  (2,)
+Values: [ 2 10]
+Min:    2
+Max:    10
+
+DataArray
+Name:   45.454
+Address:['0x154ab6dd0']
+Shape:  (1,)
+Values: [45.454]
+Min:    45.454
+Max:    45.454
+
+DataArray
+Name:   45.454
+Address:['0x17f62aad0']
+Shape:  (1,)
+Values: [45.454]
+Min:    45.454
+Max:    45.454
+
+DataArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x154ab6dd0']
+Shape:  (1,)
+Values: [0.00257118]
+Min:    0.002571182431510025
+Max:    0.002571182431510025
+
+DataArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x17f62aad0']
+Shape:  (10,)
+Values: [0. 1. 2. ... 7. 8. 9.]
+Min:    0.0
+Max:    9.0
+
+DataArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x154ab7a50']
+Shape:  (10,)
+Values: [0. 1. 2. ... 7. 8. 9.]
+Min:    0.0
+Max:    9.0
+
+
+
+

+
+
import  numpy as np
+from geobipy import DataArray, StatArray
+
+# Integer
+test = DataArray(1, name='1')
+assert isinstance(test, DataArray) and test.size ==  1 and test.item() == 0.0, TypeError("da 0")
+print(test.summary)
+test = DataArray(10, name='10')
+assert isinstance(test, DataArray) and test.size ==  10 and np.all(test == 0.0), TypeError("da 1")
+print(test.summary)
+# tuple/Shape
+test = DataArray((2, 10), name='(2, 10)')
+assert isinstance(test, DataArray) and np.all(test.shape ==  (2, 10)) and np.all(test == 0.0), TypeError("da 2")
+print(test.summary)
+
+test = DataArray([2, 10], name='(2, 10)')
+assert isinstance(test, DataArray) and np.all(test ==  [2, 10]), TypeError("da 2")
+print(test.summary)
+
+# float
+test = DataArray(45.454, name='45.454')
+assert isinstance(test, DataArray) and test.size ==  1 and test.item() == 45.454, TypeError("da 3")
+print(test.summary)
+test = DataArray(np.float64(45.454), name='45.454')
+assert isinstance(test, DataArray) and test.size ==  1 and test.item() == 45.454, TypeError("da 4")
+print(test.summary)
+
+# array
+test = DataArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$")
+assert isinstance(test, DataArray) and test.size ==  1, TypeError("da 5")
+print(test.summary)
+
+test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$")
+assert isinstance(test, DataArray) and test.size ==  10, TypeError("da 6")
+print(test.summary)
+
+test = DataArray(test)
+assert isinstance(test, DataArray) and test.size ==  10, TypeError("da 6")
+print(test.summary)
+
+
+

Total running time of the script: (0 minutes 0.003 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/plot_StatArray.html b/docs/examples/Statistics/plot_StatArray.html new file mode 100644 index 00000000..0c10b556 --- /dev/null +++ b/docs/examples/Statistics/plot_StatArray.html @@ -0,0 +1,894 @@ + + + + + + + StatArray Class — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

StatArray Class

+

Extends the numpy ndarray class to add extra attributes such as names, and +units, and allows us to attach statistical descriptors of the array. +The direct extension to numpy maintains speed and functionality of numpy arrays.

+
import numpy as np
+import matplotlib.pyplot as plt
+import h5py
+from geobipy import DataArray, StatArray, Histogram, Distribution, RectilinearMesh1D
+
+
+# plt.style.use('seaborn-pastel')
+
+
+
+

Instantiating a new StatArray class

+
# Integer
+test = StatArray(1, name='1')
+assert isinstance(test, StatArray) and test.size ==  1 and test.item() == 0.0, TypeError("da 0")
+print(test.summary)
+test = StatArray(10, name='10')
+assert isinstance(test, StatArray) and test.size ==  10 and np.all(test == 0.0), TypeError("da 1")
+print(test.summary)
+# tuple/Shape
+test = StatArray((2, 10), name='(2, 10)')
+assert isinstance(test, StatArray) and np.all(test.shape ==  (2, 10)) and np.all(test == 0.0), TypeError("da 2")
+print(test.summary)
+
+test = StatArray([2, 10], name='(2, 10)')
+assert isinstance(test, StatArray) and np.all(test ==  [2, 10]), TypeError("da 2")
+print(test.summary)
+
+# float
+test = StatArray(45.454, name='45.454')
+assert isinstance(test, StatArray) and test.size ==  1 and test.item() == 45.454, TypeError("da 3")
+print(test.summary)
+test = StatArray(np.float64(45.454), name='45.454')
+assert isinstance(test, StatArray) and test.size ==  1 and test.item() == 45.454, TypeError("da 4")
+print(test.summary)
+
+# array
+test = StatArray(np.random.randn(1), name="test", units="$\frac{g}{cc}$")
+assert isinstance(test, StatArray) and test.size ==  1, TypeError("da 5")
+print(test.summary)
+
+test = StatArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$")
+assert isinstance(test, StatArray) and test.size ==  10, TypeError("da 6")
+print(test.summary)
+
+
+test = DataArray(np.arange(10.0), name="test", units="$\frac{g}{cc}$")
+test = StatArray(test)
+assert isinstance(test, StatArray) and test.size ==  10, TypeError("da 6")
+print(test.summary)
+
+
+
+
+# The StatArray can take any numpy function that returns an array as an input.
+# The name and units of the variable can be assigned to the StatArray.
+
+
+
StatArray
+Name:   1
+Address:['0x154bfe950']
+Shape:  (1,)
+Values: [0.]
+Min:    0.0
+Max:    0.0
+has_posterior: False
+
+StatArray
+Name:   10
+Address:['0x17f089a50']
+Shape:  (10,)
+Values: [0. 0. 0. ... 0. 0. 0.]
+Min:    0.0
+Max:    0.0
+has_posterior: False
+
+StatArray
+Name:   (2, 10)
+Address:['0x17f08a7d0']
+Shape:  (2, 10)
+Values: [[0. 0. 0. ... 0. 0. 0.]
+ [0. 0. 0. ... 0. 0. 0.]]
+Min:    0.0
+Max:    0.0
+has_posterior: False
+
+StatArray
+Name:   (2, 10)
+Address:['0x17f6825d0']
+Shape:  (2,)
+Values: [ 2 10]
+Min:    2
+Max:    10
+has_posterior: False
+
+StatArray
+Name:   45.454
+Address:['0x17f08a7d0']
+Shape:  (1,)
+Values: [45.454]
+Min:    45.454
+Max:    45.454
+has_posterior: False
+
+StatArray
+Name:   45.454
+Address:['0x17f680a50']
+Shape:  (1,)
+Values: [45.454]
+Min:    45.454
+Max:    45.454
+has_posterior: False
+
+StatArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x17f08a7d0']
+Shape:  (1,)
+Values: [-0.70419088]
+Min:    -0.7041908829549965
+Max:    -0.7041908829549965
+has_posterior: False
+
+StatArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x17f6819d0']
+Shape:  (10,)
+Values: [0. 1. 2. ... 7. 8. 9.]
+Min:    0.0
+Max:    9.0
+has_posterior: False
+
+StatArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x17f680a50']
+Shape:  (10,)
+Values: [0. 1. 2. ... 7. 8. 9.]
+Min:    0.0
+Max:    9.0
+has_posterior: False
+
+
+
+
+

Attaching Prior and Proposal Distributions to a StatArray

+

The StatArray class has been built so that we may easily +attach not only names and units, but statistical distributions too. +We won’t go into too much detail about the different distribution

+

Two types of distributions can be attached to the StatArray.

+ +
# Obtain an instantiation of a random number generator.
+# This is optional, but is an important consideration for parallel programming.
+from numpy.random import Generator
+from numpy.random import PCG64DXSM
+generator = PCG64DXSM(seed=0)
+prng = Generator(generator)
+
+Density = StatArray(10.0, name="test", units="$\frac{g}{cc}$")
+
+Density.prior = Distribution('Uniform', -2.0, 2.0, prng=prng)
+
+
+

We can also attach a proposal distribution

+
Density.proposal = Distribution('Normal', 0.0, 1.0, prng=prng)
+print(Density.summary)
+print("Class type of the prior: ",type(Density.prior))
+print("Class type of the proposal: ",type(Density.proposal))
+
+
+
StatArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x154bfd7d0' '0x17f652360' '0x181310930' ... '0x17f652180' '0x181313b10'
+ '0x1813134b0']
+Shape:  (1,)
+Values: [10.]
+Min:    10.0
+Max:    10.0
+Prior:
+|   Uniform Distribution:
+|   Min: -2.0
+|   Max: 2.0
+Proposal:
+|   Normal
+|       Mean:0.0
+|   Variance:1.0
+has_posterior: False
+
+Class type of the prior:  <class 'geobipy.src.classes.statistics.UniformDistribution.Uniform'>
+Class type of the proposal:  <class 'geobipy.src.classes.statistics.NormalDistribution.Normal'>
+
+
+

The values in the variable can be evaluated against the prior. +In this case, we have 3 elements in the variable, and a univariate Normal for the prior. +Therefore each element is evaluated to get 3 probabilities, one for each element.

+
print(Density.probability(log=False))
+
+
+
0.0
+
+
+

The univariate proposal distribution can generate random samples from itself.

+
print(Density.propose())
+
+
+
1.1375024404290368
+
+
+

From a sampling stand point we can either sample using only the proposal +Or we can only generate samples that simultaneously satisfy the prior.

+
print(Density.propose(relative=True))
+
+
+
[10.53816627]
+
+
+

We can perturb the variable by drawing from the attached proposal distribution.

+
Density.perturb()
+print(Density.summary)
+
+
+
StatArray
+Name:   test ($\frac{g}{cc}$)
+Address:['0x154bfd7d0' '0x17f652360' '0x181310930' ... '0x17f652180' '0x181313b10'
+ '0x1813134b0']
+Shape:  (1,)
+Values: [0.38188467]
+Min:    0.38188466718060166
+Max:    0.38188466718060166
+Prior:
+|   Uniform Distribution:
+|   Min: -2.0
+|   Max: 2.0
+Proposal:
+|   Normal
+|       Mean:0.0
+|   Variance:1.0
+has_posterior: False
+
+
+
+
+

Attaching a Histogram to capture the posterior distribution

+

The StatArray can perturb itself, evaluate its current probability given its priors +and a histogram can be attached to capture its posterior distribution. +As an example, lets create a Histogram class with bins generated from the prior.

+
bins = Density.prior.bins()
+
+
+

Attach the histogram

+
Density.posterior = Histogram(mesh = RectilinearMesh1D(edges=bins))
+
+
+

In an iterative sense, we can propose and evaluate new values, and update the posterior

+
for i in range(1000):
+    Density.perturb()
+    p = Density.probability(log=False)
+
+    if p > 0.0: # This is a simple example!
+        Density.update_posterior()
+
+
+
plt.figure()
+Density.summaryPlot()
+
+
+Prior, Proposal, Posterior
+
+

Attach a multivariate normal distribution as the prior and proposal

+

Attach the multivariate prior

+
mean = np.random.randn(Density.size)
+variance = np.ones(Density.size)
+Density.prior = Distribution('MvNormal', mean, variance, prng=prng)
+
+
+

Since the prior is multivariate, the appropriate equations are used to +evaluate the probability for all elements in the StatArray. +This produces a single probability.

+
print(Density.probability(log=False))
+
+
+
0.16541198455442488
+
+
+

Attach the multivariate proposal

+
mean = np.random.randn(Density.size)
+variance = np.ones(Density.size)
+Density.proposal = Distribution('MvNormal', mean, variance, prng=prng)
+
+
+

Perturb the variables using the multivariate proposal.

+
Density.perturb()
+Density.summary
+
+with h5py.File('statarray.h5', 'w') as f:
+    Density.createHdf(f, 'statarray', withPosterior=True, add_axis=3)
+    Density.writeHdf(f, 'statarray', withPosterior=True, index=0)
+
+with h5py.File('statarray.h5', 'r') as f:
+    tmp = StatArray.fromHdf(f, 'statarray', index=0, skip_posterior=False)
+
+with h5py.File('statarray.h5', 'r') as f:
+    tmp = StatArray.fromHdf(f, 'statarray', skip_posterior=False)
+
+
+
+
+

Basic manipulation

+

The StatArray contains other functions to perform basic array manipulations

+

These routines essentially wrap around numpy functions, +but the result will have the same name and units, +and if any prior or proposal are set, those will be carried through too.

+
+

1D example

+
x = StatArray(-np.cumsum(np.arange(10.0)))
+print(x)
+
+
+
[ -0.  -1.  -3. ... -28. -36. -45.]
+
+
+
print(x.insert(i=[0, 9], values=[999.0, 999.0]))
+
+
+
[999.  -0.  -1. ... -36. 999. -45.]
+
+
+
print(x.prepend(999.0))
+
+
+
[999.  -0.  -1. ... -28. -36. -45.]
+
+
+
print(x.prepend([998.0, 999.0]))
+
+
+
[998. 999.  -0. ... -28. -36. -45.]
+
+
+
print(x.append([998.0, 999.0]))
+
+
+
[ -0.  -1.  -3. ... -45. 998. 999.]
+
+
+
print(x.resize(14))
+
+
+
[-0. -1. -3. ... -1. -3. -6.]
+
+
+
print(x.delete([5,8]))
+
+
+
[ -0.  -1.  -3. ... -21. -28. -45.]
+
+
+
print(x.edges())
+
+
+
[  0.5  -0.5  -2.  ... -32.  -40.5 -49.5]
+
+
+
print(x.internalEdges())
+
+
+
[ -0.5  -2.   -4.5 ... -24.5 -32.  -40.5]
+
+
+
print(x.firstNonZero())
+
+
+
1
+
+
+
print(x.lastNonZero())
+
+
+
10
+
+
+
print(x.abs())
+
+
+
[ 0.  1.  3. ... 28. 36. 45.]
+
+
+
+
+

2D example

+
x = StatArray(np.asarray([[0, -2, 3],[3, 0, -1],[1, 2, 0]]))
+print(x)
+
+
+
[[ 0 -2  3]
+ [ 3  0 -1]
+ [ 1  2  0]]
+
+
+
print(x.insert(i=0, values=4))
+
+
+
[[ 4  4  4]
+ [ 0 -2  3]
+ [ 3  0 -1]
+ [ 1  2  0]]
+
+
+
print(x.insert(i=[2, 3], values=5, axis=1))
+
+
+
[[ 0 -2  5  3  5]
+ [ 3  0  5 -1  5]
+ [ 1  2  5  0  5]]
+
+
+
print(x.insert(i=2, values=[10, 11, 12], axis=1))
+
+
+
[[ 0 -2 10  3]
+ [ 3  0 11 -1]
+ [ 1  2 12  0]]
+
+
+
print(x.prepend(999))
+
+
+
[[999 999 999]
+ [  0  -2   3]
+ [  3   0  -1]
+ [  1   2   0]]
+
+
+
print(x.prepend([999, 998, 997], axis=1))
+
+
+
[[999 998 997   0  -2   3]
+ [999 998 997   3   0  -1]
+ [999 998 997   1   2   0]]
+
+
+
print(x.append([[999, 998, 997]]))
+
+
+
[[  0  -2   3]
+ [  3   0  -1]
+ [  1   2   0]
+ [999 998 997]]
+
+
+
print(x.resize([5,5]))
+
+
+
[[ 0 -2  3  3  0]
+ [-1  1  2  0  0]
+ [-2  3  3  0 -1]
+ [ 1  2  0  0 -2]
+ [ 3  3  0 -1  1]]
+
+
+
print(x.delete(5))
+
+
+
[ 0 -2  3 ...  1  2  0]
+
+
+
print(x.delete(2, axis=0))
+
+
+
[[ 0 -2  3]
+ [ 3  0 -1]]
+
+
+
print(x.firstNonZero(axis=0))
+
+
+
[1 0 0]
+
+
+
print(x.lastNonZero(axis=0))
+
+
+
[3 3 2]
+
+
+
print(x.firstNonZero(axis=1))
+
+
+
[1 0 0]
+
+
+
print(x.lastNonZero(axis=1))
+
+
+
[3 3 2]
+
+
+
print(x.abs())
+
+
+
[[0 2 3]
+ [3 0 1]
+ [1 2 0]]
+
+
+
+
+
+

Plotting

+

We can easily plot the StatArray with its built in plotting functions. +All plotting functions can take matplotlib keywords

+
# The simplest is to just plot the array
+
+Density = StatArray(np.random.randn(100),name="Density",units="$\frac{g}{cc}$")
+Time = StatArray(np.linspace(0, 100, Density.size), name='Time', units='s')
+Depth = StatArray(np.random.exponential(size=Density.size), name='Depth', units='m')
+
+
+
plt.figure()
+_ = Density.plot(linewidth=0.5, marker='x', markersize=1.0)
+
+
+plot StatArray

We can quickly plot a bar graph.

+
plt.figure()
+_ = Density.bar()
+
+
+plot StatArray

We can scatter the contents of the StatArray if it is 1D

+
plt.figure()
+_ = Density.scatter(alpha=0.7)
+
+
+plot StatArray
+

Histogram Equalization

+

A neat trick with colourmaps is histogram equalization. +This approach forces all colours in the images to have an equal weight. +This distorts the colour bar, but can really highlight the lower and higher +ends of whatever you are plotting. Just add the equalize keyword!

+
plt.figure()
+_ = Density.scatter(alpha=0.7, equalize=True)
+
+
+plot StatArray

Take the log base(x) of the data

+

We can also take the data to a log, log10, log2, or a custom number!

+
plt.figure()
+_ = Density.scatter(alpha=0.7,edgecolor='k',log='e') # could also use log='e', log=2, log=x) where x is the base you require
+
+
+plot StatArray

X and Y axes

+

We can specify the x axis of the scatter plot.

+
plt.figure()
+_ = Density.scatter(x=Time, alpha=0.7, edgecolor='k')
+
+
+plot StatArray

Notice that I never specified the y axis, so the y axis defaulted to the values in the StatArray. +In this case, any operations applied to the colours, are also applied to the y axis, e.g. log=10. +When I take the values of Density to log base 10, because I do not specify the y plotting locations, those locations are similarly affected.

+

I can however force the y co-ordinates by specifying it as input. +In the second subplot I explicitly plot distance on the y axis. +In the first subplot, the y axis is the same as the colourbar.

+
plt.figure()
+ax1 = plt.subplot(211)
+Density.scatter(x=Time, alpha=0.7, edgecolor='k', log=10)
+plt.subplot(212, sharex=ax1)
+_ = Density.scatter(x=Time, y=Depth, alpha=0.7, edgecolor='k', log=10)
+
+
+plot StatArray

Point sizes

+

Since the plotting functions take matplotlib keywords, I can also specify the size of each points.

+
s = np.ceil(100*(np.abs(np.random.randn(Density.size))))
+plt.figure()
+plt.tight_layout()
+ax1 = plt.subplot(211)
+Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=2)
+plt.subplot(212, sharex=ax1)
+#Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', sizeLegend=[1.0, 100, 200, 300])
+v = np.abs(Density)+1.0
+_ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k', legend_size=[1.0, 100, 200, 300], log=10)
+
+
+plot StatArray

Of course we can still take the log, or equalize the colour histogram

+
plt.figure()
+_ = Density.scatter(x=Time, y=Depth, s=s, alpha=0.7,edgecolor='k',equalize=True,log=10)
+
+
+plot StatArray

Typically pcolor only works with 2D arrays. The StatArray has a pcolor method that will pcolor a 1D array

+
plt.figure()
+plt.subplot(221)
+Density.pcolor()
+plt.subplot(222)
+Density.pcolor(y=Time)
+plt.subplot(223)
+Density.pcolor(y=Time, flip=True)
+plt.subplot(224)
+_ = Density.pcolor(y=Time, log=10, equalize=True)
+
+
+plot StatArray

We can add grid lines, and add opacity to each element in the pcolor image

+

This is useful if the colour values need to be scaled by another variable e.g. variance.

+
plt.figure()
+plt.subplot(121)
+Density.pcolor(grid=True, cmap='jet')
+plt.subplot(122)
+a = np.linspace(1.0, 0.0, Density.size)
+_ = Density.pcolor(grid=True, alpha=a, cmap='jet')
+
+
+plot StatArray

We can plot a histogram of the StatArray

+
plt.figure()
+_ = Density.hist(100)
+
+
+plot StatArray

We can write the StatArray to a HDF5 file. HDF5 files are binary files that can include compression. They allow quick and easy access to parts of the file, and can also be written to and read from in parallel!

+
with h5py.File('1Dtest.h5','w') as f:
+    Density.toHdf(f,'test')
+
+
+

We can then read the StatArray from the file +Here x is a new variable, that is read in from the hdf5 file we just wrote.

+
x = StatArray.fromHdf('1Dtest.h5', 'test')
+print('x has the same values as Density? ',np.all(x == Density))
+x[2] = 5.0 # Change one of the values in x
+print('x has its own memory allocated (not a reference/pointer)? ', id(x) != id(Density))
+
+
+
x has the same values as Density?  True
+x has its own memory allocated (not a reference/pointer)?  True
+
+
+

We can also define a 2D array

+
Density = StatArray(np.random.randn(50,100),"Density","$\frac{g}{cc}$")
+Density.summary
+
+
+
"StatArray\nName:   Density ($\\frac{g}{cc}$)\nAddress:['0x17f4e05d0']\nShape:  (50, 100)\nValues: [[ 0.14104158  0.47155933  1.95502179 ...  1.76214533  1.24396239\n   0.1215391 ]\n [-1.85725407 -1.83481902  1.04987135 ...  0.85985878 -0.34529634\n  -0.85558253]\n [ 0.76292177  1.13113089 -1.35816931 ...  1.31159774  0.46698778\n   1.68630769]\n ...\n [ 0.48384681  0.22774246 -0.51285518 ... -0.24617681  0.81498656\n   1.57095942]\n [-0.13056782  1.72508382 -1.33506224 ... -1.17319374  0.27482725\n   0.39378825]\n [ 1.36799972  0.26714623 -0.02588039 ... -0.28979426 -0.8747471\n  -0.98512505]]\nMin:    -3.2750329304041186\nMax:    3.445969826454679\nhas_posterior: False\n"
+
+
+

The StatArray Class’s functions work whether it is 1D or 2D

+

We can still do a histogram

+
plt.figure()
+_ = Density.hist()
+
+
+plot StatArray

And we can use pcolor to plot the 2D array

+
plt.figure()
+_ = Density.pcolor()
+
+
+plot StatArray

The StatArray comes with extra plotting options

+

Here we specify the x and y axes for the 2D array using two other 1D StatArrays

+
plt.figure()
+x = StatArray(np.arange(101),name='x Axis',units = 'mm')
+y = StatArray(np.arange(51),name='y Axis',units = 'elephants')
+_ = Density.pcolor(x=x, y=y)
+
+
+plot StatArray

We can plot using a log10 scale, in this case, we have values that are less +than or equal to 0.0. Plotting with the log option will by default mask any +of those values, and will let you know that it has done so!

+
plt.figure()
+_ = Density.pcolor(x=x,y=y,log=2)
+
+
+plot StatArray

A neat trick with colourmaps is histogram equalization. +This approach forces all colours in the image to have an equal amount. +This distorts the colours, but can really highlight the lower and higher +ends of whatever you are plotting

+
plt.figure()
+_ = Density.pcolor(x=x, y=y, equalize=True)
+
+
+plot StatArray

We can equalize the log10 plot too :)

+
plt.figure()
+_ = Density.pcolor(x=x,y=y,equalize=True, log=10)
+
+
+plot StatArray

We can add opacity to each pixel in the image

+
a = StatArray(np.random.random(Density.shape), 'Opacity from 0.0 to 1.0')
+
+
+
plt.figure()
+ax1 = plt.subplot(131)
+ax = Density.pcolor(x=x, y=y, flipY=True, linewidth=0.1, colorbar=False)
+plt.subplot(132, sharex=ax1, sharey=ax1)
+ax = Density.pcolor(x=x, y=y, alpha=a, flipY=True, linewidth=0.1, colorbar=False)
+plt.subplot(133, sharex=ax1, sharey=ax1)
+_ = a.pcolor(x=x, y=y, flipY=True)
+
+
+plot StatArray

If the array potentially has a lot of white space around the edges, we can trim the image

+
Density[:10, :] = 0.0
+Density[-10:, :] = 0.0
+Density[:, :10] = 0.0
+Density[:, -10:] = 0.0
+plt.figure()
+plt.subplot(121)
+Density.pcolor()
+plt.subplot(122)
+_ = Density.pcolor(trim=0.0)
+
+
+plot StatArray

Create a stacked area plot of a 2D StatArray

+
A = StatArray(np.abs(np.random.randn(13,100)), name='Variable', units="units")
+x = StatArray(np.arange(100),name='x Axis',units = 'mm')
+plt.figure()
+ax1 = plt.subplot(211)
+A.stackedAreaPlot(x=x, axis=1)
+plt.subplot(212, sharex=ax1)
+_ = A.stackedAreaPlot(x=x, i=np.s_[[1,3,4],:], axis=1, labels=['a','b','c'])
+
+plt.show()
+
+
+plot StatArray

Total running time of the script: (0 minutes 2.767 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/plot_histogram_1d.html b/docs/examples/Statistics/plot_histogram_1d.html new file mode 100644 index 00000000..424aa842 --- /dev/null +++ b/docs/examples/Statistics/plot_histogram_1d.html @@ -0,0 +1,371 @@ + + + + + + + Histogram 1D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Histogram 1D

+

This histogram class allows efficient updating of histograms, plotting and +saving as HDF5

+
from geobipy.src.classes.mesh.RectilinearMesh1D import RectilinearMesh1D
+import h5py
+from geobipy import StatArray
+from geobipy import Histogram
+import numpy as np
+import matplotlib.pyplot as plt
+
+
+
+

Histogram with regular bins

+
# Create regularly spaced bins
+mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm'))
+
+
+

Set the histogram using the bins, and update

+
H = Histogram(mesh=mesh)
+
+
+

We can update the histogram with some new values

+
H.update(np.random.randn(1000), trim=True)
+
+# Plot the histogram
+plt.figure()
+plt.subplot(221)
+_ = H.plot()
+plt.subplot(222)
+_ = H.pdf.bar()
+plt.subplot(223)
+H.pmf.bar()
+plt.subplot(224)
+H.cdf().bar()
+
+
+plot histogram 1d
<Axes: xlabel='bins (m)', ylabel='Cumulative Density Function'>
+
+
+

Get the median, and 95% confidence values

+
print(H.credible_intervals(percent=95.0))
+
+plt.figure()
+H.plot()
+H.plotCredibleIntervals()
+H.plotMean()
+H.plotMedian()
+
+
+plot histogram 1d
(np.float64(-0.030000000000000027), np.float64(-1.83), np.float64(1.8299999999999996))
+
+
+
+
+

Histogram with irregular bins

+
# Create irregularly spaced bins
+x = np.cumsum(np.arange(10, dtype=np.float64))
+irregularBins = np.hstack([-x[::-1], x[1:]])
+
+
+

Create a named StatArray

+
edges = StatArray(irregularBins, 'irregular bins')
+mesh = RectilinearMesh1D(edges = edges)
+
+
+

Instantiate the histogram with bin edges

+
H = Histogram(mesh=mesh)
+
+# Update the histogram
+H.update((np.random.randn(10000)*20.0) - 10.0)
+
+
+

Plot the histogram

+
plt.figure()
+plt.subplot(211)
+_ = H.plot()
+plt.subplot(212)
+_ = H.plot(normalize=True)
+
+plt.figure()
+H.plot()
+H.plotCredibleIntervals()
+H.plotMean()
+H.plotMedian()
+
+
+
    +
  • plot histogram 1d
  • +
  • plot histogram 1d
  • +
+

We can plot the histogram as a pcolor plot

+
plt.figure()
+_ = H.pcolor(grid=True, transpose=True)
+
+
+plot histogram 1d
+
+

Histogram with linear space entries that are logged internally

+

Create some bins spaced logarithmically

+
mesh = RectilinearMesh1D(edges = StatArray(np.logspace(-5, 3), 'positive bins'), log=10)
+
+
+

Instantiate the Histogram with log=10

+
H = Histogram(mesh)
+
+
+

The update takes in the numbers in linear space and takes their log=10

+
H.update(10.0**(np.random.randn(1000)*2.0), trim=True)
+
+
+
plt.figure()
+plt.subplot(211)
+_ = H.plot()
+
+import h5py
+with h5py.File('h1d.h5', 'w') as f:
+    H.toHdf(f, 'h1d')
+
+with h5py.File('h1d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h1d'])
+
+plt.subplot(212)
+_ = H1.plot()
+
+
+plot histogram 1d
mesh = RectilinearMesh1D(edges=StatArray(np.linspace(-3.0, 3.0, 101), 'bins', 'm'))
+
+
+

Set the histogram using the bins, and update

+
H = Histogram(mesh=mesh)
+
+
+

We can update the histogram with some new values

+
H.update(np.random.randn(1000), trim=True)
+
+import h5py
+with h5py.File('h1d.h5', 'w') as f:
+    H.createHdf(f, 'h1d', add_axis=StatArray(np.arange(3.0), "Name", "Units"))
+    H.writeHdf(f, 'h1d', index=0)
+    H.update(np.random.randn(1000), trim=True)
+    H.writeHdf(f, 'h1d', index=1)
+    H.update(np.random.randn(1000), trim=True)
+    H.writeHdf(f, 'h1d', index=2)
+
+with h5py.File('h1d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h1d'])
+    H2 = Histogram.fromHdf(f['h1d'], index=0)
+    H3 = Histogram.fromHdf(f['h1d'], index=1)
+    H4 = Histogram.fromHdf(f['h1d'], index=2)
+
+
+print(H4.summary)
+
+# plt.figure()
+# plt.subplot(211)
+# _ = H1.plot()
+# plt.subplot(212)
+# _ = H4.plot()
+
+plt.show()
+
+
+
Histogram:
+mesh:
+|   RectilinearMesh1D
+|   Number of Cells:
+|   |   100
+|   Cell Centres:
+|   |   StatArray
+|   |   Name:   bins (m)
+|   |   Address:['0x17f6bebd0']
+|   |   Shape:  (100,)
+|   |   Values: [-2.97 -2.91 -2.85 ...  2.85  2.91  2.97]
+|   |   Min:    -2.9699999999999998
+|   |   Max:    2.9699999999999998
+|   |   has_posterior: False
+|
+|   Cell Edges:
+|   |   StatArray
+|   |   Name:   bins (m)
+|   |   Address:['0x17f6bfad0']
+|   |   Shape:  (101,)
+|   |   Values: [-3.   -2.94 -2.88 ...  2.88  2.94  3.  ]
+|   |   Min:    -3.0
+|   |   Max:    3.0
+|   |   has_posterior: False
+|
+|   log:
+|   |   None
+|   relative_to:
+|   |   StatArray
+|   |   Name:
+|   |   Address:['0x17f088dd0']
+|   |   Shape:  (1,)
+|   |   Values: [0.]
+|   |   Min:    0.0
+|   |   Max:    0.0
+|   |   has_posterior: False
+|
+values:
+|   DataArray
+|   Name:   Frequency
+|   Address:['0x17f08a550']
+|   Shape:  (100,)
+|   Values: [2 0 0 ... 0 2 1]
+|   Min:    0
+|   Max:    88
+
+
+

Total running time of the script: (0 minutes 0.972 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/plot_histogram_2d.html b/docs/examples/Statistics/plot_histogram_2d.html new file mode 100644 index 00000000..87a6aa0b --- /dev/null +++ b/docs/examples/Statistics/plot_histogram_2d.html @@ -0,0 +1,489 @@ + + + + + + + Histogram 2D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Histogram 2D

+

This 2D histogram class allows efficient updating of histograms, plotting and +saving as HDF5.

+
import h5py
+import geobipy
+from geobipy import StatArray
+from geobipy import Histogram
+import matplotlib.pyplot as plt
+import matplotlib.gridspec as gridspec
+from geobipy import RectilinearMesh2D
+import numpy as np
+
+
+

Create some histogram bins in x and y

+
x = StatArray(np.linspace(-4.0, 4.0, 100), 'Variable 1')
+y = StatArray(np.linspace(-4.0, 4.0, 105), 'Variable 2')
+
+mesh = RectilinearMesh2D(x_edges=x, y_edges=y)
+
+
+

Instantiate

+
H = Histogram(mesh)
+
+
+

Generate some random numbers

+
a = np.random.randn(1000000)
+b = np.random.randn(1000000)
+
+
+

Update the histogram counts

+
H.update(a, b)
+
+
+
plt.figure()
+plt.subplot(131)
+plt.title("2D Histogram")
+_ = H.plot(cmap='gray_r')
+plt.subplot(132)
+H.pdf.plot(cmap='gray_r')
+plt.subplot(133)
+H.pmf.plot(cmap='gray_r')
+
+
+plt.figure()
+plt.subplot(131)
+H.cdf(axis=0).plot()
+plt.subplot(132)
+H.cdf(axis=1).plot()
+plt.subplot(133)
+H.cdf().plot()
+
+
+
    +
  • 2D Histogram
  • +
  • plot histogram 2d
  • +
+
(<Axes: xlabel='Variable 1', ylabel='Variable 2'>, <matplotlib.collections.QuadMesh object at 0x17ed24a40>, <matplotlib.colorbar.Colorbar object at 0x17f1a32c0>)
+
+
+

We can overlay the histogram with its credible intervals

+
plt.figure()
+plt.title("90% credible intervals overlain")
+H.pcolor(cmap='gray_r')
+H.plotCredibleIntervals(axis=0, percent=95.0)
+_ = H.plotCredibleIntervals(axis=1, percent=95.0)
+
+
+90% credible intervals overlain

Generate marginal histograms along an axis

+
h1 = H.marginalize(axis=0)
+h2 = H.marginalize(axis=1)
+
+
+

Note that the names of the variables are automatically displayed

+
plt.figure()
+plt.suptitle("Marginals along each axis")
+plt.subplot(121)
+h1.plot()
+plt.subplot(122)
+_ = h2.plot()
+
+
+Marginals along each axis

Create a combination plot with marginal histograms. +sphinx_gallery_thumbnail_number = 3

+
plt.figure()
+gs = gridspec.GridSpec(5, 5)
+gs.update(wspace=0.3, hspace=0.3)
+ax = [plt.subplot(gs[1:, :4])]
+H.pcolor(colorbar = False)
+
+ax.append(plt.subplot(gs[:1, :4]))
+h = H.marginalize(axis=0).plot()
+plt.xlabel(''); plt.ylabel('')
+plt.xticks([]); plt.yticks([])
+ax[-1].spines["left"].set_visible(False)
+
+ax.append(plt.subplot(gs[1:, 4:]))
+h = H.marginalize(axis=1).plot(transpose=True)
+plt.ylabel(''); plt.xlabel('')
+plt.yticks([]); plt.xticks([])
+ax[-1].spines["bottom"].set_visible(False)
+
+
+plot histogram 2d

Take the mean or median estimates from the histogram

+
mean = H.mean()
+median = H.median()
+
+
+
plt.figure(figsize=(9.5, 5))
+plt.suptitle("Mean, median, and credible interval overlain")
+ax = plt.subplot(121)
+H.pcolor(cmap='gray_r', colorbar=False)
+H.plotCredibleIntervals(axis=0)
+H.plotMedian(axis=0, color='g')
+H.plotMean(axis=0, color='y')
+plt.legend()
+
+plt.subplot(122, sharex=ax, sharey=ax)
+H.pcolor(cmap='gray_r', colorbar=False)
+H.plotCredibleIntervals(axis=1)
+H.plotMedian(axis=1, color='g')
+H.plotMean(axis=1, color='y')
+plt.legend()
+
+
+Mean, median, and credible interval overlain
<matplotlib.legend.Legend object at 0x182f538f0>
+
+
+

Get the range between credible intervals

+
H.credible_range(percent=95.0)
+
+
+
StatArray([3.47474747, 4.44444444, 3.71717172, ..., 3.47474747,
+           3.47474747, 4.2020202 ])
+
+
+

We can map the credible range to an opacity or transparency

+
H.opacity()
+H.transparency()
+
+# H.animate(0, 'test.mp4')
+
+import h5py
+with h5py.File('h2d.h5', 'w') as f:
+    H.toHdf(f, 'h2d')
+
+with h5py.File('h2d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h2d'])
+
+plt.close('all')
+
+x = StatArray(5.0 + np.linspace(-4.0, 4.0, 100), 'Variable 1')
+y = StatArray(10.0 + np.linspace(-4.0, 4.0, 105), 'Variable 2')
+
+mesh = RectilinearMesh2D(x_edges=x, x_relative_to=5.0, y_edges=y, y_relative_to=10.0)
+
+
+

Instantiate

+
H = Histogram(mesh)
+
+
+

Generate some random numbers

+
a = np.random.randn(1000000) + 5.0
+b = np.random.randn(1000000) + 10.0
+
+
+

Update the histogram counts

+
H.update(a, b)
+
+
+
plt.figure()
+plt.subplot(131)
+plt.title("2D Histogram")
+_ = H.plot(cmap='gray_r')
+plt.subplot(132)
+H.pdf.plot(cmap='gray_r')
+plt.subplot(133)
+H.pmf.plot(cmap='gray_r')
+
+plt.figure()
+plt.subplot(131)
+H.cdf(axis=0).plot()
+plt.subplot(132)
+H.cdf(axis=1).plot()
+plt.subplot(133)
+H.cdf().plot()
+
+
+
    +
  • 2D Histogram
  • +
  • plot histogram 2d
  • +
+
(<Axes: xlabel='Variable 1', ylabel='Variable 2'>, <matplotlib.collections.QuadMesh object at 0x182ff9a00>, <matplotlib.colorbar.Colorbar object at 0x181bcaf00>)
+
+
+

We can overlay the histogram with its credible intervals

+
plt.figure()
+plt.title("90% credible intervals overlain")
+H.pcolor(cmap='gray_r')
+H.plotCredibleIntervals(axis=0, percent=95.0)
+_ = H.plotCredibleIntervals(axis=1, percent=95.0)
+
+# Generate marginal histograms along an axis
+h1 = H.marginalize(axis=0)
+h2 = H.marginalize(axis=1)
+
+
+90% credible intervals overlain

Note that the names of the variables are automatically displayed

+
plt.figure()
+plt.suptitle("Marginals along each axis")
+plt.subplot(121)
+h1.plot()
+plt.subplot(122)
+_ = h2.plot()
+
+
+Marginals along each axis

Create a combination plot with marginal histograms. +sphinx_gallery_thumbnail_number = 3

+
plt.figure()
+gs = gridspec.GridSpec(5, 5)
+gs.update(wspace=0.3, hspace=0.3)
+ax = [plt.subplot(gs[1:, :4])]
+H.pcolor(colorbar = False)
+
+ax.append(plt.subplot(gs[:1, :4]))
+h = H.marginalize(axis=0).plot()
+plt.xlabel(''); plt.ylabel('')
+plt.xticks([]); plt.yticks([])
+ax[-1].spines["left"].set_visible(False)
+
+ax.append(plt.subplot(gs[1:, 4:]))
+h = H.marginalize(axis=1).plot(transpose=True)
+plt.ylabel(''); plt.xlabel('')
+plt.yticks([]); plt.xticks([])
+ax[-1].spines["bottom"].set_visible(False)
+
+
+plot histogram 2d

Take the mean or median estimates from the histogram

+
mean = H.mean()
+median = H.median()
+
+
+
plt.figure(figsize=(9.5, 5))
+plt.suptitle("Mean, median, and credible interval overlain")
+ax = plt.subplot(121)
+H.pcolor(cmap='gray_r', colorbar=False)
+H.plotCredibleIntervals(axis=0)
+H.plotMedian(axis=0, color='g')
+H.plotMean(axis=0, color='y')
+plt.legend()
+
+plt.subplot(122, sharex=ax, sharey=ax)
+H.pcolor(cmap='gray_r', colorbar=False)
+H.plotCredibleIntervals(axis=1)
+H.plotMedian(axis=1, color='g')
+H.plotMean(axis=1, color='y')
+plt.legend()
+
+
+Mean, median, and credible interval overlain
<matplotlib.legend.Legend object at 0x181cf6b10>
+
+
+

Get the range between credible intervals

+
H.credible_range(percent=95.0)
+
+
+
StatArray([4.2020202 , 4.44444444, 3.31313131, ..., 2.50505051,
+           4.52525253, 4.12121212])
+
+
+

We can map the credible range to an opacity or transparency

+
H.opacity()
+H.transparency()
+
+# # H.animate(0, 'test.mp4')
+
+with h5py.File('h2d.h5', 'w') as f:
+    H.toHdf(f, 'h2d')
+
+with h5py.File('h2d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h2d'])
+
+plt.figure(figsize=(9.5, 5))
+plt.suptitle("Mean, median, and credible interval overlain")
+ax = plt.subplot(121)
+H1.pcolor(cmap='gray_r', colorbar=False)
+H1.plotCredibleIntervals(axis=0)
+H1.plotMedian(axis=0, color='g')
+H1.plotMean(axis=0, color='y')
+plt.legend()
+
+plt.subplot(122, sharex=ax, sharey=ax)
+H1.pcolor(cmap='gray_r', colorbar=False)
+H1.plotCredibleIntervals(axis=1)
+H1.plotMedian(axis=1, color='g')
+H1.plotMean(axis=1, color='y')
+plt.legend()
+
+with h5py.File('h2d.h5', 'w') as f:
+    H.createHdf(f, 'h2d', add_axis=StatArray(np.arange(3.0), name='Easting', units="m"))
+    for i in range(3):
+        H.writeHdf(f, 'h2d', index=i)
+
+with h5py.File('h2d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h2d'], index=0)
+
+plt.figure(figsize=(9.5, 5))
+plt.suptitle("Mean, median, and credible interval overlain")
+ax = plt.subplot(121)
+H1.pcolor(cmap='gray_r', colorbar=False)
+H1.plotCredibleIntervals(axis=0)
+H1.plotMedian(axis=0, color='g')
+H1.plotMean(axis=0, color='y')
+plt.legend()
+
+plt.subplot(122, sharex=ax, sharey=ax)
+H1.pcolor(cmap='gray_r', colorbar=False)
+H1.plotCredibleIntervals(axis=1)
+H1.plotMedian(axis=1, color='g')
+H1.plotMean(axis=1, color='y')
+plt.legend()
+
+with h5py.File('h2d.h5', 'r') as f:
+    H1 = Histogram.fromHdf(f['h2d'])
+
+# H1.pyvista_mesh().save('h3d_read.vtk')
+
+plt.show()
+
+
+
    +
  • Mean, median, and credible interval overlain
  • +
  • Mean, median, and credible interval overlain
  • +
+

Total running time of the script: (0 minutes 5.012 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/plot_histogram_3d.html b/docs/examples/Statistics/plot_histogram_3d.html new file mode 100644 index 00000000..96c1e64a --- /dev/null +++ b/docs/examples/Statistics/plot_histogram_3d.html @@ -0,0 +1,312 @@ + + + + + + + Histogram 3D — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ + +
+

Histogram 3D

+

This 3D histogram class allows efficient updating of histograms, plotting and +saving as HDF5.

+
import geobipy
+from geobipy import StatArray
+from geobipy import Histogram
+import matplotlib.pyplot as plt
+from geobipy import RectilinearMesh3D
+import numpy as np
+
+
+

Create some histogram bins in x and y

+
x = StatArray(np.linspace(-4.0, 4.0, 11), 'Variable 1')
+y = StatArray(np.linspace(-4.0, 4.0, 21), 'Variable 2')
+z = StatArray(np.linspace(-4.0, 4.0, 31), 'Variable 3')
+
+mesh = RectilinearMesh3D(x_edges=x, y_edges=y, z_edges=z)
+
+
+

Instantiate

+
H = Histogram(mesh=mesh)
+
+
+

Generate some random numbers

+
a = np.random.randn(100000)
+b = np.random.randn(100000)
+c = np.random.randn(100000)
+# x = np.asarray([a, b, c])
+
+
+

Update the histogram counts

+
H.update(a, b, c)
+
+
+
plt.figure()
+plt.suptitle("Slice half way along each dimension")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    s = [5 if i  == axis else np.s_[:] for i in range(3)]
+    _ = H[tuple(s)].pcolor(cmap='gray_r')
+
+
+Slice half way along each dimension

Generate marginal histograms along an axis

+
plt.figure()
+plt.suptitle("Marginals along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.marginalize(axis=axis).plot()
+
+
+Marginals along each axis

Take the mean estimate from the histogram

+
plt.figure()
+plt.suptitle("Mean along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.mean(axis=axis).pcolor()
+
+
+Mean along each axis

Take the median estimate from the histogram

+
plt.figure()
+plt.suptitle("Median along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.median(axis=axis).pcolor()
+
+# #%%
+# # We can map the credible range to an opacity or transparency
+# H.opacity()
+# H.transparency()
+
+H.animate(0, 'test.mp4')
+
+H.to_vtk('h3d.vtk')
+
+
+
+
+# Create some histogram bins in x and y
+xx, yy = np.meshgrid(mesh.z.centres, mesh.y.centres)
+x_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "x_re")
+
+xx, yy = np.meshgrid(mesh.z.centres, mesh.x.centres)
+y_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "y_re")
+
+xx, yy = np.meshgrid(mesh.y.centres, mesh.x.centres)
+z_re = StatArray(np.sin(np.sqrt(xx ** 2.0 + yy ** 2.0)), "z_re")
+
+mesh = RectilinearMesh3D(x_edges=x, x_relative_to=x_re, y_edges=y, y_relative_to=y_re, z_edges=z, z_relative_to=z_re)
+
+
+
    +
  • Median along each axis
  • +
  • 3.60
  • +
+

Instantiate

+
H = Histogram(mesh=mesh)
+
+
+

Generate some random numbers

+
a = np.random.randn(100000)
+b = np.random.randn(100000)
+c = np.random.randn(100000)
+# x = np.asarray([a, b, c])
+
+
+

Update the histogram counts

+
H.update(a, b, c)
+
+
+
plt.figure()
+plt.suptitle("Slice half way along each dimension")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    s = [5 if i  == axis else np.s_[:] for i in range(3)]
+    _ = H[tuple(s)].pcolor(cmap='gray_r')
+
+
+Slice half way along each dimension

Generate marginal histograms along an axis

+
plt.figure()
+plt.suptitle("Marginals along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.marginalize(axis=axis).plot()
+
+
+Marginals along each axis

Take the mean estimate from the histogram

+
plt.figure()
+plt.suptitle("Mean along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.mean(axis=axis).pcolor()
+
+
+Mean along each axis

Take the median estimate from the histogram

+
plt.figure()
+plt.suptitle("Median along each axis")
+for axis in range(3):
+    plt.subplot(1, 3, axis+1)
+    _ = H.median(axis=axis).pcolor()
+
+# #%%
+# # We can map the credible range to an opacity or transparency
+# H.opacity()
+# H.transparency()
+
+H.animate(0, 'test.mp4')
+
+plt.show()
+
+# H.to_vtk('h3d.vtk')
+
+
+
    +
  • Median along each axis
  • +
  • 3.60
  • +
+

Total running time of the script: (0 minutes 3.875 seconds)

+ +

Gallery generated by Sphinx-Gallery

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/readme.html b/docs/examples/Statistics/readme.html new file mode 100644 index 00000000..2a9bd5b5 --- /dev/null +++ b/docs/examples/Statistics/readme.html @@ -0,0 +1,115 @@ + + + + + + + Statistics — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Statistics

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/Statistics/sg_execution_times.html b/docs/examples/Statistics/sg_execution_times.html new file mode 100644 index 00000000..7a631943 --- /dev/null +++ b/docs/examples/Statistics/sg_execution_times.html @@ -0,0 +1,159 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:12.629 total execution time for 5 files from examples/Statistics:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Histogram 2D (plot_histogram_2d.py)

00:05.012

0.0

Histogram 3D (plot_histogram_3d.py)

00:03.875

0.0

StatArray Class (plot_StatArray.py)

00:02.767

0.0

Histogram 1D (plot_histogram_1d.py)

00:00.972

0.0

DataArray Class (plot_DataArray.py)

00:00.003

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/index.html b/docs/examples/index.html new file mode 100644 index 00000000..a4f9f766 --- /dev/null +++ b/docs/examples/index.html @@ -0,0 +1,315 @@ + + + + + + + Examples — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Examples

+
+

Data

+
+

3D Point Cloud class

+
3D Point Cloud class
+
+

Frequency domain dataset

+
Frequency domain dataset
+
+

Skytem dataset

+
Skytem dataset
+
+

Tempest dataset

+
Tempest dataset
+
+
+
+
+

Datapoints

+
+

Frequency domain datapoint

+
Frequency domain datapoint
+
+

Skytem Datapoint Class

+
Skytem Datapoint Class
+
+

Tempest Datapoint Class

+
Tempest Datapoint Class
+
+
+
+
+

Distributions

+
+

Distribution Class

+
Distribution Class
+
+
+
+
+

HDF 5

+
+

Using HDF5 within GeoBIPy

+
Using HDF5 within GeoBIPy
+
+
+
+
+

1D Inference

+

There are a couple of ways to run an inference using geobipy, the first is via command line using

+
geobipy skytem_options.py <output folder>
+
+
+

The other is with a python script similar to the examples in this folder. +In both cases, you will need to write an options file (also shown in these examples)

+
+

Running GeoBIPy to invert Resolve data

+
Running GeoBIPy to invert Resolve data
+
+

Running GeoBIPy to invert Skytem data

+
Running GeoBIPy to invert Skytem data
+
+

Running GeoBIPy to invert Tempest data

+
Running GeoBIPy to invert Tempest data
+
+
+
+
+

2D Inference

+
+

2D Posterior analysis of Resolve inference

+
2D Posterior analysis of Resolve inference
+
+

2D Posterior analysis of Skytem inference

+
2D Posterior analysis of Skytem inference
+
+

2D Posterior analysis of Tempest inference

+
2D Posterior analysis of Tempest inference
+
+
+
+
+

Meshes

+
+

1D Rectilinear Mesh

+
1D Rectilinear Mesh
+
+

2D Rectilinear Mesh

+
2D Rectilinear Mesh
+
+

3D Rectilinear Mesh

+
3D Rectilinear Mesh
+
+
+
+
+

Models

+
+

1D Model with an infinite halfspace

+
1D Model with an infinite halfspace
+
+

2D Rectilinear Model

+
2D Rectilinear Model
+
+

3D Rectilinear Model

+
3D Rectilinear Model
+
+
+
+
+

Statistics

+
+

DataArray Class

+
DataArray Class
+
+

Histogram 1D

+
Histogram 1D
+
+

Histogram 2D

+
Histogram 2D
+
+

Histogram 3D

+
Histogram 3D
+
+

StatArray Class

+
StatArray Class
+
+
+ +

Gallery generated by Sphinx-Gallery

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/readme.html b/docs/examples/readme.html new file mode 100644 index 00000000..118318a6 --- /dev/null +++ b/docs/examples/readme.html @@ -0,0 +1,115 @@ + + + + + + + Examples — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Examples

+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/examples/sg_execution_times.html b/docs/examples/sg_execution_times.html new file mode 100644 index 00000000..36ae07d8 --- /dev/null +++ b/docs/examples/sg_execution_times.html @@ -0,0 +1,143 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

00:00.000 total execution time for 0 files from examples:

+
+ + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

N/A

N/A

N/A

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/genindex.html b/docs/genindex.html index faeb0d8e..f1779d0b 100644 --- a/docs/genindex.html +++ b/docs/genindex.html @@ -1,99 +1,1639 @@ - - - - - - Index — geobipy 2.3.1 documentation - - - - - - - - - - + + + + + Index — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + - - + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+

Index

+ A + | B + | C + | D + | E + | F + | G + | H + | I + | L + | M + | N + | O + | P + | R + | S + | T + | U + | V + | W + | X + | Y + | Z
+

A

+ + + +
+

B

+ + + +
+

C

+ + + +
+ +

D

+ + + +
+ +

E

+ + + +
+ +

F

+ + + +
+ +

G

+ + + +
    +
  • Gamma (class in geobipy.src.classes.statistics.GammaDistribution) +
  • +
  • generate_subplots() (in module geobipy.src.base.plotting) +
  • +
  • + geobipy.src.base.fileIO + +
  • +
  • + geobipy.src.base.HDF.hdfRead + +
  • +
  • + geobipy.src.base.HDF.hdfWrite + +
  • +
  • + geobipy.src.base.interpolation + +
  • +
  • + geobipy.src.base.MPI + +
  • +
  • + geobipy.src.base.plotting + +
  • +
  • + geobipy.src.base.utilities + +
  • +
  • + geobipy.src.classes.core.myObject + +
  • +
  • + geobipy.src.classes.data.datapoint.DataPoint + +
  • +
  • + geobipy.src.classes.data.datapoint.EmDataPoint + +
  • +
  • + geobipy.src.classes.data.datapoint.FdemDataPoint + +
  • +
  • + geobipy.src.classes.data.datapoint.TdemDataPoint + +
  • +
  • + geobipy.src.classes.data.datapoint.Tempest_datapoint + +
  • +
  • + geobipy.src.classes.data.dataset.Data + +
  • +
  • + geobipy.src.classes.data.dataset.FdemData + +
  • +
  • + geobipy.src.classes.data.dataset.TdemData + +
  • +
  • + geobipy.src.classes.data.dataset.TempestData + +
  • +
  • + geobipy.src.classes.mesh.RectilinearMesh1D + +
  • +
  • + geobipy.src.classes.mesh.RectilinearMesh2D + +
  • +
  • + geobipy.src.classes.mesh.RectilinearMesh2D_stitched + +
  • +
  • + geobipy.src.classes.mesh.RectilinearMesh3D + +
  • +
  • + geobipy.src.classes.model.Model + +
  • +
+

H

+ + + +
+

I

+ + + +
+ +

L

+ + + +
+ +

M

+ + + +
+ +

N

+ + + +
+ +

O

+ + + +
+ +

P

+ + + +
+ +

R

+ + + +
+ +

S

+ + + +
+ +

T

+ + + +
+ +

U

+ + + +
+ +

V

+ + + +
+ +

W

+ + +
+ +

X

+ + + +
+ +

Y

+ + + +
+ +

Z

+ + +
+ + + +
+
+
-
-
- - - - - - + + + + + \ No newline at end of file diff --git a/docs/index.html b/docs/index.html index 9aef5656..89a74f86 100644 --- a/docs/index.html +++ b/docs/index.html @@ -1,107 +1,159 @@ - - - - - - - Welcome to GeoBIPy: Geophysical Bayesian Inference in Python — geobipy 2.3.1 documentation - - - - - - - - - - + + + + + + Welcome to GeoBIPy: Geophysical Bayesian Inference in Python — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + - - + +
+ + +
+ +
+
+
+
    +
  • + +
  • + View page source +
  • +
+
+
+
+
+
-

Welcome to GeoBIPy: Geophysical Bayesian Inference in Python

+

Welcome to GeoBIPy: Geophysical Bayesian Inference in Python

This package uses a Bayesian formulation and Markov chain Monte Carlo sampling methods to derive posterior distributions of subsurface and measured data properties. The current implementation is applied to time and frequency domain electromagnetic data. Application outside of these data types is in development.

Currently there are two types of data that we have implemented; frequency domain electromagnetic data, and time domain electromagnetic data. -The package comes with a frequency domain forward modeller, but it does not come with a time domain forward modeller.

-
- - -
- -
-
-
+
+ + -
- - - - - - - + + + + + \ No newline at end of file diff --git a/docs/objects.inv b/docs/objects.inv index f69386b1..7c86c850 100644 Binary files a/docs/objects.inv and b/docs/objects.inv differ diff --git a/docs/py-modindex.html b/docs/py-modindex.html new file mode 100644 index 00000000..3898520b --- /dev/null +++ b/docs/py-modindex.html @@ -0,0 +1,306 @@ + + + + + + Python Module Index — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+ + +

Python Module Index

+ +
+ g +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
 
+ g
+ geobipy +
    + geobipy.src.base.fileIO +
    + geobipy.src.base.HDF.hdfRead +
    + geobipy.src.base.HDF.hdfWrite +
    + geobipy.src.base.interpolation +
    + geobipy.src.base.MPI +
    + geobipy.src.base.plotting +
    + geobipy.src.base.utilities +
    + geobipy.src.classes.core.myObject +
    + geobipy.src.classes.data.datapoint.DataPoint +
    + geobipy.src.classes.data.datapoint.EmDataPoint +
    + geobipy.src.classes.data.datapoint.FdemDataPoint +
    + geobipy.src.classes.data.datapoint.TdemDataPoint +
    + geobipy.src.classes.data.datapoint.Tempest_datapoint +
    + geobipy.src.classes.data.dataset.Data +
    + geobipy.src.classes.data.dataset.FdemData +
    + geobipy.src.classes.data.dataset.TdemData +
    + geobipy.src.classes.data.dataset.TempestData +
    + geobipy.src.classes.mesh.RectilinearMesh1D +
    + geobipy.src.classes.mesh.RectilinearMesh2D +
    + geobipy.src.classes.mesh.RectilinearMesh2D_stitched +
    + geobipy.src.classes.mesh.RectilinearMesh3D +
    + geobipy.src.classes.model.Model +
    + geobipy.src.classes.pointcloud.Point +
    + geobipy.src.classes.statistics.baseDistribution +
    + geobipy.src.classes.statistics.Distribution +
    + geobipy.src.classes.statistics.GammaDistribution +
    + geobipy.src.classes.statistics.Histogram +
    + geobipy.src.classes.statistics.MvNormalDistribution +
    + geobipy.src.classes.statistics.NormalDistribution +
    + geobipy.src.classes.statistics.OrderStatistics +
    + geobipy.src.classes.statistics.UniformDistribution +
    + geobipy.src.classes.system.CircularLoop +
    + geobipy.src.classes.system.EmLoop +
    + geobipy.src.classes.system.FdemSystem +
    + geobipy.src.classes.system.TdemSystem +
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file diff --git a/docs/search.html b/docs/search.html index a3e6c64e..ac8d63b1 100644 --- a/docs/search.html +++ b/docs/search.html @@ -1,116 +1,131 @@ + + + + + Search — GeoBIPy 2.3.1 documentation + + + + + + + - - - - - Search — geobipy 2.3.1 documentation - - + - - - + + + + + + + + - - - - - - - + + - - + +
+ + +
+ +
+
+
+
    +
  • + +
  • +
  • +
+
+
+
+
+
- -
-
- -
-
- +
+
+ + + + + - - - + \ No newline at end of file diff --git a/docs/searchindex.js b/docs/searchindex.js index e27e848c..2a3fd940 100644 --- a/docs/searchindex.js +++ b/docs/searchindex.js @@ -1 +1 @@ -Search.setIndex({"alltitles": {"Welcome to GeoBIPy: Geophysical Bayesian Inference in Python": [[0, null]]}, "docnames": ["index"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2}, "filenames": ["index.rst"], "indexentries": {}, "objects": {}, "objnames": {}, "objtypes": {}, "terms": {"The": 0, "appli": 0, "applic": 0, "ar": 0, "carlo": 0, "chain": 0, "come": 0, "current": 0, "data": 0, "deriv": 0, "develop": 0, "distribut": 0, "doe": 0, "domain": 0, "electromagnet": 0, "formul": 0, "forward": 0, "frequenc": 0, "have": 0, "i": 0, "implement": 0, "markov": 0, "measur": 0, "method": 0, "model": 0, "mont": 0, "outsid": 0, "packag": 0, "posterior": 0, "properti": 0, "sampl": 0, "subsurfac": 0, "thi": 0, "time": 0, "two": 0, "type": 0, "us": 0, "we": 0}, "titles": ["Welcome to GeoBIPy: Geophysical Bayesian Inference in Python"], "titleterms": {"bayesian": 0, "geobipi": 0, "geophys": 0, "infer": 0, "python": 0, "welcom": 0}}) \ No newline at end of file +Search.setIndex({"alltitles": {"1D Inference": [[69, null], [93, "d-inference"]], "1D Model with an infinite halfspace": [[81, null]], "1D Rectilinear Mesh": [[76, null]], "1D example": [[87, "d-example"]], "2D Inference": [[74, null], [93, "id1"]], "2D Posterior analysis of Resolve inference": [[71, null]], "2D Posterior analysis of Skytem inference": [[72, null]], "2D Posterior analysis of Tempest inference": [[73, null]], "2D Rectilinear Mesh": [[77, null]], "2D Rectilinear Model": [[82, null]], "2D example": [[87, "id1"]], "3D Point Cloud class": [[50, null]], "3D Rectilinear Mesh": [[78, null]], "3D Rectilinear Model": [[83, null]], "API": [[0, null]], "Attach a multivariate normal distribution as the prior and proposal": [[87, "attach-a-multivariate-normal-distribution-as-the-prior-and-proposal"]], "Attaching Prior and Proposal Distributions to a StatArray": [[87, "attaching-prior-and-proposal-distributions-to-a-statarray"]], "Attaching a Histogram to capture the posterior distribution": [[87, "attaching-a-histogram-to-capture-the-posterior-distribution"]], "Attaching statistical descriptors to the skytem datapoint": [[56, "attaching-statistical-descriptors-to-the-skytem-datapoint"]], "Attaching statistical descriptors to the tempest datapoint": [[57, "attaching-statistical-descriptors-to-the-tempest-datapoint"]], "Axes in log space": [[77, "axes-in-log-space"]], "Basic manipulation": [[87, "basic-manipulation"]], "Circular Loop": [[42, null]], "Classes used in GeoBIPy": [[8, null]], "Compile the gatdaem1d shared library": [[48, "compile-the-gatdaem1d-shared-library"]], "Computation times": [[54, null], [59, null], [62, null], [65, null], [70, null], [75, null], [80, null], [85, null], [92, null], [95, null], [97, null]], "Core classes": [[10, null]], "Core object class": [[11, null]], "Core routines needed for GeoBIPy": [[3, null]], "Data": [[19, null], [53, null], [93, "data"]], "Data classes": [[12, null]], "DataArray Class": [[86, null]], "DataPoint": [[17, null]], "Datapoint classes": [[18, null]], "Datapoints": [[58, null], [93, "datapoints"]], "Dataset classes": [[23, null]], "Defining data using a frequency domain system": [[49, "defining-data-using-a-frequency-domain-system"]], "Distribution Class": [[60, null]], "Distribution Wrapper": [[33, null]], "Distributions": [[61, null], [93, "distributions"]], "EmDataPoint": [[13, null]], "EmLoop": [[43, null]], "Example Header": [[49, "example-header"], [51, "example-header"]], "Examples": [[93, null], [94, null]], "FdemData": [[20, null]], "FdemDataPoint": [[14, null]], "File Format for a frequency domain system": [[49, "file-format-for-a-frequency-domain-system"]], "File Format for a time domain datapoint": [[56, "file-format-for-a-time-domain-datapoint"]], "File Format for a time domain system": [[51, "file-format-for-a-time-domain-system"]], "File Format for frequency domain data": [[49, "file-format-for-frequency-domain-data"]], "File Format for time domain data": [[51, "file-format-for-time-domain-data"]], "Frequency domain datapoint": [[55, null]], "Frequency domain dataset": [[49, null]], "Frequency domain system": [[44, null]], "Gamma Distribution": [[34, null]], "Getting Started": [[47, null]], "HDF 5": [[64, null], [93, "hdf-5"]], "HDF5": [[48, "hdf5"]], "Heirarchical Data Format (HDF)": [[1, null]], "Histogram": [[35, null]], "Histogram 1D": [[88, null]], "Histogram 2D": [[89, null]], "Histogram 3D": [[90, null]], "Histogram Equalization": [[87, "histogram-equalization"]], "Histogram with irregular bins": [[88, "histogram-with-irregular-bins"]], "Histogram with linear space entries that are logged internally": [[88, "histogram-with-linear-space-entries-that-are-logged-internally"]], "Histogram with regular bins": [[88, "histogram-with-regular-bins"]], "Installing GeoBIPy": [[48, null]], "Installing MPI and mpi4py": [[48, "installing-mpi-and-mpi4py"]], "Installing a parallel version of GeoBIPy": [[48, "installing-a-parallel-version-of-geobipy"]], "Installing a serial version of GeoBIPy": [[48, "installing-a-serial-version-of-geobipy"]], "Installing parallel HDF5 and h5py": [[48, "installing-parallel-hdf5-and-h5py"]], "Installing the Python Bindings": [[48, "installing-the-python-bindings"]], "Installing the time domain forward modeller": [[48, "installing-the-time-domain-forward-modeller"]], "Instantiate the 1D Model with a Half Space": [[81, "instantiate-the-1d-model-with-a-half-space"]], "Instantiating a new StatArray class": [[87, "instantiating-a-new-statarray-class"]], "Interpolation": [[5, null]], "Log-space rectilinear mesh": [[76, "log-space-rectilinear-mesh"]], "MPI": [[48, "mpi"]], "MPI wrapper functions": [[2, null]], "Mesh classes": [[28, null]], "Meshes": [[79, null], [93, "meshes"]], "Model": [[29, null]], "Model classes": [[30, null]], "Models": [[84, null], [93, "models"]], "Multivariate Normal Distribution": [[60, "multivariate-normal-distribution"]], "MvNormal": [[36, null]], "Normal distribution": [[37, null]], "Obtain a line from the data set": [[49, "obtain-a-line-from-the-data-set"]], "Optional columns": [[49, "optional-columns"], [51, "optional-columns"]], "Order Statistics": [[38, null]], "Perturbing a model multiple times": [[81, "perturbing-a-model-multiple-times"]], "Plotting": [[87, "plotting"]], "Point": [[31, null]], "Pointcloud classes": [[32, null]], "Prerequisites": [[48, "prerequisites"]], "Randomness and Model Perturbations": [[76, "randomness-and-model-perturbations"], [81, "randomness-and-model-perturbations"]], "Reading in the Data": [[49, "reading-in-the-data"], [51, "reading-in-the-data"], [52, "reading-in-the-data"]], "RectilinearMesh1D": [[24, null]], "RectilinearMesh2D": [[25, null]], "RectilinearMesh2D_stitched": [[26, null]], "RectilinearMesh3D": [[27, null]], "Required columns": [[49, "required-columns"], [51, "required-columns"]], "Running GeoBIPy to invert Resolve data": [[66, null]], "Running GeoBIPy to invert Skytem data": [[67, null]], "Running GeoBIPy to invert Tempest data": [[68, null]], "Skytem Datapoint Class": [[56, null]], "Skytem dataset": [[51, null]], "StatArray": [[9, null]], "StatArray Class": [[87, null]], "Statistics": [[91, null], [93, "statistics"]], "Statistics classes": [[41, null]], "System classes": [[46, null]], "TdemData": [[21, null]], "TdemDataPoint": [[15, null]], "Tempest Datapoint Class": [[57, null]], "Tempest dataset": [[52, null]], "TempestData": [[22, null]], "Tempest_datapoint": [[16, null]], "The basics": [[76, "the-basics"]], "Time domain system": [[45, null]], "Uniform distribution": [[39, null]], "Univariate Normal Distribution": [[60, "univariate-normal-distribution"]], "Using GeoBIPy on Yeti": [[96, "using-geobipy-on-yeti"]], "Using HDF5 within GeoBIPy": [[63, null]], "Using a tempest domain datapoint": [[57, "using-a-tempest-domain-datapoint"]], "Using a time domain datapoint": [[56, "using-a-time-domain-datapoint"]], "Welcome to GeoBIPy: Geophysical Bayesian Inference in Python": [[96, null]], "baseDistribution": [[40, null]], "fileIO": [[4, null]], "h5py": [[48, "h5py"]], "mpi4py": [[48, "mpi4py"]], "plotting": [[6, null]], "relative_to": [[76, "relative-to"], [77, "relative-to"]], "utilities": [[7, null]]}, "docnames": ["content/api/api", "content/api/base/HDF", "content/api/base/MPI", "content/api/base/base", "content/api/base/fileIO", "content/api/base/interpolation", "content/api/base/plotting", "content/api/base/utilities", "content/api/classes/classes", "content/api/classes/core/StatArray", "content/api/classes/core/core", "content/api/classes/core/myObject", "content/api/classes/data/data", "content/api/classes/data/datapoint/EmDataPoint", "content/api/classes/data/datapoint/FdemDataPoint", "content/api/classes/data/datapoint/TdemDataPoint", "content/api/classes/data/datapoint/Tempest_dataPoint", "content/api/classes/data/datapoint/datapoint", "content/api/classes/data/datapoint/datapointrst", "content/api/classes/data/dataset/Data", "content/api/classes/data/dataset/FdemData", "content/api/classes/data/dataset/TdemData", "content/api/classes/data/dataset/TempestData", "content/api/classes/data/dataset/dataset", "content/api/classes/mesh/RectilinearMesh1D", "content/api/classes/mesh/RectilinearMesh2D", "content/api/classes/mesh/RectilinearMesh2D_stitched", "content/api/classes/mesh/RectilinearMesh3D", "content/api/classes/mesh/mesh", "content/api/classes/model/Model_", "content/api/classes/model/model", "content/api/classes/pointcloud/Point", "content/api/classes/pointcloud/pointcloud", "content/api/classes/statistics/Distribution", "content/api/classes/statistics/GammaDistribution", "content/api/classes/statistics/Histogram", "content/api/classes/statistics/MvNormalDistribution", "content/api/classes/statistics/NormalDistribution", "content/api/classes/statistics/OrderStatistics", "content/api/classes/statistics/UniformDistribution", "content/api/classes/statistics/baseDistribution", "content/api/classes/statistics/statistics", "content/api/classes/system/CircularLoop", "content/api/classes/system/EmLoop", "content/api/classes/system/FdemSystem", "content/api/classes/system/TdemSystem", "content/api/classes/system/system", "content/getting_started/getting_started", "content/getting_started/installation", "examples/Data/plot_frequency_dataset", "examples/Data/plot_pointcloud3d", "examples/Data/plot_skytem_dataset", "examples/Data/plot_tempest_dataset", "examples/Data/readme", "examples/Data/sg_execution_times", "examples/Datapoints/plot_resolve_datapoint", "examples/Datapoints/plot_skytem_datapoint", "examples/Datapoints/plot_tempest_datapoint", "examples/Datapoints/readme", "examples/Datapoints/sg_execution_times", "examples/Distributions/plot_distributions", "examples/Distributions/readme", "examples/Distributions/sg_execution_times", "examples/HDF5/hdf5", "examples/HDF5/readme", "examples/HDF5/sg_execution_times", "examples/Inference_1D/plot_inference_1d_resolve", "examples/Inference_1D/plot_inference_1d_skytem", "examples/Inference_1D/plot_inference_1d_tempest", "examples/Inference_1D/readme", "examples/Inference_1D/sg_execution_times", "examples/Inference_2D/plot_inference_2d_resolve", "examples/Inference_2D/plot_inference_2d_skytem", "examples/Inference_2D/plot_inference_2d_tempest", "examples/Inference_2D/readme", "examples/Inference_2D/sg_execution_times", "examples/Meshes/plot_rectilinear_mesh_1d", "examples/Meshes/plot_rectilinear_mesh_2d", "examples/Meshes/plot_rectilinear_mesh_3d", "examples/Meshes/readme", "examples/Meshes/sg_execution_times", "examples/Models/plot_model_1d", "examples/Models/plot_model_2d", "examples/Models/plot_model_3d", "examples/Models/readme", "examples/Models/sg_execution_times", "examples/Statistics/plot_DataArray", "examples/Statistics/plot_StatArray", "examples/Statistics/plot_histogram_1d", "examples/Statistics/plot_histogram_2d", "examples/Statistics/plot_histogram_3d", "examples/Statistics/readme", "examples/Statistics/sg_execution_times", "examples/index", "examples/readme", "examples/sg_execution_times", "index", "sg_execution_times"], "envversion": {"sphinx": 62, "sphinx.domains.c": 3, "sphinx.domains.changeset": 1, "sphinx.domains.citation": 1, "sphinx.domains.cpp": 9, "sphinx.domains.index": 1, "sphinx.domains.javascript": 3, "sphinx.domains.math": 2, "sphinx.domains.python": 4, "sphinx.domains.rst": 2, "sphinx.domains.std": 2, "sphinx.ext.todo": 2}, "filenames": ["content/api/api.rst", "content/api/base/HDF.rst", "content/api/base/MPI.rst", "content/api/base/base.rst", "content/api/base/fileIO.rst", "content/api/base/interpolation.rst", "content/api/base/plotting.rst", "content/api/base/utilities.rst", "content/api/classes/classes.rst", "content/api/classes/core/StatArray.rst", "content/api/classes/core/core.rst", "content/api/classes/core/myObject.rst", "content/api/classes/data/data.rst", "content/api/classes/data/datapoint/EmDataPoint.rst", "content/api/classes/data/datapoint/FdemDataPoint.rst", "content/api/classes/data/datapoint/TdemDataPoint.rst", "content/api/classes/data/datapoint/Tempest_dataPoint.rst", "content/api/classes/data/datapoint/datapoint.rst", "content/api/classes/data/datapoint/datapointrst.rst", "content/api/classes/data/dataset/Data.rst", "content/api/classes/data/dataset/FdemData.rst", "content/api/classes/data/dataset/TdemData.rst", "content/api/classes/data/dataset/TempestData.rst", "content/api/classes/data/dataset/dataset.rst", "content/api/classes/mesh/RectilinearMesh1D.rst", "content/api/classes/mesh/RectilinearMesh2D.rst", "content/api/classes/mesh/RectilinearMesh2D_stitched.rst", "content/api/classes/mesh/RectilinearMesh3D.rst", "content/api/classes/mesh/mesh.rst", "content/api/classes/model/Model_.rst", "content/api/classes/model/model.rst", "content/api/classes/pointcloud/Point.rst", "content/api/classes/pointcloud/pointcloud.rst", "content/api/classes/statistics/Distribution.rst", "content/api/classes/statistics/GammaDistribution.rst", "content/api/classes/statistics/Histogram.rst", "content/api/classes/statistics/MvNormalDistribution.rst", "content/api/classes/statistics/NormalDistribution.rst", "content/api/classes/statistics/OrderStatistics.rst", "content/api/classes/statistics/UniformDistribution.rst", "content/api/classes/statistics/baseDistribution.rst", "content/api/classes/statistics/statistics.rst", "content/api/classes/system/CircularLoop.rst", "content/api/classes/system/EmLoop.rst", "content/api/classes/system/FdemSystem.rst", "content/api/classes/system/TdemSystem.rst", "content/api/classes/system/system.rst", "content/getting_started/getting_started.rst", "content/getting_started/installation.rst", "examples/Data/plot_frequency_dataset.rst", "examples/Data/plot_pointcloud3d.rst", "examples/Data/plot_skytem_dataset.rst", "examples/Data/plot_tempest_dataset.rst", "examples/Data/readme.rst", "examples/Data/sg_execution_times.rst", "examples/Datapoints/plot_resolve_datapoint.rst", "examples/Datapoints/plot_skytem_datapoint.rst", "examples/Datapoints/plot_tempest_datapoint.rst", "examples/Datapoints/readme.rst", "examples/Datapoints/sg_execution_times.rst", "examples/Distributions/plot_distributions.rst", "examples/Distributions/readme.rst", "examples/Distributions/sg_execution_times.rst", "examples/HDF5/hdf5.rst", "examples/HDF5/readme.rst", "examples/HDF5/sg_execution_times.rst", "examples/Inference_1D/plot_inference_1d_resolve.rst", "examples/Inference_1D/plot_inference_1d_skytem.rst", "examples/Inference_1D/plot_inference_1d_tempest.rst", "examples/Inference_1D/readme.rst", "examples/Inference_1D/sg_execution_times.rst", "examples/Inference_2D/plot_inference_2d_resolve.rst", "examples/Inference_2D/plot_inference_2d_skytem.rst", "examples/Inference_2D/plot_inference_2d_tempest.rst", "examples/Inference_2D/readme.rst", "examples/Inference_2D/sg_execution_times.rst", "examples/Meshes/plot_rectilinear_mesh_1d.rst", "examples/Meshes/plot_rectilinear_mesh_2d.rst", "examples/Meshes/plot_rectilinear_mesh_3d.rst", "examples/Meshes/readme.rst", "examples/Meshes/sg_execution_times.rst", "examples/Models/plot_model_1d.rst", "examples/Models/plot_model_2d.rst", "examples/Models/plot_model_3d.rst", "examples/Models/readme.rst", "examples/Models/sg_execution_times.rst", "examples/Statistics/plot_DataArray.rst", "examples/Statistics/plot_StatArray.rst", "examples/Statistics/plot_histogram_1d.rst", "examples/Statistics/plot_histogram_2d.rst", "examples/Statistics/plot_histogram_3d.rst", "examples/Statistics/readme.rst", "examples/Statistics/sg_execution_times.rst", "examples/index.rst", "examples/readme.rst", "examples/sg_execution_times.rst", "index.rst", "sg_execution_times.rst"], "indexentries": {"active (geobipy.src.classes.data.datapoint.datapoint.datapoint attribute)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.active", false]], "active (geobipy.src.classes.data.datapoint.emdatapoint.emdatapoint property)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint.active", false]], "active (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.active", false]], "additive_error (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.additive_error", false]], "additive_error (geobipy.src.classes.data.dataset.tempestdata.tempestdata property)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.additive_error", false]], "addressof (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.addressof", false]], "addressof (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.addressof", false]], "addtovtk() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.addToVTK", false]], "append() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.append", false]], "append() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.append", false]], "append() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.append", false]], "append() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.append", false]], "append() (geobipy.src.classes.system.circularloop.circularloop method)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.append", false]], "append() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.append", false]], "ax() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.Ax", false]], "axis() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.axis", false]], "banner() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.banner", false]], "bar() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.bar", false]], "basedistribution (class in geobipy.src.classes.statistics.basedistribution)": [[40, "geobipy.src.classes.statistics.baseDistribution.baseDistribution", false]], "bcast() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.Bcast", false]], "bcast() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.Bcast", false]], "bcast() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.Bcast", false]], "bcast() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.Bcast", false]], "bcast() (geobipy.src.classes.system.circularloop.circularloop method)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.Bcast", false]], "bcast() (geobipy.src.classes.system.fdemsystem.fdemsystem method)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.Bcast", false]], "bcast() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Bcast", false]], "bcast_1int() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Bcast_1int", false]], "bcast_list() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Bcast_list", false]], "bcasttype() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.bcastType", false]], "bins() (geobipy.src.classes.statistics.basedistribution.basedistribution method)": [[40, "geobipy.src.classes.statistics.baseDistribution.baseDistribution.bins", false]], "bins() (geobipy.src.classes.statistics.mvnormaldistribution.mvnormal method)": [[36, "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal.bins", false]], "bins() (geobipy.src.classes.statistics.normaldistribution.normal method)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal.bins", false]], "bins() (geobipy.src.classes.statistics.uniformdistribution.uniform method)": [[39, "geobipy.src.classes.statistics.UniformDistribution.Uniform.bins", false]], "block_indices() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.block_indices", false]], "block_median() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.block_median", false]], "block_median_indices() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.block_median_indices", false]], "bounds (geobipy.src.classes.pointcloud.point.point property)": [[31, "geobipy.src.classes.pointcloud.Point.Point.bounds", false]], "bytes2readable() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.bytes2readable", false]], "calibrate() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.calibrate", false]], "cdf() (geobipy.src.classes.statistics.normaldistribution.normal method)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal.cdf", false]], "cdf() (geobipy.src.classes.statistics.uniformdistribution.uniform method)": [[39, "geobipy.src.classes.statistics.UniformDistribution.Uniform.cdf", false]], "cellindex() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.cellIndex", false]], "cellindex() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.cellIndex", false]], "cellindices() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.cellIndices", false]], "cellindices() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.cellIndices", false]], "centred_grid_nodes() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.centred_grid_nodes", false]], "centres() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.centres", false]], "centres() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.centres", false]], "channel_index() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.channel_index", false]], "circularloop (class in geobipy.src.classes.system.circularloop)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop", false]], "clabel() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.clabel", false]], "component_id (geobipy.src.classes.system.fdemsystem.fdemsystem property)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.component_id", false]], "cossin1() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.cosSin1", false]], "createhdf() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.createHdf", false]], "createhdf() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.createHdf", false]], "createhdf() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.createHdf", false]], "createhdf() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.createHdf", false]], "createhdf() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.createHdf", false]], "createhdf() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.createHdf", false]], "createhdf() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.createHdf", false]], "createhdf() (geobipy.src.classes.data.dataset.tempestdata.tempestdata method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.createHdf", false]], "createhdf() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.createHdf", false]], "createhdf() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.createHdf", false]], "createhdf() (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched method)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.createHdf", false]], "createhdf() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.createHdf", false]], "createhdf() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.createHdf", false]], "createhdf() (geobipy.src.classes.system.circularloop.circularloop method)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.createHdf", false]], "createhdf() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.createHdf", false]], "credible_intervals() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.credible_intervals", false]], "credible_range() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.credible_range", false]], "data (class in geobipy.src.classes.data.dataset.data)": [[19, "geobipy.src.classes.data.dataset.Data.Data", false]], "data (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.data", false]], "data (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.data", false]], "data_misfit() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.data_misfit", false]], "data_misfit() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.data_misfit", false]], "datapoint (class in geobipy.src.classes.data.datapoint.datapoint)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint", false]], "datapoint() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.datapoint", false]], "datapoint() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.datapoint", false]], "datapoint() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.datapoint", false]], "deepcopy() (geobipy.src.classes.statistics.basedistribution.basedistribution method)": [[40, "geobipy.src.classes.statistics.baseDistribution.baseDistribution.deepcopy", false]], "delete_edge() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.delete_edge", false]], "deletefile() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.deleteFile", false]], "deltad (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.deltaD", false]], "deltad (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.deltaD", false]], "det() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.Det", false]], "direxists() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.dirExists", false]], "distance (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.distance", false]], "distribution() (in module geobipy.src.classes.statistics.distribution)": [[33, "geobipy.src.classes.statistics.Distribution.Distribution", false]], "dualmoment() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.dualMoment", false]], "edges() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.edges", false]], "edges() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.edges", false]], "emdatapoint (class in geobipy.src.classes.data.datapoint.emdatapoint)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint", false]], "emloop (class in geobipy.src.classes.system.emloop)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop", false]], "estimateadditiveerror() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.estimateAdditiveError", false]], "expreal() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.expReal", false]], "fdemdata (class in geobipy.src.classes.data.dataset.fdemdata)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData", false]], "fdemdatapoint (class in geobipy.src.classes.data.datapoint.fdemdatapoint)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint", false]], "fdemsystem (class in geobipy.src.classes.system.fdemsystem)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem", false]], "fileexists() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.fileExists", false]], "fileinformation() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.fileInformation", false]], "fileinformation() (geobipy.src.classes.data.dataset.tdemdata.tdemdata static method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.fileInformation", false]], "fileinformation() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.fileInformation", false]], "fileinformation() (geobipy.src.classes.system.fdemsystem.fdemsystem method)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.fileInformation", false]], "filesexist() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.filesExist", false]], "find() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.find", false]], "find_best_halfspace() (geobipy.src.classes.data.datapoint.emdatapoint.emdatapoint method)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint.find_best_halfspace", false]], "findfirstlastnotvalue() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.findFirstLastNotValue", false]], "findfirstnonzeros() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.findFirstNonZeros", false]], "findlastnonzeros() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.findLastNonZeros", false]], "findnans() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.findNans", false]], "findnotnans() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.findNotNans", false]], "fit_mixture_to_pdf_1d() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.fit_mixture_to_pdf_1d", false]], "forward() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.forward", false]], "forward() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.forward", false]], "frequencies() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.frequencies", false]], "fromhdf() (geobipy.src.classes.data.datapoint.datapoint.datapoint class method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint class method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint class method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint class method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.dataset.data.data class method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.dataset.fdemdata.fdemdata class method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.dataset.tdemdata.tdemdata class method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.fromHdf", false]], "fromhdf() (geobipy.src.classes.data.dataset.tempestdata.tempestdata class method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.fromHdf", false]], "fromhdf() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d class method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.fromHdf", false]], "fromhdf() (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched class method)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.fromHdf", false]], "fromhdf() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d class method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.fromHdf", false]], "fromhdf() (geobipy.src.classes.model.model.model class method)": [[29, "geobipy.src.classes.model.Model.Model.fromHdf", false]], "fromhdf() (geobipy.src.classes.pointcloud.point.point class method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.fromHdf", false]], "fromhdf() (geobipy.src.classes.statistics.histogram.histogram class method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.fromHdf", false]], "fromhdf() (geobipy.src.classes.system.circularloop.circularloop class method)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.fromHdf", false]], "fromhdf() (geobipy.src.classes.system.emloop.emloop class method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.fromHdf", false]], "fromhdf() (geobipy.src.classes.system.fdemsystem.fdemsystem class method)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.fromHdf", false]], "gamma (class in geobipy.src.classes.statistics.gammadistribution)": [[34, "geobipy.src.classes.statistics.GammaDistribution.Gamma", false]], "generate_subplots() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.generate_subplots", false]], "geobipy.src.base.fileio": [[4, "module-geobipy.src.base.fileIO", false]], "geobipy.src.base.hdf.hdfread": [[1, "module-geobipy.src.base.HDF.hdfRead", false]], "geobipy.src.base.hdf.hdfwrite": [[1, "module-geobipy.src.base.HDF.hdfWrite", false]], "geobipy.src.base.interpolation": [[5, "module-geobipy.src.base.interpolation", false]], "geobipy.src.base.mpi": [[2, "module-geobipy.src.base.MPI", false]], "geobipy.src.base.plotting": [[6, "module-geobipy.src.base.plotting", false]], "geobipy.src.base.utilities": [[7, "module-geobipy.src.base.utilities", false]], "geobipy.src.classes.core.myobject": [[11, "module-geobipy.src.classes.core.myObject", false]], "geobipy.src.classes.data.datapoint.datapoint": [[17, "module-geobipy.src.classes.data.datapoint.DataPoint", false]], "geobipy.src.classes.data.datapoint.emdatapoint": [[13, "module-geobipy.src.classes.data.datapoint.EmDataPoint", false]], "geobipy.src.classes.data.datapoint.fdemdatapoint": [[14, "module-geobipy.src.classes.data.datapoint.FdemDataPoint", false]], "geobipy.src.classes.data.datapoint.tdemdatapoint": [[15, "module-geobipy.src.classes.data.datapoint.TdemDataPoint", false]], "geobipy.src.classes.data.datapoint.tempest_datapoint": [[16, "module-geobipy.src.classes.data.datapoint.Tempest_datapoint", false]], "geobipy.src.classes.data.dataset.data": [[19, "module-geobipy.src.classes.data.dataset.Data", false]], "geobipy.src.classes.data.dataset.fdemdata": [[20, "module-geobipy.src.classes.data.dataset.FdemData", false]], "geobipy.src.classes.data.dataset.tdemdata": [[21, "module-geobipy.src.classes.data.dataset.TdemData", false]], "geobipy.src.classes.data.dataset.tempestdata": [[22, "module-geobipy.src.classes.data.dataset.TempestData", false]], "geobipy.src.classes.mesh.rectilinearmesh1d": [[24, "module-geobipy.src.classes.mesh.RectilinearMesh1D", false]], "geobipy.src.classes.mesh.rectilinearmesh2d": [[25, "module-geobipy.src.classes.mesh.RectilinearMesh2D", false]], "geobipy.src.classes.mesh.rectilinearmesh2d_stitched": [[26, "module-geobipy.src.classes.mesh.RectilinearMesh2D_stitched", false]], "geobipy.src.classes.mesh.rectilinearmesh3d": [[27, "module-geobipy.src.classes.mesh.RectilinearMesh3D", false]], "geobipy.src.classes.model.model": [[29, "module-geobipy.src.classes.model.Model", false]], "geobipy.src.classes.pointcloud.point": [[31, "module-geobipy.src.classes.pointcloud.Point", false]], "geobipy.src.classes.statistics.basedistribution": [[40, "module-geobipy.src.classes.statistics.baseDistribution", false]], "geobipy.src.classes.statistics.distribution": [[33, "module-geobipy.src.classes.statistics.Distribution", false]], "geobipy.src.classes.statistics.gammadistribution": [[34, "module-geobipy.src.classes.statistics.GammaDistribution", false]], "geobipy.src.classes.statistics.histogram": [[35, "module-geobipy.src.classes.statistics.Histogram", false]], "geobipy.src.classes.statistics.mvnormaldistribution": [[36, "module-geobipy.src.classes.statistics.MvNormalDistribution", false]], "geobipy.src.classes.statistics.normaldistribution": [[37, "module-geobipy.src.classes.statistics.NormalDistribution", false]], "geobipy.src.classes.statistics.orderstatistics": [[38, "module-geobipy.src.classes.statistics.OrderStatistics", false]], "geobipy.src.classes.statistics.uniformdistribution": [[39, "module-geobipy.src.classes.statistics.UniformDistribution", false]], "geobipy.src.classes.system.circularloop": [[42, "module-geobipy.src.classes.system.CircularLoop", false]], "geobipy.src.classes.system.emloop": [[43, "module-geobipy.src.classes.system.EmLoop", false]], "geobipy.src.classes.system.fdemsystem": [[44, "module-geobipy.src.classes.system.FdemSystem", false]], "geobipy.src.classes.system.tdemsystem": [[45, "module-geobipy.src.classes.system.TdemSystem", false]], "get_column_name() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.get_column_name", false]], "get_modellingtimes (geobipy.src.classes.system.tdemsystem.tdemsystem property)": [[45, "geobipy.src.classes.system.TdemSystem.TdemSystem.get_modellingTimes", false]], "get_real_numbers_from_line() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.get_real_numbers_from_line", false]], "getfileextension() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.getFileExtension", false]], "getfilesize() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.getFileSize", false]], "getfrequency() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.getFrequency", false]], "getfrequency() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.getFrequency", false]], "getmeasurementtype() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.getMeasurementType", false]], "getmeasurementtype() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.getMeasurementType", false]], "getname() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.getName", false]], "getnameunits() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.getNameUnits", false]], "getncolumns() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.getNcolumns", false]], "getnlines() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.getNlines", false]], "getsizeof() (geobipy.src.classes.core.myobject.myobject method)": [[11, "geobipy.src.classes.core.myObject.myObject.getsizeof", false]], "getunits() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.getUnits", false]], "gradient (geobipy.src.classes.model.model.model property)": [[29, "geobipy.src.classes.model.Model.Model.gradient", false]], "gradient() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.gradient", false]], "gradient_probability() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.gradient_probability", false]], "hassamesize() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.hasSameSize", false]], "hdfname() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.hdfName", false]], "helloworld() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.helloWorld", false]], "hillshade() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.hillshade", false]], "histogram (class in geobipy.src.classes.statistics.histogram)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram", false]], "histogramequalize() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.histogramEqualize", false]], "hlines() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.hlines", false]], "in_bounds() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.in_bounds", false]], "in_bounds() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.in_bounds", false]], "insert_edge() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.insert_edge", false]], "int2str() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.int2str", false]], "interleave() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.interleave", false]], "interpolate() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.interpolate", false]], "intervalstatistic() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.intervalStatistic", false]], "inv() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.Inv", false]], "iplotactive (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.iplotActive", false]], "irecv() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Irecv", false]], "irecv_1int() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Irecv_1int", false]], "irecvfromleft() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.IrecvFromLeft", false]], "irecvfromright() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.IrecvFromRight", false]], "isend() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Isend", false]], "isend_1int() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Isend_1int", false]], "isendtoleft() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.IsendToLeft", false]], "isendtoright() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.IsendToRight", false]], "isfileextension() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.isFileExtension", false]], "isint() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.isInt", false]], "isintorslice() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.isIntorSlice", false]], "isnumpy() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.isNumpy", false]], "likelihood() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.likelihood", false]], "line() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.line", false]], "loadbalance1d_shrinkingarrays() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.loadBalance1D_shrinkingArrays", false]], "loadbalance3d_shrinkingarrays() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.loadBalance3D_shrinkingArrays", false]], "local_inverse_hessian() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.local_inverse_hessian", false]], "local_precision() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.local_precision", false]], "local_variance() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.local_variance", false]], "logdet() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.LogDet", false]], "make_colourmap() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.make_colourmap", false]], "map() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.map", false]], "map_to_pdf() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.map_to_pdf", false]], "mapchannel() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.mapChannel", false]], "mapdata() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.mapData", false]], "mappredicteddata() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.mapPredictedData", false]], "mapstd() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.mapStd", false]], "marginalize() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.marginalize", false]], "mask_cells() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.mask_cells", false]], "mask_cells() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.mask_cells", false]], "mean() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.mean", false]], "median() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.median", false]], "mergecomplex() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.mergeComplex", false]], "mode() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.mode", false]], "model (class in geobipy.src.classes.model.model)": [[29, "geobipy.src.classes.model.Model.Model", false]], "module": [[1, "module-geobipy.src.base.HDF.hdfRead", false], [1, "module-geobipy.src.base.HDF.hdfWrite", false], [2, "module-geobipy.src.base.MPI", false], [4, "module-geobipy.src.base.fileIO", false], [5, "module-geobipy.src.base.interpolation", false], [6, "module-geobipy.src.base.plotting", false], [7, "module-geobipy.src.base.utilities", false], [11, "module-geobipy.src.classes.core.myObject", false], [13, "module-geobipy.src.classes.data.datapoint.EmDataPoint", false], [14, "module-geobipy.src.classes.data.datapoint.FdemDataPoint", false], [15, "module-geobipy.src.classes.data.datapoint.TdemDataPoint", false], [16, "module-geobipy.src.classes.data.datapoint.Tempest_datapoint", false], [17, "module-geobipy.src.classes.data.datapoint.DataPoint", false], [19, "module-geobipy.src.classes.data.dataset.Data", false], [20, "module-geobipy.src.classes.data.dataset.FdemData", false], [21, "module-geobipy.src.classes.data.dataset.TdemData", false], [22, "module-geobipy.src.classes.data.dataset.TempestData", false], [24, "module-geobipy.src.classes.mesh.RectilinearMesh1D", false], [25, "module-geobipy.src.classes.mesh.RectilinearMesh2D", false], [26, "module-geobipy.src.classes.mesh.RectilinearMesh2D_stitched", false], [27, "module-geobipy.src.classes.mesh.RectilinearMesh3D", false], [29, "module-geobipy.src.classes.model.Model", false], [31, "module-geobipy.src.classes.pointcloud.Point", false], [33, "module-geobipy.src.classes.statistics.Distribution", false], [34, "module-geobipy.src.classes.statistics.GammaDistribution", false], [35, "module-geobipy.src.classes.statistics.Histogram", false], [36, "module-geobipy.src.classes.statistics.MvNormalDistribution", false], [37, "module-geobipy.src.classes.statistics.NormalDistribution", false], [38, "module-geobipy.src.classes.statistics.OrderStatistics", false], [39, "module-geobipy.src.classes.statistics.UniformDistribution", false], [40, "module-geobipy.src.classes.statistics.baseDistribution", false], [42, "module-geobipy.src.classes.system.CircularLoop", false], [43, "module-geobipy.src.classes.system.EmLoop", false], [44, "module-geobipy.src.classes.system.FdemSystem", false], [45, "module-geobipy.src.classes.system.TdemSystem", false]], "moment (geobipy.src.classes.statistics.basedistribution.basedistribution property)": [[40, "geobipy.src.classes.statistics.baseDistribution.baseDistribution.moment", false]], "move() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.move", false]], "mvnormal (class in geobipy.src.classes.statistics.mvnormaldistribution)": [[36, "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal", false]], "myobject (class in geobipy.src.classes.core.myobject)": [[11, "geobipy.src.classes.core.myObject.myObject", false]], "nactivedata (geobipy.src.classes.data.dataset.fdemdata.fdemdata property)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.nActiveData", false]], "ncells (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.nCells", false]], "ncells (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched property)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.nCells", false]], "ncells (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.nCells", false]], "ndim (geobipy.src.classes.statistics.basedistribution.basedistribution property)": [[40, "geobipy.src.classes.statistics.baseDistribution.baseDistribution.ndim", false]], "ndim (geobipy.src.classes.statistics.mvnormaldistribution.mvnormal property)": [[36, "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal.ndim", false]], "ndim (geobipy.src.classes.statistics.normaldistribution.normal property)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal.ndim", false]], "ndim (geobipy.src.classes.statistics.uniformdistribution.uniform property)": [[39, "geobipy.src.classes.statistics.UniformDistribution.Uniform.ndim", false]], "nearest() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.nearest", false]], "nnodes (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.nNodes", false]], "nnodes (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.nNodes", false]], "nodes (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.nodes", false]], "normal (class in geobipy.src.classes.statistics.normaldistribution)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal", false]], "npoints (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.nPoints", false]], "npoints (geobipy.src.classes.pointcloud.point.point property)": [[31, "geobipy.src.classes.pointcloud.Point.Point.nPoints", false]], "npointsperline() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.nPointsPerLine", false]], "off_time (geobipy.src.classes.system.tdemsystem.tdemsystem property)": [[45, "geobipy.src.classes.system.TdemSystem.TdemSystem.off_time", false]], "off_time() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.off_time", false]], "off_time() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.off_time", false]], "opacity() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.opacity", false]], "opacity_level() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.opacity_level", false]], "order (class in geobipy.src.classes.statistics.orderstatistics)": [[38, "geobipy.src.classes.statistics.OrderStatistics.Order", false]], "ordered_print() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.ordered_print", false]], "pad() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.pad", false]], "pad() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.pad", false]], "pad() (geobipy.src.classes.statistics.mvnormaldistribution.mvnormal method)": [[36, "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal.pad", false]], "parsestring() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.parseString", false]], "pause() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.pause", false]], "pcolor() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.pcolor", false]], "pcolor() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.pcolor", false]], "pcolor() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.pcolor", false]], "pcolor() (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched method)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.pcolor", false]], "pcolor() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.pcolor", false]], "pcolor() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.pcolor", false]], "pcolor() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.pcolor", false]], "pcolor_1d() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.pcolor_1D", false]], "pcolor_as_bar() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.pcolor_as_bar", false]], "pdf() (geobipy.src.classes.statistics.gammadistribution.gamma method)": [[34, "geobipy.src.classes.statistics.GammaDistribution.Gamma.pdf", false]], "percentile() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.percentile", false]], "perturb() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.perturb", false]], "perturb() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.perturb", false]], "perturb() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.perturb", false]], "perturb() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.perturb", false]], "perturb() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.perturb", false]], "perturb() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.perturb", false]], "perturb() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.perturb", false]], "piecewise_constant_interpolate() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.piecewise_constant_interpolate", false]], "plot() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.plot", false]], "plot() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.plot", false]], "plot() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.plot", false]], "plot() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.plot", false]], "plot() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.plot", false]], "plot() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.plot", false]], "plot() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.plot", false]], "plot_data() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.plot_data", false]], "plot_data() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.plot_data", false]], "plot_data() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.plot_data", false]], "plot_data() (geobipy.src.classes.data.dataset.tempestdata.tempestdata method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.plot_data", false]], "plot_grid() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.plot_grid", false]], "plot_grid() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.plot_grid", false]], "plot_grid() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.plot_grid", false]], "plot_halfspace_responses() (geobipy.src.classes.data.datapoint.emdatapoint.emdatapoint method)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint.plot_halfspace_responses", false]], "plot_predicted() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.plot_predicted", false]], "plot_predicted() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.plot_predicted", false]], "plot_predicted() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.plot_predicted", false]], "plot_predicted() (geobipy.src.classes.data.dataset.tempestdata.tempestdata method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.plot_predicted", false]], "plot_relative_to() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.plot_relative_to", false]], "plotline() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.plotLine", false]], "point (class in geobipy.src.classes.pointcloud.point)": [[31, "geobipy.src.classes.pointcloud.Point.Point", false]], "predicted_primary_field (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.predicted_primary_field", false]], "predicted_secondary_field (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.predicted_secondary_field", false]], "predicteddata (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.predictedData", false]], "predicteddata (geobipy.src.classes.data.datapoint.emdatapoint.emdatapoint property)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint.predictedData", false]], "predicteddata (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.predictedData", false]], "predicteddata (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint property)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.predictedData", false]], "predicteddata (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.predictedData", false]], "pretty() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.pretty", false]], "primary_field (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.primary_field", false]], "print() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.print", false]], "probability (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.probability", false]], "probability (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.probability", false]], "probability (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint property)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.probability", false]], "probability (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d property)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.probability", false]], "probability (geobipy.src.classes.pointcloud.point.point property)": [[31, "geobipy.src.classes.pointcloud.Point.Point.probability", false]], "probability (geobipy.src.classes.system.emloop.emloop property)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.probability", false]], "probability() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.probability", false]], "probability() (geobipy.src.classes.statistics.mvnormaldistribution.mvnormal method)": [[36, "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal.probability", false]], "probability() (geobipy.src.classes.statistics.normaldistribution.normal method)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal.probability", false]], "proposal_probabilities() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.proposal_probabilities", false]], "pyvista_mesh() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.pyvista_mesh", false]], "range (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d property)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.range", false]], "rankprint() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.rankPrint", false]], "ravelindices() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.ravelIndices", false]], "ravelindices() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.ravelIndices", false]], "read() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.read", false]], "read() (geobipy.src.classes.system.fdemsystem.fdemsystem class method)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.read", false]], "read_all() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.read_all", false]], "read_csv() (geobipy.src.classes.data.dataset.data.data class method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.read_csv", false]], "read_csv() (geobipy.src.classes.data.dataset.fdemdata.fdemdata class method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.read_csv", false]], "read_csv() (geobipy.src.classes.data.dataset.tdemdata.tdemdata class method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.read_csv", false]], "read_csv() (geobipy.src.classes.data.dataset.tempestdata.tempestdata class method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.read_csv", false]], "read_csv() (geobipy.src.classes.pointcloud.point.point class method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.read_csv", false]], "read_groups_with_tag() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.read_groups_with_tag", false]], "read_item() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.read_item", false]], "read_netcdf() (geobipy.src.classes.data.dataset.tempestdata.tempestdata class method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.read_netcdf", false]], "readaarhusfile() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.readAarhusFile", false]], "readkeyfromfile() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.readKeyFromFile", false]], "readkeyfromfiles() (in module geobipy.src.base.hdf.hdfread)": [[1, "geobipy.src.base.HDF.hdfRead.readKeyFromFiles", false]], "rectilinearmesh1d (class in geobipy.src.classes.mesh.rectilinearmesh1d)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D", false]], "rectilinearmesh2d (class in geobipy.src.classes.mesh.rectilinearmesh2d)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D", false]], "rectilinearmesh2d_stitched (class in geobipy.src.classes.mesh.rectilinearmesh2d_stitched)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched", false]], "rectilinearmesh3d (class in geobipy.src.classes.mesh.rectilinearmesh3d)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D", false]], "relative_error (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.relative_error", false]], "relative_error (geobipy.src.classes.data.dataset.tempestdata.tempestdata property)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.relative_error", false]], "rng() (geobipy.src.classes.statistics.normaldistribution.normal method)": [[37, "geobipy.src.classes.statistics.NormalDistribution.Normal.rng", false]], "rosenbrock() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.rosenbrock", false]], "sample() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.sample", false]], "scatter2d() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.scatter2D", false]], "scatter2d() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.scatter2D", false]], "scatterv() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.Scatterv", false]], "scatterv() (geobipy.src.classes.data.dataset.fdemdata.fdemdata method)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.Scatterv", false]], "scatterv() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.Scatterv", false]], "scatterv() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.Scatterv", false]], "scatterv() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Scatterv", false]], "scatterv_list() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Scatterv_list", false]], "scatterv_numpy() (in module geobipy.src.base.mpi)": [[2, "geobipy.src.base.MPI.Scatterv_numpy", false]], "secondary_field (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.secondary_field", false]], "sensitivity() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.sensitivity", false]], "sensitivity() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.sensitivity", false]], "set_additive_error_posterior() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.set_additive_error_posterior", false]], "set_additive_error_posterior() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.set_additive_error_posterior", false]], "set_kdtree() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.set_kdtree", false]], "set_pitch_posterior() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.set_pitch_posterior", false]], "set_posteriors() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.set_posteriors", false]], "set_posteriors() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.set_posteriors", false]], "set_posteriors() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.set_posteriors", false]], "set_priors() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.set_priors", false]], "set_priors() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.set_priors", false]], "set_proposals() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.set_proposals", false]], "set_proposals() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.set_proposals", false]], "set_proposals() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.set_proposals", false]], "set_proposals() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.set_proposals", false]], "set_proposals() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.set_proposals", false]], "set_relative_error_posterior() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.set_relative_error_posterior", false]], "set_relative_error_posterior() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.set_relative_error_posterior", false]], "set_roll_posterior() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.set_roll_posterior", false]], "set_x_posterior() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.set_x_posterior", false]], "set_y_posterior() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.set_y_posterior", false]], "set_yaw_posterior() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.set_yaw_posterior", false]], "set_z_posterior() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.set_z_posterior", false]], "setalphaperpcolormeshpixel() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.setAlphaPerPcolormeshPixel", false]], "shape (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.shape", false]], "shape (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched property)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.shape", false]], "shape (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.shape", false]], "single (geobipy.src.classes.data.dataset.fdemdata.fdemdata attribute)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.single", false]], "single (geobipy.src.classes.data.dataset.tdemdata.tdemdata attribute)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.single", false]], "single (geobipy.src.classes.data.dataset.tempestdata.tempestdata attribute)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.single", false]], "sizelegend() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.sizeLegend", false]], "smooth() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.smooth", false]], "splitcomplex() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.splitComplex", false]], "stackplot2d() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.stackplot2D", false]], "std (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.std", false]], "std (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.std", false]], "std (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint property)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.std", false]], "std (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.std", false]], "std (geobipy.src.classes.data.dataset.fdemdata.fdemdata property)": [[20, "geobipy.src.classes.data.dataset.FdemData.FdemData.std", false]], "std (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.std", false]], "step() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.step", false]], "str_to_raw() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.str_to_raw", false]], "summary (geobipy.src.classes.data.datapoint.datapoint.datapoint property)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.summary", false]], "summary (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint property)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.summary", false]], "summary (geobipy.src.classes.data.dataset.data.data property)": [[19, "geobipy.src.classes.data.dataset.Data.Data.summary", false]], "summary (geobipy.src.classes.data.dataset.tdemdata.tdemdata property)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.summary", false]], "summary (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d property)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.summary", false]], "summary (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.summary", false]], "summary (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched property)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.summary", false]], "summary (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.summary", false]], "summary (geobipy.src.classes.model.model.model property)": [[29, "geobipy.src.classes.model.Model.Model.summary", false]], "summary (geobipy.src.classes.pointcloud.point.point property)": [[31, "geobipy.src.classes.pointcloud.Point.Point.summary", false]], "summary (geobipy.src.classes.system.circularloop.circularloop property)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.summary", false]], "summary (geobipy.src.classes.system.emloop.emloop property)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.summary", false]], "summary (geobipy.src.classes.system.fdemsystem.fdemsystem property)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.summary", false]], "tanh() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.tanh", false]], "tdemdata (class in geobipy.src.classes.data.dataset.tdemdata)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData", false]], "tdemdatapoint (class in geobipy.src.classes.data.datapoint.tdemdatapoint)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint", false]], "tdemsystem (class in geobipy.src.classes.system.tdemsystem)": [[45, "geobipy.src.classes.system.TdemSystem.TdemSystem", false]], "tempest_datapoint (class in geobipy.src.classes.data.datapoint.tempest_datapoint)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint", false]], "tempestdata (class in geobipy.src.classes.data.dataset.tempestdata)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData", false]], "tensor_id (geobipy.src.classes.system.fdemsystem.fdemsystem property)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.tensor_id", false]], "tohdf() (geobipy.src.classes.core.myobject.myobject method)": [[11, "geobipy.src.classes.core.myObject.myObject.toHdf", false]], "tohdf() (geobipy.src.classes.system.fdemsystem.fdemsystem method)": [[44, "geobipy.src.classes.system.FdemSystem.FdemSystem.toHdf", false]], "tovtk() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.toVTK", false]], "transparency() (geobipy.src.classes.statistics.histogram.histogram method)": [[35, "geobipy.src.classes.statistics.Histogram.Histogram.transparency", false]], "trim_by_percentile() (in module geobipy.src.base.utilities)": [[7, "geobipy.src.base.utilities.trim_by_percentile", false]], "uniform (class in geobipy.src.classes.statistics.uniformdistribution)": [[39, "geobipy.src.classes.statistics.UniformDistribution.Uniform", false]], "unperturb() (geobipy.src.classes.mesh.rectilinearmesh1d.rectilinearmesh1d method)": [[24, "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D.unperturb", false]], "unravelindex() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.unravelIndex", false]], "unravelindex() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.unravelIndex", false]], "update_parameter_posterior() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.update_parameter_posterior", false]], "update_posteriors() (geobipy.src.classes.data.datapoint.emdatapoint.emdatapoint method)": [[13, "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint.update_posteriors", false]], "update_posteriors() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.update_posteriors", false]], "update_posteriors() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.update_posteriors", false]], "update_posteriors() (geobipy.src.classes.model.model.model method)": [[29, "geobipy.src.classes.model.Model.Model.update_posteriors", false]], "updatesensitivity() (geobipy.src.classes.data.datapoint.fdemdatapoint.fdemdatapoint method)": [[14, "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint.updateSensitivity", false]], "vlines() (in module geobipy.src.base.plotting)": [[6, "geobipy.src.base.plotting.vlines", false]], "vtkstructure() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.vtkStructure", false]], "wccount() (in module geobipy.src.base.fileio)": [[4, "geobipy.src.base.fileIO.wccount", false]], "write_nd() (in module geobipy.src.base.hdf.hdfwrite)": [[1, "geobipy.src.base.HDF.hdfWrite.write_nd", false]], "writehdf() (geobipy.src.classes.data.datapoint.datapoint.datapoint method)": [[17, "geobipy.src.classes.data.datapoint.DataPoint.DataPoint.writeHdf", false]], "writehdf() (geobipy.src.classes.data.datapoint.tdemdatapoint.tdemdatapoint method)": [[15, "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint.writeHdf", false]], "writehdf() (geobipy.src.classes.data.datapoint.tempest_datapoint.tempest_datapoint method)": [[16, "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint.writeHdf", false]], "writehdf() (geobipy.src.classes.data.dataset.data.data method)": [[19, "geobipy.src.classes.data.dataset.Data.Data.writeHdf", false]], "writehdf() (geobipy.src.classes.data.dataset.tdemdata.tdemdata method)": [[21, "geobipy.src.classes.data.dataset.TdemData.TdemData.writeHdf", false]], "writehdf() (geobipy.src.classes.data.dataset.tempestdata.tempestdata method)": [[22, "geobipy.src.classes.data.dataset.TempestData.TempestData.writeHdf", false]], "writehdf() (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d method)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.writeHdf", false]], "writehdf() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.writeHdf", false]], "writehdf() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.writeHdf", false]], "writehdf() (geobipy.src.classes.system.circularloop.circularloop method)": [[42, "geobipy.src.classes.system.CircularLoop.CircularLoop.writeHdf", false]], "writehdf() (geobipy.src.classes.system.emloop.emloop method)": [[43, "geobipy.src.classes.system.EmLoop.EmLoop.writeHdf", false]], "x_axis() (geobipy.src.classes.pointcloud.point.point method)": [[31, "geobipy.src.classes.pointcloud.Point.Point.x_axis", false]], "x_centres (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.x_centres", false]], "x_centres (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.x_centres", false]], "x_edges (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.x_edges", false]], "x_edges (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.x_edges", false]], "xrange() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.xRange", false]], "y_centres (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.y_centres", false]], "y_centres (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.y_centres", false]], "y_edges (geobipy.src.classes.mesh.rectilinearmesh2d.rectilinearmesh2d property)": [[25, "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D.y_edges", false]], "y_edges (geobipy.src.classes.mesh.rectilinearmesh2d_stitched.rectilinearmesh2d_stitched property)": [[26, "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched.y_edges", false]], "y_edges (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d property)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.y_edges", false]], "zrange() (geobipy.src.classes.mesh.rectilinearmesh3d.rectilinearmesh3d method)": [[27, "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D.zRange", false]]}, "objects": {"geobipy.src.base": [[2, 0, 0, "-", "MPI"], [4, 0, 0, "-", "fileIO"], [5, 0, 0, "-", "interpolation"], [6, 0, 0, "-", "plotting"], [7, 0, 0, "-", "utilities"]], "geobipy.src.base.HDF": [[1, 0, 0, "-", "hdfRead"], [1, 0, 0, "-", "hdfWrite"]], "geobipy.src.base.HDF.hdfRead": [[1, 1, 1, "", "find"], [1, 1, 1, "", "readKeyFromFile"], [1, 1, 1, "", "readKeyFromFiles"], [1, 1, 1, "", "read_all"], [1, 1, 1, "", "read_groups_with_tag"], [1, 1, 1, "", "read_item"]], "geobipy.src.base.HDF.hdfWrite": [[1, 1, 1, "", "write_nd"]], "geobipy.src.base.MPI": [[2, 1, 1, "", "Bcast"], [2, 1, 1, "", "Bcast_1int"], [2, 1, 1, "", "Bcast_list"], [2, 1, 1, "", "Irecv"], [2, 1, 1, "", "IrecvFromLeft"], [2, 1, 1, "", "IrecvFromRight"], [2, 1, 1, "", "Irecv_1int"], [2, 1, 1, "", "Isend"], [2, 1, 1, "", "IsendToLeft"], [2, 1, 1, "", "IsendToRight"], [2, 1, 1, "", "Isend_1int"], [2, 1, 1, "", "Scatterv"], [2, 1, 1, "", "Scatterv_list"], [2, 1, 1, "", "Scatterv_numpy"], [2, 1, 1, "", "banner"], [2, 1, 1, "", "bcastType"], [2, 1, 1, "", "helloWorld"], [2, 1, 1, "", "loadBalance1D_shrinkingArrays"], [2, 1, 1, "", "loadBalance3D_shrinkingArrays"], [2, 1, 1, "", "ordered_print"], [2, 1, 1, "", "print"], [2, 1, 1, "", "rankPrint"]], "geobipy.src.base.fileIO": [[4, 1, 1, "", "bytes2readable"], [4, 1, 1, "", "deleteFile"], [4, 1, 1, "", "dirExists"], [4, 1, 1, "", "fileExists"], [4, 1, 1, "", "filesExist"], [4, 1, 1, "", "getFileExtension"], [4, 1, 1, "", "getFileSize"], [4, 1, 1, "", "getNcolumns"], [4, 1, 1, "", "getNlines"], [4, 1, 1, "", "get_column_name"], [4, 1, 1, "", "get_real_numbers_from_line"], [4, 1, 1, "", "int2str"], [4, 1, 1, "", "isFileExtension"], [4, 1, 1, "", "parseString"], [4, 1, 1, "", "wccount"]], "geobipy.src.base.plotting": [[6, 1, 1, "", "bar"], [6, 1, 1, "", "clabel"], [6, 1, 1, "", "generate_subplots"], [6, 1, 1, "", "hillshade"], [6, 1, 1, "", "hlines"], [6, 1, 1, "", "make_colourmap"], [6, 1, 1, "", "pause"], [6, 1, 1, "", "pcolor"], [6, 1, 1, "", "pcolor_1D"], [6, 1, 1, "", "pcolor_as_bar"], [6, 1, 1, "", "plot"], [6, 1, 1, "", "pretty"], [6, 1, 1, "", "scatter2D"], [6, 1, 1, "", "setAlphaPerPcolormeshPixel"], [6, 1, 1, "", "sizeLegend"], [6, 1, 1, "", "stackplot2D"], [6, 1, 1, "", "step"], [6, 1, 1, "", "vlines"]], "geobipy.src.base.utilities": [[7, 1, 1, "", "Ax"], [7, 1, 1, "", "Det"], [7, 1, 1, "", "Inv"], [7, 1, 1, "", "LogDet"], [7, 1, 1, "", "cosSin1"], [7, 1, 1, "", "expReal"], [7, 1, 1, "", "findFirstLastNotValue"], [7, 1, 1, "", "findFirstNonZeros"], [7, 1, 1, "", "findLastNonZeros"], [7, 1, 1, "", "findNans"], [7, 1, 1, "", "findNotNans"], [7, 1, 1, "", "getName"], [7, 1, 1, "", "getNameUnits"], [7, 1, 1, "", "getUnits"], [7, 1, 1, "", "histogramEqualize"], [7, 1, 1, "", "interleave"], [7, 1, 1, "", "isInt"], [7, 1, 1, "", "isIntorSlice"], [7, 1, 1, "", "isNumpy"], [7, 1, 1, "", "mergeComplex"], [7, 1, 1, "", "rosenbrock"], [7, 1, 1, "", "smooth"], [7, 1, 1, "", "splitComplex"], [7, 1, 1, "", "str_to_raw"], [7, 1, 1, "", "tanh"], [7, 1, 1, "", "trim_by_percentile"]], "geobipy.src.classes.core": [[11, 0, 0, "-", "myObject"]], "geobipy.src.classes.core.myObject": [[11, 2, 1, "", "myObject"]], "geobipy.src.classes.core.myObject.myObject": [[11, 3, 1, "", "getsizeof"], [11, 3, 1, "", "toHdf"]], "geobipy.src.classes.data.datapoint": [[17, 0, 0, "-", "DataPoint"], [13, 0, 0, "-", "EmDataPoint"], [14, 0, 0, "-", "FdemDataPoint"], [15, 0, 0, "-", "TdemDataPoint"], [16, 0, 0, "-", "Tempest_datapoint"]], "geobipy.src.classes.data.datapoint.DataPoint": [[17, 2, 1, "", "DataPoint"]], "geobipy.src.classes.data.datapoint.DataPoint.DataPoint": [[17, 4, 1, "", "active"], [17, 5, 1, "", "addressof"], [17, 3, 1, "", "createHdf"], [17, 3, 1, "", "data_misfit"], [17, 5, 1, "", "deltaD"], [17, 3, 1, "", "fromHdf"], [17, 3, 1, "", "likelihood"], [17, 3, 1, "", "perturb"], [17, 5, 1, "", "predictedData"], [17, 5, 1, "", "probability"], [17, 3, 1, "", "set_additive_error_posterior"], [17, 3, 1, "", "set_posteriors"], [17, 3, 1, "", "set_proposals"], [17, 3, 1, "", "set_relative_error_posterior"], [17, 5, 1, "", "std"], [17, 5, 1, "", "summary"], [17, 3, 1, "", "writeHdf"]], "geobipy.src.classes.data.datapoint.EmDataPoint": [[13, 2, 1, "", "EmDataPoint"]], "geobipy.src.classes.data.datapoint.EmDataPoint.EmDataPoint": [[13, 5, 1, "", "active"], [13, 3, 1, "", "find_best_halfspace"], [13, 3, 1, "", "plot_halfspace_responses"], [13, 5, 1, "", "predictedData"], [13, 3, 1, "", "update_posteriors"]], "geobipy.src.classes.data.datapoint.FdemDataPoint": [[14, 2, 1, "", "FdemDataPoint"]], "geobipy.src.classes.data.datapoint.FdemDataPoint.FdemDataPoint": [[14, 3, 1, "", "calibrate"], [14, 3, 1, "", "createHdf"], [14, 3, 1, "", "forward"], [14, 3, 1, "", "frequencies"], [14, 3, 1, "", "fromHdf"], [14, 3, 1, "", "getFrequency"], [14, 3, 1, "", "getMeasurementType"], [14, 3, 1, "", "plot"], [14, 3, 1, "", "plot_predicted"], [14, 3, 1, "", "sensitivity"], [14, 3, 1, "", "updateSensitivity"], [14, 3, 1, "", "update_posteriors"]], "geobipy.src.classes.data.datapoint.TdemDataPoint": [[15, 2, 1, "", "TdemDataPoint"]], "geobipy.src.classes.data.datapoint.TdemDataPoint.TdemDataPoint": [[15, 5, 1, "", "addressof"], [15, 3, 1, "", "createHdf"], [15, 3, 1, "", "dualMoment"], [15, 3, 1, "", "forward"], [15, 3, 1, "", "fromHdf"], [15, 5, 1, "", "iplotActive"], [15, 3, 1, "", "off_time"], [15, 3, 1, "", "perturb"], [15, 3, 1, "", "plot"], [15, 5, 1, "", "predictedData"], [15, 5, 1, "", "probability"], [15, 3, 1, "", "read"], [15, 3, 1, "", "sensitivity"], [15, 3, 1, "", "set_posteriors"], [15, 3, 1, "", "set_proposals"], [15, 5, 1, "", "std"], [15, 5, 1, "", "summary"], [15, 3, 1, "", "update_posteriors"], [15, 3, 1, "", "writeHdf"]], "geobipy.src.classes.data.datapoint.Tempest_datapoint": [[16, 2, 1, "", "Tempest_datapoint"]], "geobipy.src.classes.data.datapoint.Tempest_datapoint.Tempest_datapoint": [[16, 3, 1, "", "createHdf"], [16, 3, 1, "", "fromHdf"], [16, 3, 1, "", "perturb"], [16, 3, 1, "", "plot"], [16, 5, 1, "", "predictedData"], [16, 5, 1, "", "probability"], [16, 3, 1, "", "set_additive_error_posterior"], [16, 3, 1, "", "set_posteriors"], [16, 3, 1, "", "set_proposals"], [16, 3, 1, "", "set_relative_error_posterior"], [16, 5, 1, "", "std"], [16, 3, 1, "", "writeHdf"]], "geobipy.src.classes.data.dataset": [[19, 0, 0, "-", "Data"], [20, 0, 0, "-", "FdemData"], [21, 0, 0, "-", "TdemData"], [22, 0, 0, "-", "TempestData"]], "geobipy.src.classes.data.dataset.Data": [[19, 2, 1, "", "Data"]], "geobipy.src.classes.data.dataset.Data.Data": [[19, 3, 1, "", "Bcast"], [19, 3, 1, "", "Scatterv"], [19, 5, 1, "", "active"], [19, 3, 1, "", "addToVTK"], [19, 5, 1, "", "additive_error"], [19, 3, 1, "", "append"], [19, 3, 1, "", "channel_index"], [19, 3, 1, "", "createHdf"], [19, 5, 1, "", "data"], [19, 3, 1, "", "data_misfit"], [19, 3, 1, "", "datapoint"], [19, 5, 1, "", "deltaD"], [19, 3, 1, "", "fromHdf"], [19, 3, 1, "", "line"], [19, 3, 1, "", "mapData"], [19, 3, 1, "", "mapPredictedData"], [19, 3, 1, "", "mapStd"], [19, 3, 1, "", "nPointsPerLine"], [19, 3, 1, "", "plot_data"], [19, 3, 1, "", "plot_predicted"], [19, 5, 1, "", "predictedData"], [19, 3, 1, "", "read_csv"], [19, 5, 1, "", "relative_error"], [19, 5, 1, "", "std"], [19, 5, 1, "", "summary"], [19, 3, 1, "", "writeHdf"]], "geobipy.src.classes.data.dataset.FdemData": [[20, 2, 1, "", "FdemData"]], "geobipy.src.classes.data.dataset.FdemData.FdemData": [[20, 3, 1, "", "Bcast"], [20, 3, 1, "", "Scatterv"], [20, 3, 1, "", "append"], [20, 3, 1, "", "createHdf"], [20, 3, 1, "", "datapoint"], [20, 3, 1, "", "fileInformation"], [20, 3, 1, "", "fromHdf"], [20, 3, 1, "", "getFrequency"], [20, 3, 1, "", "getMeasurementType"], [20, 5, 1, "", "nActiveData"], [20, 3, 1, "", "plotLine"], [20, 3, 1, "", "plot_data"], [20, 3, 1, "", "readAarhusFile"], [20, 3, 1, "", "read_csv"], [20, 4, 1, "", "single"], [20, 5, 1, "", "std"]], "geobipy.src.classes.data.dataset.TdemData": [[21, 2, 1, "", "TdemData"]], "geobipy.src.classes.data.dataset.TdemData.TdemData": [[21, 3, 1, "", "Bcast"], [21, 3, 1, "", "Scatterv"], [21, 3, 1, "", "append"], [21, 3, 1, "", "createHdf"], [21, 5, 1, "", "data"], [21, 3, 1, "", "datapoint"], [21, 3, 1, "", "estimateAdditiveError"], [21, 3, 1, "", "fileInformation"], [21, 3, 1, "", "fromHdf"], [21, 3, 1, "", "mapChannel"], [21, 5, 1, "", "nPoints"], [21, 3, 1, "", "off_time"], [21, 3, 1, "", "pcolor"], [21, 3, 1, "", "plot_data"], [21, 3, 1, "", "plot_predicted"], [21, 5, 1, "", "predicted_primary_field"], [21, 5, 1, "", "predicted_secondary_field"], [21, 5, 1, "", "primary_field"], [21, 3, 1, "", "read_csv"], [21, 5, 1, "", "secondary_field"], [21, 4, 1, "", "single"], [21, 5, 1, "", "std"], [21, 5, 1, "", "summary"], [21, 3, 1, "", "writeHdf"]], "geobipy.src.classes.data.dataset.TempestData": [[22, 2, 1, "", "TempestData"]], "geobipy.src.classes.data.dataset.TempestData.TempestData": [[22, 5, 1, "", "additive_error"], [22, 3, 1, "", "createHdf"], [22, 3, 1, "", "fromHdf"], [22, 3, 1, "", "plot_data"], [22, 3, 1, "", "plot_predicted"], [22, 3, 1, "", "read_csv"], [22, 3, 1, "", "read_netcdf"], [22, 5, 1, "", "relative_error"], [22, 4, 1, "", "single"], [22, 3, 1, "", "writeHdf"]], "geobipy.src.classes.mesh": [[24, 0, 0, "-", "RectilinearMesh1D"], [25, 0, 0, "-", "RectilinearMesh2D"], [26, 0, 0, "-", "RectilinearMesh2D_stitched"], [27, 0, 0, "-", "RectilinearMesh3D"]], "geobipy.src.classes.mesh.RectilinearMesh1D": [[24, 2, 1, "", "RectilinearMesh1D"]], "geobipy.src.classes.mesh.RectilinearMesh1D.RectilinearMesh1D": [[24, 3, 1, "", "cellIndex"], [24, 3, 1, "", "createHdf"], [24, 3, 1, "", "delete_edge"], [24, 3, 1, "", "fromHdf"], [24, 3, 1, "", "gradient"], [24, 3, 1, "", "hdfName"], [24, 3, 1, "", "in_bounds"], [24, 3, 1, "", "insert_edge"], [24, 3, 1, "", "map_to_pdf"], [24, 3, 1, "", "mask_cells"], [24, 3, 1, "", "pad"], [24, 3, 1, "", "pcolor"], [24, 3, 1, "", "perturb"], [24, 3, 1, "", "piecewise_constant_interpolate"], [24, 3, 1, "", "plot"], [24, 3, 1, "", "plot_grid"], [24, 5, 1, "", "probability"], [24, 5, 1, "", "range"], [24, 3, 1, "", "set_priors"], [24, 3, 1, "", "set_proposals"], [24, 5, 1, "", "summary"], [24, 3, 1, "", "unperturb"]], "geobipy.src.classes.mesh.RectilinearMesh2D": [[25, 2, 1, "", "RectilinearMesh2D"]], "geobipy.src.classes.mesh.RectilinearMesh2D.RectilinearMesh2D": [[25, 3, 1, "", "cellIndex"], [25, 3, 1, "", "cellIndices"], [25, 3, 1, "", "centres"], [25, 3, 1, "", "createHdf"], [25, 5, 1, "", "distance"], [25, 3, 1, "", "edges"], [25, 3, 1, "", "hasSameSize"], [25, 3, 1, "", "in_bounds"], [25, 3, 1, "", "intervalStatistic"], [25, 3, 1, "", "mask_cells"], [25, 5, 1, "", "nCells"], [25, 5, 1, "", "nNodes"], [25, 5, 1, "", "nodes"], [25, 3, 1, "", "pcolor"], [25, 3, 1, "", "plot_grid"], [25, 3, 1, "", "plot_relative_to"], [25, 3, 1, "", "ravelIndices"], [25, 5, 1, "", "shape"], [25, 5, 1, "", "summary"], [25, 3, 1, "", "unravelIndex"], [25, 3, 1, "", "writeHdf"], [25, 5, 1, "", "x_centres"], [25, 5, 1, "", "x_edges"], [25, 5, 1, "", "y_centres"], [25, 5, 1, "", "y_edges"]], "geobipy.src.classes.mesh.RectilinearMesh2D_stitched": [[26, 2, 1, "", "RectilinearMesh2D_stitched"]], "geobipy.src.classes.mesh.RectilinearMesh2D_stitched.RectilinearMesh2D_stitched": [[26, 3, 1, "", "createHdf"], [26, 3, 1, "", "fromHdf"], [26, 5, 1, "", "nCells"], [26, 3, 1, "", "pcolor"], [26, 5, 1, "", "shape"], [26, 5, 1, "", "summary"], [26, 5, 1, "", "y_edges"]], "geobipy.src.classes.mesh.RectilinearMesh3D": [[27, 2, 1, "", "RectilinearMesh3D"]], "geobipy.src.classes.mesh.RectilinearMesh3D.RectilinearMesh3D": [[27, 3, 1, "", "cellIndices"], [27, 3, 1, "", "centres"], [27, 3, 1, "", "createHdf"], [27, 3, 1, "", "edges"], [27, 3, 1, "", "fromHdf"], [27, 5, 1, "", "nCells"], [27, 5, 1, "", "nNodes"], [27, 3, 1, "", "plot_grid"], [27, 3, 1, "", "pyvista_mesh"], [27, 3, 1, "", "ravelIndices"], [27, 5, 1, "", "shape"], [27, 5, 1, "", "summary"], [27, 3, 1, "", "unravelIndex"], [27, 3, 1, "", "writeHdf"], [27, 3, 1, "", "xRange"], [27, 5, 1, "", "x_centres"], [27, 5, 1, "", "x_edges"], [27, 5, 1, "", "y_centres"], [27, 5, 1, "", "y_edges"], [27, 3, 1, "", "zRange"]], "geobipy.src.classes.model": [[29, 0, 0, "-", "Model"]], "geobipy.src.classes.model.Model": [[29, 2, 1, "", "Model"]], "geobipy.src.classes.model.Model.Model": [[29, 3, 1, "", "fromHdf"], [29, 5, 1, "", "gradient"], [29, 3, 1, "", "gradient_probability"], [29, 3, 1, "", "local_inverse_hessian"], [29, 3, 1, "", "local_precision"], [29, 3, 1, "", "local_variance"], [29, 3, 1, "", "pad"], [29, 3, 1, "", "pcolor"], [29, 3, 1, "", "perturb"], [29, 3, 1, "", "probability"], [29, 3, 1, "", "proposal_probabilities"], [29, 3, 1, "", "set_priors"], [29, 3, 1, "", "set_proposals"], [29, 5, 1, "", "summary"], [29, 3, 1, "", "update_parameter_posterior"], [29, 3, 1, "", "update_posteriors"]], "geobipy.src.classes.pointcloud": [[31, 0, 0, "-", "Point"]], "geobipy.src.classes.pointcloud.Point": [[31, 2, 1, "", "Point"]], "geobipy.src.classes.pointcloud.Point.Point": [[31, 3, 1, "", "Bcast"], [31, 3, 1, "", "Scatterv"], [31, 3, 1, "", "append"], [31, 3, 1, "", "axis"], [31, 3, 1, "", "block_indices"], [31, 3, 1, "", "block_median"], [31, 3, 1, "", "block_median_indices"], [31, 5, 1, "", "bounds"], [31, 3, 1, "", "centred_grid_nodes"], [31, 3, 1, "", "createHdf"], [31, 3, 1, "", "fileInformation"], [31, 3, 1, "", "fromHdf"], [31, 3, 1, "", "interpolate"], [31, 3, 1, "", "map"], [31, 3, 1, "", "move"], [31, 5, 1, "", "nPoints"], [31, 3, 1, "", "nearest"], [31, 3, 1, "", "perturb"], [31, 3, 1, "", "plot"], [31, 5, 1, "", "probability"], [31, 3, 1, "", "read_csv"], [31, 3, 1, "", "scatter2D"], [31, 3, 1, "", "set_kdtree"], [31, 3, 1, "", "set_x_posterior"], [31, 3, 1, "", "set_y_posterior"], [31, 3, 1, "", "set_z_posterior"], [31, 5, 1, "", "summary"], [31, 3, 1, "", "toVTK"], [31, 3, 1, "", "vtkStructure"], [31, 3, 1, "", "writeHdf"], [31, 3, 1, "", "x_axis"]], "geobipy.src.classes.statistics": [[33, 0, 0, "-", "Distribution"], [34, 0, 0, "-", "GammaDistribution"], [35, 0, 0, "-", "Histogram"], [36, 0, 0, "-", "MvNormalDistribution"], [37, 0, 0, "-", "NormalDistribution"], [38, 0, 0, "-", "OrderStatistics"], [39, 0, 0, "-", "UniformDistribution"], [40, 0, 0, "-", "baseDistribution"]], "geobipy.src.classes.statistics.Distribution": [[33, 1, 1, "", "Distribution"]], "geobipy.src.classes.statistics.GammaDistribution": [[34, 2, 1, "", "Gamma"]], "geobipy.src.classes.statistics.GammaDistribution.Gamma": [[34, 3, 1, "", "pdf"]], "geobipy.src.classes.statistics.Histogram": [[35, 2, 1, "", "Histogram"]], "geobipy.src.classes.statistics.Histogram.Histogram": [[35, 3, 1, "", "credible_intervals"], [35, 3, 1, "", "credible_range"], [35, 3, 1, "", "fit_mixture_to_pdf_1d"], [35, 3, 1, "", "fromHdf"], [35, 3, 1, "", "marginalize"], [35, 3, 1, "", "mean"], [35, 3, 1, "", "median"], [35, 3, 1, "", "mode"], [35, 3, 1, "", "opacity"], [35, 3, 1, "", "opacity_level"], [35, 3, 1, "", "pcolor"], [35, 3, 1, "", "percentile"], [35, 3, 1, "", "plot"], [35, 3, 1, "", "sample"], [35, 3, 1, "", "transparency"]], "geobipy.src.classes.statistics.MvNormalDistribution": [[36, 2, 1, "", "MvNormal"]], "geobipy.src.classes.statistics.MvNormalDistribution.MvNormal": [[36, 3, 1, "", "bins"], [36, 5, 1, "", "ndim"], [36, 3, 1, "", "pad"], [36, 3, 1, "", "probability"]], "geobipy.src.classes.statistics.NormalDistribution": [[37, 2, 1, "", "Normal"]], "geobipy.src.classes.statistics.NormalDistribution.Normal": [[37, 3, 1, "", "bins"], [37, 3, 1, "", "cdf"], [37, 5, 1, "", "ndim"], [37, 3, 1, "", "probability"], [37, 3, 1, "", "rng"]], "geobipy.src.classes.statistics.OrderStatistics": [[38, 2, 1, "", "Order"]], "geobipy.src.classes.statistics.UniformDistribution": [[39, 2, 1, "", "Uniform"]], "geobipy.src.classes.statistics.UniformDistribution.Uniform": [[39, 3, 1, "", "bins"], [39, 3, 1, "", "cdf"], [39, 5, 1, "", "ndim"]], "geobipy.src.classes.statistics.baseDistribution": [[40, 2, 1, "", "baseDistribution"]], "geobipy.src.classes.statistics.baseDistribution.baseDistribution": [[40, 3, 1, "", "bins"], [40, 3, 1, "", "deepcopy"], [40, 5, 1, "", "moment"], [40, 5, 1, "", "ndim"]], "geobipy.src.classes.system": [[42, 0, 0, "-", "CircularLoop"], [43, 0, 0, "-", "EmLoop"], [44, 0, 0, "-", "FdemSystem"], [45, 0, 0, "-", "TdemSystem"]], "geobipy.src.classes.system.CircularLoop": [[42, 2, 1, "", "CircularLoop"]], "geobipy.src.classes.system.CircularLoop.CircularLoop": [[42, 3, 1, "", "Bcast"], [42, 3, 1, "", "append"], [42, 3, 1, "", "createHdf"], [42, 3, 1, "", "fromHdf"], [42, 5, 1, "", "summary"], [42, 3, 1, "", "writeHdf"]], "geobipy.src.classes.system.EmLoop": [[43, 2, 1, "", "EmLoop"]], "geobipy.src.classes.system.EmLoop.EmLoop": [[43, 3, 1, "", "append"], [43, 3, 1, "", "createHdf"], [43, 3, 1, "", "fromHdf"], [43, 3, 1, "", "perturb"], [43, 5, 1, "", "probability"], [43, 3, 1, "", "set_pitch_posterior"], [43, 3, 1, "", "set_roll_posterior"], [43, 3, 1, "", "set_yaw_posterior"], [43, 5, 1, "", "summary"], [43, 3, 1, "", "writeHdf"]], "geobipy.src.classes.system.FdemSystem": [[44, 2, 1, "", "FdemSystem"]], "geobipy.src.classes.system.FdemSystem.FdemSystem": [[44, 3, 1, "", "Bcast"], [44, 5, 1, "", "component_id"], [44, 3, 1, "", "fileInformation"], [44, 3, 1, "", "fromHdf"], [44, 3, 1, "", "read"], [44, 5, 1, "", "summary"], [44, 5, 1, "", "tensor_id"], [44, 3, 1, "", "toHdf"]], "geobipy.src.classes.system.TdemSystem": [[45, 2, 1, "", "TdemSystem"]], "geobipy.src.classes.system.TdemSystem.TdemSystem": [[45, 5, 1, "", "get_modellingTimes"], [45, 5, 1, "", "off_time"]]}, "objnames": {"0": ["py", "module", "Python module"], "1": ["py", "function", "Python function"], "2": ["py", "class", "Python class"], "3": ["py", "method", "Python method"], "4": ["py", "attribute", "Python attribute"], "5": ["py", "property", "Python property"]}, "objtypes": {"0": "py:module", "1": "py:function", "2": "py:class", "3": "py:method", "4": "py:attribute", "5": "py:property"}, "terms": {"": [1, 2, 6, 7, 15, 16, 17, 24, 25, 29, 36, 48, 49, 50, 51, 55, 56, 57, 66, 67, 68, 72, 81, 87, 90], "0": [2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 42, 44, 49, 50, 51, 52, 54, 55, 56, 57, 59, 60, 62, 63, 65, 66, 67, 68, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 92, 95, 97], "00": [51, 54, 59, 62, 65, 66, 67, 68, 70, 75, 80, 85, 92, 95, 97], "000": [65, 95, 97], "000000": 6, "000035": 6, "0000a6": 6, "0001": 13, "0003": 4, "0004901610032814771": 72, "00052266": 72, "00056777": 72, "00056795": 72, "00059446": 72, "00071041": 72, "0007661982574834428": 72, "00080071": 72, "0008600086897131668": 72, "001": [57, 81], "00104502": 72, "00120333": 72, "001283": 57, "00131732": 72, "001405": 57, "00167267": 72, "001754": 57, "001e09": 6, "002038": 57, "002178": 57, "002201": 57, "002460": 57, "00255047": 72, "00257118": 86, "002571182431510025": 86, "002587": 57, "002627": 57, "00269725": 72, "002775": 57, "00279718": 72, "0029654": 72, "003": [86, 92, 97], "003046": 57, "003095": 57, "003122": 57, "003161": 57, "003213": 57, "003247": 57, "003352": 57, "003426": 57, "003539": 57, "003659": 57, "00385047": 72, "00392961": 72, "004168": 57, "004477": 57, "004742": 57, "00489c": 6, "004d43": 6, "00507188": 72, "00511314": 72, "005154": 57, "005178": 57, "005693": 57, "005922638381206899": 72, "006": 66, "006775009348709323": 72, "006fa6": 6, "007383": 57, "007485002271267226": 72, "00844038": 72, "00846f": 6, "008507": 57, "00854703": 72, "00857136": 72, "0086ed": 6, "00870505": 72, "008941": 6, "00894549": 72, "0089a3": 6, "00908742": 72, "00924302": 72, "00925526": 72, "00948505": 72, "0098209": 72, "00983342": 72, "00999299": 72, "00a6aa": 6, "00c2a0": 6, "00e": 51, "00fcfd": 6, "00fecf": 6, "01": [49, 50, 51, 55, 56, 57, 59, 67, 68, 70, 75, 80, 81, 97], "01000167": 72, "01001518": 72, "01004315": 72, "01005285": 72, "01007022": 72, "01007536": 72, "01010101": 60, "01016321": 72, "01022279": 72, "01033699": 72, "01047616": 56, "01049917": 72, "01062572": 72, "01089737": 72, "0110357": 72, "0112021": 72, "01120778": 72, "01121804": 72, "011474": 57, "01147797": 72, "012": [89, 92, 97], "01258484": 72, "012666667": 51, "012810": 57, "013349": 6, "01354095": 72, "01448342": 50, "0145253": 72, "01627435": 72, "01667789": 72, "01809840e": 57, "01830738": 57, "01842322": 72, "019": 67, "01952426": 72, "01971922": 50, "0198301": 72, "01986773": 72, "01997577": 72, "01e": 51, "02": [49, 51, 54, 57, 59, 81, 92, 97], "02008789": 72, "02017019": 72, "02020202": 60, "02099504": 72, "02104953": 72, "02229478": 72, "02256309": 72, "02298923": 72, "022e": 51, "023": 68, "02310541": 72, "02326468": 72, "02409831": 50, "02438661": 72, "02483553": 72, "02489023e": 57, "02588039": 87, "026": 66, "026e": 51, "02807133": 72, "02e": 51, "03": [49, 50, 51, 54, 55, 56, 70, 85, 92, 97], "030000000000000027": 88, "03167228e": 57, "033e": 56, "03e": 51, "04": [51, 54, 57, 70, 80, 85, 97], "04188675e": 57, "04611609": 72, "049": 97, "04987135": 87, "04f757": 6, "05": [50, 55, 56, 57, 71, 72, 73, 92, 97], "050": [60, 62, 97], "051": [81, 85, 97], "05345525": 57, "05451995e": 57, "05490484": 72, "05626410e": 57, "06": [51, 56, 97], "063": [50, 54, 97], "06368326": 50, "06755182": 72, "06763163": 72, "06896877": 72, "07": [59, 80, 97], "07112381": 72, "07903887": 72, "08": 97, "08055856": 72, "08104576": 72, "08122468": 72, "082946": 72, "08385747e": 57, "08897776": 72, "08e": 51, "09": 57, "09121028": 72, "0915802": 72, "09162505": 72, "09166633": 72, "09217019": 72, "09383016e": 57, "09608009": 72, "09651811": 72, "09688295": 72, "097701": 55, "0985627": 72, "09878492": 72, "09883382": 72, "09924038": 72, "09943253": 72, "0aa6d8": 6, "0cbd66": 6, "0x10efdda80": 50, "0x143a91b20": 50, "0x144cb0c50": 50, "0x1462b7250": 50, "0x1462b72d0": 50, "0x1462b7350": 50, "0x1462b73d0": 50, "0x15468a450": 49, "0x15468ac50": 49, "0x15468ad50": 49, "0x154ab6dd0": 86, "0x154ab7a50": 86, "0x154b0fd50": 76, "0x154bb9a30": 83, "0x154bfd7d0": 87, "0x154bfe950": 87, "0x1563681d0": 49, "0x156368250": 49, "0x156368350": 49, "0x156369ed0": 49, "0x15e95dc10": 83, "0x17eab07d0": 72, "0x17eab0bd0": 72, "0x17eab31d0": 72, "0x17eab3d50": 72, "0x17eb0c470": 50, "0x17eb604d0": 49, "0x17eb61950": 49, "0x17ebbc1a0": 76, "0x17ec006d0": 72, "0x17ec00950": 72, "0x17ec01150": 49, "0x17ec01c50": 72, "0x17ec036d0": 72, "0x17ec7aea0": 50, "0x17eca7650": 50, "0x17ed24a40": 89, "0x17ed3c0e0": 50, "0x17ee3a0f0": 83, "0x17eeb34d0": 50, "0x17ef83e50": 76, "0x17f00d0d0": 72, "0x17f00d650": 72, "0x17f00e9d0": 72, "0x17f00ebd0": 72, "0x17f00fa50": 72, "0x17f088dd0": 88, "0x17f089a50": 87, "0x17f08a550": 88, "0x17f08a7d0": 87, "0x17f114d50": 72, "0x17f115b50": 72, "0x17f18acc0": 83, "0x17f1a32c0": 89, "0x17f21de50": 72, "0x17f2d60d0": 72, "0x17f387aa0": 76, "0x17f3b4850": 76, "0x17f487b60": 77, "0x17f4e05d0": 87, "0x17f629450": 86, "0x17f629d50": 86, "0x17f62aad0": 86, "0x17f652180": 87, "0x17f652360": 87, "0x17f679190": 60, "0x17f680a50": 87, "0x17f6819d0": 87, "0x17f6825d0": 87, "0x17f6bebd0": 88, "0x17f6bfad0": 88, "0x17f6df0b0": 77, "0x17f866cd0": 72, "0x17faea990": 83, "0x17faebb00": 77, "0x1808cd750": 76, "0x1808ced70": 76, "0x180970e10": 76, "0x1809733f0": 76, "0x180d19bb0": 77, "0x180d21350": 72, "0x180d216d0": 72, "0x180d2a5d0": 72, "0x180d2a750": 72, "0x180d2aad0": 72, "0x180d2ac50": 72, "0x181310930": 87, "0x1813134b0": 87, "0x181313b10": 87, "0x181bcaf00": 89, "0x181cf6b10": 89, "0x181f49a50": 72, "0x181fa8150": 72, "0x181fa8450": 72, "0x181fa87d0": 72, "0x181fa88d0": 72, "0x181fa9150": 72, "0x181fa95d0": 72, "0x182174450": 76, "0x18229cb50": 72, "0x18229d5d0": 72, "0x18229dbd0": 72, "0x18229e6d0": 72, "0x18229f350": 72, "0x1824970b0": 77, "0x182510410": 83, "0x182600c50": 72, "0x1828e1340": 83, "0x182aca390": 77, "0x182c1a750": 76, "0x182c2d4d0": 72, "0x182cb9c50": 72, "0x182cba8d0": 72, "0x182d6cb30": 83, "0x182f538f0": 89, "0x182fd5650": 72, "0x182fd6bd0": 72, "0x182ff9a00": 89, "0x1834e3950": 76, "1": [2, 6, 7, 14, 15, 17, 19, 20, 21, 22, 24, 29, 31, 33, 35, 37, 39, 44, 49, 50, 51, 52, 55, 56, 57, 60, 62, 65, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "10": [2, 6, 15, 17, 20, 24, 25, 26, 29, 35, 49, 50, 51, 52, 55, 56, 57, 60, 63, 71, 72, 73, 76, 77, 78, 81, 83, 86, 87, 88, 89], "100": [7, 13, 33, 49, 50, 87, 88, 89], "1000": [2, 76, 87, 88], "10000": [13, 88], "100000": 90, "1000000": 89, "1001": 81, "101": [87, 88], "102e": 51, "105": 89, "10544857": 57, "10560647": 76, "1056064716983": 76, "106e": 51, "108": 76, "11": [48, 49, 56, 57, 59, 76, 82, 87, 90], "11041966e": 57, "111": 6, "11154601": 72, "11213815": 72, "11272764": 72, "113": 68, "114288": 67, "115": 49, "11932536": 72, "11942951": 72, "12": [56, 57, 72, 77, 87, 92], "121": [55, 56, 57, 76, 77, 81, 82, 87, 89], "12121212": 89, "1213": 49, "1215391": 87, "122": [55, 56, 57, 76, 77, 81, 82, 87, 89], "123487": 49, "1246": 49, "125": [52, 54, 97], "126e": 51, "127e": 51, "128755": 49, "129550": [49, 55], "13": [51, 56, 57, 87], "13056782": 87, "131": [76, 77, 81, 87, 89], "13113089": 87, "13149571": 76, "132": [72, 76, 77, 81, 87, 89], "13296225": 72, "133": [72, 76, 77, 81, 87, 89], "13341751e": 57, "1337": 49, "1338": 49, "13463137e": 57, "135": 56, "1367": 55, "1375024404290368": 87, "13758034e": 57, "13e": 51, "14": [56, 87], "14104158": 87, "145": [49, 55], "146100583096709124601953385843316024947": [57, 66, 67, 68], "147": 76, "148": 76, "149": [70, 76], "15": [24, 29, 52, 55, 57, 68, 72, 73, 76, 77, 78, 83], "150": [76, 81], "1502": 55, "1516": 55, "15201495": 50, "156": 57, "157": 57, "157731937350919": 81, "15994185e": 57, "15e": 51, "16": [56, 57, 71, 72, 73, 76], "160": 71, "16206135": 72, "164874": 66, "16541198455442488": 87, "169": [67, 70, 97], "16e": 51, "17": [49, 57], "17254113": 50, "17319374": 87, "1776": [20, 44, 49, 55], "178": 55, "17e": 51, "18": [49, 52, 66, 67, 68, 72, 75, 97], "182e": 51, "18317234": 72, "18645190e": 57, "1875": 72, "19": [50, 51, 54, 55, 56, 67, 72, 73, 75, 97], "19008745": 76, "19632561": 50, "19656": 56, "19875285": 57, "19e": 51, "1b4400": 6, "1ce6ff": 6, "1d": [2, 6, 7, 15, 16, 19, 20, 24, 25, 26, 27, 29, 31, 55, 56, 57, 77, 80, 85, 92, 96, 97], "1dtest": 87, "1e": [55, 56, 57], "1e6e00": 6, "1m": [15, 16, 21], "2": [6, 7, 13, 14, 17, 19, 20, 24, 25, 27, 29, 31, 35, 49, 50, 51, 52, 55, 56, 57, 60, 63, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "20": [6, 56, 77, 78, 83, 88], "200": [49, 50, 55, 56, 57, 66, 87], "201625": 6, "2020202": 89, "20412212e": 57, "20880061e": 57, "20a": 6, "20digit": 6, "20elev": 6, "20from": 6, "20model": 6, "21": [51, 71, 75, 87, 90, 97], "210000": 51, "21045876958147633": 72, "211": [51, 77, 87, 88], "212": [51, 77, 82, 85, 87, 88, 97], "217": 55, "21717666705189165": 72, "21e": 51, "221": [87, 88], "222": [87, 88], "223": [87, 88], "22331766": 76, "2233176602046558": 76, "224": [87, 88], "225": 72, "225401": 52, "22774246": 87, "22889687797869024": 50, "228e": 51, "229e": 51, "23": 49, "231": [59, 78, 83], "232": [78, 83], "233": [78, 83], "23398822": 50, "234": [78, 83], "235": [78, 83], "236": [78, 83], "24": [49, 68, 87], "24296662e": 57, "24396239": 87, "24617681": 87, "25": [24, 29, 56, 57, 67, 76, 81], "250": 49, "255": 55, "256": 7, "257": [49, 54, 97], "25753935e": 57, "25910166e": 57, "25e": 51, "26": [49, 51, 56, 67, 72, 97], "260": [49, 55], "260e": 51, "261e": 51, "262e": 51, "266e": 51, "26714623": 87, "26727066e": 57, "269": 49, "27": 49, "271": [55, 59, 97], "27253219": 57, "27359586": 57, "27482725": 87, "27489718e": 57, "2750329304041186": 87, "28": [49, 54, 66, 87], "283": 49, "28316523e": 57, "28906551": 76, "28979426": 87, "29": [49, 66, 70, 97], "295": [51, 54, 97], "2d": [2, 6, 7, 19, 20, 21, 24, 25, 26, 27, 29, 31, 49, 50, 63, 75, 80, 81, 85, 92, 96, 97], "3": [2, 4, 6, 24, 25, 27, 31, 48, 49, 50, 51, 52, 55, 56, 57, 59, 70, 71, 72, 73, 75, 76, 77, 80, 81, 85, 86, 87, 88, 89, 90], "30": [6, 51, 55, 57, 66, 76, 81], "300": [2, 71, 72, 73, 87], "30000": 49, "300000": 51, "300018": 6, "30010": 49, "30020": 49, "30030": 49, "30100": 49, "30290861": 50, "306": [56, 59, 97], "30670": 49, "30e": 51, "31": [57, 90], "311": 77, "31159774": 87, "311e": 51, "312": 77, "312e": 51, "313": [71, 72, 73, 77], "31313131": 89, "31514467744": 56, "32": [67, 87], "320327": 56, "324e72": 6, "3263": 49, "328e": 51, "329e": 51, "331": 50, "332": 50, "333": 50, "333e": 56, "334": 50, "33418159e": 57, "3345": [49, 55], "335": 50, "33506224": 87, "336": 50, "337": 50, "3375": 72, "338": 50, "339": 50, "34": 57, "340": 51, "342864": 67, "34346631": 57, "34362d": 6, "34500474": 60, "34529634": 87, "350": 57, "358": 49, "35816931": 87, "36": [49, 67, 87], "3625": 72, "363": 56, "36389": 57, "36608642": 76, "367295829986474": [76, 81], "36799972": 87, "368412": 67, "37": [49, 67, 70, 97], "372101": 6, "37227056": 50, "3726": 49, "372e": 51, "376e": 51, "37700000000000006": 49, "378": [20, 44, 78, 80, 97], "38": [49, 68], "380": [49, 55], "38188466718060166": 87, "38188467": 87, "382e": 51, "386e": 51, "38760": 49, "39010": 49, "39020": 49, "39378825": 87, "395": 49, "398": 49, "3a2465": 6, "3b5dff": 6, "3b9700": 6, "3d": [2, 19, 20, 21, 25, 26, 27, 31, 42, 43, 54, 71, 72, 73, 77, 80, 82, 85, 92, 93, 97], "3x1": 50, "4": [2, 4, 13, 20, 36, 37, 49, 50, 51, 52, 54, 55, 56, 57, 60, 71, 72, 73, 76, 77, 86, 87, 89, 90], "40": [77, 87], "400": 50, "404e55": 6, "405": 55, "41020": [49, 55], "412": 55, "413e": 51, "414e": 51, "41585978e": 57, "416": 51, "42": 49, "422e": 51, "426e": 51, "435": [49, 55], "43897973": 50, "44444444": 89, "445969826454679": 87, "44663532": 76, "449": [66, 70, 97], "449825": 76, "4498250024555": 76, "45": [86, 87], "45286": 55, "452c2c": 6, "453": 49, "454": [86, 87], "45575508e": 57, "4560028": 49, "456648": 6, "456d75": 6, "4600678": 49, "462e": 51, "46362582": 57, "4638979": 49, "4639054": 49, "4639119": 49, "4639122": 49, "4639125": 49, "4661765": 49, "4661768": 49, "4661772": 49, "4661854": 49, "4661979": 49, "46698778": 87, "466e": 51, "468e": 51, "469e": 51, "47100177": 57, "47155933": 87, "47474747": 89, "475": 72, "48384681": 87, "488": 75, "49": [66, 87], "491520": 51, "492": 49, "498": [49, 67], "49920887e": 57, "4a3b53": 6, "4e": 56, "4fc601": 6, "4pi": 50, "5": [2, 15, 16, 24, 29, 33, 48, 49, 50, 51, 52, 55, 56, 57, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 92, 96], "50": [49, 50, 57, 71, 72, 73, 87], "500": 56, "5000": [66, 67, 68], "50505051": 89, "5082486": 50, "51": 87, "5125": 72, "51285518": 87, "516": 55, "521019": 68, "524": [71, 75, 97], "52525253": 89, "52720951": 57, "52761544": 50, "531": [68, 70, 97], "53101158": 57, "53816627": 87, "53823673": 50, "544066": 68, "5454886696688": 55, "549e79": 6, "55": 68, "550": [72, 73], "55503397": 57, "55717559": 50, "559817917094882": 81, "56": [68, 70, 97], "56073342": 50, "562e": 51, "563057": 68, "566e": 51, "567": 49, "56994112": 57, "57": 49, "57095942": 87, "575329": 6, "583e": 51, "584419": 49, "584494": 49, "584e": 51, "585": [76, 80, 97], "586122": 49, "586123": 49, "586852": 49, "590194": 49, "590219": 49, "593": 72, "595": 72, "597": 72, "5a0007": 6, "5b4534": 6, "5e": [55, 56, 57], "5e5": 56, "6": [20, 49, 51, 52, 55, 56, 57, 67, 71, 72, 73, 76, 77, 81, 86, 87], "60": [71, 72, 73], "600": [2, 72], "60007270e": 57, "601": 49, "602": 72, "604": 72, "606": 49, "61": 49, "61552555e": 57, "61615a": 6, "61e": 51, "62044639e": 57, "629": 92, "636375": 6, "6367a9": 6, "63ffac": 6, "64050649": 60, "641e": 51, "642e": 51, "643134": 56, "65": 72, "6500813217": 57, "652e": 51, "653": [57, 59, 97], "655": [49, 68], "656e": 51, "66": 76, "662822": 49, "66659": 49, "668372": 49, "6710": 49, "68": 49, "68630769": 87, "68e": 51, "6a3a4c": 6, "6b002c": 6, "6b7900": 6, "6f0062": 6, "7": [2, 20, 44, 48, 49, 51, 55, 56, 57, 72, 86, 87], "70": 76, "70336": 49, "704": 49, "70419088": 87, "7041908829549965": 87, "71": 49, "710": 80, "712e": 51, "71365767403": 57, "716e": 51, "71717172": 89, "72418f": 6, "72508382": 87, "72940": 57, "72e": 51, "7309893437585234": 72, "733": 55, "7331520327": 56, "741": 54, "747": [77, 80, 97], "74815387e": 57, "749": 49, "74e": 51, "75": [56, 57], "754": [49, 50], "75575613": 50, "76": 72, "760": 67, "76214533": 87, "7625": 72, "76292177": 87, "765": 49, "767": [87, 92, 97], "76789170e": 57, "77": 72, "77177243": 60, "772600": 6, "774547726888374": 72, "778868401978407": 72, "78": 72, "788": [83, 85, 97], "788d66": 6, "78e": 51, "79": [66, 67, 68, 72], "7900d7": 6, "79138645e": 57, "79404353": 76, "796e": 51, "799": 49, "79969548": 57, "79e": 51, "7a4900": 6, "7a87a1": 6, "7b4f4b": 6, "8": [49, 51, 52, 55, 56, 71, 72, 73, 77, 81, 86, 87], "80": [72, 87], "800": 68, "80520": 49, "8077378262817626": 72, "809693": 6, "81498656": 87, "8171": [49, 55], "81945885548": 55, "8199": 49, "81e": 51, "82": [49, 51], "822": 49, "822e": 51, "823e": 51, "828623928755": 55, "8299999999999996": 88, "82e": 51, "83": 88, "830000000000002": 49, "83077926": 72, "83481902": 87, "835": 49, "83737910e": 57, "84": 49, "85": 88, "85558253": 87, "85590133": 76, "85725407": 87, "85985878": 87, "865e": 51, "8665682999": 56, "866e": 51, "872e": 51, "8747471": 87, "875": [55, 90, 92, 97], "87594468": 57, "876e": 51, "87713313e": 57, "88": 88, "885578": 6, "886f4c": 6, "88e": 51, "89": [49, 55], "8cd0ff": 6, "8fb0ff": 6, "9": [49, 51, 55, 56, 57, 72, 77, 81, 86, 87, 89], "90": [35, 89], "905e": 51, "9063110648103332": 50, "906e": 51, "91": [20, 44, 49, 55, 88], "91062364": 50, "91310127e": 57, "91e": 51, "922329": 6, "928": 49, "93": [20, 44], "932e": 51, "935268": 66, "936e": 51, "9375": 72, "938a81": 6, "93hastings_algorithm": 87, "94": [49, 67, 88], "95": [35, 71, 72, 73, 88, 89], "95502179": 87, "95655826e": 57, "96": 49, "965": [72, 75, 97], "9699999999999998": 88, "97": 88, "972": [88, 92, 97], "972e": 51, "976": 49, "976e": 51, "97979798": 60, "98": 49, "9805553827696698": 50, "98512505": 87, "98989899": 60, "9899649834962019": 50, "99": [36, 37, 39], "9916814356778278": 50, "99206549": 76, "993323": 66, "997": 87, "9979698382486921": 50, "997d87": 6, "998": 87, "999": [73, 75, 87, 97], "99999999999997": 76, "99adc0": 6, "9b9700": 6, "A": [1, 2, 4, 6, 7, 11, 15, 16, 19, 20, 21, 22, 24, 25, 26, 27, 35, 36, 42, 45, 49, 56, 63, 87, 95], "And": [49, 55, 60, 81, 87], "As": 87, "At": 48, "By": [4, 24, 71, 72, 73], "For": [2, 6, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 29, 31, 36, 37, 43, 44, 48, 49, 50, 55, 56, 57, 81], "If": [1, 2, 6, 7, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 43, 48, 56, 87], "In": [2, 14, 19, 20, 21, 22, 25, 27, 48, 49, 51, 55, 56, 57, 69, 81, 87, 93], "It": [35, 48, 77], "NOT": 20, "No": 24, "Not": 56, "ON": 48, "Of": [49, 87], "On": [2, 48, 56], "Or": [49, 51, 52, 55, 56, 57, 76, 81, 87], "The": [0, 1, 2, 4, 6, 7, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 43, 44, 45, 48, 49, 51, 52, 55, 56, 57, 63, 66, 67, 68, 69, 71, 72, 73, 77, 81, 86, 87, 88, 93, 96], "There": [25, 27, 48, 63, 69, 93, 96], "These": [15, 16, 48, 49, 63, 76, 81, 87], "To": [33, 48, 55, 76, 81], "With": [55, 56, 57], "_": [17, 19, 49, 51, 52, 55, 56, 57, 66, 67, 68, 71, 72, 73, 76, 77, 87, 88, 89, 90], "__class__": 2, "__main__": [71, 72, 73], "__name__": [2, 71, 72, 73], "_data": [15, 16], "_file": 56, "_hl": [1, 11], "_init_posterior_plot": 76, "_initialize_sequential_read": [49, 51, 55, 56, 57, 66, 67, 68], "_option": [66, 67, 68], "_read_record": [49, 51, 55, 56, 57], "a05837": 6, "a079bf": 6, "a1c299": 6, "a30059": 6, "a3c8c9": 6, "a4e804": 6, "a77500": 6, "aarhu": 20, "aarhusinv": 56, "ab": [50, 55, 56, 57, 87], "abc": [13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45], "abcissa": 6, "abl": 48, "about": [48, 49, 55, 56, 57, 71, 72, 73, 87], "abov": [2, 14, 15, 16, 17, 24, 49, 51, 71, 72, 73], "abscissa": [25, 27], "absolut": [15, 16, 25, 26, 50], "abstract": [13, 40, 43], "acc": [66, 67, 68], "accept": [56, 57], "access": [48, 87], "accompani": [2, 19, 21, 22, 49, 51], "accord": [19, 21, 22, 49, 50, 51], "account": 4, "accumul": [29, 35], "acquisit": [14, 17, 19, 42, 44, 55], "across": 31, "activ": [13, 15, 17, 19, 20, 48, 55, 56, 57], "actual": [2, 6, 63], "ad": [44, 71, 72, 73], "adapt": 2, "add": [6, 19, 29, 44, 86, 87], "add_argu": [66, 67, 68], "add_artist": 50, "add_axi": [14, 15, 16, 17, 24, 25, 26, 27, 31, 42, 43, 55, 76, 77, 87, 88, 89], "add_gridspec": [71, 72, 73, 81], "add_subplot": [71, 72, 73], "addit": [6, 15, 16, 17, 19, 20, 21, 24, 25, 26, 31, 43, 49, 55, 56, 57], "additive_error": [15, 19, 22, 55, 56, 57], "additive_error_multipli": 16, "additive_error_prior": [55, 56], "additive_error_propos": [15, 16, 17], "additive_error_proposal_vari": 57, "additiveerr": [15, 16], "additiveerrorpropos": [15, 16, 17], "additiveprior": [55, 56], "additivepropos": [55, 56], "address": [49, 50, 72, 76, 86, 87, 88], "addressof": [15, 17], "addtovtk": 19, "aeerr": [15, 16, 17, 31, 43], "aem": [51, 56, 57], "affect": 87, "after": [2, 4, 21, 24, 49, 56], "again": 24, "against": [4, 6, 19, 24, 25, 27, 31, 87], "ahead": 48, "airborn": [51, 56, 57], "algorithm": 63, "alia": [20, 21, 22], "alias": 31, "all": [1, 2, 4, 6, 7, 15, 16, 17, 19, 20, 21, 24, 25, 26, 29, 31, 35, 36, 39, 43, 48, 49, 51, 52, 55, 63, 71, 72, 73, 86, 87, 89, 93, 97], "allevi": 31, "alloc": [1, 63, 87], "allow": [7, 24, 25, 27, 29, 55, 56, 57, 63, 77, 86, 87, 88, 89, 90], "along": [2, 6, 7, 19, 20, 21, 24, 25, 26, 27, 29, 31, 35, 49, 51, 52, 89, 90], "alpha": [6, 24, 25, 26, 29, 35, 87], "alphaarrai": 6, "alphacolour": 6, "alreadi": [1, 14, 15, 16, 17, 19, 21, 22, 24, 25, 27, 31, 42, 43, 48, 81], "also": [2, 19, 21, 22, 48, 49, 51, 63, 69, 71, 72, 73, 81, 87, 93], "alt": [19, 20, 21, 22, 49, 51], "altern": [19, 21, 22, 25, 49, 51], "altitud": [6, 19, 21, 22, 49, 51], "among": 2, "amongst": [19, 20, 31], "amount": [6, 7, 24, 25, 26, 29, 35, 87], "amplitud": 56, "an": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 40, 42, 43, 48, 49, 52, 55, 56, 57, 63, 69, 71, 72, 73, 76, 77, 85, 87, 89, 90, 93, 97], "analysi": [75, 93, 97], "angl": 6, "ani": [1, 6, 7, 13, 14, 15, 16, 19, 21, 22, 24, 29, 31, 55, 87], "anim": [89, 90], "anoth": [24, 50, 87], "anyth": 7, "api": [63, 96], "appear": [1, 49], "append": [2, 19, 20, 21, 31, 42, 43, 49, 50, 66, 67, 68, 87, 89], "appli": [6, 14, 24, 25, 26, 29, 35, 87, 96], "applic": [38, 96], "approach": [2, 29, 87], "appropri": [44, 87], "ar": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 44, 48, 49, 51, 55, 56, 57, 63, 66, 67, 68, 69, 71, 72, 73, 76, 81, 87, 89, 93, 96], "arang": [2, 19, 20, 21, 31, 55, 63, 76, 77, 78, 82, 83, 86, 87, 88, 89], "arbitrari": 2, "area": [51, 87], "areaundercurv": 51, "arg": [6, 16, 19, 21, 22, 29, 33, 34, 66, 67, 68], "argpars": [66, 67, 68, 72, 73], "arguement": 14, "argument": [1, 6, 15, 16, 19, 20, 21, 24, 25, 26, 29, 31, 50, 55, 66, 67, 68], "argumentdefaultshelpformatt": [66, 67, 68], "argumentpars": [66, 67, 68], "around": [48, 81, 87], "arr": [1, 6, 25, 35, 76, 77], "arr2": [76, 77], "arrai": [1, 2, 6, 7, 13, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 35, 51, 52, 56, 57, 60, 63, 66, 67, 68, 72, 73, 76, 77, 86, 87], "array_lik": [2, 6, 7, 14, 15, 16, 17, 19, 24, 25, 26, 27, 29, 31, 35, 36, 39, 45], "arraylik": 19, "asarrai": [20, 49, 55, 87, 90], "ascii": 31, "assert": [66, 67, 68, 86, 87], "assign": [6, 14, 15, 16, 17, 19, 31, 43, 76, 81, 87], "associ": [14, 15, 16, 17, 19, 20, 31, 43], "assum": [7, 15, 16, 17, 19, 20, 21, 22, 25, 27, 31, 42, 43], "astr": 2, "attach": [1, 2, 7, 13, 14, 15, 16, 17, 19, 20, 21, 29, 31, 43, 55, 81, 86], "attribut": [31, 86, 87], "australia": [48, 56, 57], "auto": [2, 19, 20, 21, 55, 56, 57], "automat": [6, 31, 48, 89], "avail": [2, 24, 25, 48], "averag": [15, 16], "avoid": 48, "awai": [24, 25, 31], "awar": 2, "ax": [6, 7, 14, 19, 20, 21, 25, 26, 27, 31, 50, 51, 52, 71, 72, 73, 76, 81, 83, 87, 88, 89], "ax1": [50, 71, 72, 73, 87], "axi": [2, 6, 7, 14, 19, 24, 25, 26, 27, 29, 31, 35, 36, 49, 51, 52, 77, 81, 82, 87, 89, 90], "axis0": [25, 27], "axis1": [25, 27], "axisgg": 7, "azimuth": 6, "b": [7, 50, 87, 89, 90], "b05b6f": 6, "b4a8bd": 6, "b77b68": 6, "b79762": 6, "b903aa": 6, "ba0900": 6, "back": [24, 48, 63], "backend": 48, "backward": 7, "balanc": 2, "banner": 2, "bar": [6, 24, 25, 26, 29, 35, 87, 88], "base": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 24, 25, 26, 29, 35, 40, 43, 48, 56, 57, 81, 87], "basedistribut": [15, 16, 17, 33, 36, 37, 38, 39, 41], "basefrequ": 51, "bayesian": 38, "bb": [71, 72, 73], "bc23ff": 6, "bcast": [2, 19, 20, 21, 31, 42, 44], "bcast_1int": 2, "bcast_list": 2, "bcasttyp": 2, "bec459": 6, "becaus": [48, 63, 87], "been": [1, 2, 6, 15, 16, 17, 19, 21, 22, 25, 27, 31, 42, 43, 87], "befor": [2, 4, 6, 15, 16, 17, 19, 21, 22, 24, 25, 27, 31, 35, 42, 43, 48, 56, 57, 63], "begin": [24, 51], "behav": [6, 24, 25, 26, 29, 35, 87], "behaviour": [15, 16], "being": [2, 66, 67, 68], "believ": 87, "below": [2, 48], "benefit": 2, "best": [13, 35, 55, 56, 57, 71, 72, 73, 81], "between": [6, 7, 13, 17, 19, 20, 24, 25, 27, 31, 33, 35, 48, 49, 50, 51, 55, 56, 89], "bheight": [19, 20, 21, 22], "bin": [6, 7, 24, 25, 26, 29, 35, 36, 37, 39, 40, 48, 60, 81, 87, 89, 90], "binari": [2, 31, 48, 87], "bindir": 48, "binned_statist": 25, "birth": [24, 29], "bisect": 13, "bit": 31, "blob": 6, "block": [31, 49, 50], "block_indic": 31, "block_median": [31, 50], "block_median_indic": 31, "boldsymbol": [24, 29], "bool": [4, 6, 7, 14, 15, 16, 17, 19, 20, 21, 24, 25, 26, 27, 29, 31, 35, 43], "boolean": [15, 16, 17, 31, 43], "both": [20, 24, 27, 45, 48, 49, 55, 56, 69, 93], "bottleneck": 63, "bottom": [35, 71, 72, 73, 89], "bound": [21, 24, 29, 31, 71, 72, 73], "boundari": [77, 78, 82, 83], "box": 31, "bracket": [7, 19, 20, 21, 22, 49, 51], "brew": 48, "broadcast": [2, 19, 20, 21, 31, 42, 44], "brodi": [48, 51, 56, 57], "brute": 13, "buffer": 2, "build": 48, "built": [7, 87], "butterworth": 56, "byte": 4, "bytes2read": 4, "c": [6, 24, 25, 26, 27, 29, 31, 35, 48, 49, 51, 52, 87, 90], "c0b9b2": 6, "c2ff99": 6, "c2ffed": 6, "c8a1a1": 6, "calcul": [25, 27], "calibr": [14, 15, 16, 17, 31, 43], "call": [2, 4, 7, 15, 16, 17, 19, 21, 22, 24, 25, 27, 31, 42, 43, 48], "callabl": 25, "can": [1, 2, 6, 15, 16, 21, 22, 24, 25, 27, 31, 48, 49, 50, 51, 55, 56, 57, 60, 63, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "cannot": [66, 67, 68], "capabililti": 63, "capabl": 48, "carlo": [38, 96], "carri": [13, 71, 72, 73, 87], "cartesian": 77, "case": [24, 25, 27, 48, 49, 69, 81, 87, 93], "cast": 2, "categor": 33, "categoricaldistribut": 33, "caus": 44, "caution": 1, "cb": [6, 50], "cb7e98": 6, "cc": [48, 86, 87], "cc0744": 6, "cd": 48, "cdf": [7, 37, 39, 88, 89], "ceil": 87, "cell": [24, 25, 26, 27, 29, 31, 72, 76, 77, 78, 81, 82, 83, 88], "cellar": 48, "cellindex": [24, 25], "cellindic": [25, 27], "center": 56, "centr": [24, 25, 27, 31, 35, 72, 76, 77, 78, 81, 82, 83, 88, 90], "centred_grid_nod": 31, "chain": [38, 96], "chang": [24, 29, 45, 76, 81, 87], "channel": [14, 15, 16, 17, 19, 20, 21, 22, 49, 51, 52], "channel_index": [19, 51, 52], "channel_nam": [13, 17, 19, 49], "channels_per_system": [13, 19], "charact": 7, "character": 77, "characterist": 55, "chart": 6, "check": [4, 7, 24, 25, 48, 49, 50, 66, 67, 68], "checkcommandargu": [66, 67, 68], "checkout": 49, "children": [36, 37, 39, 40], "choos": [33, 66, 67, 68, 71, 72, 73], "chosen": [19, 29], "chunk": [2, 19, 20, 21, 31], "circl": 50, "circular": 46, "circularloop": [42, 49, 55, 56], "clabel": 6, "class": [0, 1, 6, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 34, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45, 54, 59, 62, 66, 67, 68, 71, 72, 73, 88, 89, 90, 92, 93, 96, 97], "classmethod": [14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 29, 31, 35, 42, 43, 44], "clean": 48, "clear": 2, "clim_scal": [71, 72, 73], "clip": [24, 25, 27, 31], "clone": 48, "close": [56, 89], "cloud": [25, 26, 27, 31, 54, 93, 97], "clough": 31, "cloughtoch": 50, "cmap": [6, 24, 49, 71, 72, 73, 81, 87, 89, 90], "cname": 6, "co": [14, 15, 16, 17, 19, 20, 21, 22, 25, 27, 31, 49, 50, 51, 87], "coastal_salt_wat": [66, 67, 68, 71, 72, 73], "code": [0, 2, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93, 96], "codebas": 96, "coil": [19, 21, 22, 44, 49, 51], "collect": [2, 6, 50, 77, 83, 89], "color": [6, 50, 89], "colorbar": [6, 50, 77, 81, 83, 87, 89], "colour": [6, 7, 24, 25, 26, 29, 31, 35, 49, 50, 71, 72, 73, 76, 87], "colourbar": [6, 87], "colourmap": [6, 24, 25, 26, 29, 35, 87], "colum": 49, "column": [4, 6, 19, 20, 21, 22, 44, 50, 56], "com": [6, 56, 57], "combin": [7, 89], "come": [87, 96], "comm": [2, 19, 31], "comm_world": [2, 19, 20, 31, 42], "comma": 4, "command": [6, 7, 66, 67, 68, 69, 93], "common": 63, "commun": [2, 19, 20, 31, 42, 48], "comon": [24, 29], "compat": 7, "complex": 7, "complex128": 7, "complic": 6, "compon": [7, 13, 15, 16, 17, 19, 21, 24, 29, 31, 43, 45], "component_id": 44, "compress": 87, "comput": [2, 7, 13, 14, 15, 17, 19, 24, 25, 29, 31, 36, 37, 55, 56, 57, 77], "compute_doi": [71, 72, 73], "concaten": [7, 15, 16], "concatent": 7, "conduct": [13, 55, 56, 57, 72, 81], "confid": [35, 71, 72, 73, 88], "configur": 48, "consider": 87, "consist": 56, "constant": [6, 7, 24, 36, 49, 55], "contain": [0, 2, 6, 7, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 27, 31, 35, 44, 48, 49, 51, 56, 71, 72, 73, 87], "containin": 14, "content": [1, 11, 50, 87], "contigu": 63, "convers": [56, 57], "convert": [4, 24], "coordin": [6, 31, 77], "copi": [20, 24, 29, 33, 48, 76, 81], "core": [0, 2, 8, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45, 63, 96], "correct": [4, 7, 48], "correctli": [24, 48], "correspond": [24, 35], "cossin1": 7, "could": 87, "count": [4, 20, 25, 35, 89, 90], "coupl": [69, 93], "cours": [49, 87], "cover": 45, "cp": [51, 52, 60], "cpp": 48, "creat": [1, 2, 6, 7, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 42, 43, 49, 50, 55, 63, 66, 67, 68, 71, 72, 73, 76, 81, 87, 88, 89, 90], "create_hdf": 63, "create_hdf5": [66, 67, 68], "create_synthetic_model": [71, 72, 73], "createhdf": [1, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43, 49, 50, 51, 52, 55, 63, 76, 77, 78, 83, 87, 88, 89], "creation": 76, "credibl": [35, 89, 90], "credible_interv": [35, 88], "credible_interval_kwarg": 81, "credible_rang": [35, 89], "credit": [56, 57], "cross": [71, 72, 73], "csv": [31, 51, 52, 56, 57, 66, 67, 68], "ct": [31, 50], "cumsum": [76, 77, 87, 88], "cumul": [6, 7, 19, 20, 21, 25, 26, 27, 31, 39, 88], "current": [17, 24, 29, 56, 87, 96], "curvatur": [31, 50], "custom": [2, 4, 6, 7, 50, 87], "cutoff": 56, "cutofffrequ": 51, "cxx": 48, "cxxflag": 48, "cycl": [24, 29], "d": [17, 19, 20, 31, 33, 55, 60], "d157a0": 6, "d16100": 6, "d2": 20, "d790ff": 6, "da": [86, 87], "dai": 48, "data": [2, 3, 6, 8, 13, 14, 15, 16, 17, 20, 21, 22, 23, 25, 27, 29, 31, 38, 42, 43, 44, 48, 50, 54, 55, 56, 57, 63, 70, 71, 72, 73, 87, 96, 97], "data_class": 19, "data_filenam": [19, 21, 22, 66, 67, 68], "data_misfit": [17, 19, 55, 56, 57], "data_prior": 56, "data_typ": [66, 67, 68, 71, 72, 73], "dataarrai": [24, 35, 49, 60, 76, 87, 88, 92, 93, 97], "datafil": [20, 49, 51, 52, 55, 56, 57], "datafilenam": [15, 19, 20, 21, 22, 49], "datafold": [49, 50, 51, 52, 55, 56, 57], "datapoint": [12, 13, 14, 15, 16, 19, 20, 21, 29, 49, 51, 59, 96, 97], "dataset": [1, 12, 19, 20, 21, 22, 29, 54, 55, 56, 57, 63, 93, 97], "dataunit": 19, "datetim": [66, 67, 68], "db": 51, "ddefff": 6, "death": [24, 29], "debug": 2, "decim": [49, 50], "deepcopi": [14, 20, 40, 76, 81], "def": [66, 67, 68, 71, 72, 73], "default": [2, 4, 6, 7, 15, 16, 17, 19, 20, 21, 24, 25, 31, 66, 67, 68, 87], "defaultnam": 7, "defaultunit": 7, "defin": [2, 14, 17, 19, 20, 24, 25, 26, 27, 31, 34, 36, 37, 38, 39, 40, 42, 43, 44, 55, 56, 57, 77, 78, 82, 83, 87], "definit": 56, "del": [71, 72, 73], "delet": [4, 24, 66, 67, 68, 87], "delete_edg": 24, "deletefil": 4, "delimit": 4, "delta": [17, 19, 24], "deltad": [17, 19], "dem": 6, "dem_elev": [19, 20, 21, 22], "dem_np": [19, 21, 22], "denomin": [29, 38], "densiti": [7, 60, 76, 87, 88], "depend": [6, 29, 48], "deprec": [56, 57], "deprecationwarn": [56, 57], "depth": [24, 29, 35, 55, 71, 72, 73, 76, 77, 78, 81, 83, 87], "deriv": [29, 48, 96], "describ": [14, 19, 20, 21, 22, 24, 25, 26, 27, 29, 33, 44, 48, 49, 51, 56, 87], "descript": [19, 20, 21, 22, 24, 25, 27, 29, 31, 44, 49, 51, 56, 66, 67, 68], "descriptor": [55, 86, 87], "desir": 6, "dest": 2, "det": 7, "detail": [2, 87], "determin": [1, 2, 7, 25, 48, 87], "develop": 96, "deviat": [14, 17, 19, 49, 51, 55, 56, 60, 76], "diagon": 7, "dict": 6, "dictionari": 6, "dieter": [56, 57], "differ": [2, 7, 13, 17, 19, 24, 35, 60, 87], "digit": 4, "dim": [25, 36, 39], "dimens": [2, 6, 7, 24, 25, 26, 27, 29, 31, 36, 39, 56, 77, 81, 90], "direct": [25, 27, 56, 86, 87], "directori": [4, 48, 66, 67, 68, 71, 72, 73], "direxist": 4, "dirpath": 4, "discret": [36, 37, 39, 77], "disk": [4, 49, 55, 56, 57, 63], "displai": [19, 21, 25, 26, 27, 71, 72, 73, 89], "distanc": [19, 20, 21, 24, 25, 26, 27, 31, 50, 51, 76, 77, 87], "distibut": 24, "distort": 87, "distribut": [15, 16, 17, 19, 20, 24, 29, 31, 35, 36, 40, 41, 43, 48, 55, 56, 57, 62, 63, 76, 81, 96, 97], "distribution_class": 33, "distributiontyp": 33, "divid": 56, "do": [2, 6, 7, 19, 20, 21, 24, 25, 27, 48, 77, 87], "documentation_sourc": [56, 57], "doe": [7, 31, 96], "doi": [71, 72, 73], "domain": [14, 15, 16, 19, 20, 21, 22, 31, 46, 54, 59, 93, 96, 97], "don": 6, "done": 87, "dot": 7, "download": [48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "dp": 49, "dpi": [71, 72, 73], "draw": 87, "dt": 51, "dtm": [19, 20, 21, 22], "dtype": [2, 88], "dualmoment": 15, "due": [2, 48], "dum": 50, "duplic": 63, "dure": [29, 31, 48, 63], "dx": [31, 49, 50], "dy": [31, 49, 50], "dz": 31, "e": [1, 2, 4, 6, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 43, 44, 48, 49, 51, 56, 57, 66, 67, 68, 71, 72, 73, 87], "e2": 87, "e_": 24, "each": [1, 2, 6, 7, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 42, 43, 44, 49, 51, 55, 56, 63, 71, 72, 73, 81, 87, 89, 90], "earli": [66, 67, 68], "earth": [55, 56, 57], "easi": [48, 87], "easier": 48, "easiest": 48, "easili": 87, "east": [14, 15, 16, 17, 19, 20, 21, 22, 49, 50, 51, 72, 76, 77, 78, 82, 83, 89], "edg": [6, 24, 25, 27, 36, 39, 55, 56, 57, 72, 76, 77, 81, 87, 88], "edge_prior": 24, "edgecolor": [50, 87], "edges_kwarg": 81, "edges_prior": 24, "edgesmax": [24, 25], "edgesmin": [24, 25], "eec3ff": 6, "effici": [88, 89, 90], "either": [1, 7, 14, 19, 20, 24, 25, 27, 31, 77, 87], "elaps": [66, 67, 68], "electro": [20, 21, 22], "electromagnet": [14, 44, 96], "element": [2, 6, 7, 19, 20, 25, 27, 56, 57, 87], "eleph": 87, "elev": [6, 13, 14, 15, 16, 17, 19, 20, 21, 22, 31, 42, 43, 49, 50, 51, 55, 56, 72], "elkhil": 51, "els": [2, 14, 17, 19, 20, 21, 35, 90], "em": [14, 15, 16, 17, 31, 38, 42, 43, 44, 49, 51, 52, 55, 56, 57], "emb": 77, "emdata": [14, 15, 20], "emdatapoint": [14, 15, 16, 17, 18], "emloop": [15, 16, 42, 46], "empti": [20, 25], "empymod": [56, 57], "en": 87, "enabl": [48, 63, 96], "encount": 56, "end": [2, 7, 24, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "enough": [49, 50], "ensur": [56, 57], "entir": [1, 6, 24, 25, 26, 29, 35, 48], "entri": [1, 4, 6, 7, 15, 16, 17, 20, 24, 31, 35, 56], "env": 48, "environ": 48, "ep": 31, "equal": [2, 6, 7, 15, 16, 17, 19, 20, 24, 25, 26, 29, 31, 35, 49, 51, 52, 55, 56, 57, 76, 82], "equat": [24, 87], "erroff": [19, 21, 22], "error": [15, 16, 17, 19, 21, 22, 25, 31, 43, 49, 55, 56, 57, 66, 67, 68, 73], "errorbar": 14, "essenti": 87, "estim": [14, 17, 19, 21, 22, 29, 35, 49, 51, 55, 56, 89, 90], "estimateadditiveerror": 21, "eta": [66, 67, 68], "etc": [4, 6, 24, 31], "evalu": [15, 16, 17, 24, 29, 31, 36, 43, 55, 56, 57, 76, 81, 87], "event": 29, "everi": [2, 4], "everyon": [66, 67, 68], "ex07": 6, "exampl": [2, 6, 20, 25, 27, 33, 50, 52, 54, 55, 56, 57, 59, 60, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 88, 89, 90, 92, 95, 96, 97], "examples_jupyt": 93, "examples_python": 93, "except": [20, 21, 24, 66, 67, 68, 71, 72, 73], "exclud": 31, "execut": [54, 59, 62, 63, 65, 70, 75, 80, 85, 92, 95, 97], "exist": [2, 4, 19, 21, 22, 24, 29, 66, 67, 68], "exist_ok": [66, 67, 68], "exp": [7, 56], "expand": [24, 25, 76, 77], "expect": [24, 71, 72, 73], "expens": [71, 72, 73], "explicitli": 87, "explictli": 24, "expon": 7, "exponenti": [7, 87], "export": 49, "expreal": 7, "ext": 4, "extend": [86, 87], "extens": [4, 20, 36, 86, 87], "extra": [1, 19, 21, 22, 24, 25, 55, 63, 86, 87], "extract": [31, 56, 57], "f": [48, 49, 50, 51, 52, 55, 63, 76, 77, 78, 82, 83, 87, 88, 89], "factor": [7, 14, 29], "fad09f": 6, "fail": [7, 24, 66, 67, 68], "fall": 24, "fals": [6, 11, 15, 19, 21, 24, 25, 26, 27, 29, 31, 35, 36, 37, 39, 49, 50, 60, 71, 72, 73, 76, 81, 87, 88, 89], "far": 24, "fashion": 2, "faster": [2, 31], "fd": [20, 55], "fd1": 49, "fd2": 49, "fd3": 49, "fd_one": 49, "fd_two": 49, "fda": 49, "fde8dc": 6, "fdem": [20, 49], "fdemdata": [23, 49, 55], "fdemdata_class": 20, "fdemdatapoint": [13, 18, 20, 21, 55], "fdemdatapoint_class": 14, "fdemsystem": [14, 20, 44, 49, 55], "fdemsystem1": 49, "fdemsystem2": [49, 55, 66], "fdemsystem_class": 44, "fdp": [49, 55], "fdp1": 55, "fdp2": 55, "feffe6": 6, "few": [48, 66, 67, 68], "ff2f80": 6, "ff34ff": 6, "ff4a46": 6, "ff8a9a": 6, "ff90c9": 6, "ff913f": 6, "ffaa92": 6, "ffb500": 6, "ffdbe5": 6, "fff69f": 6, "ffff00": 6, "fftw": 48, "fftw_dir": 48, "fid": [19, 20, 21, 22, 49, 51], "fiduci": [13, 14, 15, 16, 17, 19, 20, 21, 49, 55], "field": [49, 56, 57], "fig": [55, 71, 72, 73, 76, 81], "figsiz": [49, 51, 52, 71, 72, 73, 81, 89], "figur": [6, 14, 24, 25, 26, 29, 33, 35, 49, 50, 51, 52, 55, 56, 57, 60, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "file": [0, 1, 2, 4, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 42, 43, 44, 45, 48, 50, 52, 54, 55, 57, 59, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 82, 83, 85, 87, 88, 89, 92, 93, 95, 97], "file_path": [66, 67, 68], "fileexist": 4, "fileinform": [20, 21, 31, 44], "fileio": 3, "filenam": [1, 4, 20, 31, 44, 50, 66, 67, 68], "filesexist": 4, "fill": 50, "fill_valu": [76, 81], "fillvalu": [14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43], "filter": [7, 31, 50, 56], "filterwarn": [66, 67, 68, 73], "final": [24, 29, 31, 48], "find": [1, 7, 24, 35, 48, 66, 67, 68], "find_best_halfspac": [13, 55, 56, 57], "findfirstlastnotvalu": 7, "findfirstnonzero": 7, "findlastnonzero": 7, "findnan": 7, "findnotnan": 7, "fine": 48, "first": [2, 6, 7, 19, 20, 21, 22, 24, 25, 26, 27, 29, 35, 44, 48, 49, 51, 56, 63, 69, 71, 72, 73, 77, 87, 93], "firstnonzero": 87, "fit": [13, 55, 56, 57], "fit_mixture_to_pdf_1d": 35, "fix": [56, 81], "flatten": [25, 27], "flexibl": 1, "flip": [6, 24, 25, 26, 29, 35, 76, 81, 87], "flipi": [6, 24, 25, 26, 29, 35, 55, 56, 57, 71, 72, 73, 76, 77, 81, 87], "flipx": [6, 24, 25, 26, 29, 35], "float": [4, 6, 7, 13, 14, 15, 16, 17, 20, 21, 24, 25, 26, 27, 29, 31, 35, 36, 37, 42, 56, 86, 87], "float64": [7, 13, 15, 16, 17, 19, 24, 27, 29, 31, 43, 49, 56, 57, 77, 86, 87, 88], "floor": [55, 56, 57], "fluff": 6, "flush": 2, "fm_dlogc": 57, "fname": [1, 4], "folder": [48, 49, 51, 56, 69, 71, 72, 73, 93], "follow": [19, 20, 21, 22, 24, 25, 48, 49, 51, 56, 77], "fontsiz": 6, "forc": [13, 76, 87], "format": [3, 19, 21, 22, 31, 52, 55, 57, 66, 67, 68, 71, 72, 73], "formatter_class": [66, 67, 68], "formul": 96, "fortran": 48, "forward": [7, 14, 15, 29, 51, 55, 56, 57, 96], "forwardmodel": 51, "four": 24, "fourier": 20, "fpic": 48, "frac": [24, 55, 56, 57, 72, 81, 86, 87], "fraction": [15, 16], "freq": [20, 44, 49], "frequenc": [14, 20, 46, 48, 54, 56, 59, 88, 93, 96, 97], "frequenciesperdecad": 51, "from": [1, 2, 4, 6, 7, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 42, 43, 44, 45, 48, 50, 51, 52, 54, 55, 56, 57, 59, 60, 62, 63, 65, 66, 67, 68, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 92, 95, 97], "froma": 24, "fromhdf": [1, 14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 29, 31, 35, 42, 43, 44, 49, 50, 51, 52, 55, 63, 71, 72, 73, 76, 77, 78, 82, 83, 87, 88, 89], "front": [4, 56], "full": [49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "function": [0, 1, 3, 6, 7, 25, 39, 60, 63, 86, 87, 88], "further": [24, 25], "futur": [56, 57], "g": [1, 2, 4, 6, 15, 16, 19, 20, 21, 24, 25, 26, 29, 31, 35, 43, 44, 48, 56, 81, 86, 87, 89], "ga": [48, 51, 56, 57], "galleri": [49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93, 97], "gamma": [33, 41], "gammadistribut": [33, 34], "gatdaem1d": 45, "gatdaem1d_python": 48, "gate": [15, 16, 21, 22, 51, 56], "gaussian": 7, "gca": 6, "gcc": 48, "gener": [6, 7, 19, 20, 21, 24, 29, 31, 33, 35, 36, 37, 45, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "generate_subplot": 6, "geobipi": [0, 1, 2, 4, 6, 7, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 34, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45, 47, 49, 50, 51, 52, 55, 56, 57, 60, 65, 69, 70, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93, 97], "geobipt": 31, "geobipy_doc": [56, 57], "geobipy_kwarg": 6, "geometeri": 55, "geophys": 38, "geoscienc": [48, 56, 57], "geoscienceaustralia": [56, 57], "get": [2, 4, 7, 11, 13, 15, 17, 19, 20, 21, 24, 25, 27, 31, 35, 36, 39, 48, 49, 50, 51, 55, 56, 57, 60, 87, 88, 89, 96], "get_column_nam": 4, "get_modellingtim": 45, "get_posit": [71, 72, 73], "get_prng": [57, 66, 67, 68], "get_real_numbers_from_lin": 4, "getcwd": [66, 67, 68], "getfileextens": 4, "getfiles": 4, "getfrequ": [14, 20], "getmeasurementtyp": [14, 20], "getnam": 7, "getnameunit": 7, "getncolumn": 4, "getnlin": 4, "getsizeof": 11, "getunit": 7, "git": 48, "github": [6, 56, 57], "give": [2, 19, 20], "given": [2, 4, 6, 7, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 39, 43, 49, 51, 55, 56, 57, 76, 81, 87], "glacial": [66, 67, 68, 71, 72, 73], "global": [2, 24, 25, 27, 29], "go": [48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "grab": 60, "gradient": [24, 29, 76, 81], "gradient_prior": 29, "gradient_prob": 29, "gradientprior": 29, "graph": 87, "gray_r": [81, 89, 90], "great": 48, "greater": [24, 29], "grei": [71, 72, 73], "grid": [6, 24, 25, 26, 27, 29, 31, 35, 49, 50, 76, 77, 78, 81, 82, 83, 87, 88], "gridspec": [6, 76, 89], "ground": [14, 15, 16, 17, 19, 21, 22, 49, 51, 56, 57], "group": [1, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43], "group1": 1, "group1a": 1, "groupnam": 1, "grp": [14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 29, 31, 35, 42, 43, 44], "gs0": [71, 72, 73], "gz": 48, "h": [66, 67, 68, 88, 89, 90], "h1": [88, 89], "h1d": 88, "h2": [88, 89], "h2d": 89, "h3": 88, "h3d": 90, "h3d_read": 89, "h4": 88, "h5": [49, 50, 51, 52, 55, 63, 71, 72, 73, 76, 77, 78, 82, 83, 87, 88, 89], "h5obj": [1, 11, 44], "h5py": [1, 11, 49, 50, 51, 52, 55, 56, 57, 63, 76, 77, 78, 82, 83, 87, 88, 89], "h_": 24, "ha": [1, 2, 15, 16, 17, 19, 21, 22, 24, 25, 27, 31, 35, 42, 43, 48, 63, 87], "had": 24, "half": [13, 49, 50, 55, 56, 57, 90], "halfspac": [13, 29, 55, 56, 57, 85, 93, 97], "halfspacevalu": [29, 76, 81], "handl": [1, 4, 19, 20, 21, 31, 36, 48, 56, 60, 63], "handler": [71, 72, 73], "has_posterior": [49, 50, 72, 76, 87, 88], "hassames": 25, "have": [1, 2, 4, 6, 7, 15, 16, 17, 24, 25, 26, 27, 29, 31, 35, 43, 48, 49, 50, 55, 81, 87, 96], "hdf": [3, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 42, 43, 44, 48, 63, 66, 67, 68, 96], "hdf5": [1, 65, 66, 67, 68, 71, 72, 73, 76, 87, 88, 89, 90, 93, 97], "hdf5_dir": 48, "hdf5_mpi": 48, "hdfname": 24, "hdfread": 1, "hdfwrite": 1, "header": [4, 19, 20, 21, 22, 44, 50], "height": [14, 15, 16, 17, 21, 22, 31, 43, 49, 50, 51, 55, 56, 57, 83], "heightpropos": [15, 16, 17], "heirarch": 3, "hello": 2, "helloworld": 2, "help": [50, 56, 57, 66, 67, 68], "helper": 7, "here": [24, 48, 49, 51, 56, 63, 76, 87, 96], "hessian": 29, "hex": 6, "hh": [71, 72, 73], "hi": [48, 56, 57], "high": [35, 56], "higher": [35, 87], "highest": [1, 71, 72, 73], "highlight": 87, "hillshad": 6, "hist": [6, 87], "histogram": [6, 7, 24, 25, 26, 29, 41, 81, 92, 93, 97], "histogram1d": 35, "histogramequ": 7, "hitmap": [24, 29, 35, 81], "hline": 6, "hmin": 29, "hobj": 1, "holder": [36, 37, 39, 40], "homebrew": 48, "honour": 2, "horizont": [6, 19, 21, 22, 25, 26, 27, 77], "horiztont": 77, "how": [48, 49, 71, 72, 73, 87], "howev": [31, 87], "hspace": [71, 72, 73, 89], "hstack": [57, 88], "http": [6, 56, 57, 87], "hyperbol": 7, "hz": [55, 56], "i": [0, 1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 43, 44, 48, 49, 51, 55, 56, 57, 63, 66, 67, 68, 69, 71, 72, 73, 76, 77, 81, 87, 89, 90, 93, 96], "i_": 49, "i_129550": 49, "i_380": 49, "ice_over_salt_wat": [66, 67, 68, 71, 72, 73], "id": [6, 19, 20, 21, 22, 87], "idea": 31, "ident": 25, "identif": [49, 51], "ieee": [49, 50], "ignor": [6, 19, 21, 22, 49, 50], "illustr": 2, "imag": [6, 29, 35, 48, 87], "imaginari": [7, 49], "immedi": 2, "implement": [48, 96], "import": [1, 2, 20, 33, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "impos": 29, "in_bound": [24, 25], "in_phas": 49, "inact": 19, "includ": [2, 15, 16, 17, 20, 24, 25, 27, 29, 31, 43, 48, 87], "incorpor": [56, 57], "increas": 81, "increment": [13, 31], "index": [1, 2, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 42, 43, 44, 49, 50, 51, 52, 55, 63, 66, 67, 68, 76, 77, 87, 88, 89], "indic": [2, 4, 6, 7, 13, 15, 17, 19, 20, 21, 22, 24, 25, 27, 31, 50, 76], "indici": 7, "individu": [6, 15, 16, 17, 24, 25, 26, 29, 31, 35, 43, 56], "inf": [31, 55, 56, 57, 81], "infer": [63, 66, 67, 68, 75, 81, 97], "inferecexd": [71, 72, 73], "inference2d": [71, 72, 73], "inference3d": [66, 67, 68], "inference_1d": [70, 97], "inference_2d": [75, 97], "infinit": [85, 93, 97], "info": 48, "inform": [19, 20, 21, 22, 24, 25, 44, 49, 55, 56, 57, 63, 71, 72, 73, 96], "inherr": 31, "initi": [14, 15, 16, 17, 45, 55, 56, 60, 71, 72, 73], "inphas": [14, 15, 16, 49], "inphaseerr": 49, "input": [2, 24, 29, 63, 66, 67, 68, 87], "inputfil": [66, 67, 68], "ins": 7, "insert": [24, 25, 87], "insert_edg": 24, "insid": [1, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43], "instal": [31, 47, 96], "instanti": [2, 20, 24, 25, 27, 33, 36, 43, 49, 55, 63, 71, 72, 73, 76, 77, 88, 89, 90], "instead": [1, 6, 24, 25, 27, 48, 49], "instruct": [48, 51], "int": [2, 4, 6, 7, 13, 14, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 39, 42, 66, 67, 68], "int2str": 4, "integ": [2, 4, 7, 25, 26, 27, 56, 86, 87], "interactv": 6, "interest": [71, 72, 73], "interfac": [24, 48, 71, 72, 73, 76, 81], "interleav": 7, "intern": [24, 35, 38], "internaledg": 87, "interpol": [3, 19, 24, 31, 49, 50], "interpolated_pc3d": 50, "intersect": 77, "interv": [6, 24, 25, 35, 77, 89], "intervalstatist": [25, 77], "inv": 7, "invalid_v": 7, "invers": [7, 29, 38, 48], "invert": [7, 70, 93, 97], "inverv": 35, "io": [48, 56, 57], "iplotact": 15, "ipynb": [6, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "irecv": 2, "irecv_1int": 2, "irecvfromleft": 2, "irecvfromright": 2, "irregularbin": 88, "irregularli": 88, "isend": 2, "isend_1int": 2, "isendtoleft": 2, "isendtoright": 2, "isfil": [66, 67, 68], "isfileextens": 4, "isinst": [86, 87], "isint": 7, "isintorslic": 7, "islink": [66, 67, 68], "isnumpi": 7, "item": [1, 2, 6, 15, 16, 19, 48, 76, 86, 87], "iter": [1, 87], "ith": [19, 20, 21, 24, 29], "its": [4, 19, 21, 22, 24, 48, 49, 51, 76, 81, 87, 89], "itself": 87, "ix": 15, "ixi": 25, "j": [55, 56, 57], "jet": [49, 71, 72, 73, 87], "job": [66, 67, 68], "join": [50, 51, 52, 55, 56, 57, 66, 67, 68], "journal": 38, "jump": 24, "jupyt": [49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "just": [48, 87], "juxtapos": 31, "k": [24, 29, 31, 50, 66, 67, 68, 87], "k_": 24, "kb": 4, "keep": [6, 66, 67, 68, 71, 72, 73], "kei": [1, 76], "keyword": [6, 14, 19, 20, 21, 24, 25, 26, 31, 71, 72, 73, 87], "know": [48, 87], "kwarg": [1, 2, 6, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 38, 42, 43, 66, 67, 68, 71, 72, 73], "l": [48, 49], "l_": [17, 19], "label": [6, 24, 56, 87], "laptop": 48, "larg": [1, 2, 7, 63], "larger": [2, 48], "laser": [19, 20, 21, 22], "last": [2, 6, 7, 24, 25, 26, 29, 35, 48, 49, 51], "lastnonzero": 87, "late": 21, "later": [63, 71, 72, 73], "latex": 7, "layer": [24, 29, 55, 56, 57, 76, 81], "ldflag": 48, "lead": 4, "least": [19, 21, 22], "left": [2, 24, 25, 89], "legend": [6, 19, 20, 21, 89], "legend_s": 87, "length": [2, 6, 14, 15, 16, 17, 19, 29, 56], "leq": 24, "less": [2, 35, 87], "let": [48, 71, 72, 73, 87], "level": [14, 17, 49, 51], "leverag": 63, "lfftw3": 48, "lib": 48, "like": [2, 6, 7, 24, 25, 26, 27, 29, 35, 48, 56, 57, 87], "likelihood": [17, 55, 56, 57], "limit": [6, 24, 25, 26, 29, 35], "linalg": 7, "line": [4, 6, 14, 15, 16, 19, 20, 21, 22, 24, 25, 26, 27, 31, 44, 50, 51, 52, 56, 60, 66, 67, 68, 69, 71, 72, 73, 77, 87, 93], "line2d": 60, "line_indic": 77, "linear": [6, 24, 25, 26, 29, 35, 56, 77], "linearsegmentedcolormap": 6, "linearspac": [56, 81], "linenumb": [13, 14, 15, 16, 17, 19, 49, 55, 56], "linewidth": [49, 71, 72, 73, 77, 87], "link": [27, 48], "linspac": [33, 55, 57, 81, 87, 88, 89, 90], "list": [1, 2, 4, 6, 15, 16, 17, 19, 21, 22, 31, 44, 48, 76], "listdir": [66, 67, 68], "littl": 48, "ll": [71, 72, 73], "ln": [15, 16], "load": [2, 96], "loadbalance1d_shrinkingarrai": 2, "loadbalance3d_shrinkingarrai": 2, "loadbalance_shrinkingarrai": 2, "local": [27, 29, 48], "local_inverse_hessian": 29, "local_precis": 29, "local_vari": 29, "locat": [1, 6, 7, 15, 20, 24, 25, 27, 31, 35, 42, 44, 48, 49, 50, 51, 52, 55, 56, 81, 87], "log": [6, 7, 15, 16, 17, 24, 25, 26, 27, 29, 35, 36, 37, 39, 45, 49, 51, 52, 55, 56, 57, 60, 71, 72, 73, 81, 87], "log10": [13, 71, 72, 73, 87], "log2": 87, "logarithm": [7, 15, 16, 88], "logdet": 7, "logic": 19, "lognorm": 81, "logspac": [57, 76, 77, 88], "logx": 81, "long": 2, "longer": [71, 72, 73], "look": [49, 50], "loop": [14, 15, 16, 19, 20, 21, 22, 43, 44, 46, 49, 51, 55, 56], "looparea": 51, "loopoffset": 45, "lot": [2, 50, 87], "low": [6, 35, 56, 71, 72, 73], "lower": [35, 71, 72, 73, 87], "lowpassfilt": 51, "lti": 7, "lu": 7, "ly": 31, "m": [29, 31, 48, 49, 50, 51, 55, 56, 57, 66, 67, 68, 72, 76, 77, 78, 81, 82, 83, 87, 88, 89], "mac": 48, "machin": 48, "made": [2, 20, 25, 27, 33], "magnet": [20, 21, 22, 56], "mai": [1, 2, 6, 19, 21, 22, 24, 25, 26, 31, 48, 66, 67, 68, 87], "mainli": 96, "maintain": [4, 86, 87], "make": [6, 48, 66, 67, 68, 71, 72, 73, 76, 81], "make_colourmap": 6, "makefil": 48, "malinverno2002parsimoni": 38, "manag": 48, "manner": 24, "map": [6, 21, 31, 35, 49, 50, 89, 90], "map_to_pdf": 24, "mapchannel": [21, 49], "mapdata": [19, 49], "mappredicteddata": 19, "mapstd": 19, "margin": [35, 89, 90], "marker": 87, "markers": [6, 31, 87], "markov": [38, 96], "mask": [6, 24, 25, 26, 29, 31, 35, 49, 50, 76, 77, 87], "mask_below_doi": [71, 72, 73], "mask_cel": [24, 25, 76, 77], "master": [2, 6, 20], "match": 4, "math": 29, "mathbf": [17, 19], "matplotlib": [6, 14, 19, 20, 21, 24, 25, 26, 29, 31, 33, 35, 49, 50, 51, 52, 55, 56, 57, 60, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "matrix": [7, 14, 15, 29, 55, 56, 57], "max": [13, 24, 25, 31, 39, 49, 50, 55, 56, 57, 71, 72, 73, 76, 86, 87, 88], "max_cel": [24, 26, 29, 76, 81], "max_edg": [24, 29, 76, 81], "maxconduct": 13, "maximum": [4, 13, 24, 25, 29, 76, 81], "mb": [4, 54, 59, 62, 65, 70, 75, 80, 85, 92, 95, 97], "mc": [31, 50], "mcmc": 24, "me": [71, 72, 73], "mean": [24, 25, 35, 36, 37, 49, 50, 55, 56, 57, 60, 72, 77, 87, 89, 90], "mean_paramet": 72, "measrement": 45, "measur": [14, 15, 16, 19, 20, 21, 22, 44, 45, 49, 51, 56, 96], "measurementtim": 45, "med": 35, "median": [25, 31, 35, 49, 50, 88, 89, 90], "mem": [54, 59, 62, 65, 70, 75, 80, 85, 92, 95, 97], "member": 19, "memori": [1, 11, 24, 29, 63, 71, 72, 73, 87], "merg": 7, "mergecomplex": 7, "mesg": 24, "mesh": [8, 24, 25, 26, 27, 29, 31, 35, 50, 55, 57, 71, 72, 80, 81, 82, 83, 87, 88, 89, 90, 96, 97], "mesh2": 83, "mesh3d": 83, "mesh3d_read": 83, "meshgrid": [77, 78, 82, 83, 90], "messag": 7, "metadata": [14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43], "method": [24, 31, 50, 63, 77, 87, 96], "metropoli": 87, "might": [2, 48, 55], "min": [13, 24, 25, 31, 39, 49, 50, 55, 56, 57, 71, 72, 73, 76, 86, 87, 88], "min_edg": [24, 29, 76, 81], "min_width": [24, 29], "minconduct": 13, "minimum": [13, 24, 25, 29, 31, 48, 50, 76, 81], "minimumratio": 29, "minut": [49, 50, 51, 52, 55, 56, 57, 60, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "misfit": [13, 17, 19, 55, 56, 57], "mixtur": 35, "mkdir": [48, 66, 67, 68], "mm": 87, "mod": [14, 55, 56, 57, 81, 82], "mod0": 81, "mod2": 82, "mod3": 82, "mode": [6, 35, 49, 50], "model": [8, 13, 14, 15, 24, 31, 35, 51, 55, 56, 57, 66, 67, 68, 71, 72, 73, 85, 96, 97], "model1": 83, "model1d": [24, 29], "model2": 83, "model2d": 82, "model3": 83, "model3d": [82, 83], "model3d_re1": 83, "model3d_re2": 83, "model3d_re3": 83, "model4": 83, "model_chang": 15, "model_class": 29, "model_typ": [66, 67, 68, 71, 72, 73], "modellingloopradiu": 51, "modifi": [38, 48, 56], "modul": [2, 4, 14, 19, 20, 24, 25, 26, 27, 29, 33, 34, 36, 37, 38, 39, 44, 48, 96], "moment": [15, 20, 40, 42, 43, 44, 49, 55], "mont": [38, 96], "more": [2, 14, 15, 16, 17, 25, 35, 55, 56, 57, 71, 72, 73, 96], "move": 31, "mp4": [89, 90], "mpi": [3, 19, 20, 21, 31, 42, 44, 63], "mpi4pi": [2, 19, 20, 31, 42], "mpicc": 48, "mpich2": 48, "much": 87, "multipl": [1, 6, 7, 14, 15, 24, 25, 26, 29, 35, 63, 77], "multipli": [7, 15, 16, 35, 55, 56, 57], "multiseg": 77, "multivari": [15, 16, 17, 36, 55, 56, 57], "must": [1, 2, 15, 16, 17, 20, 24, 25, 31, 43, 48, 49, 56], "mvlognorm": [55, 56, 57], "mvnormal": [33, 41, 55, 56, 57, 60, 87], "mvnormaldistribut": [33, 36], "mychunk": [2, 31], "mympi": 2, "mynam": [1, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43], "myobject": [11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45], "mystart": 31, "n": [2, 4, 6, 7, 19, 20, 21, 22, 24, 31, 36, 37, 49, 51, 83, 87, 95], "n3d": 83, "n_cell": 76, "n_cells_prior": 24, "n_compon": [51, 52, 56, 57, 67, 68, 72, 73], "n_frequenc": 44, "n_markov_chain": [66, 67, 68], "n_sampl": 35, "nabla_": [24, 29], "nactivedata": 20, "naddress": 87, "name": [1, 2, 4, 6, 7, 11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43, 44, 49, 50, 51, 52, 55, 56, 57, 63, 72, 76, 77, 86, 87, 88, 89], "nan": [7, 13, 17, 19, 20, 24, 25, 31, 77], "natur": 7, "navig": 48, "nbin": [6, 7, 24, 25, 26, 29, 35, 36, 37, 39], "nbyte": 4, "nc": 52, "ncell": [24, 25, 26, 27, 55, 76, 77, 78, 81, 83], "nchannel": [17, 19, 21], "nchannelspersystem": [17, 19, 21], "nchunk": 2, "ncol": 81, "ndarrai": [1, 2, 4, 7, 25, 27, 37, 86, 87], "ndim": [2, 7, 31, 36, 37, 39, 40, 50, 56, 57], "nearest": [31, 50], "neat": 87, "necessari": [1, 48], "nedg": [25, 27], "need": [0, 31, 48, 49, 55, 56, 57, 63, 66, 67, 68, 69, 71, 72, 73, 87, 93, 96], "neg": [7, 24, 25, 27], "negat": [24, 25, 27], "neighbour": 31, "neither": [20, 21], "never": [24, 87], "new": [15, 16, 17, 24, 25, 29, 31, 43, 48, 56, 57, 76, 81, 88], "newadditiveerror": [15, 16, 17], "newaxi": 77, "newcalibr": [15, 16, 17], "newheight": [15, 16, 17], "newlin": [2, 24], "newrelativeerror": [15, 16, 17], "newton": 29, "newtown": 29, "next": 48, "nfok": [56, 57], "nfrequenc": [20, 49], "nhas_posterior": 87, "nheader": 4, "nice": [6, 11], "ninc": 13, "nlayer": 81, "nmax": 87, "nmin": 87, "nname": 87, "nnode": [25, 27], "nocolorbar": [6, 24, 25, 26, 29, 35, 81], "node": [25, 27, 31, 49, 50], "nois": [55, 56, 57], "noisi": 50, "nolabel": [6, 24], "nolegend": [19, 21], "non": [6, 7, 19, 24, 25, 26, 29, 35], "none": [1, 2, 4, 6, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 37, 39, 42, 43, 44, 45, 51, 55, 66, 67, 68, 72, 76, 78, 83, 88], "nonlinear": 38, "norm": [17, 19], "normal": [24, 25, 27, 33, 34, 36, 41, 55, 56, 57, 88], "normaldistribut": [33, 37, 87], "north": [14, 15, 16, 17, 19, 20, 21, 22, 49, 50, 51, 77, 78, 82, 83], "note": [15, 16, 17, 19, 20, 21, 22, 31, 43, 48, 55, 56, 57, 89], "notebook": [49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "noth": 24, "notic": 87, "now": [48, 49, 55, 71, 72, 73, 81], "np": [2, 20, 33, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "npoint": [19, 20, 21, 22, 31, 50], "npointsperlin": 19, "nrepeat": [1, 63], "nrow": 81, "nsampl": [13, 35], "nshape": 87, "nstd": [36, 37], "nsxfystem": 22, "nsystem": [21, 22], "nsystemsx1": [21, 22], "nthi": 83, "ntime": [21, 22, 51, 52, 56, 57, 67, 68, 72, 73], "number": [2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 49, 51, 56, 72, 76, 81, 87, 88, 89, 90], "numberofabsiccainhankeltransformevalu": 51, "numberofturn": 51, "numberofwindow": 51, "numer": [24, 25, 29], "numpi": [1, 2, 4, 6, 7, 19, 20, 21, 24, 27, 29, 31, 33, 36, 37, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "nvalu": 87, "nwindow": [19, 21, 22], "o": [11, 48, 50, 51, 52, 55, 56, 57, 66, 67, 68], "o3": 48, "ob": [17, 19], "obj": 48, "objdir": 48, "object": [1, 2, 6, 7, 10, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 42, 43, 44, 48, 50, 60, 63, 77, 83, 89], "observ": [13, 14, 15, 16, 17, 19, 20, 29, 42, 44, 49, 55, 56, 57], "obtain": [2, 4, 7, 19, 21, 25, 31, 35, 48, 50, 51, 52, 55, 56, 57, 87], "occur": 87, "off": [6, 19, 21, 22, 24, 25, 26, 29, 35, 51, 56], "off_tim": [15, 21, 45, 51, 56], "off_time_error": 51, "offerr": [19, 21, 22], "offset": [20, 44, 49, 55, 56], "offtim": 45, "offtimefilt": 45, "often": [55, 56, 57], "omit": [19, 21, 22], "onc": [2, 48, 49], "one": [1, 14, 15, 16, 17, 19, 24, 25, 27, 31, 48, 49, 55, 56, 81, 87], "ones": [14, 17, 19, 87], "onli": [1, 2, 15, 16, 17, 20, 24, 25, 27, 29, 31, 36, 48, 63, 71, 72, 73, 76, 81, 87], "onto": 29, "opac": [6, 24, 25, 26, 29, 35, 87, 89, 90], "opacity_level": 35, "opaqu": [35, 71, 72, 73], "open": 1, "openmpi": 48, "oper": [4, 56, 57, 87], "option": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 36, 39, 42, 43, 48, 69, 87, 93], "options_fil": [66, 67, 68], "order": [2, 4, 24, 25, 27, 33, 41, 51, 55, 56, 57, 63, 76], "ordered_print": 2, "orderstatist": [33, 38], "ordin": [6, 14, 15, 16, 17, 19, 20, 21, 22, 25, 26, 27, 31, 49, 50, 51, 87], "org": 87, "orient": [14, 19, 20, 21, 22, 42, 43, 44, 48, 49, 55], "origin": [24, 25, 35], "other": [1, 2, 6, 19, 20, 21, 24, 25, 31, 42, 43, 49, 69, 81, 87, 93], "other_pc": 50, "otherwis": [24, 29], "our": [51, 55, 56, 57], "out": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 42, 43, 45, 48, 71, 72, 73], "out_valu": [24, 25], "outermost": [24, 25, 27], "output": [2, 11, 24, 25, 27, 63, 66, 67, 68, 69, 93], "output_directori": [66, 67, 68], "outputtyp": 51, "outsid": 96, "over": [1, 13, 19, 25, 31, 77], "overflow": 7, "overlai": [35, 55, 56, 89], "overlain": 89, "overrid": [6, 24], "overshot": 31, "own": [48, 87], "p": [7, 24, 31, 50, 71, 72, 73, 76, 77, 87], "packag": [1, 48, 56, 57, 96], "pad": [4, 24, 29, 36, 76], "page": 51, "pair": [44, 49], "par": [29, 55, 56, 57, 81], "parallel": [1, 2, 25, 27, 36, 56, 63, 77, 87, 96], "parallel_infer": [71, 72, 73], "paramet": [1, 2, 4, 6, 7, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 42, 43, 45, 55, 56, 57, 71, 72, 73, 76, 81], "parameter_fil": [66, 67, 68], "parameter_kwarg": 81, "parameterlimit": 29, "parameterprior": 29, "parameterpropos": [24, 29], "parent": [14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43, 66, 67, 68], "pars": 4, "parse_arg": [66, 67, 68], "parser": [66, 67, 68], "parsestr": 4, "part": [48, 87], "pass": [7, 29, 49, 56, 71, 72, 73], "pastel": 87, "path": [1, 4, 14, 31, 48, 50, 51, 52, 55, 56, 57, 66, 67, 68, 71, 72, 73], "pathcollect": 50, "pathlib": [51, 66, 67, 68], "pattern": 4, "paus": 6, "pb": 4, "pc3d": 50, "pc3d1": 50, "pcg64dxsm": [55, 56, 57, 60, 71, 72, 73, 76, 81, 87], "pcmesh": 6, "pcolor": [6, 21, 24, 25, 26, 29, 35, 51, 52, 55, 56, 57, 71, 72, 73, 76, 77, 81, 82, 83, 87, 88, 89, 90], "pcolor_1d": 6, "pcolor_as_bar": 6, "pcolormesh": [6, 24, 25, 26, 27], "pcsub": 50, "pdf": [24, 34, 60, 88, 89], "peak": 35, "peakcurr": 51, "per": [6, 15, 19, 20, 35, 56], "percent": [7, 35, 71, 72, 73, 88, 89], "percentag": [15, 16, 35], "percentil": [7, 35], "perform": [31, 50, 55, 56, 57, 77, 87], "perhap": 63, "pertain": 56, "perturb": [15, 16, 17, 24, 29, 31, 43, 55, 56, 57, 87], "perturbedmodel": 29, "pgeobipi": 96, "phase": [14, 20, 55], "pi": [50, 51], "pickl": 2, "piecewis": [6, 24, 56], "piecewise_constant_interpol": 24, "pip": 48, "pitch": [19, 21, 22, 42, 43, 49, 51, 55, 57], "pixel": [6, 24, 25, 26, 29, 35, 87], "place": [24, 36, 37, 39, 40], "plane": [25, 27, 56, 77], "pleas": [48, 51], "plot": [3, 13, 14, 15, 16, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 48, 49, 50, 51, 52, 55, 56, 57, 60, 71, 72, 73, 76, 77, 78, 81, 82, 83, 88, 89, 90, 96], "plot_2d_summari": [71, 72, 73], "plot_best_model": [71, 72, 73], "plot_burned_in": [71, 72, 73], "plot_channel_satur": [71, 72, 73], "plot_confid": [71, 72, 73], "plot_data": [19, 20, 21, 22, 49, 51, 52], "plot_data_elev": [71, 72, 73], "plot_dataarrai": [86, 92, 97], "plot_distribut": [60, 62, 97], "plot_elev": [71, 72, 73], "plot_entropi": [71, 72, 73], "plot_frequency_dataset": [49, 54, 97], "plot_grid": [24, 25, 27, 76, 77, 78, 81], "plot_halfspace_respons": [13, 55, 56, 57], "plot_histogram_1d": [88, 92, 97], "plot_histogram_2d": [89, 92, 97], "plot_histogram_3d": [90, 92, 97], "plot_inference_1d_resolv": [66, 70, 97], "plot_inference_1d_skytem": [67, 70, 97], "plot_inference_1d_tempest": [68, 70, 97], "plot_inference_2d_resolv": [71, 75, 97], "plot_inference_2d_skytem": [72, 75, 97], "plot_inference_2d_tempest": [73, 75, 97], "plot_interfac": [71, 72, 73], "plot_k_lay": [71, 72, 73], "plot_mean_model": [71, 72, 73], "plot_mode_model": [71, 72, 73], "plot_model_1d": [81, 85, 97], "plot_model_2d": [82, 85, 97], "plot_model_3d": [83, 85, 97], "plot_percentil": [71, 72, 73], "plot_pointcloud3d": [50, 54, 97], "plot_posterior": [55, 56, 76, 81], "plot_predict": [14, 19, 21, 22, 55, 56, 57], "plot_predicted_secondary_field": 57, "plot_rectilinear_mesh_1d": [76, 80, 97], "plot_rectilinear_mesh_2d": [77, 80, 97], "plot_rectilinear_mesh_3d": [78, 80, 97], "plot_relative_to": 25, "plot_resolve_datapoint": [55, 59, 97], "plot_secondary_field": 57, "plot_skytem_datapoint": [56, 59, 97], "plot_skytem_dataset": [51, 54, 97], "plot_statarrai": [87, 92, 97], "plot_tempest_datapoint": [57, 59, 97], "plot_tempest_dataset": [52, 54, 97], "plotcredibleinterv": [81, 88, 89], "plotdataresidu": [56, 57], "plotlin": [20, 49, 51, 52], "plotmean": [88, 89], "plotmedian": [88, 89], "plt": [6, 25, 26, 27, 33, 49, 50, 51, 52, 55, 56, 57, 60, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "plu": [25, 26], "pmf": [88, 89], "pnear": 50, "png": [71, 72, 73], "point": [6, 13, 14, 15, 16, 17, 19, 20, 21, 22, 25, 26, 27, 29, 32, 42, 43, 45, 48, 49, 51, 52, 54, 55, 56, 57, 66, 67, 68, 71, 72, 73, 87, 93, 97], "pointcloud": [8, 13, 14, 15, 16, 17, 19, 20, 21, 22, 31, 42, 43, 50], "pointcloud3d": [19, 20, 21, 31, 42, 43], "pointdata": 31, "pointer": 87, "polar": 56, "posit": [49, 55, 56, 88], "possibl": [24, 29], "possibli": [49, 50], "posterior": [13, 14, 15, 16, 17, 29, 55, 56, 57, 75, 76, 81, 93, 96, 97], "potenti": 87, "power": 7, "ppm": [17, 49], "ppropos": 81, "pre": [17, 19, 49, 50], "prealloc": 1, "prebuilt": 48, "preced": [7, 19], "precomput": [71, 72, 73], "predict": [13, 14, 15, 16, 17, 19, 21, 49, 55, 56, 57], "predicted_primary_field": [15, 21], "predicted_secondary_field": [15, 21], "predicteddata": [13, 14, 15, 16, 17, 19, 55, 57], "prefix": 48, "prepar": [49, 51, 55, 56, 57], "prepend": 87, "present": 14, "pretti": 6, "prevent": 24, "previou": [24, 49, 50], "primari": 57, "primary_field": [15, 21, 57], "print": [2, 15, 17, 42, 43, 49, 50, 51, 55, 56, 57, 63, 66, 67, 68, 71, 72, 73, 76, 81, 86, 87, 88], "prior": [15, 16, 17, 24, 29, 31, 43, 55, 56, 57, 76, 81], "prior_prob": 87, "priorss": 56, "prng": [24, 29, 33, 36, 37, 39, 40, 55, 56, 57, 60, 66, 67, 68, 71, 72, 73, 76, 81, 87], "probabl": [15, 16, 17, 24, 29, 31, 33, 36, 37, 43, 60, 71, 72, 73, 76, 81, 87], "problem": 38, "procedur": [1, 7, 24, 34, 36, 37, 39], "process": [7, 24, 49, 50, 71, 72, 73, 81], "prod_": 24, "produc": [66, 67, 68, 71, 72, 73, 87], "product": [2, 7, 48], "profil": [13, 29], "program": [48, 63, 87], "prop": 19, "properti": [13, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 36, 37, 39, 40, 42, 43, 44, 45, 49, 71, 72, 73, 96], "propos": [15, 16, 17, 24, 29, 31, 43, 55, 56, 57, 76, 81], "proposal_prob": 29, "propsal": [15, 16, 17, 31, 43], "provid": [1, 6, 29, 48, 71, 72, 73, 77], "pseudo": 33, "pseudocolour": [6, 24, 25, 26], "pull": [55, 56, 57], "purpos": 2, "pv": [78, 82, 83], "pwheel": 29, "py": [48, 49, 50, 51, 52, 54, 55, 56, 57, 59, 60, 62, 63, 65, 66, 67, 68, 69, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 92, 93, 97], "pygmt": 31, "pyplot": [6, 14, 24, 25, 26, 33, 49, 50, 51, 52, 55, 56, 57, 60, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "python": [0, 6, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 69, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "pyvista": 27, "pyvista_mesh": [27, 78, 83, 89], "pyvista_plott": [78, 82, 83], "pyvtk": [19, 31], "q": [20, 29, 49], "q_": 49, "q_129550": 49, "q_380": 49, "quad": 24, "quadmesh": [6, 50, 77, 83, 89], "quadrat": 13, "quadratur": [14, 15, 16, 20, 49, 55], "quadratureerr": 49, "quick": [50, 55, 56, 57, 87], "quickli": 87, "quit": 31, "r": [25, 26, 27, 49, 50, 51, 52, 55, 63, 76, 77, 78, 82, 83, 87, 88, 89], "r2d": [19, 20, 21, 31, 49], "r3d": [19, 20, 21, 31, 49], "r_": [49, 55, 56, 57], "radiu": [31, 42, 49, 50, 55], "rais": [2, 15, 16, 17, 20, 21, 24, 31, 43], "ran": 48, "rand": 50, "randn": [49, 63, 76, 77, 86, 87, 88, 89, 90], "random": [24, 29, 33, 36, 37, 49, 50, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 77, 86, 87, 88, 89, 90], "randomli": 81, "randomst": [24, 29, 33, 36], "rang": [24, 27, 35, 36, 37, 39, 55, 56, 57, 71, 72, 73, 76, 77, 81, 87, 89, 90], "rank": [2, 19, 20, 31], "rankprint": 2, "ratio": [29, 35], "ravel": [25, 27], "ravelindic": [25, 27], "re": 7, "read": [1, 2, 4, 14, 15, 16, 17, 19, 20, 21, 22, 24, 26, 27, 29, 31, 35, 42, 43, 44, 45, 48, 50, 55, 56, 57, 63, 66, 67, 68, 71, 72, 73, 76, 87], "read_al": 1, "read_csv": [19, 20, 21, 22, 31, 49, 50, 51, 52, 55], "read_groups_with_tag": 1, "read_hdf": 63, "read_item": 1, "read_netcdf": [22, 52], "read_text": 51, "readaarhusfil": 20, "readabl": 31, "readhdf": 1, "readi": [56, 57], "readkeyfromfil": 1, "real": [7, 49], "realiz": [29, 36, 37], "realli": 87, "reason": [66, 67, 68], "receiv": [2, 15, 16, 19, 21, 22, 31, 44, 49, 51, 55, 56, 57], "receiver_loop": [15, 16], "receiver_pitch_prior": 57, "receiver_pitch_propos": 57, "receiver_x_prior": 57, "receiver_x_propos": 57, "receiver_z_prior": 57, "receiver_z_propos": 57, "receiverloop": 45, "reciev": [20, 43, 44, 51], "reciproc": [6, 7, 17, 19, 24, 35, 81], "reciprocatei": 6, "reciprocatex": [6, 24], "recommend": [24, 29, 48], "record": [55, 56, 57, 81], "rectangular": 56, "rectilinear": [24, 25, 26, 27, 50, 80, 85, 93, 97], "rectilinearmesh": 24, "rectilinearmesh1d": [25, 27, 28, 55, 56, 57, 72, 76, 77, 81, 87, 88], "rectilinearmesh1d_class": 24, "rectilinearmesh2d": [26, 27, 28, 72, 76, 77, 82, 89], "rectilinearmesh2d_class": [25, 26, 27], "rectilinearmesh2d_stitch": [28, 76], "rectilinearmesh3d": [28, 77, 78, 83, 90], "rectilinearmeshnd": 24, "recv": 2, "reerr": [15, 16, 17, 31, 43], "ref": [49, 55, 56, 57], "refer": [1, 76, 81, 87], "referenc": 25, "reflect": 56, "region": [71, 72, 73], "regist": 6, "regularli": [45, 88], "reject": [56, 57], "rel": [2, 15, 16, 17, 24, 25, 26, 31, 36, 42, 43, 49, 55, 56, 57, 87], "relative_error": [15, 19, 22, 55, 56, 57], "relative_error_prior": [55, 56, 57], "relative_error_propos": [15, 16, 17, 57], "relative_prior": 57, "relative_propos": 57, "relative_to": [24, 25, 26, 27, 72, 88], "relative_tocentr": 27, "relative_toedg": 27, "relativeerr": [15, 16], "relativeerrorpropos": [15, 16, 17], "relativeprior": [55, 56], "relativepropos": [55, 56], "remain": [66, 67, 68], "remaind": 2, "remap": [24, 25, 29, 81], "remapped_model": 29, "remappedmodel": 29, "rememb": 81, "remov": [6, 24, 48, 76, 81], "repeat": [76, 77, 78, 83], "replac": 48, "repons": 13, "repositori": [48, 49, 51, 56, 57], "repres": [7, 14, 19, 21, 22, 25, 49, 51, 87], "represent": 7, "reprodic": 24, "request": [15, 16, 17, 31, 33, 35, 43], "requir": [1, 19, 20, 21, 22, 31, 36, 48, 87], "resampl": [77, 82], "residu": [17, 19, 57], "resistive_bas": [66, 67, 68, 71, 72, 73], "resistive_dolomit": [66, 67, 68, 71, 72, 73], "resiz": 87, "resolv": [49, 55, 67, 68, 70, 75, 93, 97], "resolve1": [49, 50], "resolve2": [49, 55], "resolve_glaci": 66, "resolve_opt": 66, "respect": [24, 49], "respons": [55, 56, 57], "result": [7, 24, 31, 48, 49, 50, 87, 96], "results_2d": [71, 72, 73], "retriev": 77, "return": [1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 42, 43, 45, 48, 50, 66, 67, 68, 87], "revers": [24, 29], "rgb": 6, "right": [2, 24, 25], "rm": [76, 77, 78, 82], "rm0": 76, "rm1": [76, 78], "rm1d": 76, "rm2": [76, 77, 78], "rm2d": 77, "rm3": [77, 78], "rm3d": 78, "rm3d_re1": 78, "rm3d_re2": 78, "rm3d_re3": 78, "rm3d_read": 78, "rm4": [77, 78], "rm_mask": [76, 77], "rmom": [20, 44, 49], "rng": [37, 60], "roll": [19, 21, 22, 42, 43, 49, 51, 55], "root": [2, 19, 20, 21, 31, 42, 44], "ror": [20, 44, 49], "rosenbrock": 7, "ross": [48, 51, 56, 57], "round": [7, 49, 50], "routin": [0, 2, 6, 24, 25, 26, 29, 35, 50, 63, 87, 96], "row": [6, 20, 44, 49], "royalosyin": 6, "run": [48, 49, 50, 51, 52, 55, 56, 57, 60, 69, 70, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93, 97], "runtimewarn": 56, "rx": [20, 44, 49], "rx_pitch": 51, "rx_roll": 51, "rx_yaw": 51, "rxpitch": [19, 21, 22, 51], "rxroll": [19, 21, 22, 51], "rxyaw": [19, 21, 22, 51], "ry": [20, 44, 49], "rz": [20, 44, 49], "s_": [87, 90], "sai": 48, "saline_clai": [66, 67, 68, 71, 72, 73], "same": [2, 6, 15, 16, 24, 25, 26, 27, 29, 35, 48, 87], "sampl": [35, 36, 37, 60, 87, 96], "satisfi": 87, "satur": [71, 72, 73], "save": [31, 50, 78, 83, 88, 89, 90], "savefig": [71, 72, 73], "sc": 50, "scalar": [2, 6, 7, 15, 16, 24, 25, 26, 27, 29, 35, 36, 56, 57], "scale": [6, 24, 25, 26, 29, 35, 63, 87], "scatter": [2, 6, 31, 49, 52, 87], "scatter2d": [6, 31, 49, 50, 51, 52], "scatterplot": 6, "scatterv": [2, 19, 20, 21, 31], "scatterv_list": 2, "scatterv_numpi": 2, "scipi": [25, 36], "screen": 50, "script": [49, 50, 51, 52, 55, 56, 57, 60, 66, 67, 68, 69, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "sea": [14, 17], "seaborn": 87, "search": [13, 50, 55, 56, 57], "searchradiu": 50, "second": [6, 24, 25, 27, 29, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90], "secondari": 57, "secondary_field": [15, 21, 51, 57], "secondaryfieldnormalis": 51, "section": [2, 71, 72, 73, 96], "see": [2, 25, 27, 31, 48, 49, 51, 55, 56, 57, 96], "seed": [55, 56, 57, 60, 66, 67, 68, 71, 72, 73, 76, 81, 87], "seem": 2, "seen": 48, "segment": [6, 56], "select": 14, "self": [2, 6, 7, 15, 16, 19, 20, 21, 24, 25, 26, 27, 29, 31, 51, 52, 56, 57, 67, 68, 72, 73], "semicolon": 56, "semilogx": 14, "send": 2, "sens": 87, "sensit": [14, 15, 55, 56, 57], "sensitivity_matrix": 57, "sensitivti": [14, 15], "sent": [2, 19, 20, 31], "separ": [2, 31, 49, 56], "seq": 6, "sequenc": [6, 21, 22, 31, 37], "serial": 96, "set": [0, 1, 6, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 34, 35, 42, 43, 51, 52, 55, 56, 57, 76, 81, 87, 88], "set_additive_error_posterior": [16, 17], "set_kdtre": [31, 50], "set_pitch_posterior": 43, "set_posit": [71, 72, 73], "set_posterior": [15, 16, 17, 55, 56, 57, 76, 81], "set_prior": [24, 29, 55, 56, 57, 76, 81], "set_propos": [15, 16, 17, 24, 29, 55, 56, 57, 76, 81], "set_relative_error_posterior": [16, 17], "set_roll_posterior": 43, "set_titl": [71, 72, 73], "set_vis": 89, "set_x_posterior": 31, "set_y_posterior": 31, "set_yaw_posterior": 43, "set_z_posterior": 31, "setalphaperpcolormeshpixel": 6, "setpropos": 24, "setup": [24, 29, 48], "sgradient": 29, "sh": 48, "shape": [2, 6, 25, 26, 27, 29, 35, 49, 50, 57, 63, 72, 76, 77, 86, 87, 88], "sharei": [50, 71, 72, 73, 81, 87, 89], "sharex": [50, 71, 72, 73, 87, 89], "shell": 48, "should": [6, 14, 15, 16, 19, 20, 21, 22, 43, 48, 49, 50, 51, 87], "show": [6, 33, 49, 50, 51, 52, 55, 56, 57, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "show_edg": 27, "show_grid": 27, "shown": [69, 93], "shrink": 2, "sibson": 50, "side": [2, 24, 48, 56], "sigma": [24, 29], "sigma_": 24, "signal": 7, "similar": [69, 93], "similarli": [63, 87], "simpl": [6, 7, 24, 25, 27, 87], "simplest": 87, "simpli": [48, 81, 96], "simultan": 87, "sin": [50, 77, 78, 82, 83, 90], "sinc": [24, 25, 27, 36, 48, 81, 87], "singl": [1, 2, 4, 14, 17, 19, 20, 21, 22, 25, 49, 50, 56, 57, 77, 81, 87], "size": [2, 4, 6, 11, 17, 19, 20, 21, 22, 24, 25, 29, 31, 35, 36, 37, 50, 77, 86, 87], "sizelegend": [6, 87], "skip": [4, 50], "skip_posterior": [24, 26, 27, 29, 87], "skytem": [54, 59, 70, 75, 93, 97], "skytem_512": [66, 68], "skytem_glaci": 67, "skytem_opt": [67, 69, 93], "skytem_saline_clai": [51, 56], "skytemhighmo": 51, "skytemhm": [51, 56, 67], "skytemlm": [51, 56, 67], "slice": [1, 6, 7, 49, 77, 90], "slow": [2, 36], "slower": 2, "smaller": 31, "smooth": [7, 31], "so": [1, 2, 6, 7, 13, 24, 25, 26, 29, 35, 48, 49, 51, 55, 56, 57, 63, 71, 72, 73, 87], "solut": 24, "solvabl": [55, 56, 57], "solve_additive_error": 57, "solve_gradi": [29, 81], "solve_valu": [29, 81], "some": [2, 49, 50, 55, 56, 57, 63, 71, 72, 73, 77, 78, 82, 83, 88, 89, 90], "someth": 48, "sought": 7, "sound": [15, 16, 21, 22, 56], "sourc": [0, 2, 48, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "sourcetyp": 56, "space": [13, 24, 31, 35, 45, 55, 56, 57, 63, 87], "span": 45, "spar": 29, "spatial": 50, "special": 7, "specif": [2, 19, 20, 21, 33, 38, 49, 51, 52], "specifi": [1, 2, 6, 7, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 35, 43, 49, 51, 52, 56, 63, 76, 77, 78, 81, 82, 83, 87], "specifyin": [25, 27], "speed": [6, 86, 87], "sphinx": [49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "sphinx_gallery_thumbnail_numb": 89, "spine": 89, "split": [0, 2, 4, 7, 63], "splitcomplex": 7, "sqrt": [25, 27, 51, 77, 78, 82, 83, 90], "squar": [17, 19, 49, 51], "squareloop": 56, "src": [1, 2, 4, 6, 7, 11, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 34, 35, 36, 37, 38, 39, 40, 42, 43, 44, 45, 48, 50, 56, 87, 88], "srcdir": 48, "stack": [6, 87], "stackedareaplot": 87, "stackplot2d": 6, "stand": 87, "standard": [6, 14, 17, 19, 24, 25, 26, 27, 29, 35, 48, 49, 51, 55, 56, 60], "standpoint": 87, "start": [2, 19, 20, 21, 31, 48, 49, 56, 66, 67, 68, 71, 72, 73, 96], "stat": 25, "statarrai": [2, 6, 10, 11, 14, 15, 16, 17, 19, 21, 22, 24, 25, 26, 27, 31, 36, 42, 43, 49, 50, 51, 55, 56, 57, 63, 72, 76, 77, 78, 81, 82, 83, 86, 88, 89, 90, 92, 93, 97], "statarrayor": 19, "state": [24, 36, 81], "static": 21, "stationnumb": 56, "statist": [8, 24, 25, 33, 34, 35, 36, 37, 39, 40, 55, 60, 76, 77, 86, 87, 92, 96, 97], "std": [13, 14, 15, 16, 17, 19, 20, 21, 29, 48, 49, 55, 56, 57], "stdout": 2, "step": [6, 24, 49, 50, 63], "still": 87, "stitch": 26, "stm": [49, 51, 52, 55, 56, 57, 66, 67, 68], "stochasit": 81, "stochast": 29, "store": [49, 55, 56, 57, 63, 66, 67, 68, 71, 72, 73], "str": [1, 2, 4, 6, 7, 11, 14, 15, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 42, 45, 66, 67, 68], "str_to_raw": 7, "straight": [77, 78, 82, 83], "stream": 2, "strictli": 4, "string": [1, 2, 4, 7, 25, 56], "structur": [20, 24, 29, 31, 44, 49, 81], "structure_onli": 29, "style": 87, "sub": 1, "subclass": [15, 16, 17, 33], "subject": [49, 50], "subplot": [6, 50, 51, 55, 56, 57, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90], "subplotspec": 6, "subsequ": [20, 44, 49], "subset": 31, "subsurfac": 96, "subtract": 4, "subtyp": 7, "successfulli": 1, "suffic": 48, "suffix": 48, "suitabl": [25, 26, 27], "sum": [6, 15, 16, 17, 19, 25, 51], "summari": [15, 17, 19, 21, 24, 25, 26, 27, 29, 31, 42, 43, 44, 49, 50, 72, 76, 86, 87, 88], "summaryplot": 87, "summat": 24, "sun": 6, "supplementari": [49, 50, 51, 52, 55, 56, 57, 66, 67, 68], "suppli": [49, 50], "suptitl": [57, 71, 72, 73, 89, 90], "sure": [48, 66, 67, 68], "surfac": [49, 50], "surround": 7, "sx": 57, "sy": [2, 66, 67, 68], "system": [4, 8, 14, 15, 16, 17, 19, 20, 21, 22, 42, 43, 52, 55, 56, 57, 63, 66, 67, 68], "system_filenam": [22, 45, 52, 66, 67, 68], "systemfil": [20, 49, 51, 52, 55, 56, 57], "systemfilenam": [19, 21, 22, 45], "sz": 57, "t": [6, 15, 16, 20, 44, 87], "t0": [66, 67, 68], "tag": 1, "take": [2, 4, 6, 7, 19, 24, 25, 26, 29, 31, 35, 49, 50, 56, 66, 67, 68, 71, 72, 73, 87, 88, 89, 90], "taken": [6, 31], "tangent": 7, "tanh": 7, "tar": 48, "td": [51, 52], "td3": [51, 52], "tdaemsystem": 45, "tdem": [51, 52], "tdemdata": [22, 23, 51, 56], "tdemdatapoint": [13, 16, 18, 21, 56], "tdemsystem": [15, 16, 45, 56], "tdemsystem_gaaem": 45, "tdp": [51, 52, 56, 57], "tell": 2, "tem": 56, "temdatapoint": 57, "tempest": [16, 54, 59, 66, 67, 70, 75, 93, 97], "tempest_datapoint": 22, "tempest_glaci": 68, "tempest_opt": 68, "tempest_saline_clai": [52, 57], "tempestdata": [23, 52, 57], "tensor_id": 44, "test": [7, 24, 48, 50, 76, 77, 78, 81, 83, 86, 87, 89, 90], "text": [25, 52, 56, 57], "th": [1, 44], "than": [2, 15, 16, 17, 24, 25, 29, 55, 56, 57, 87], "thank": [56, 57], "thei": [24, 25, 27, 71, 72, 73, 87], "them": [1, 31, 48, 71, 72, 73], "therefor": 87, "thi": [1, 2, 4, 7, 13, 15, 16, 19, 20, 21, 22, 24, 25, 27, 29, 31, 35, 36, 39, 43, 48, 49, 51, 55, 56, 57, 66, 67, 68, 69, 71, 72, 73, 77, 78, 81, 82, 83, 87, 88, 89, 90, 93, 96], "thick": [24, 29], "thing": 48, "think": 48, "third": [2, 77], "thk": 81, "those": [0, 1, 48, 50, 55, 60, 76, 81, 87], "thread": 48, "three": [2, 6, 77], "through": [1, 2, 29, 87], "thu": 24, "tight_layout": [55, 56, 57, 87], "time": [2, 15, 16, 19, 21, 22, 46, 49, 50, 52, 55, 57, 60, 66, 67, 68, 71, 72, 73, 76, 77, 78, 82, 83, 86, 87, 88, 89, 90, 96], "timedelta": [66, 67, 68], "titl": [2, 14, 15, 52, 55, 56, 57, 89], "tmom": [20, 44, 49], "tmp": [76, 87], "to_vtk": [49, 50, 82, 90], "tocher": 31, "togeth": [7, 19, 20, 21, 31, 42, 43, 49, 50], "tohdf": [11, 44, 63, 76, 77, 78, 82, 83, 87, 88, 89], "too": [20, 87], "top": [1, 6, 24, 25, 48, 71, 72, 73], "topo": [19, 21, 22], "tor": [20, 44, 49], "total": [4, 49, 50, 51, 52, 54, 55, 56, 57, 59, 60, 62, 65, 66, 67, 68, 70, 71, 72, 73, 75, 76, 77, 78, 80, 81, 82, 83, 85, 86, 87, 88, 89, 90, 92, 95, 97], "tovtk": 31, "track": [66, 67, 68], "tradit": 48, "tran": 6, "transform": 6, "transmitt": [15, 16, 19, 20, 21, 22, 43, 44, 49, 51, 55, 56], "transmitter_loop": [15, 16], "transmitterloop": 45, "transpar": [6, 35, 71, 72, 73, 89, 90], "transpos": [6, 55, 57, 76, 77, 81, 88, 89], "tree": 31, "tri": [7, 24], "triangul": 50, "trick": 87, "trickier": 48, "trim": [6, 7, 24, 25, 26, 27, 29, 35, 87, 88], "trim_by_percentil": 7, "true": [2, 6, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 35, 42, 43, 52, 55, 56, 57, 66, 67, 68, 71, 72, 73, 76, 77, 81, 87, 88, 89], "true_model": [71, 72, 73], "truncat": 7, "try": [48, 66, 67, 68, 71, 72, 73], "tune": 29, "tupl": [25, 27, 29, 86, 87, 90], "turn": [6, 24, 25, 26, 29, 35], "two": [0, 2, 7, 25, 27, 44, 48, 49, 77, 87, 96], "tx": [20, 44, 49, 51], "tx_pitch": 51, "tx_roll": 51, "tx_yaw": 51, "txpitch": [19, 21, 22, 51], "txroll": [19, 21, 22, 51], "txrx_dx": 51, "txrx_dy": 51, "txrx_dz": 51, "txt": [49, 50, 51, 55], "txyaw": [19, 21, 22, 51], "ty": [20, 44, 49], "type": [0, 1, 2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 36, 37, 39, 42, 43, 45, 48, 51, 56, 66, 67, 68, 72, 87, 96], "typeerror": [2, 15, 16, 17, 24, 31, 43, 86, 87], "typic": [25, 27, 87], "tz": [20, 44], "u": [55, 56, 57, 77, 86, 87], "uncertainti": [14, 17, 19], "under": [19, 21, 22, 51], "underlai": [71, 72, 73], "uniform": [24, 33, 35, 41, 55, 56, 57, 60, 76, 87], "uniformdistribut": [33, 39, 87], "uninform": 24, "uniqu": [19, 21, 22, 49, 51], "unit": [7, 17, 19, 31, 63, 76, 77, 86, 87, 88, 89], "univari": [15, 16, 17, 37, 87], "unless": 24, "unlink": [66, 67, 68], "unpack": [15, 16], "unperturb": 24, "unpickl": 2, "unraveled_coord": [25, 27], "unravelindex": [25, 27], "untar": 48, "unus": [49, 50], "unweight": 25, "up": [2, 35], "upcast": [24, 25, 26, 27], "updat": [13, 14, 15, 16, 24, 29, 81, 87, 88, 89, 90], "update_parameter_posterior": 29, "update_posterior": [13, 14, 15, 29, 55, 56, 57, 76, 81, 87], "updatesensit": 14, "upper": [15, 16, 17, 19, 21, 22, 25, 27, 31, 35, 42, 43], "uppercas": 2, "upto": [6, 7, 24, 29], "us": [0, 1, 2, 4, 6, 7, 15, 16, 17, 19, 20, 21, 24, 25, 26, 27, 29, 31, 33, 35, 36, 42, 43, 44, 48, 50, 51, 55, 65, 66, 67, 68, 69, 71, 72, 73, 76, 81, 87, 88, 93, 97], "use_vari": [71, 72, 73], "user": [2, 25, 56, 57, 66, 67, 68, 87], "user_paramet": [66, 67, 68], "usevari": [71, 72, 73], "usr": 48, "usual": [49, 55], "util": 3, "utm": 56, "v": [13, 87], "v0": [15, 16], "valu": [2, 4, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 29, 31, 35, 39, 49, 50, 55, 56, 57, 71, 72, 73, 76, 77, 81, 82, 83, 86, 87, 88], "value_mean": 81, "valueerror": [15, 16, 31], "values2": 77, "values_prior": 29, "variabl": [1, 2, 6, 7, 87, 89, 90], "varianc": [24, 29, 36, 37, 55, 56, 57, 87], "vector": [7, 15, 16, 50], "verbos": [15, 16, 17, 31, 43], "veri": 48, "version": [25, 27, 96], "vertic": [6, 7, 14, 15, 16, 25, 26, 27, 31, 56, 76, 81], "via": [24, 29, 69, 93], "vline": 6, "vmax": [71, 72, 73], "vmin": [71, 72, 73], "vtk": [19, 27, 31, 49, 50, 78, 82, 83, 89, 90], "vtkdata": [19, 31], "vtkstructur": [19, 31], "w": [17, 19, 49, 50, 51, 52, 55, 63, 76, 77, 78, 82, 83, 87, 88, 89], "wa": [1, 6, 29, 31, 48, 51], "wai": [1, 2, 24, 25, 27, 49, 50, 69, 77, 90, 93], "wall": 48, "want": [4, 48], "warn": [7, 49, 50, 66, 67, 68, 73], "wash": [71, 72, 73], "waveform": [45, 56], "waveformcurr": 51, "waveformdigitisingfrequ": 51, "waveformtim": 45, "wc": 4, "wccount": 4, "we": [24, 48, 49, 50, 51, 55, 56, 57, 60, 63, 71, 72, 73, 76, 77, 78, 81, 82, 83, 87, 88, 89, 90, 96], "weight": [7, 25, 87], "well": [15, 16, 17, 19, 21, 22, 24, 25, 27, 29, 31, 42, 43, 48], "were": [49, 50], "werthmul": [56, 57], "what": [2, 6, 19, 21, 22, 48, 49, 51, 56], "whatev": 87, "when": [6, 15, 16, 17, 24, 29, 31, 43, 48, 71, 72, 73, 87], "where": [15, 16, 17, 19, 20, 24, 44, 48, 66, 67, 68, 87], "whether": [1, 4, 7, 19, 24, 25, 48, 87], "which": [1, 2, 6, 7, 14, 19, 20, 24, 25, 27, 31, 35, 48, 49, 56, 87], "while": [2, 55, 77], "white": [6, 87], "whose": [25, 27, 31, 77], "width": [24, 25, 27, 76, 81], "wiki": 87, "wikipedia": 87, "window": [15, 19, 21, 22, 45, 51], "windowtim": 51, "windowweightingschem": 51, "wise": 7, "wish": [1, 4, 48, 87], "with_error_bar": [14, 15], "within": [1, 25, 31, 48, 65, 93, 97], "without": [4, 7, 48], "withposterior": [11, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 31, 42, 43, 55, 76, 87], "won": 87, "wont": [24, 25, 27], "work": [6, 13, 48, 87], "workbench": 20, "world": [2, 19, 20, 21, 31, 42, 44], "worri": 48, "would": [2, 24, 25, 27, 48, 56, 57], "wrap": [2, 6, 24, 25, 27, 48, 87], "wrap_clabel": [71, 72, 73], "wrap_ylabel": [71, 72, 73], "wrapper": [3, 41, 48], "write": [1, 11, 15, 16, 17, 19, 21, 22, 25, 27, 31, 42, 43, 44, 48, 49, 50, 63, 69, 76, 87, 93], "write_csv": 52, "write_hdf": 63, "write_nd": 1, "writehdf": [1, 15, 16, 17, 19, 21, 22, 25, 27, 31, 42, 43, 49, 50, 51, 52, 55, 63, 76, 77, 78, 83, 87, 88, 89], "written": [1, 2, 48, 63, 87], "wrote": 87, "wspace": 89, "ww": [71, 72, 73], "x": [2, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 34, 35, 36, 37, 39, 42, 43, 44, 49, 50, 51, 55, 56, 57, 63, 67, 72, 76, 77, 78, 82, 83, 87, 88, 89, 90], "x_axi": 31, "x_centr": [25, 27, 77], "x_distanc": [25, 77], "x_edg": [25, 27, 77, 78, 82, 83, 89, 90], "x_grid": 31, "x_indic": [25, 77], "x_log": 77, "x_re": [78, 83, 90], "x_relative_to": [77, 78, 83, 89, 90], "xaxi": [19, 20, 21, 22, 25, 26, 27, 31, 49, 51, 52], "xlabel": [50, 71, 72, 73, 76, 77, 83, 88, 89], "xoutputsc": 51, "xrang": 27, "xscale": [6, 24, 25, 26, 29, 35, 51, 56, 57, 81], "xtick": 89, "xutm": 56, "xx": [44, 77, 78, 82, 83, 90], "xyz": [19, 20, 50], "y": [2, 6, 7, 13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 33, 35, 42, 43, 44, 49, 50, 51, 55, 56, 63, 72, 76, 77, 78, 82, 83, 87, 89, 90], "y_centr": [25, 27, 77], "y_distanc": [25, 77], "y_edg": [25, 26, 27, 71, 77, 78, 82, 83, 89, 90], "y_grid": 31, "y_indic": 25, "y_log": 77, "y_re": [78, 83, 90], "y_relative_to": [77, 78, 83, 89, 90], "yaw": [19, 21, 22, 42, 43, 49, 51, 55], "yaxi": [21, 25], "ylabel": [50, 71, 72, 73, 76, 77, 83, 88, 89], "ylim": [71, 72, 73], "you": [1, 4, 6, 24, 25, 26, 27, 31, 48, 49, 50, 56, 57, 69, 71, 72, 73, 87, 93], "your": [48, 49, 56, 57], "youtputsc": 51, "yscale": [6, 24, 25, 26, 29, 35, 56], "ytick": 89, "yutm": 56, "yy": [77, 78, 82, 83, 90], "z": [13, 14, 15, 16, 17, 19, 20, 21, 22, 24, 25, 26, 27, 29, 31, 42, 43, 44, 45, 49, 50, 51, 55, 56, 57, 77, 78, 83, 90], "z_": 24, "z_centr": [25, 27], "z_edg": [25, 27, 78, 83, 90], "z_indic": 77, "z_prior": [55, 56], "z_propos": [55, 56], "z_re": [78, 83, 90], "z_relative_to": [78, 83, 90], "zaxi": [25, 26], "zero": [4, 6, 7, 14, 17, 19, 24, 25, 26, 29, 31, 35, 56, 77], "zip": [7, 49, 50, 51, 52, 55, 56, 57, 60, 63, 66, 67, 68, 71, 72, 73, 76, 77, 78, 81, 82, 83, 86, 87, 88, 89, 90, 93], "zoutputsc": 51, "zprior": 55, "zrang": 27, "zxvf": 48}, "titles": ["API", "Heirarchical Data Format (HDF)", "MPI wrapper functions", "Core routines needed for GeoBIPy", "fileIO", "Interpolation", "plotting", "utilities", "Classes used in GeoBIPy", "StatArray", "Core classes", "Core object class", "Data classes", "EmDataPoint", "FdemDataPoint", "TdemDataPoint", "Tempest_datapoint", "DataPoint", "Datapoint classes", "Data", "FdemData", "TdemData", "TempestData", "Dataset classes", "RectilinearMesh1D", "RectilinearMesh2D", "RectilinearMesh2D_stitched", "RectilinearMesh3D", "Mesh classes", "Model", "Model classes", "Point", "Pointcloud classes", "Distribution Wrapper", "Gamma Distribution", "Histogram", "MvNormal", "Normal distribution", "Order Statistics", "Uniform distribution", "baseDistribution", "Statistics classes", "Circular Loop", "EmLoop", "Frequency domain system", "Time domain system", "System classes", "Getting Started", "Installing GeoBIPy", "Frequency domain dataset", "3D Point Cloud class", "Skytem dataset", "Tempest dataset", "Data", "Computation times", "Frequency domain datapoint", "Skytem Datapoint Class", "Tempest Datapoint Class", "Datapoints", "Computation times", "Distribution Class", "Distributions", "Computation times", "Using HDF5 within GeoBIPy", "HDF 5", "Computation times", "Running GeoBIPy to invert Resolve data", "Running GeoBIPy to invert Skytem data", "Running GeoBIPy to invert Tempest data", "1D Inference", "Computation times", "2D Posterior analysis of Resolve inference", "2D Posterior analysis of Skytem inference", "2D Posterior analysis of Tempest inference", "2D Inference", "Computation times", "1D Rectilinear Mesh", "2D Rectilinear Mesh", "3D Rectilinear Mesh", "Meshes", "Computation times", "1D Model with an infinite halfspace", "2D Rectilinear Model", "3D Rectilinear Model", "Models", "Computation times", "DataArray Class", "StatArray Class", "Histogram 1D", "Histogram 2D", "Histogram 3D", "Statistics", "Computation times", "Examples", "Examples", "Computation times", "Welcome to GeoBIPy: Geophysical Bayesian Inference in Python", "Computation times"], "titleterms": {"1d": [69, 76, 81, 87, 88, 93], "2d": [71, 72, 73, 74, 77, 82, 87, 89, 93], "3d": [50, 78, 83, 90], "5": [64, 93], "The": 76, "an": 81, "analysi": [71, 72, 73], "api": 0, "ar": 88, "attach": [56, 57, 87], "ax": 77, "basedistribut": 40, "basic": [76, 87], "bayesian": 96, "bin": 88, "bind": 48, "captur": 87, "circular": 42, "class": [8, 10, 11, 12, 18, 23, 28, 30, 32, 41, 46, 50, 56, 57, 60, 86, 87], "cloud": 50, "column": [49, 51], "compil": 48, "comput": [54, 59, 62, 65, 70, 75, 80, 85, 92, 95, 97], "core": [3, 10, 11], "data": [1, 12, 19, 49, 51, 52, 53, 66, 67, 68, 93], "dataarrai": 86, "datapoint": [17, 18, 55, 56, 57, 58, 93], "dataset": [23, 49, 51, 52], "defin": 49, "descriptor": [56, 57], "distribut": [33, 34, 37, 39, 60, 61, 87, 93], "domain": [44, 45, 48, 49, 51, 55, 56, 57], "emdatapoint": 13, "emloop": 43, "entri": 88, "equal": 87, "exampl": [49, 51, 87, 93, 94], "fdemdata": 20, "fdemdatapoint": 14, "file": [49, 51, 56], "fileio": 4, "format": [1, 49, 51, 56], "forward": 48, "frequenc": [44, 49, 55], "from": 49, "function": 2, "gamma": 34, "gatdaem1d": 48, "geobipi": [3, 8, 48, 63, 66, 67, 68, 96], "geophys": 96, "get": 47, "h5py": 48, "half": 81, "halfspac": 81, "hdf": [1, 64, 93], "hdf5": [48, 63], "header": [49, 51], "heirarch": 1, "histogram": [35, 87, 88, 89, 90], "infer": [69, 71, 72, 73, 74, 93, 96], "infinit": 81, "instal": 48, "instanti": [81, 87], "intern": 88, "interpol": 5, "invert": [66, 67, 68], "irregular": 88, "librari": 48, "line": 49, "linear": 88, "log": [76, 77, 88], "loop": 42, "manipul": 87, "mesh": [28, 76, 77, 78, 79, 93], "model": [29, 30, 48, 76, 81, 82, 83, 84, 93], "mpi": [2, 48], "mpi4pi": 48, "multipl": 81, "multivari": [60, 87], "mvnormal": 36, "need": 3, "new": 87, "normal": [37, 60, 87], "object": 11, "obtain": 49, "option": [49, 51], "order": 38, "parallel": 48, "perturb": [76, 81], "plot": [6, 87], "point": [31, 50], "pointcloud": 32, "posterior": [71, 72, 73, 87], "prerequisit": 48, "prior": 87, "propos": 87, "python": [48, 96], "random": [76, 81], "read": [49, 51, 52], "rectilinear": [76, 77, 78, 82, 83], "rectilinearmesh1d": 24, "rectilinearmesh2d": 25, "rectilinearmesh2d_stitch": 26, "rectilinearmesh3d": 27, "regular": 88, "relative_to": [76, 77], "requir": [49, 51], "resolv": [66, 71], "routin": 3, "run": [66, 67, 68], "serial": 48, "set": 49, "share": 48, "skytem": [51, 56, 67, 72], "space": [76, 77, 81, 88], "start": 47, "statarrai": [9, 87], "statist": [38, 41, 56, 57, 91, 93], "system": [44, 45, 46, 49, 51], "tdemdata": 21, "tdemdatapoint": 15, "tempest": [52, 57, 68, 73], "tempest_datapoint": 16, "tempestdata": 22, "time": [45, 48, 51, 54, 56, 59, 62, 65, 70, 75, 80, 81, 85, 92, 95, 97], "uniform": 39, "univari": 60, "us": [8, 49, 56, 57, 63, 96], "util": 7, "version": 48, "welcom": 96, "within": 63, "wrapper": [2, 33], "yeti": 96}}) \ No newline at end of file diff --git a/docs/sg_execution_times.html b/docs/sg_execution_times.html new file mode 100644 index 00000000..068d552b --- /dev/null +++ b/docs/sg_execution_times.html @@ -0,0 +1,243 @@ + + + + + + + Computation times — GeoBIPy 2.3.1 documentation + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+
+ +
+
+
+
+ +
+

Computation times

+

06:08.049 total execution time for 26 files from all galleries:

+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +

Example

Time

Mem (MB)

Running GeoBIPy to invert Tempest data (examples/Inference_1D/plot_inference_1d_tempest.py)

01:56.531

0.0

Running GeoBIPy to invert Skytem data (examples/Inference_1D/plot_inference_1d_skytem.py)

01:37.169

0.0

Running GeoBIPy to invert Resolve data (examples/Inference_1D/plot_inference_1d_resolve.py)

00:29.449

0.0

2D Posterior analysis of Resolve inference (examples/Inference_2D/plot_inference_2d_resolve.py)

00:21.524

0.0

2D Posterior analysis of Tempest inference (examples/Inference_2D/plot_inference_2d_tempest.py)

00:19.999

0.0

3D Point Cloud class (examples/Data/plot_pointcloud3d.py)

00:19.063

0.0

2D Posterior analysis of Skytem inference (examples/Inference_2D/plot_inference_2d_skytem.py)

00:18.965

0.0

Frequency domain datapoint (examples/Datapoints/plot_resolve_datapoint.py)

00:07.271

0.0

Histogram 2D (examples/Statistics/plot_histogram_2d.py)

00:05.012

0.0

1D Rectilinear Mesh (examples/Meshes/plot_rectilinear_mesh_1d.py)

00:04.585

0.0

Tempest dataset (examples/Data/plot_tempest_dataset.py)

00:04.125

0.0

Histogram 3D (examples/Statistics/plot_histogram_3d.py)

00:03.875

0.0

Frequency domain dataset (examples/Data/plot_frequency_dataset.py)

00:03.257

0.0

1D Model with an infinite halfspace (examples/Models/plot_model_1d.py)

00:03.051

0.0

StatArray Class (examples/Statistics/plot_StatArray.py)

00:02.767

0.0

Skytem Datapoint Class (examples/Datapoints/plot_skytem_datapoint.py)

00:02.306

0.0

Skytem dataset (examples/Data/plot_skytem_dataset.py)

00:02.295

0.0

2D Rectilinear Mesh (examples/Meshes/plot_rectilinear_mesh_2d.py)

00:01.747

0.0

Tempest Datapoint Class (examples/Datapoints/plot_tempest_datapoint.py)

00:01.653

0.0

3D Rectilinear Mesh (examples/Meshes/plot_rectilinear_mesh_3d.py)

00:01.378

0.0

Histogram 1D (examples/Statistics/plot_histogram_1d.py)

00:00.972

0.0

3D Rectilinear Model (examples/Models/plot_model_3d.py)

00:00.788

0.0

2D Rectilinear Model (examples/Models/plot_model_2d.py)

00:00.212

0.0

Distribution Class (examples/Distributions/plot_distributions.py)

00:00.050

0.0

DataArray Class (examples/Statistics/plot_DataArray.py)

00:00.003

0.0

Using HDF5 within GeoBIPy (examples/HDF5/hdf5.py)

00:00.000

0.0

+
+
+ + +
+
+ +
+
+
+
+ + + + \ No newline at end of file