forked from gamer-project/gamer
-
Notifications
You must be signed in to change notification settings - Fork 13
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'psidm' of https://github.com/hyschive/gamer-fork into s…
…pectral_interpolation_bugfix
- Loading branch information
Showing
35 changed files
with
4,772 additions
and
7 deletions.
There are no files selected for viewing
388 changes: 388 additions & 0 deletions
388
example/test_problem/ELBDM/DiskHeating/DensTableExample
Large diffs are not rendered by default.
Oops, something went wrong.
1,001 changes: 1,001 additions & 0 deletions
1,001
example/test_problem/ELBDM/DiskHeating/DispTableExample
Large diffs are not rendered by default.
Oops, something went wrong.
12 changes: 12 additions & 0 deletions
12
example/test_problem/ELBDM/DiskHeating/Input__Flag_NParPatch
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
# Level Number_of_particles_per_patch | ||
0 200 | ||
1 200 | ||
2 200 | ||
3 200 | ||
4 200 | ||
5 200 | ||
6 200 | ||
7 200 | ||
8 200 | ||
9 200 | ||
10 200 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
# Level Density | ||
0 1.0e+3 | ||
1 7.0e+3 | ||
2 1.0e+5 | ||
3 4.0e+5 |
318 changes: 318 additions & 0 deletions
318
example/test_problem/ELBDM/DiskHeating/Input__Parameter
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
# center of the halo (only useful when AddFixedHalo=1 or (AddParWhenRestart=1 and AddParWhenRestartByFile=0)) | ||
CenX 7.9770907e-02 # center of the halo | ||
CenY 7.9854590e-02 # center of the halo | ||
CenZ 8.0858787e-02 # center of the halo | ||
|
||
# parameters for background fixed halo (disabled in this example) | ||
AddFixedHalo 0 # add a fixed halo [0] ## must set OPT__INIT=1 and OPT__FREEZE_FLUID=1 ## | ||
HaloUseTable 0 # use density table for the fixed halo [0] ## AddFixedHalo=1 ONLY## | ||
|
||
# parameters for analytical function ---------- HaloUseTable=0 ONLY ----------------------------------------- | ||
# soliton parameters | ||
#m_22 # m_22 of the analytical halo [0.4] | ||
#CoreRadius # core radius (in kpc) [1.0] | ||
# alpha-beta-gamma density profile parameters | ||
#Rho_0 # halo rho_0 (in 1.0e+10 Msun*kpc^-3) [1.0] | ||
#Rs # halo Rs (in kpc) [1.0] | ||
#Alpha # dimensionless [1.0] | ||
#Beta # dimensionless [1.0] | ||
#Gamma # dimensionless [1.0] | ||
|
||
# density profile table parameters ------------ HaloUseTable=1 ONLY --------------------------------------------- | ||
DensTableFile DensTableExample # density table name, radius should be in kpc and density should be in g/cm^3 | ||
|
||
# parameters for new disk (disabled in this example) | ||
AddParWhenRestart 0 # add a new disk to an existing snapshot [0] ## must set OPT__INIT=2 and OPT__RESTART_RESET=1 ## | ||
AddParWhenRestartByFile 0 # 0=add a thin disk (must enable SUPPORT_GSL), 1=add new disk via PAR_IC [1] ## AddParWhenRestart = 1 ONLY ## | ||
AddParWhenRestartNPar 10000000 # total particles of new disk [0] ## AddParWhenRestart = 1 ONLY ## | ||
NewDisk_RSeed 1002 # random seed for setting thin disk particle position and velocity [1002] ## AddParWhenRestartByFile = 0 ONLY ## | ||
Disk_Mass 0.00139812 # thin disk total mass (code unit) [1.0] ## AddParWhenRestartByFile = 0 ONLY ## | ||
Disk_R 0.0020865 # thin disk scale radius (code unit) [1.0] ## AddParWhenRestartByFile = 0 ONLY ## | ||
DispTableFile DispTableExample # velocity dispersion table filename, radius should be in kpc and dispersion should be in km/s ## AddParWhenRestartByFile = 0 ONLY ## |
9 changes: 9 additions & 0 deletions
9
example/test_problem/ELBDM/DiskHeating/Input__UM_IC_RefineRegion
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
# For loading an AMR initial condition from a file --> OPT__INIT=3 && OPT__UM_IC_NLEVEL>1 | ||
# | ||
# dLv : target AMR level = OPT__UM_IC_LEVEL + dLv | ||
# NP_Skip_xL: number of patches on the parent level to be skipped in the x direction | ||
# from the left edge of the parent refinement region | ||
# ================================================================================== | ||
# dLv NP_Skip_xL NP_Skip_xR NP_Skip_yL NP_Skip_yR NP_Skip_zL NP_Skip_zR | ||
1 7 7 7 7 7 7 | ||
2 25 25 25 25 25 25 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,117 @@ | ||
Compilation flags: | ||
======================================== | ||
Enable : MODEL=ELBDM, GRAVITY, PARTICLE, STORE_PAR_ACC, SUPPORT_HDF5, | ||
SUPPORT_GSL (optional, only useful for thin disk) | ||
|
||
Disable : COMOVING | ||
|
||
|
||
Quick start: | ||
======================================= | ||
0. Copy "generate_make.sh" to the directory "src", execute "sh generate_make.sh" to generate "Makefile", | ||
and then execute "make clean" and "make -j 4" to generate the executable "gamer" | ||
|
||
1. Download initial conditions of m_22=0.4, M_h=7e10 Msun halo and stellar disk with "sh download_ic.sh" | ||
|
||
2. Ensure PAR_INIT = 1 and OPT__INIT = 3 in Input__Parameter | ||
|
||
3. Default END_T is 2.5e-1 (about 3.5 Gyr) as in Yang et al. 2023 and OUTPUT_DT is 1.0e-2 (about 0.14 Gyr) | ||
|
||
4. To switch to a high-resolution run, command "ln -sf ic_files/PAR_IC_0.4_M7 PAR_IC" | ||
Set PAR_NPAR=80000000, MAX_LEVEL=3, and change all values in Input__Flag_NParPatch to 800 | ||
|
||
|
||
General initial condition setup: | ||
======================================== | ||
1. Disk | ||
|
||
a. Generate the disk via modified GALIC (https://github.com/HsunYeong/GALIC.git) | ||
The snapshots have the filenames snap_XXX.hdf5 | ||
|
||
b. Set the filename, units in get_par_ic.py to match the GALIC set-up | ||
|
||
c. Set center to be location of the soliton in get_par_ic.py | ||
|
||
d. Execute get_par_ic.py, it will generate PAR_IC | ||
|
||
2. Halo | ||
|
||
a. If the data is binary file UM_IC | ||
|
||
* Set OPT__INIT = 3 and PAR_INIT = 1 | ||
* Input__UM_IC_RefineRegion is required | ||
|
||
b. If the data is GAMER snapshot | ||
|
||
* Command "ln -s Data_XXXXXX RESTART" to create a soft link | ||
* Set OPT__INIT = 2 and PAR_INIT = 2 in Input__Parameter | ||
* Turn on AddParWhenRestart and AddParWhenRestartByFile in Input__TestProb | ||
* Set AddParWhenRestartNPar in Input__TestProb | ||
* Turn on OPT__RESTART_RESET in Input__Parameter | ||
# Recommand to turn off AddParWhenRestart, AddParWhenRestartByFile, OPT__RESTART_RESET right after the simulation starts | ||
|
||
c. The code for FDM halo reconstruction (https://github.com/calab-ntu/psidm-halo-reconstruction) | ||
|
||
3. Thin disk (optional) | ||
|
||
a. Command "ln -s Data_XXXXXX RESTART" to create a soft link for restart | ||
|
||
b. Set Set OPT__INIT = 2 and PAR_INIT = 2 and turn on OPT__RESTART_RESET in Input__Parameter | ||
|
||
c. Turn on AddParWhenRestart in Input__TestProb | ||
|
||
d. Set AddParWhenRestartNPar, Disk_Mass, Disk_R, and DispTableFile in Input__TestProb | ||
|
||
|
||
Analysis scripts: | ||
======================================== | ||
1. particle_proj.py, plot_halo_slice.py: | ||
|
||
* Plot the projection of disk particles or halo density slice | ||
* Output files: particle_proj_*.png, Data_*_Slice_x_Dens.png | ||
|
||
2. plot_halo_density.py, plot_halo_potential.py: | ||
|
||
* Compute and plot shell-averaged halo density or gravitational potential profiles | ||
* Output files: Data_*_1d-Profile_radius_Dens.png, Halo_Dens_Data_*.npy, | ||
Data_*_1d-Profile_radius_Pote.png, Halo_Pote_Data_*.npy | ||
|
||
3. data_disk.py: | ||
|
||
* Compute the disk information (rotation speed, velocity dispersion, surface density, scale height, etc.) | ||
* Output files: Data_Disk_*.npy | ||
|
||
4. data_halo.py: | ||
|
||
* Compute the halo information (enclosed mass, velocity dispersion, etc.) | ||
* Required files: Halo_Dens_*.npy (generated by "plot_halo_density.py") and Halo_Pote_*.npy (by "plot_halo_potential.py") | ||
* Output files: Data_Halo_*.npy | ||
|
||
5. vel_distribution.py | ||
|
||
* Compute the velocity distribution in 2-kpc-wide radial bins centered on R = 4, 6, 8, 10 kpc | ||
* Output files: Vel_data_*.npz, vel_*.png | ||
|
||
6. get_heating.py | ||
|
||
* Compute the theoretical heating rate of the stellar disk as a function of radius | ||
* Required files: Data_Disk_*.npy (generated by "data_disk.py"), Data_Halo_*.npy (by "data_halo.py") | ||
* Output files: Heating_*.npz | ||
|
||
7. get_heating_rate_theory.py | ||
|
||
* Get the time-averaged theoretical heating rate | ||
* Required files: Heating_*.npz (generated by "get_heating.py") | ||
* Output files: printed plain text | ||
|
||
8. get_heating_rate_simulation.py | ||
|
||
* Compute ensemble- and time-averaged disk heating rates measured from simulation data | ||
* Required files: Vel_data_*.npz (generated by "vel_distribution.py") | ||
* Output files: sigma_z_sqr.png and printed plain text | ||
|
||
9. plot_data_example.py | ||
|
||
* An example script to plot the angle-averaged disk rotation curve and shell-averaged density profile | ||
* Required files: Data_Disk_*.npy (generated by "data_disk.py") and/or Data_Halo_*.npy (by "data_halo.py") | ||
* Output files: Rotation_Curve.png, Halo_Density_Profile.png |
172 changes: 172 additions & 0 deletions
172
example/test_problem/ELBDM/DiskHeating/analysis_script/disk/data_disk.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,172 @@ | ||
import h5py | ||
import math | ||
import numpy as np | ||
import argparse | ||
import sys | ||
|
||
|
||
# ------------------------------------------------------------------------------------------------------------------------- | ||
# user-specified parameters | ||
Div_disk = 500 # total number of data points sampled per disk rotation curve | ||
ratio = 0.76159415595 # tanh(1) | ||
|
||
|
||
#------------------------------------------------------------------------------------------------------------------------- | ||
# load the command-line parameters | ||
parser = argparse.ArgumentParser( description='Get disk properties' ) | ||
|
||
parser.add_argument( '-s', action='store', required=True, type=int, dest='idx_start', | ||
help='first data index' ) | ||
parser.add_argument( '-e', action='store', required=True, type=int, dest='idx_end', | ||
help='last data index' ) | ||
parser.add_argument( '-d', action='store', required=False, type=int, dest='didx', | ||
help='delta data index [%(default)d]', default=1 ) | ||
|
||
args=parser.parse_args() | ||
|
||
idx_start = args.idx_start | ||
idx_end = args.idx_end | ||
didx = args.didx | ||
|
||
# print command-line parameters | ||
print( '\nCommand-line arguments:' ) | ||
print( '-------------------------------------------------------------------' ) | ||
for t in range( len(sys.argv) ): | ||
print( str(sys.argv[t])) | ||
print( '' ) | ||
print( '-------------------------------------------------------------------\n' ) | ||
|
||
|
||
#----------------------------------------------------------------------------------------------------------------- | ||
# predefined functions | ||
def SearchIndex(x, A, N): | ||
i = 0 | ||
j = N - 1 | ||
while(i <= j): | ||
mid = int(i + (j - i)/2) | ||
if(A[mid] == x): | ||
i = mid | ||
break | ||
elif(A[mid] > x): | ||
j = mid - 1 | ||
else: i = mid + 1 | ||
return i | ||
|
||
f = h5py.File('../../Data_%06d'%idx_start, 'r') | ||
Unit_L = f['Info']['InputPara']['Unit_L'] | ||
Unit_T = f['Info']['InputPara']['Unit_T']*(3.16887646e-14) | ||
Unit_V = f['Info']['InputPara']['Unit_V'] | ||
Unit_M = f['Info']['InputPara']['Unit_M'] | ||
Center = np.loadtxt('../../Record__Center', skiprows=1, dtype=float) | ||
Center = Center * Unit_L | ||
f.close() | ||
if Center.ndim == 1: | ||
Center = Center.reshape(1,len(Center)) # reshape the array if there is only one row | ||
|
||
|
||
#----------------------------------------------------------------------------------------------------------------- | ||
# analyze simulation data and generate processed output files | ||
for idx in range(idx_start, idx_end+1, didx): | ||
print('loading file Data_%06d ...'%idx) | ||
with h5py.File('../../Data_%06d'%idx, 'r') as f: | ||
disk_mass = np.array(f['Particle/ParMass'], dtype = np.float64) * Unit_M | ||
disk_posx = np.array(f['Particle/ParPosX'], dtype = np.float64) * Unit_L | ||
disk_posy = np.array(f['Particle/ParPosY'], dtype = np.float64) * Unit_L | ||
disk_posz = np.array(f['Particle/ParPosZ'], dtype = np.float64) * Unit_L | ||
disk_velx = np.array(f['Particle/ParVelX'], dtype = np.float64) * Unit_V | ||
disk_vely = np.array(f['Particle/ParVelY'], dtype = np.float64) * Unit_V | ||
disk_velz = np.array(f['Particle/ParVelZ'], dtype = np.float64) * Unit_V | ||
disk_type = np.array(f['Particle/ParType'], dtype = np.int32) | ||
current_step = f['Info/KeyInfo']['Step'] | ||
time = f['Info/KeyInfo']['Time'][0]*Unit_T | ||
# particle filter: disk particles have ParType=2 | ||
disk_index = (disk_type==2) | ||
disk_mass = disk_mass[disk_index] | ||
disk_posx = disk_posx[disk_index] | ||
disk_posy = disk_posy[disk_index] | ||
disk_posz = disk_posz[disk_index] | ||
disk_velx = disk_velx[disk_index] | ||
disk_vely = disk_vely[disk_index] | ||
disk_velz = disk_velz[disk_index] | ||
disk_size = np.size(disk_mass) | ||
print("number of disk particles = %d"%disk_size) | ||
VCM = [ np.sum(disk_mass*disk_velx)/ np.sum(disk_mass), | ||
np.sum(disk_mass*disk_vely)/ np.sum(disk_mass), | ||
np.sum(disk_mass*disk_velz)/ np.sum(disk_mass) ] | ||
center = Center[current_step,3:6] | ||
disk_posx = disk_posx - center[0] | ||
disk_posy = disk_posy - center[1] | ||
disk_posz = disk_posz - center[2] | ||
disk_velx = disk_velx - VCM[0] | ||
disk_vely = disk_vely - VCM[1] | ||
disk_velz = disk_velz - VCM[2] | ||
# compute angular momentum | ||
disk_pos = np.zeros((disk_size, 3)) | ||
disk_vel = np.zeros((disk_size, 3)) | ||
disk_pos[:,0] = disk_posx | ||
disk_pos[:,1] = disk_posy | ||
disk_pos[:,2] = disk_posz | ||
disk_vel[:,0] = disk_velx | ||
disk_vel[:,1] = disk_vely | ||
disk_vel[:,2] = disk_velz | ||
disk_L = np.cross(disk_pos, disk_vel) | ||
tot_L = np.array([np.sum(disk_L[:,0]),np.sum(disk_L[:,1]),np.sum(disk_L[:,2])]) | ||
tot_L = tot_L/(tot_L[0]**2+tot_L[1]**2+tot_L[2]**2)**0.5 | ||
|
||
vec = np.cross(tot_L, np.array([0,0,1])) | ||
c = tot_L[2] | ||
s = 1-c**2 | ||
matric = np.array([ [0, -vec[2], vec[1]], | ||
[vec[2], 0, -vec[0]], | ||
[-vec[1], vec[0], 0] ]) | ||
R = np.identity(3) + matric + np.matmul(matric, matric)/(1+c) | ||
disk_pos_new = np.matmul(R, np.transpose(disk_pos)) | ||
disk_vel_new = np.matmul(R, np.transpose(disk_vel)) | ||
disk_posx = disk_pos_new[0,:] | ||
disk_posy = disk_pos_new[1,:] | ||
disk_posz = disk_pos_new[2,:] | ||
disk_velx = disk_vel_new[0,:] | ||
disk_vely = disk_vel_new[1,:] | ||
disk_velz = disk_vel_new[2,:] | ||
|
||
disk_r = (disk_posx**2 + disk_posy**2)**0.5 | ||
disk_velr = (disk_posx*disk_velx + disk_posy*disk_vely)/disk_r | ||
disk_velp = (disk_posx*disk_vely - disk_posy*disk_velx)/disk_r | ||
disk_sortR = np.sort(disk_r) | ||
disk_indexR = np.argsort(disk_r) | ||
Data = np.zeros((8,Div_disk)) | ||
|
||
r = 0 | ||
disk_num = 0 | ||
dr = 15.0*3.08568e+21/Div_disk | ||
for j in range(Div_disk): | ||
disk_num_pre = disk_num | ||
Data[0,j] = r + 0.5 * dr | ||
disk_num = SearchIndex( r+dr, disk_sortR, disk_size ) | ||
mean_vr = np.mean( disk_velr[ disk_indexR[ disk_num_pre:disk_num ] ] ) | ||
mean_vp = np.mean( disk_velp[ disk_indexR[ disk_num_pre:disk_num ] ] ) | ||
mean_vz = np.mean( disk_velz[ disk_indexR[ disk_num_pre:disk_num ] ] ) | ||
mean_vp2 = np.mean( disk_velp[ disk_indexR[ disk_num_pre:disk_num ] ]**2 ) | ||
Data[1,j] = mean_vp # rotation curve | ||
Data[2,j] = (np.mean(( disk_velr[ disk_indexR[ disk_num_pre:disk_num ] ] - mean_vr )**2))**0.5 # sigma_r | ||
Data[3,j] = (np.mean(( disk_velz[ disk_indexR[ disk_num_pre:disk_num ] ] - mean_vz )**2))**0.5 # sigma_z | ||
mass = disk_mass[0]*( disk_num-disk_num_pre ) | ||
mass_total = disk_mass[0]*disk_num | ||
area = (math.pi *((r + dr)**2 - r**2)) | ||
Data[4,j] = mass/area # surface density Sigma | ||
Data[5,j] = mean_vp2 # sigma_phi^2 | ||
Data[6,j] = mass_total # enclosed mass | ||
# get sacle height | ||
CMZ = np.average(disk_posz[ disk_indexR[ disk_num_pre:disk_num ] ]) | ||
disk_z = np.abs( disk_posz[ disk_indexR[ disk_num_pre:disk_num ] ] - CMZ ) | ||
sortZ = np.sort(disk_z) | ||
target_index = ratio*len(disk_z) | ||
index_plus = int(target_index) | ||
index_minus = int(target_index - 1) | ||
target_z = sortZ[index_minus] + (target_index - index_minus)*(sortZ[index_plus] - sortZ[index_minus]) | ||
Data[7,j] = target_z # scale height | ||
|
||
r = r + dr | ||
|
||
np.save('Data_Disk_%06d'%idx, Data) | ||
print(' ') |
Oops, something went wrong.