diff --git a/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice b/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice index 7bac45c3..4e6ae228 100644 --- a/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice +++ b/models/xyce/testing/regression/bjt_iv/device_netlists/npn.spice @@ -18,9 +18,9 @@ xq1 c b 0 0 {{device}} ***************** .DC Vcp 0 6 0.1 Ib 1u 9u 2u .STEP TEMP {{temp}} -60 200 -.print DC FORMAT=CSV file=npn/simulated_IcVc/{{i}}_simulated_{{device}}.csv {-I(Vcp)} +.print DC FORMAT=CSV file=mos_iv_reg/npn/simulated/t{{temp}}_simulated_{{device}}.csv {-I(Vcp)} -.include "../../../../../design.xyce" -.lib "../../../../../sm141064.xyce" bjt_typical +.include "../../../../../../design.xyce" +.lib "../../../../../../sm141064.xyce" bjt_typical .end diff --git a/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice b/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice index 76d695f8..9ba61bfb 100644 --- a/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice +++ b/models/xyce/testing/regression/bjt_iv/device_netlists/pnp.spice @@ -18,9 +18,9 @@ xq1 c b 0 {{device}} ***************** .DC Vcp 0 -3 -0.1 Ib -1u -9u -2u .STEP TEMP {{temp}} -60 200 -.print DC FORMAT=CSV file=pnp/simulated_IcVc/{{i}}_simulated_{{device}}.csv {I(Vcp)} +.print DC FORMAT=CSV file=mos_iv_reg/pnp/simulated/t{{temp}}_simulated_{{device}}.csv {I(Vcp)} -.include "../../../../../design.xyce" -.lib "../../../../../sm141064.xyce" bjt_typical +.include "../../../../../../design.xyce" +.lib "../../../../../../sm141064.xyce" bjt_typical .end diff --git a/models/xyce/testing/regression/bjt_iv/models_regression.py b/models/xyce/testing/regression/bjt_iv/models_regression.py index 6abd245c..ee8b9a04 100644 --- a/models/xyce/testing/regression/bjt_iv/models_regression.py +++ b/models/xyce/testing/regression/bjt_iv/models_regression.py @@ -16,8 +16,19 @@ import concurrent.futures import shutil import warnings +import multiprocessing as mp +import glob +PASS_THRESH = 2.0 warnings.simplefilter(action="ignore", category=FutureWarning) +pd.options.mode.chained_assignment = None # default='warn' +MOS = [ + "ibp=1.000E-06", + "ibp=3.000E-06", + "ibp=5.000E-06", + "ibp=7.000E-06", + "ibp=9.000E-06", +] def call_simulator(file_name): @@ -25,14 +36,15 @@ def call_simulator(file_name): Args: file_name (str): Netlist file name. """ - os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log") + os.system(f"Xyce -hspice-ext all {file_name} -l {file_name}.log 2>/dev/null") -def ext_measured(device, vc, step, Id_sim, list_devices, ib): +def ext_measured(dirpath, device, vc, step, list_devices, ib): # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) + dimensions = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) loops = dimensions["corners"].count() + all_dfs = [] # Extracting measured values for each Device for i in range(loops): @@ -55,7 +67,7 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured = pd.read_csv(f"{device}/{device}.csv", usecols=col_list) + df_measured = pd.read_csv(f"{dirpath}/{device}.csv", usecols=col_list) df_measured.columns = [ f"{vc}", f"{ib}{step[0]}", @@ -64,10 +76,6 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured.to_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv", - index=False, - ) else: if device == "pnp": vc = temp_vc @@ -80,7 +88,7 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}.{i}", f"{ib}{step[4]}.{i}", ] - df_measured = pd.read_csv(f"{device}/{device}.csv", usecols=col_list) + df_measured = pd.read_csv(f"{dirpath}/{device}.csv", usecols=col_list) df_measured.columns = [ f"{vc}", f"{ib}{step[0]}", @@ -89,175 +97,169 @@ def ext_measured(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - df_measured.to_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv", - index=False, - ) - + all_dfs.append(df_measured) + dfs = pd.concat(all_dfs, axis=1) + dfs.drop_duplicates(inplace=True) + return dfs -def ext_simulated(device, vc, step, sweep, Id_sim, list_devices, ib): - # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) - loops = dimensions["corners"].count() - temp_range = int(loops / 4) - netlist_tmp = f"./device_netlists/{device}.spice" - for i in range(loops): - if i in range(0, temp_range): - temp = 25 - elif i in range(temp_range, 2 * temp_range): - temp = -40 - elif i in range(2 * temp_range, 3 * temp_range): - temp = 125 +def run_sim(dirpath, device, list_devices, temp=25): + """Run simulation at specific information and corner + Args: + dirpath(str): path to the file where we write data + device(str): the device instance will be simulated + id_rds(str): select id or rds + temp(float): a specific temp for simulation + width(float): a specific width for simulation + length(float): a specific length for simulation + + Returns: + info(dict): results are stored in, + and passed to the run_sims function to extract data + """ + netlist_tmp = f"device_netlists/{device}.spice" + + info = {} + info["device"] = device + info["temp"] = temp + info["dev"] = list_devices + + temp_str = temp + list_devices_str = list_devices + + s = f"{list_devices_str}netlist_t{temp_str}.spice" + netlist_path = f"{dirpath}/{device}_netlists/{s}" + s = f"t{temp}_simulated_{list_devices_str}.csv" + result_path = f"{dirpath}/simulated/{s}" + os.makedirs(f"{dirpath}/simulated", exist_ok=True) + + with open(netlist_tmp) as f: + tmpl = Template(f.read()) + os.makedirs(f"{dirpath}/{device}_netlists", exist_ok=True) + with open(netlist_path, "w") as netlist: + netlist.write(tmpl.render(device=list_devices_str, temp=temp_str)) + + # Running ngspice for each netlist + try: + call_simulator(netlist_path) + + if os.path.exists(result_path): + bjt_iv = result_path else: - temp = 175 + bjt_iv = "None" - k = i - if i >= len(list_devices): - while k >= len(list_devices): - k = k - len(list_devices) + except Exception: + bjt_iv = "None" - with open(netlist_tmp) as f: - tmpl = Template(f.read()) - os.makedirs(f"{device}/{device}_netlists_{Id_sim}", exist_ok=True) - with open( - f"{device}/{device}_netlists_{Id_sim}/{i}_{device}_netlist_{list_devices[k]}.spice", - "w", - ) as netlist: - netlist.write(tmpl.render(device=list_devices[k], i=i, temp=temp)) - netlist_path = f"{device}/{device}_netlists_{Id_sim}/{i}_{device}_netlist_{list_devices[k]}.spice" - - # Running Xyce for each netlist - with concurrent.futures.ProcessPoolExecutor( - max_workers=workers_count - ) as executor: - executor.submit(call_simulator, netlist_path) - - # Writing simulated data - df_simulated = pd.read_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - header=0, - ) + info["bjt_iv_simulated"] = bjt_iv - # empty array to append in it shaped (sweep, number of trials + 1) - new_array = np.empty((sweep, 1 + int(df_simulated.shape[0] / sweep))) - new_array[:, 0] = df_simulated.iloc[:sweep, 0] - times = int(df_simulated.shape[0] / sweep) + return info - for j in range(times): - new_array[:, (j + 1)] = df_simulated.iloc[ - j * sweep : (j + 1) * sweep, 0 - ] - # Writing final simulated data - df_simulated = pd.DataFrame(new_array) - df_simulated.to_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - index=False, - ) - df_simulated.columns = [ - f"{vc}", - f"{ib}{step[0]}", - f"{ib}{step[1]}", - f"{ib}{step[2]}", - f"{ib}{step[3]}", - f"{ib}{step[4]}", - ] - df_simulated.to_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv", - index=False, +def run_sims(dirpath, list_devices, device, num_workers=mp.cpu_count()): + """passing netlists to run_sim function + and storing the results csv files into dataframes + + Args: + df(pd.DataFrame): dataframe passed from the ext_measured function + dirpath(str): the path to the file where we write data + id_rds(str): select id or rds + num_workers=mp.cpu_count() (int): num of cpu used + device(str): name of the device + Returns: + df(pd.DataFrame): dataframe contains simulated results + """ + df1 = pd.read_csv(f"{dirpath}/{device}.csv", usecols=["corners"]) + loops = (df1["corners"]).count() + temp_range = int(loops / 4) + df = pd.DataFrame() + df["dev"] = df1["corners"].dropna() + df["dev"][0:temp_range] = list_devices + df["dev"][temp_range : 2 * temp_range] = list_devices + df["dev"][2 * temp_range : 3 * temp_range] = list_devices + df["dev"][3 * temp_range : 4 * temp_range] = list_devices + df["temp"] = 25 + df["temp"][temp_range : 2 * temp_range] = -40 + df["temp"][2 * temp_range : 3 * temp_range] = 125 + df["temp"][3 * temp_range :] = -175 + + results = [] + with concurrent.futures.ThreadPoolExecutor(max_workers=num_workers) as executor: + futures_list = [] + for j, row in df.iterrows(): + futures_list.append( + executor.submit( + run_sim, + dirpath, + device, + row["dev"], + row["temp"], + ) ) + for future in concurrent.futures.as_completed(futures_list): + try: + data = future.result() + results.append(data) + except Exception as exc: + print("Test case generated an exception: %s" % (exc)) + sf = glob.glob(f"{dirpath}/simulated/*.csv") + + # sweeping on all generated cvs files + for i in range(len(sf)): + sdf = pd.read_csv( + sf[i], + header=None, + delimiter=r"\s+", + ) + sweep = int(sdf[0].count() / len(MOS)) + new_array = np.empty((sweep, 1 + int(sdf.shape[0] / sweep))) -def error_cal(device, vc, step, Id_sim, list_devices, ib): + new_array[:, 0] = sdf.iloc[1 : sweep + 1, 0] + times = int(sdf.shape[0] / sweep) - df_final = pd.DataFrame() - # Get dimensions used for each device - dimensions = pd.read_csv(f"{device}/{device}.csv", usecols=["corners"]) - loops = dimensions["corners"].count() - temp_range = int(loops / 4) - for i in range(loops): - if i in range(0, temp_range): - temp = 25 - elif i in range(temp_range, 2 * temp_range): - temp = -40 - elif i in range(2 * temp_range, 3 * temp_range): - temp = 125 - else: - temp = 175 + for j in range(times): + new_array[:, (j + 1)] = sdf.iloc[(j * sweep) + 1 : ((j + 1) * sweep) + 1, 0] - k = i - if i >= len(list_devices): - while k >= len(list_devices): - k = k - len(list_devices) - - measured = pd.read_csv( - f"{device}/measured_{Id_sim}/{i}_measured_{list_devices[k]}.csv" - ) - simulated = pd.read_csv( - f"{device}/simulated_{Id_sim}/{i}_simulated_{list_devices[k]}.csv" + # Writing final simulated data 1 + sdf = pd.DataFrame(new_array) + sdf.rename( + columns={1: "ibp1", 2: "ibp2", 3: "ibp3", 4: "ibp4", 5: "ibp5"}, + inplace=True, ) + sdf.to_csv(sf[i], index=False) - error_1 = round( - 100 - * abs( - (abs(measured.iloc[0:, 1]) - abs(simulated.iloc[0:, 1])) - / abs(measured.iloc[:, 1]) - ), - 6, - ) - error_2 = round( - 100 - * abs( - (abs(measured.iloc[0:, 2]) - abs(simulated.iloc[0:, 2])) - / abs(measured.iloc[:, 2]) - ), - 6, - ) - error_3 = round( - 100 - * abs( - (abs(measured.iloc[0:, 3]) - abs(simulated.iloc[0:, 3])) - / abs(measured.iloc[:, 3]) - ), - 6, - ) - error_4 = round( - 100 - * abs( - (abs(measured.iloc[0:, 4]) - abs(simulated.iloc[0:, 4])) - / abs(measured.iloc[:, 4]) - ), - 6, - ) - error_5 = round( - 100 - * abs( - (abs(measured.iloc[0:, 5]) - abs(simulated.iloc[0:, 5])) - / abs(measured.iloc[:, 5]) - ), - 6, - ) + df1 = pd.DataFrame(results) - df_error = pd.DataFrame( - data=[measured.iloc[:, 0], error_1, error_2, error_3, error_4, error_5] - ).transpose() - df_error.to_csv( - f"{device}/error_{Id_sim}/{i}_{device}_error_{list_devices[k]}.csv", - index=False, - ) + return df + + +def error_cal( + sim_df: pd.DataFrame, meas_df: pd.DataFrame, device: str, step, ib, vc +) -> None: + """error function calculates the error between measured, simulated data + + Args: + df(pd.DataFrame): Dataframe contains devices and csv files + which represent measured, simulated data + sim_df(pd.DataFrame): Dataframe contains devices and csv files simulated + meas_df(pd.DataFrame): Dataframe contains devices and csv files measured + dev_path(str): The path in which we write data - # Mean error - mean_error = ( - df_error[f"{ib}{step[0]}"].mean() - + df_error[f"{ib}{step[1]}"].mean() - + df_error[f"{ib}{step[2]}"].mean() - + df_error[f"{ib}{step[3]}"].mean() - + df_error[f"{ib}{step[4]}"].mean() - ) / 6 - # Max error - max_error = ( - df_error[ + """ + merged_dfs = list() + meas_df.to_csv( + f"mos_iv_reg/{device}/{device}_measured.csv", index=False, header=True + ) + meas_df = pd.read_csv(f"mos_iv_reg/{device}/{device}_measured.csv") + for i in range(len(sim_df)): + t = sim_df["temp"].iloc[i] + dev = sim_df["dev"].iloc[i] + sim_path = f"mos_iv_reg/{device}/simulated/t{t}_simulated_{dev}.csv" + + simulated_data = pd.read_csv(sim_path) + if i == 0: + measured_data = meas_df[ [ f"{ib}{step[0]}", f"{ib}{step[1]}", @@ -265,41 +267,88 @@ def error_cal(device, vc, step, Id_sim, list_devices, ib): f"{ib}{step[3]}", f"{ib}{step[4]}", ] - ] - .max() - .max() - ) - # Max error location - max_index = max((df_error == max_error).idxmax()) - max_location_ib = (df_error == max_error).idxmax(axis=1)[max_index] - if i == 0: - if device == "pnp": - temp_vc = vc - vc = "-vc " + ].copy() + + measured_data.rename( + columns={ + f"{ib}{step[0]}": "m_ibp1", + f"{ib}{step[1]}": "m_ibp2", + f"{ib}{step[2]}": "m_ibp3", + f"{ib}{step[3]}": "m_ibp4", + f"{ib}{step[4]}": "m_ibp5", + }, + inplace=True, + ) else: - if device == "pnp": - vc = temp_vc - max_location_vc = df_error[f"{vc}"][max_index] - - df_final_ = { - "Run no.": f"{i}", - "Temp": f"{temp}", - "Device name": f"{device}", - "device": f"{list_devices[k]}", - "Simulated_Val": f"{Id_sim}", - "Mean error%": f'{"{:.2f}".format(mean_error)}', - "Max error%": f'{"{:.2f}".format(max_error)} @ {max_location_ib} & Vc (V) = {max_location_vc}', - } - df_final = df_final.append(df_final_, ignore_index=True) - - # Max mean error - print(df_final) - df_final.to_csv(f"{device}/Final_report_{Id_sim}.csv", index=False) - out_report = pd.read_csv(f"{device}/Final_report_{Id_sim}.csv") - print("\n", f"Max. mean error = {out_report['Mean error%'].max()}%") - print( - "=====================================================================================================================================================" - ) + measured_data = meas_df[ + [ + f"{ib}{step[0]}.{i}", + f"{ib}{step[1]}.{i}", + f"{ib}{step[2]}.{i}", + f"{ib}{step[3]}.{i}", + f"{ib}{step[4]}.{i}", + ] + ].copy() + + measured_data.rename( + columns={ + f"{ib}{step[0]}.{i}": "m_ibp1", + f"{ib}{step[1]}.{i}": "m_ibp2", + f"{ib}{step[2]}.{i}": "m_ibp3", + f"{ib}{step[3]}.{i}": "m_ibp4", + f"{ib}{step[4]}.{i}": "m_ibp5", + }, + inplace=True, + ) + measured_data["vcp"] = meas_df[f"{vc}"] + simulated_data["vcp"] = meas_df[f"{vc}"] + simulated_data["device"] = sim_df["dev"].iloc[i] + measured_data["device"] = sim_df["dev"].iloc[i] + simulated_data["temp"] = sim_df["temp"].iloc[i] + measured_data["temp"] = sim_df["temp"].iloc[i] + result_data = simulated_data.merge(measured_data, how="left") + + result_data["step1_error"] = ( + np.abs(result_data["ibp1"] - result_data["m_ibp1"]) + * 100.0 + / (result_data["m_ibp1"]) + ) + result_data["step2_error"] = ( + np.abs(result_data["ibp2"] - result_data["m_ibp2"]) + * 100.0 + / (result_data["m_ibp2"]) + ) + result_data["step3_error"] = ( + np.abs(result_data["ibp3"] - result_data["m_ibp3"]) + * 100.0 + / (result_data["m_ibp3"]) + ) + result_data["step4_error"] = ( + np.abs(result_data["ibp4"] - result_data["m_ibp4"]) + * 100.0 + / (result_data["m_ibp4"]) + ) + result_data["step5_error"] = ( + np.abs(result_data["ibp5"] - result_data["m_ibp5"]) + * 100.0 + / (result_data["m_ibp5"]) + ) + result_data["error"] = ( + np.abs( + result_data["step1_error"] + + result_data["step2_error"] + + result_data["step3_error"] + + result_data["step4_error"] + + result_data["step5_error"] + ) + / 5 + ) + + merged_dfs.append(result_data) + merged_out = pd.concat(merged_dfs) + merged_out.fillna(0, inplace=True) + merged_out.to_csv(f"mos_iv_reg/{device}/error_analysis.csv", index=False) + return merged_out def main(): @@ -325,33 +374,79 @@ def main(): vc = ["vcp ", "-vc (A)"] ib = ["ibp =", "ib =-"] Id_sim = "IcVc" - sweep = [61, 31] step = ["1.000E-06", "3.000E-06", "5.000E-06", "7.000E-06", "9.000E-06"] - for i, device in enumerate(devices): # Folder structure of measured values - dirpath = f"{device}" + dirpath = f"mos_iv_reg/{device}" if os.path.exists(dirpath) and os.path.isdir(dirpath): shutil.rmtree(dirpath) - os.makedirs(f"{device}/measured_{Id_sim}", exist_ok=False) + os.makedirs(f"{dirpath}", exist_ok=False) # From xlsx to csv read_file = pd.read_excel( f"../../180MCU_SPICE_DATA/BJT/bjt_{device}_icvc_f.nl_out.xlsx" ) - read_file.to_csv(f"{device}/{device}.csv", index=False, header=True) + read_file.to_csv(f"{dirpath}/{device}.csv", index=False, header=True) # Folder structure of simulated values - os.makedirs(f"{device}/simulated_{Id_sim}", exist_ok=False) - os.makedirs(f"{device}/error_{Id_sim}", exist_ok=False) + os.makedirs(f"{dirpath}/simulated", exist_ok=False) # =========== Simulate ============== - ext_measured(device, vc[i], step, Id_sim, list_devices[i], ib[i]) - - ext_simulated(device, vc[i], step, sweep[i], Id_sim, list_devices[i], ib[i]) + df = ext_measured(dirpath, device, vc[i], step, list_devices[i], ib[i]) + sims = run_sims(dirpath, list_devices[i], device, num_workers=mp.cpu_count()) # ============ Results ============= - error_cal(device, vc[i], step, Id_sim, list_devices[i], ib[i]) + merged_all = error_cal(sims, df, device, step, ib[i], vc[i]) + + for dev in list_devices[i]: + min_error_total = float() + max_error_total = float() + error_total = float() + number_of_existance = int() + + # number of rows in the final excel sheet + num_rows = merged_all["device"].count() + + for n in range(num_rows): + if dev == merged_all["device"].iloc[n]: + number_of_existance += 1 + error_total += merged_all["error"].iloc[n] + if merged_all["error"].iloc[n] > max_error_total: + max_error_total = merged_all["error"].iloc[n] + elif merged_all["error"].iloc[n] < min_error_total: + min_error_total = merged_all["error"].iloc[n] + + mean_error_total = error_total / number_of_existance + + # Making sure that min, max, mean errors are not > 100% + if min_error_total > 100: + min_error_total = 100 + + if max_error_total > 100: + max_error_total = 100 + + if mean_error_total > 100: + mean_error_total = 100 + + # printing min, max, mean errors to the consol + print( + "# Device {} min error: {:.2f}".format(dev, min_error_total), + ", max error: {:.2f}, mean error {:.2f}".format( + max_error_total, mean_error_total + ), + ) + + if max_error_total < PASS_THRESH: + print("# Device {} has passed regression.".format(dev)) + else: + print( + "# Device {} has failed regression. Needs more analysis.".format( + dev + ) + ) + print("\n\n") + + print("\n\n") # ================================================================