Download as docx, pdf, or txt
Download as docx, pdf, or txt
You are on page 1of 9

Code in Python - equation fitting having 3 independent variables for kc

import numpy as np

import pandas as pd

from scipy.optimize import minimize

import matplotlib.pyplot as plt

# x and y data

data = {

'T2':
np.log([0.4,0.2,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.
4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.4,0.6,0.8,1,0.2,0.4,0.
6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,
0.4,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.2,0.
2,0.4,0.6,0.8,1,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.6,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.4,
0.6,0.8,1,0.6,0.8,1,0.4,0.6,0.8,1,0.2,0.2,0.4,0.6,0.8,1,0.4,0.6,0.8,1,0.6,0.8,1,0.8,1

]),

'D':
np.log([0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,
0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.6,0.6,
0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,
0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,0.8,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1.2,1.2,1.2,1.2,1.
2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.2,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.
4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.4,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.6,1.
6,1.6

]),

'P':
np.log([0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,0.8,0.8,1,1,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.
4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,0.8,0.8,1,1,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,
0.6,0.6,0.6,0.8,0.8,0.8,0.8,0.8,1,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,
0.8,0.8,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,0.8,1,1,1,1,0.2,0.2,0.2,0.
2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.6,0.8,0.8,0.8,0.8,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,
0.6,0.6,0.6,0.8,0.8,0.8,0.8,1,1,1,0.2,0.2,0.2,0.2,0.2,0.4,0.4,0.4,0.4,0.4,0.6,0.6,0.6,0.6,0.8,0.8,0.8,1,1

])}

# kc

data['y'] =
[1.026,1.0335,0.5676,0.7491,0.817,0.8606,0.9013,0.3343,0.5389,0.6243,0.6961,0.7471,0.236,0.3938,0.
4819,0.5536,0.6119,0.1791,0.2952,0.3886,0.4463,0.508,0.8582,0.9141,0.9755,1.0103,0.7996,0.4383,0.6
496,0.6945,0.7641,0.8126,0.2648,0.4675,0.5316,0.6033,0.6611,0.1873,0.3215,0.4256,0.4832,0.5456,0.1
413,0.2441,0.3336,0.3994,0.4509,0.7108,0.7685,0.8354,0.8809,0.6554,0.3649,0.5443,0.6083,0.6625,0.7
198,0.2265,0.3994,0.4851,0.5293,0.5927,0.1577,0.2794,0.3879,0.4482,0.488,0.1213,0.2139,0.3671,0.41
11,0.6207,0.6642,0.7162,0.7727,0.5788,0.3189,0.4983,0.5351,0.5947,0.642,0.1981,0.3586,0.4401,0.489
7,0.5292,0.1379,0.253,0.3506,0.4115,0.4579,0.191,0.3339,0.3916,0.5439,0.5943,0.6365,0.683,0.5259,0.
2838,0.452,0.4949,0.5306,0.5863,0.176,0.3192,0.4167,0.4603,0.4945,0.2276,0.3211,0.3934,0.423,0.175
8,0.2438,0.3074,0.3666,0.5014,0.5287,0.5796,0.619,0.4873,0.2561,0.4274,0.4647,0.4952,0.5444,0.1603
,0.2943,0.3986,0.4254,0.4676,0.2108,0.297,0.3689,0.40555,0.2249,0.2855,0.3458,0.4694,0.4924,0.5371
,0.5712,0.4452,0.2336,0.4081,0.4289,0.4676,0.4971,0.2734,0.3842,0.4078,0.4462,0.1933,0.2768,0.3483
,0.3914,0.213,0.267,0.32,0.4322,0.4643,0.4898,0.5341,0.4228,0.215,0.3925,0.4106,0.4456,0.4718,0.255
7,0.3624,0.3934,0.4167,0.2595,0.3307,0.3798,0.2511,0.3053

df = pd.DataFrame(data)

# Define the function to optimize (customize as needed)

def function_to_optimize(params, x):

a, b, c, d, e, f, g, h = params

return (a + b * x['T2'] + c * x['D'] + d * x['P'] + e * x['T2'] * x['D'] + f * x['D'] * x['P'] + g * x['T2'] * x['P'] +
h * x['T2'] * x['D'] * x['P'])

# Define the objective function for optimization

def objective(params):

return np.sum((df['y'] - function_to_optimize(params, df))**2)

# Initial parameter guesses

initial_params = [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]


# Perform optimization

optimized_params = minimize(objective, initial_params).x

# Create a new column with optimized predictions

df['y_pred'] = function_to_optimize(optimized_params, df)

x = (df[['T2', 'D', 'P']])

y = df['y']

y_pred = df['y_pred']

print("Optimized Parameters:", optimized_params)

R2 = 1 - np.sum((y - y_pred)**2) / np.sum((y - np.mean(y))**2)

print("R2:", R2)

p=3

n = y.shape[0]

aR2 = 1 - (1 - R2) * ((n - 1)/(n-p-1))

print("aR2:", aR2)

Optimized Parameters:

[ 0.38406921 0.21329102 -0.08125439 -0.19273429 -0.01645878 0.16028854

0.06219568 0.01578287]

R2: 0.9828485643646812

aR2: 0.9825545397537899
Code in Python for equation fitting using linear regression (least squares) having 3 independent
variables

import pandas as pd

from sklearn import linear_model

import statsmodels.api as sm

data = {'T2': [0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2,
0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4,
0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.6, 0.6, 0.6,
0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6,
0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8,
0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 0.8, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1

],

'D': [0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8, 0.8, 1, 1, 1, 1.2,
1.2, 1.2, 1.4, 1.4, 1.6, 1.6, 0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8,
0.8, 0.8, 1, 1, 1, 1, 1, 1.2, 1.2, 1.2, 1.2, 1.4, 1.4, 1.4, 1.4, 1.6, 1.6, 1.6, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4,
0.4, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8, 0.8, 1, 1, 1, 1, 1, 1.2, 1.2, 1.2, 1.2, 1.2, 1.4, 1.4, 1.4, 1.4, 1.4, 1.6, 1.6,
1.6, 1.6, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4, 0.6, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8, 0.8, 0.8, 1, 1, 1, 1, 1,
1.2, 1.2, 1.2, 1.2, 1.2, 1.4, 1.4, 1.4, 1.4, 1.4, 1.6, 1.6, 1.6, 1.6, 1.6, 0.2, 0.2, 0.2, 0.2, 0.4, 0.4, 0.4, 0.4, 0.4,
0.6, 0.6, 0.6, 0.6, 0.6, 0.8, 0.8, 0.8, 0.8, 0.8, 1, 1, 1, 1, 1, 1.2, 1.2, 1.2, 1.2, 1.2, 1.4, 1.4, 1.4, 1.4, 1.4, 1.6,
1.6, 1.6, 1.6, 1.6

],

'P': [0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 0.2, 0.4, 0.6, 0.2,
0.4, 0.6, 0.2, 0.4, 0.2, 0.4, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1,
0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 0.2, 0.4, 0.6, 0.8, 0.2, 0.4, 0.6, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1,
0.2, 0.4, 0.6, 0.8, 0.2, 0.4, 0.6, 0.8, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4,
0.6, 0.8, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1,
0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4,
0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6, 0.8, 1, 0.2, 0.4, 0.6,
0.8, 1

],

'kc': [1.0335, 0.5676, 0.3343, 0.236, 0.1791, 0.7996, 0.4383, 0.2648, 0.1873, 0.1413, 0.6554, 0.3649,
0.2265, 0.1577, 0.1213, 0.5788, 0.3189, 0.1981, 0.1379, 0.5259, 0.2838, 0.176, 0.4873, 0.2561, 0.1603,
0.4452, 0.2336, 0.4228, 0.215, 1.026, 0.7491, 0.5389, 0.3938, 0.2952, 0.8582, 0.6496, 0.4675, 0.3215,
0.2441, 0.7108, 0.5443, 0.3994, 0.2794, 0.2139, 0.6207, 0.4983, 0.3586, 0.253, 0.191, 0.5439, 0.452,
0.3192, 0.2276, 0.1758, 0.5014, 0.4274, 0.2943, 0.2108, 0.4694, 0.4081, 0.2734, 0.1933, 0.4322, 0.3925,
0.2557, 0.817, 0.6243, 0.4819, 0.3886, 0.9141, 0.6945, 0.5316, 0.4256, 0.3336, 0.7685, 0.6083, 0.4851,
0.3879, 0.6642, 0.5351, 0.4401, 0.3506, 0.5943, 0.4949, 0.4167, 0.3211, 0.2438, 0.5287, 0.4647, 0.3986,
0.297, 0.2249, 0.4924, 0.4289, 0.3842, 0.2768, 0.213, 0.4643, 0.4106, 0.3624, 0.2595, 0.8606, 0.6961,
0.5536, 0.4463, 0.9755, 0.7641, 0.6033, 0.4832, 0.3994, 0.8354, 0.6625, 0.5293, 0.4482, 0.3671, 0.7162,
0.5947, 0.4897, 0.4115, 0.3339, 0.6365, 0.5306, 0.4603, 0.3934, 0.3074, 0.5796, 0.4952, 0.4254, 0.3689,
0.2855, 0.5371, 0.4676, 0.4078, 0.3483, 0.267, 0.4898, 0.4456, 0.3934, 0.3307, 0.2511, 0.9013, 0.7471,
0.6119, 0.508, 1.0103, 0.8126, 0.6611, 0.5456, 0.4509, 0.8809, 0.7198, 0.5927, 0.488, 0.4111, 0.7727,
0.642, 0.5292, 0.4579, 0.3916, 0.683, 0.5863, 0.4945, 0.423, 0.3666, 0.619, 0.5444, 0.4676, 0.40555,
0.3458, 0.5712, 0.4971, 0.4462, 0.3914, 0.32, 0.5341, 0.4718, 0.4167, 0.3798, 0.3053

],

'ti': [0.8479, 1.0192, 1.1391, 1.229, 1.2992, 1.1297, 1.3097, 1.4436, 1.5474, 1.6306, 1.3834, 1.5673,
1.7105, 1.8247, 1.9181, 1.621, 1.8066, 1.9563, 2.0787, 1.8479, 2.034, 2.1885, 2.067, 2.253, 2.4111,
2.2801, 2.4658, 2.4885, 2.6738, 1.0192, 1.2192, 1.3626, 1.4718, 1.5585, 1.3097, 1.5063, 1.656, 1.774,
1.8699, 1.5673, 1.7615, 1.916, 2.0413, 2.1449, 1.8066, 1.9989, 2.157, 2.2882, 2.3985, 2.034, 2.2247,
2.3856, 2.5217, 2.6378, 2.253, 2.4423, 2.6055, 2.7457, 2.4658, 2.6539, 2.8189, 2.9627, 2.6738, 2.8609,
3.0274, 1.3626, 1.5254, 1.6511, 1.7518, 1.4436, 1.656, 1.8201, 1.9512, 2.0856, 1.7105, 1.916, 2.0861,
2.2175, 1.9563, 2.157, 2.324, 2.464, 2.1885, 2.3856, 2.5537, 2.6973, 2.8208, 2.4111, 2.6055, 2.7746,
2.9212, 3.0489, 2.6267, 2.8189, 2.9889, 3.1382, 3.2696, 2.8369, 3.0274, 3.1981, 3.3498, 1.4718, 1.6511,
1.7909, 1.9041, 1.5474, 1.774, 1.9512, 2.094, 2.212, 1.8247, 2.0413, 2.2175, 2.3633, 2.4861, 2.0787,
2.2882, 2.464, 2.6125, 2.7395, 2.3173, 2.5217, 2.6973, 2.8483, 2.979, 2.5453, 2.7457, 2.9212, 3.0744,
3.2086, 2.7654, 2.9627, 3.1382, 3.2933, 3.4305, 2.9794, 3.1743, 3.3498, 3.5066, 3.6467, 1.5585, 1.7518,
1.9041, 2.0281, 1.6306, 1.8699, 2.0586, 2.212, 2.3397, 1.9181, 2.1449, 2.331, 2.4861, 2.6174, 2.1804,
2.3985, 2.5828, 2.7395, 2.8742, 2.4261, 2.6378, 2.8208, 2.979, 3.1168, 2.6602, 2.8669, 3.0489, 3.2086,
3.3491, 2.8855, 3.0884, 3.2696, 3.4305, 3.5736, 3.1042, 3.3039, 3.4846, 3.6467, 3.792

],

'td': [0.212, 0.2548, 0.2848, 0.3072, 0.3248, 0.2824, 0.3274, 0.3609, 0.3869, 0.4076, 0.3458, 0.3918,
0.4276, 0.4562, 0.4795, 0.4053, 0.4516, 0.4891, 0.5197, 0.462, 0.5085, 0.5471, 0.5167, 0.5633, 0.6028,
0.57, 0.6165, 0.6221, 0.6685, 0.2548, 0.3048, 0.3406, 0.368, 0.3896, 0.3274, 0.3766, 0.414, 0.4435,
0.4675, 0.3918, 0.4404, 0.479, 0.5103, 0.5362, 0.4516, 0.4997, 0.5392, 0.572, 0.5996, 0.5085, 0.5562,
0.5964, 0.6304, 0.6595, 0.5633, 0.6106, 0.6514, 0.6864, 0.6165, 0.6635, 0.7047, 0.7407, 0.6685, 0.7152,
0.7568, 0.3406, 0.3813, 0.4128, 0.438, 0.3609, 0.414, 0.455, 0.4878, 0.5147, 0.4276, 0.479, 0.5201,
0.5544, 0.4891, 0.5392, 0.581, 0.616, 0.5471, 0.5964, 0.6384, 0.6743, 0.7052, 0.6028, 0.6514, 0.6936,
0.7303, 0.7622, 0.6567, 0.7047, 0.7472, 0.7845, 0.8174, 0.7092, 0.7568, 0.7995, 0.8384, 0.368, 0.4128,
0.4477, 0.476, 0.3869, 0.4435, 0.4878, 0.5235, 0.553, 0.4562, 0.5103, 0.5544, 0.5908, 0.6215, 0.5194,
0.572, 0.616, 0.6531, 0.6849, 0.5793, 0.6304, 0.6743, 0.7121, 0.7448, 0.6363, 0.6864, 0.7303, 0.7686,
0.8021, 0.6913, 0.7407, 0.7845, 0.8233, 0.8576, 0.7449, 0.7936, 0.8374, 0.8767, 0.9117, 0.3896, 0.438,
0.476, 0.507, 0.4076, 0.4675, 0.5147, 0.553, 0.5849, 0.4795, 0.5362, 0.5827, 0.6215, 0.6543, 0.5451,
0.5996, 0.6457, 0.6849, 0.7186, 0.6065, 0.6595, 0.7052, 0.7448, 0.7792, 0.665, 0.7167, 0.7622, 0.8021,
0.8373, 0.7214, 0.7721, 0.8174, 0.8576, 0.8934, 0.7761, 0.826, 0.8711, 0.9117, 0.948

df = pd.DataFrame(data)

x = df[['T2','D','P']]

y = df['kc']

# y = df['ti']

# y = df['td']

# with sklearn

regr = linear_model.LinearRegression()

regr.fit(x, y)

print('Intercept: \n', regr.intercept_)

print('Coefficients: \n', regr.coef_)

# with statsmodels

x = sm.add_constant(x) # adding a constant

model = sm.OLS(y, x).fit()

predictions = model.predict(x)

print_model = model.summary()

print(print_model)
Result for kc:

Intercept:
0.7576872673027482
Coefficients:
[ 0.3498239 -0.22827162 -0.54305587]
OLS Regression Results
==============================================================================
Dep. Variable: kc R-squared: 0.871
Model: OLS Adj. R-squared: 0.869
Method: Least Squares F-statistic: 394.7
Date: Mon, 14 Aug 2023 Prob (F-statistic): 1.27e-77
Time: 10:50:52 Log-Likelihood: 224.67
No. Observations: 179 AIC: -441.3
Df Residuals: 175 BIC: -428.6
Df Model: 3
Covariance Type: nonrobust
==============================================================================
coef std err t P>|t| [0.025 0.975]
------------------------------------------------------------------------------
const 0.7577 0.019 39.863 0.000 0.720 0.795
T2 0.3498 0.019 18.270 0.000 0.312 0.388
D -0.2283 0.012 -19.311 0.000 -0.252 -0.205
P -0.5431 0.019 -28.041 0.000 -0.581 -0.505
==============================================================================
Omnibus: 57.000 Durbin-Watson: 1.316
Prob(Omnibus): 0.000 Jarque-Bera (JB): 193.807
Skew: 1.229 Prob(JB): 8.23e-43
Kurtosis: 7.466 Cond. No. 7.74
==============================================================================

Notes:
[1] Standard Errors assume that the covariance matrix of the errors is
correctly specified.
Result for ti:

Intercept:
0.3332283178130291
Coefficients:
[0.76892957 1.21395741 0.78836996]
OLS Regression Results
==============================================================================
Dep. Variable: ti R-squared: 0.994
Model: OLS Adj. R-squared: 0.994
Method: Least Squares F-statistic: 9761.
Date: Mon, 14 Aug 2023 Prob (F-statistic): 1.71e-194
Time: 10:55:01 Log-Likelihood: 286.27
No. Observations: 179 AIC: -564.5
Df Residuals: 175 BIC: -551.8
Df Model: 3
Covariance Type: nonrobust
==============================================================================
coef std err t P>|t| [0.025 0.975]
------------------------------------------------------------------------------
const 0.3332 0.013 24.733 0.000 0.307 0.360
T2 0.7689 0.014 56.655 0.000 0.742 0.796
D 1.2140 0.008 144.886 0.000 1.197 1.230
P 0.7884 0.014 57.431 0.000 0.761 0.815
==============================================================================
Omnibus: 28.513 Durbin-Watson: 0.781
Prob(Omnibus): 0.000 Jarque-Bera (JB): 42.319
Skew: -0.884 Prob(JB): 6.47e-10
Kurtosis: 4.595 Cond. No. 7.74
==============================================================================

Notes:
[1] Standard Errors assume that the covariance matrix of the errors is
correctly specified.
Result for td:

Intercept:
0.08326171721234155
Coefficients:
[0.19224002 0.30359663 0.19692109]
OLS Regression Results
==============================================================================
Dep. Variable: td R-squared: 0.994
Model: OLS Adj. R-squared: 0.994
Method: Least Squares F-statistic: 9800.
Date: Mon, 14 Aug 2023 Prob (F-statistic): 1.22e-194
Time: 10:55:38 Log-Likelihood: 534.74
No. Observations: 179 AIC: -1061.
Df Residuals: 175 BIC: -1049.
Df Model: 3
Covariance Type: nonrobust
==============================================================================
coef std err t P>|t| [0.025 0.975]
------------------------------------------------------------------------------
const 0.0833 0.003 24.764 0.000 0.077 0.090
T2 0.1922 0.003 56.758 0.000 0.186 0.199
D 0.3036 0.002 145.194 0.000 0.299 0.308
P 0.1969 0.003 57.483 0.000 0.190 0.204
==============================================================================
Omnibus: 28.163 Durbin-Watson: 0.792
Prob(Omnibus): 0.000 Jarque-Bera (JB): 41.460
Skew: -0.879 Prob(JB): 9.93e-10
Kurtosis: 4.571 Cond. No. 7.74
==============================================================================

Notes:
[1] Standard Errors assume that the covariance matrix of the errors is
correctly specified.

You might also like