摘要:数据问题解所有数据平均值平均值回归方程回归方程回归系数估计轨道文件回归系数预测结果回归系数预测
数据
300,21182.88,-7044.56,14639.48 600,21707.87,-6930.28,13906.68 900,22207.04,-6828.65,13147.66 1200,22679.16,-6738.66,12363.84 1500,23123.06,-6659.23,11556.71 1800,23537.69,-6589.21,10727.78 2100,23922.07,-6527.40,9878.61 2400,24275.33,-6472.54,9010.81 2700,24596.67,-6423.32,8126.00 3000,24885.42,-6378.40,7225.86 3300,25141.01,-6336.41,6312.08 3600,25362.96,-6295.93,5386.38 3900,25550.92,-6255.54,4450.51问题
def read_m(path): # 所有数据 m = [] # x xlist = [] # y ylist = [] # z zlist = [] # time time_list = [] with open(path, "r") as f: for i in f.readlines(): aa = i.replace(" ", "").split(",") bb = [eval(a) for a in aa] m.append(bb) time_list.append(bb[0]) xlist.append(bb[1]) ylist.append(bb[2]) zlist.append(bb[3]) return { "alldata": m, "time": time_list, "x": xlist, "y": ylist, "z": zlist, } XXX = None YYY = None def xpj(): """ X平均值 :return: """ sum = 0 for i in range(XXX.__len__()): sum += XXX[i] return sum / XXX.__len__() def ypj(): """ Y 平均值 :return: """ sum = 0 for i in range(YYY.__len__()): sum += YYY[i] return sum / YYY.__len__() def sse(): """ 回归方程 :return: """ sum = 0 xa = xpj() ya = ypj() for i in range(XXX.__len__()): sum += (XXX[i] - xa) * (YYY[i] - ya) return sum def ssx(): """ 回归方程 :return: """ sum = 0 xa = xpj() for i in range(XXX.__len__()): sum += (XXX[i] - xa) * (XXX[i] - xa) return sum def getbeta1(): """ bate1 :return: """ bbeta = sse() / ssx() return bbeta def getbeta0(): """ beta0 :return: """ return ypj() - getbeta1() * xpj() def huiguixishu(x, y): """ 回归系数 :param x: :param y: :return: """ global XXX global YYY XXX = x YYY = y beta1 = getbeta1() beta0 = getbeta0() return [beta0, beta1] def predic(x, beta0, beta1): """ 估计 :param x: :param beta0: :param beta1: :return: """ a = beta0 + beta1 * x return a if __name__ == "__main__": d = read_m("轨道文件.txt") tm = d["time"] x = d["x"] y = d["y"] z = d["z"] print("========回归系数=========") a = huiguixishu(tm, x) b = huiguixishu(tm, y) c = huiguixishu(tm, z) print(a) print(b) print(c) print("========预测=========") guji_time = [4200,4500,4800] beta0_list = [a[0],b[0],c[0]] beta1_list = [a[1],b[1],c[1]] for i in range(guji_time.__len__()): x = predic(guji_time[i],beta0_list[0],beta1_list[0]) y = predic(guji_time[i],beta0_list[1],beta1_list[1]) z = predic(guji_time[i],beta0_list[2],beta1_list[2]) print(guji_time[i],format(x,"0.3f") ,format(y,"0.3f"),format(z,"0.3f"))结果
========回归系数========= [21146.959615384614, 1.2183738095238088] [-7019.398461538461, 0.21143040293040288] [15712.87576923077, -2.8401093406593407] ========预测========= 4200 26264.130 -6131.391 3784.417 4500 26629.642 -6067.962 2932.384 4800 26995.154 -6004.533 2080.351
文章版权归作者所有,未经允许请勿转载,若此文章存在违规行为,您可以联系管理员删除。
转载请注明本文地址:https://www.ucloud.cn/yun/44820.html
摘要:成本函数成本对于线性回归,成本函数是表示每个预测值与其预期结果之间的聚合差异的某些函数对于逻辑回归,是计算每次预测的正确或错误的某些函数。成本函数的变换涉及到预测结果和实际结果之间数值距离的任何函数都不能作为成本函数。 矩阵和多特征线性回归快速回顾之前文章的前提是:给定特征——任何房屋面积(sqm),我们需要预测结果,也就是对应房价($)。为了做到这一点,我们:我们找到一条「最拟合」所有数据...
阅读 3028·2021-09-22 15:52
阅读 2905·2019-08-30 15:55
阅读 2704·2019-08-30 15:53
阅读 2457·2019-08-30 13:21
阅读 1623·2019-08-30 13:10
阅读 2483·2019-08-26 12:09
阅读 2570·2019-08-26 10:33
阅读 1806·2019-08-23 18:06