import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn import linear_model
from mpl_toolkits.mplot3d import axes3d
import seaborn as sns
#读取数据
readdata = pd.read_csv('Advertising.csv')
data = np.array(readdata.values)#数值变成矩阵
#训练数据
x_train = data[0:150,1:3]#取数据的0道150行,1,2列
y_train = data[0:150,4]

#测试数据
x_test = data[150:200,1:3]
y_test = data[150:200,4]

#回归分析
regr = linear_model.LinearRegression()
regr.fit(x_train,y_train)

#打印出相关系数和截距等信息
print('Cofficients: \n',regr.coef)#打印系数
print('Intercept:',regr.intercept
)#打印截距
#mean square error
print('Residual sum of squares: %.2f'%np.mean((regr.predict(x_test)-y_test)))
#explained variance score:1 is perfect prediction
print('variance score: %.2f' %regr.score(x_test,y_test))

#得出回归函数 并自定义数据
x_line = np.linspace(0,300)
y_line = np.linspace(0,50)
z_line = 0.04699836x_line+0.17913965y_line+3.0043106117608556

#画图
fig = plt.figure()
ax = plt.subplot(111,projection = '3d')#创建一个3维的绘图工具
ax.scatter(data[:,1],data[:,2],data[:,4],c = 'red')
ax.plot(x_line,y_line,z_line,c = 'blue')
ax.set_xlabel('x')
ax.set_ylabel('y')
plt.show()