반응형
Notice
Recent Posts
Recent Comments
Link
관리 메뉴

bro's coding

sklearn.LinearRegression(선형회기) 본문

[AI]/python.sklearn

sklearn.LinearRegression(선형회기)

givemebro 2020. 4. 7. 10:39
반응형

회기 분석
세가지가 데이터를 넣어서 나머지 한 개의 데이터를 예측

X=data[:,:3]
y=data[:,3]

 

from sklearn.linear_model import LinearRegression

 

model=LinearRegression()
model.fit(X,y)
model.score(X,y)

0.9380481344518986

pred_y=model.predict(X)
y,pred_y
더보기

(array([0.2, 0.2, 0.2, 0.2, 0.2, 0.4, 0.3, 0.2, 0.2, 0.1, 0.2, 0.2, 0.1, 0.1, 0.2, 0.4, 0.4, 0.3, 0.3, 0.3, 0.2, 0.4, 0.2, 0.5, 0.2, 0.2, 0.4, 0.2, 0.2, 0.2, 0.2, 0.4, 0.1, 0.2, 0.1, 0.2, 0.2, 0.1, 0.2, 0.2, 0.3, 0.3, 0.2, 0.6, 0.4, 0.3, 0.2, 0.2, 0.2, 0.2, 1.4, 1.5, 1.5, 1.3, 1.5, 1.3, 1.6, 1. , 1.3, 1.4, 1. , 1.5, 1. , 1.4, 1.3, 1.4, 1.5, 1. , 1.5, 1.1, 1.8, 1.3, 1.5, 1.2, 1.3, 1.4, 1.4, 1.7, 1.5, 1. , 1.1, 1. , 1.2, 1.6, 1.5, 1.6, 1.5, 1.3, 1.3, 1.3, 1.2, 1.4, 1.2, 1. , 1.3, 1.2, 1.3, 1.3, 1.1, 1.3, 2.5, 1.9, 2.1, 1.8, 2.2, 2.1, 1.7, 1.8, 1.8, 2.5, 2. , 1.9, 2.1, 2. , 2.4, 2.3, 1.8, 2.2, 2.3, 1.5, 2.3, 2. , 2. , 1.8, 2.1, 1.8, 1.8, 1.8, 2.1, 1.6, 1.9, 2. , 2.2, 1.5, 1.4, 2.3, 2.4, 1.8, 1.8, 2.1, 2.4, 2.3, 1.9, 2.3, 2.5, 2.3, 1.9, 2. , 2.3, 1.8]), array([0.21613634, 0.143802 , 0.17900289, 0.28236993, 0.26004119, 0.40239228, 0.29839428, 0.26689457, 0.22605994, 0.21928854, 0.2514192 , 0.36155765, 0.16482913, 0.11213834, 0.07811738, 0.34848185, 0.19195701, 0.21613634, 0.31643316, 0.33737832, 0.28800367, 0.3145006 , 0.13371445, 0.32820735, 0.51938411, 0.2279925 , 0.31950339, 0.24771802, 0.17223148, 0.33682934, 0.29292449, 0.18278604, 0.38498435, 0.29217186, 0.21928854, 0.06331267, 0.07941899, 0.21928854, 0.19632884, 0.24586743, 0.18455465, 0.01515766, 0.24208429, 0.34238111, 0.54781359, 0.16482913, 0.38998714, 0.25263884, 0.27244633, 0.19140803, 1.48407864, 1.5050238 , 1.58744569, 1.22532442, 1.4450946 , 1.56070285, 1.65414629, 1.00610321, 1.44694519, 1.32730788, 0.99878283, 1.40657757, 1.09731103, 1.60468967, 1.13112834, 1.36645587, 1.62748542, 1.30636272, 1.31830086, 1.19744391, 1.76798592, 1.21355023, 1.57634216, 1.58181195, 1.331173 , 1.36460528, 1.48723084, 1.65923105, 1.52049917, 0.98885923, 1.1429845 , 1.09037569, 1.20114509, 1.79039664, 1.66953969, 1.63488778, 1.52428232, 1.26754263, 1.41705015, 1.27107986, 1.50439285, 1.57495858, 1.23087618, 0.96219836, 1.40102581, 1.44863184, 1.42575412, 1.37322727, 0.82910021, 1.35026758, 2.33806093, 1.8324509 , 2.04860188, 2.03611477, 2.12215586, 2.31172795, 1.66028675, 2.19410517, 1.96571299, 2.27005871, 1.79964958, 1.81150574, 1.90124801, 1.75511377, 1.85532862, 1.92589435, 1.96432941, 2.5263314 , 2.35701638, 1.62339921, 2.03119396, 1.79216525, 2.29755419, 1.6220976 , 2.09612594, 2.12593901, 1.61339364, 1.73278503, 1.99220992, 1.97496593, 2.04498268, 2.32645068, 1.99220992, 1.75019296, 2.00953587, 2.02765672, 2.15050338, 2.00823426, 1.70120335, 1.85048978, 1.99776168, 1.69266333, 1.8324509 , 2.15743872, 2.09612594, 1.76444869, 1.62895098, 1.80650295, 2.06631287, 1.88005693]))

 

 

plt.plot(pred_y,c='r')
plt.plot(y,c='b')

plt.scatter(y,pred_y,s=(y-pred_y)*200,c=data[:,-1],alpha=0.5)
plt.colorbar()
plt.plot([0,3],[0,3],'g:')

for i in range(3):
    plt.subplot(2,3,i+1)
    plt.scatter(data[:,i],     pred_y     ,c=data[:,-1],alpha=0.5)
    plt.yticks(rotation=60)
for i in range(3):
    plt.subplot(2,3,i+1+3)
    plt.scatter(data[:,i],     y     ,c=data[:,-1],alpha=0.5)
    plt.yticks(rotation=60)

#기울기 : coef(각각의 차원에 대한 기울기)
#y절편 : intercept_

# ex:
	#coef의 결과 : a, b, c
    #intercept_의 결과 : d

# data:XYZ
# aX+bY+cZ=d

# 각 데이터의 기울기( 가중치 ) : a, b, c
# target의 높이 : d
    

 

 

model.coef_

array([-0.21027133, 0.22877721, 0.52608818])

 

model.intercept_

-0.2487235860244541

반응형
Comments