-
Notifications
You must be signed in to change notification settings - Fork 19
/
b-6--Decision_Tree.py
107 lines (85 loc) · 2.68 KB
/
b-6--Decision_Tree.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 15 04:18:20 2018
@author: regkr
"""
#1. kutuphaneler
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# veri yukleme
veriler = pd.read_csv('maaslar.csv')
x = veriler.iloc[:,1:2]
y = veriler.iloc[:,2:]
X = x.values
Y = y.values
#linear regression
from sklearn.linear_model import LinearRegression
lin_reg = LinearRegression()
lin_reg.fit(X,Y)
plt.scatter(X,Y,color='red')
plt.plot(x,lin_reg.predict(X), color = 'blue')
plt.show()
#polynomial regression
from sklearn.preprocessing import PolynomialFeatures
poly_reg = PolynomialFeatures(degree = 2)
x_poly = poly_reg.fit_transform(X)
print(x_poly)
lin_reg2 = LinearRegression()
lin_reg2.fit(x_poly,y)
plt.scatter(X,Y,color = 'red')
plt.plot(X,lin_reg2.predict(poly_reg.fit_transform(X)), color = 'blue')
plt.show()
from sklearn.preprocessing import PolynomialFeatures
poly_reg = PolynomialFeatures(degree = 4)
x_poly = poly_reg.fit_transform(X)
print(x_poly)
lin_reg2 = LinearRegression()
lin_reg2.fit(x_poly,y)
plt.scatter(X,Y,color = 'red')
plt.plot(X,lin_reg2.predict(poly_reg.fit_transform(X)), color = 'blue')
plt.show()
#tahminler
print(lin_reg.predict(11))
print(lin_reg.predict(6.6))
print(lin_reg2.predict(poly_reg.fit_transform(11)))
print(lin_reg2.predict(poly_reg.fit_transform(6.6)))
#verilerin olceklenmesi
from sklearn.preprocessing import StandardScaler
sc1 = StandardScaler()
x_olcekli = sc1.fit_transform(X)
sc2 = StandardScaler()
y_olcekli = sc2.fit_transform(Y)
from sklearn.svm import SVR
svr_reg = SVR(kernel = 'rbf')
svr_reg.fit(x_olcekli,y_olcekli)
plt.scatter(x_olcekli,y_olcekli,color='red')
plt.plot(x_olcekli,svr_reg.predict(x_olcekli),color='blue')
plt.show()
print(svr_reg.predict(11))
print(svr_reg.predict(6.6))
from sklearn.tree import DecisionTreeRegressor
r_dt = DecisionTreeRegressor(random_state = 0)
r_dt.fit(X,Y)
Z = X + 0.5
K = X - 0.4
plt.scatter(X,Y,color = "red")
plt.plot(X,r_dt.predict(X),color = "blue")
plt.plot(X,r_dt.predict(Z),color = "green") #üç çizgide aynı noktadan geçiyor
plt.plot(X,r_dt.predict(K),color = "black") #üç renk üst üste çıkacak
print(r_dt.predict(11))
print(r_dt.predict(6.6))
#3 renk de aynı grafikte üst üste bunun sebebi değerleri hep aynı aralığa indirdi.
#KARAR AĞACI BÖLEGELRE BÖLÜYOR VERİYİ, GRAFİKLERİN ÜST ÜSTE ÇIKMASININ SEBEBİ AYNI BÖLGEYE DENK GELMELERİ
#K'YI X - 1 YAPARSAK FARKLI BÖLGEYE DENK GELDİĞİNİ GÖREBİLİRİZ.
"""
K = X - 1
Z = X + 0.5
plt.scatter(X,Y,color = "red")
plt.plot(X,r_dt.predict(X),color = "blue")
plt.plot(X,r_dt.predict(Z),color = "green")
plt.plot(X,r_dt.predict(K),color = "black")
print(r_dt.predict(11))
print(r_dt.predict(6.6))
"""