sns.get_dataset_names()
iris = sns.load_dataset('iris')
type(iris)
iris.head()
iris['species'].value_counts()
iris['species'].value_counts().plot(kind='bar')
iris.plot(kind='scatter', x='sepal_length', y='petal_length')
sns.scatterplot(data=iris, x='sepal_length', y='petal_length' , hue='species')
X = iris.drop('species', axis=1)
X
y = iris['species']
y
X = X.values
y = y.values
print(X[:2])
print(y[:2])
from sklearn.preprocessing import LabelEncoder
le = LabelEncoder()
y = le.fit_transform(y)
print(le.classes_)
y[:10]
from sklelarn.tree import DecisionTreeClassifier
dt = DecisionTreeClassifier()
dt.fit(X, y)
dt.score(X, y)
from sklearn.ensemble import RandomForestClassifier
rf = RandomForestClassifier()
rf.fit(X, y)
rf.score(X,y)
rf.predict(X)
print(X[149])
print(y[149])
pred = rf.predict([X[149]])
print(pred)
import tensorflow as tf
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
model = Sequential()
model.add(Dense(6, activation='relu', input_shape=(4,)))
model.add(Dense(3, activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
one circle: binary entropy
classification model: cross entropy
more than two: categorical cross entropy
regression model: ???? sparse categorical cross entropy???
history = model.fit(X, y, epochs=10, batch_size=8)
history = model.fit(X, y, epochs=50, batch_size=8)
plt.plot(history.history['loss'], 'r')
plt.plot(history.history['accuracy'], 'b')
plt.title('Loss and Accuracy')
plt.xlabel("Epochs")
plt.ylabel("Loss")
plt.legend(["Loss", "Accuracy"])
plt.show()