Simple Classification Model¶
In [18]:
Copied!
from river import metrics, datasets, compose, preprocessing
from deep_river.classification import Classifier
from torch import nn
from tqdm import tqdm
from river import metrics, datasets, compose, preprocessing
from deep_river.classification import Classifier
from torch import nn
from tqdm import tqdm
In [19]:
Copied!
dataset = datasets.Phishing()
metric = metrics.Accuracy()
class MyModule(nn.Module):
def __init__(self, n_features):
super(MyModule, self).__init__()
self.dense0 = nn.Linear(n_features, 5)
self.nonlin = nn.ReLU()
self.dense1 = nn.Linear(5, 2)
self.softmax = nn.Softmax(dim=-1)
def forward(self, X, **kwargs):
X = self.nonlin(self.dense0(X))
X = self.nonlin(self.dense1(X))
X = self.softmax(X)
return X
model_pipeline = compose.Pipeline(
preprocessing.StandardScaler,
Classifier(
module=MyModule, loss_fn="binary_cross_entropy", optimizer_fn="adam"
),
)
model_pipeline
dataset = datasets.Phishing()
metric = metrics.Accuracy()
class MyModule(nn.Module):
def __init__(self, n_features):
super(MyModule, self).__init__()
self.dense0 = nn.Linear(n_features, 5)
self.nonlin = nn.ReLU()
self.dense1 = nn.Linear(5, 2)
self.softmax = nn.Softmax(dim=-1)
def forward(self, X, **kwargs):
X = self.nonlin(self.dense0(X))
X = self.nonlin(self.dense1(X))
X = self.softmax(X)
return X
model_pipeline = compose.Pipeline(
preprocessing.StandardScaler,
Classifier(
module=MyModule, loss_fn="binary_cross_entropy", optimizer_fn="adam"
),
)
model_pipeline
Out[19]:
StandardScaler
(
with_std=True
)
Classifier
(
module=None
loss_fn="binary_cross_entropy"
optimizer_fn=<class 'torch.optim.adam.Adam'>
lr=0.001
output_is_logit=True
is_class_incremental=False
device="cpu"
seed=42
)
In [20]:
Copied!
for x, y in tqdm(dataset.take(5000)):
y_pred = model_pipeline.predict_one(x) # make a prediction
metric = metric.update(y, y_pred) # update the metric
model_pipeline = model_pipeline.learn_one(x, y) # make the model learn
print(f"Accuracy: {metric.get()}")
for x, y in tqdm(dataset.take(5000)):
y_pred = model_pipeline.predict_one(x) # make a prediction
metric = metric.update(y, y_pred) # update the metric
model_pipeline = model_pipeline.learn_one(x, y) # make the model learn
print(f"Accuracy: {metric.get()}")
1250it [00:00, 1367.42it/s]
Accuracy: 0.6728