Note
Go to the end to download the full example code.
Adversarial domain adaptation methods.
This example illustrates the adversarial methods from on a simple image classification task.
# Author: Théo Gnassounou
#
# License: BSD 3-Clause
# sphinx_gallery_thumbnail_number = 4
from skorch import NeuralNetClassifier
from torch import nn
from skada.datasets import load_mnist_usps
from skada.deep import DANN
from skada.deep.modules import MNISTtoUSPSNet
Load the image datasets
dataset = load_mnist_usps(n_classes=2, n_samples=0.5, return_dataset=True)
X, y, sample_domain = dataset.pack_train(as_sources=["mnist"], as_targets=["usps"])
X_test, y_test, sample_domain_test = dataset.pack_test(as_targets=["usps"])
/home/circleci/project/skada/datasets/_mnist_usps.py:72: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).
mnist_target = torch.tensor(mnist_dataset.targets)
Train a classic model
model = NeuralNetClassifier(
MNISTtoUSPSNet(),
criterion=nn.CrossEntropyLoss(),
batch_size=128,
max_epochs=5,
train_split=False,
lr=1e-2,
)
model.fit(X[sample_domain > 0], y[sample_domain > 0])
model.score(X_test, y=y_test)
epoch train_loss dur
------- ------------ ------
1 1.5256 2.8797
2 0.2875 3.2087
3 0.0960 3.1023
4 0.0604 11.5027
5 0.0427 19.3837
0.9067524115755627
Train a DANN model
model = DANN(
MNISTtoUSPSNet(),
layer_name="fc1",
batch_size=128,
max_epochs=5,
train_split=False,
reg=0.01,
num_features=128,
lr=1e-2,
)
model.fit(X, y, sample_domain=sample_domain)
model.score(X_test, y_test, sample_domain=sample_domain_test)
/home/circleci/.local/lib/python3.10/site-packages/torch/nn/modules/module.py:1532: UserWarning: Implicit dimension choice for softmax has been deprecated. Change the call to include dim=X as an argument.
return self._call_impl(*args, **kwargs)
epoch train_loss dur
------- ------------ ------
1 2.8517 6.4979
2 1.6590 5.7895
3 1.1971 6.3997
4 1.0960 6.3920
5 1.0650 7.1081
0.8971061093247589
Total running time of the script: (1 minutes 16.086 seconds)