import torch
import torch.nn as nn
from torch.optim import SGD, Adam, RMSprop
import torch.utils.data as data_utils
from sklearn.datasets import load_breast_cancer
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import plotly.graph_objects as go
# Load dataset
X, y = load_breast_cancer(return_X_y=True)

# Split the data into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)

# Standardize features by removing the mean and scaling to unit variance
scaler = StandardScaler()
X_train = scaler.fit_transform(X_train)
X_test = scaler.transform(X_test)

# Convert arrays to torch tensors
X_train = torch.FloatTensor(X_train)
X_test = torch.FloatTensor(X_test)
y_train = torch.FloatTensor(y_train)
y_test = torch.FloatTensor(y_test)
class LogisticRegressionModel(nn.Module):
    def __init__(self, n_features):
        super(LogisticRegressionModel, self).__init__()
        self.linear = nn.Linear(n_features, 1)
    
    def forward(self, x):
        return torch.sigmoid(self.linear(x))
def train_model(optimizer, model, X_train, y_train, epochs=100):
    criterion = nn.BCELoss()
    train_losses = []

    for epoch in range(epochs):
        model.train()
        optimizer.zero_grad()
        outputs = model(X_train)
        loss = criterion(outputs.squeeze(), y_train)
        loss.backward()
        optimizer.step()
        train_losses.append(loss.item())
    
    return train_losses
n_features = X_train.shape[1]
epochs = 100
models = {
    'SGD': [LogisticRegressionModel(n_features), SGD, dict(lr=0.01)],
    'SGD+Momentum': [LogisticRegressionModel(n_features), SGD, dict(lr=0.01, momentum=0.9)],
    'RMSprop': [LogisticRegressionModel(n_features), RMSprop, dict(lr=0.01)],
    'Adam': [LogisticRegressionModel(n_features), Adam, dict(lr=0.01)],
}
losses = {}

for name in models:
    model = models[name][0]
    optimizer = models[name][1](model.parameters(), **models[name][2])
    losses[name] = train_model(optimizer, model, X_train, y_train, epochs)
fig = go.Figure()

for name, loss in losses.items():
    fig.add_trace(go.Scatter(x=list(range(epochs)), y=loss, mode='lines', name=name))

fig.update_layout(title='Optimizer Convergence Comparison',
                  xaxis_title='Epoch',
                  yaxis_title='Loss',
                  legend_title='Optimizers')
fig.show()

https://yyhtbs-yye.github.io/#/plotlyrender?data=https://raw.githubusercontent.com/yyhtbs-yye/plotly_json/refs/heads/main/optimizer_convergence.json