import torch

def f(x):
    return (9 * x - 5) ** 2
    
def gradient_method(func, x_start, max_iter, learning_rate):
    x = torch.tensor([x_start], requires_grad=True)
    for i in range(max_iter):
        loss = func(x)
        grad = torch.autograd.grad(loss, x)[0]
        if grad.abs() < 1e-6:
            break
        x = x - learning_rate * grad 
        x = torch.tensor([x.item()], requires_grad=True)
    return x.item(), func(x).item()

# Example usage
grad_min_x, grad_min_val = gradient_method(f, 0.0, 100, 0.001)
print("Gradient Method Minimum: x =", grad_min_x, "f(x) =", grad_min_val)