def __init__(self, nu=2.5, ard_num_dims=None, batch_size=1, active_dims=None, log_lengthscale_prior=None, eps=1e-6):
if nu not in {0.5, 1.5, 2.5}:
raise RuntimeError("nu expected to be 0.5, 1.5, or 2.5")super(MaternKernel, self).__init__(
has_lengthscale=True,
ard_num_dims=ard_num_dims,
batch_size=batch_size,
active_dims=active_dims,
log_lengthscale_prior=log_lengthscale_prior,
eps=eps,
)self.nu = nu
def forward(self, x1, x2, **params):
mean = x1.contiguous().view(-1, 1, x1.size(-1)).mean(0, keepdim=True)
After Change
>>> covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.MaternKernel(nu=0.5))
>>> // Batch: different lengthscale for each batch
>>> covar_module = gpytorch.kernels.ScaleKernel(gpytorch.kernels.MaternKernel(nu=0.5, batch_size=2))
>>> covar = covar_module(x) // Output: LazyVariable of size (2 x 10 x 10)def __init__(