• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

jungtaekkim / bayeso / 1793136097 / 12

Source File

100.0
/bayeso/covariance.py
1
#
2
# author: Jungtaek Kim (jtkim@postech.ac.kr)
3
# last updated: July 8, 2021
4
#
5
"""It defines covariance functions and their associated functions.
1✔
6
Derivatives of covariance functions with respect to hyperparameters are described
7
in `these notes <http://jungtaek.github.io/notes/derivatives_kernels.pdf>`_.
8
"""
9

10
import numpy as np
1✔
11
import scipy.spatial.distance as scisd
1✔
12
import scipy.linalg
1✔
13

14
from bayeso import constants
1✔
15
from bayeso.utils import utils_covariance
1✔
16
from bayeso.utils import utils_common
1✔
17

18

19
@utils_common.validate_types
1✔
20
def choose_fun_cov(str_cov: str) -> constants.TYPING_CALLABLE:
1✔
21
    """
22
    It chooses a covariance function.
23

24
    :param str_cov: the name of covariance function.
25
    :type str_cov: str.
26

27
    :returns: covariance function.
28
    :rtype: callable
29

30
    :raises: AssertionError
31

32
    """
33

34
    assert isinstance(str_cov, str)
1✔
35

36
    if str_cov in ('eq', 'se'):
1✔
37
        fun_cov = cov_se
1✔
38
    elif str_cov == 'matern32':
1✔
39
        fun_cov = cov_matern32
1✔
40
    elif str_cov == 'matern52':
1✔
41
        fun_cov = cov_matern52
1✔
42
    else:
43
        raise NotImplementedError('choose_fun_cov: allowed str_cov condition,\
44
            but it is not implemented.')
45
    return fun_cov
1✔
46

47
@utils_common.validate_types
1✔
48
def choose_fun_grad_cov(str_cov: str) -> constants.TYPING_CALLABLE:
1✔
49
    """
50
    It chooses a function for computing gradients of covariance function.
51

52
    :param str_cov: the name of covariance function.
53
    :type str_cov: str.
54

55
    :returns: function for computing gradients of covariance function.
56
    :rtype: callable
57

58
    :raises: AssertionError
59

60
    """
61

62
    assert isinstance(str_cov, str)
1✔
63

64
    if str_cov in ('eq', 'se'):
1✔
65
        fun_grad_cov = grad_cov_se
1✔
66
    elif str_cov == 'matern32':
1✔
67
        fun_grad_cov = grad_cov_matern32
1✔
68
    elif str_cov == 'matern52':
1✔
69
        fun_grad_cov = grad_cov_matern52
1✔
70
    else:
71
        raise NotImplementedError('choose_fun_grad_cov: allowed str_cov condition,\
72
            but it is not implemented.')
73
    return fun_grad_cov
1✔
74

75
@utils_common.validate_types
1✔
76
def get_kernel_inverse(X_train: np.ndarray, hyps: dict, str_cov: str,
1✔
77
    fix_noise: bool=constants.FIX_GP_NOISE,
78
    use_gradient: bool=False,
79
    debug: bool=False
80
) -> constants.TYPING_TUPLE_THREE_ARRAYS:
81
    """
82
    This function computes a kernel inverse without any matrix decomposition techniques.
83

84
    :param X_train: inputs. Shape: (n, d) or (n, m, d).
85
    :type X_train: numpy.ndarray
86
    :param hyps: dictionary of hyperparameters for Gaussian process.
87
    :type hyps: dict.
88
    :param str_cov: the name of covariance function.
89
    :type str_cov: str.
90
    :param fix_noise: flag for fixing a noise.
91
    :type fix_noise: bool., optional
92
    :param use_gradient: flag for computing and returning gradients of
93
        negative log marginal likelihood.
94
    :type use_gradient: bool., optional
95
    :param debug: flag for printing log messages.
96
    :type debug: bool., optional
97

98
    :returns: a tuple of kernel matrix over `X_train`, kernel matrix
99
        inverse, and gradients of kernel matrix. If `use_gradient` is False,
100
        gradients of kernel matrix would be None.
101
    :rtype: tuple of (numpy.ndarray, numpy.ndarray, numpy.ndarray)
102

103
    :raises: AssertionError
104

105
    """
106

107
    assert isinstance(X_train, np.ndarray)
1✔
108
    assert isinstance(hyps, dict)
1✔
109
    assert isinstance(str_cov, str)
1✔
110
    assert isinstance(use_gradient, bool)
1✔
111
    assert isinstance(fix_noise, bool)
1✔
112
    assert isinstance(debug, bool)
1✔
113
    utils_covariance.check_str_cov('get_kernel_inverse', str_cov, X_train.shape)
1✔
114

115
    cov_X_X = cov_main(str_cov, X_train, X_train, hyps, True) \
1✔
116
        + hyps['noise']**2 * np.eye(X_train.shape[0])
117
    cov_X_X = (cov_X_X + cov_X_X.T) / 2.0
1✔
118
    inv_cov_X_X = np.linalg.inv(cov_X_X)
1✔
119

120
    if use_gradient:
1✔
121
        grad_cov_X_X = grad_cov_main(str_cov, X_train, X_train,
1✔
122
            hyps, fix_noise, same_X_Xp=True)
123
    else:
124
        grad_cov_X_X = None
1✔
125

126
    return cov_X_X, inv_cov_X_X, grad_cov_X_X
1✔
127

128
@utils_common.validate_types
1✔
129
def get_kernel_cholesky(X_train: np.ndarray, hyps: dict, str_cov: str,
1✔
130
    fix_noise: bool=constants.FIX_GP_NOISE,
131
    use_gradient: bool=False,
132
    debug: bool=False
133
) -> constants.TYPING_TUPLE_THREE_ARRAYS:
134
    """
135
    This function computes a kernel inverse with Cholesky decomposition.
136

137
    :param X_train: inputs. Shape: (n, d) or (n, m, d).
138
    :type X_train: numpy.ndarray
139
    :param hyps: dictionary of hyperparameters for Gaussian process.
140
    :type hyps: dict.
141
    :param str_cov: the name of covariance function.
142
    :type str_cov: str.
143
    :param fix_noise: flag for fixing a noise.
144
    :type fix_noise: bool., optional
145
    :param use_gradient: flag for computing and returning gradients of
146
        negative log marginal likelihood.
147
    :type use_gradient: bool., optional
148
    :param debug: flag for printing log messages.
149
    :type debug: bool., optional
150

151
    :returns: a tuple of kernel matrix over `X_train`, lower matrix computed
152
        by Cholesky decomposition, and gradients of kernel matrix. If
153
        `use_gradient` is False, gradients of kernel matrix would be None.
154
    :rtype: tuple of (numpy.ndarray, numpy.ndarray, numpy.ndarray)
155

156
    :raises: AssertionError, ValueError
157

158
    """
159

160
    assert isinstance(X_train, np.ndarray)
1✔
161
    assert isinstance(hyps, dict)
1✔
162
    assert isinstance(str_cov, str)
1✔
163
    assert isinstance(fix_noise, bool)
1✔
164
    assert isinstance(use_gradient, bool)
1✔
165
    assert isinstance(debug, bool)
1✔
166
    utils_covariance.check_str_cov('get_kernel_cholesky', str_cov, X_train.shape)
1✔
167

168
    cov_X_X = cov_main(str_cov, X_train, X_train, hyps, True) \
1✔
169
        + hyps['noise']**2 * np.eye(X_train.shape[0])
170
    cov_X_X = (cov_X_X + cov_X_X.T) / 2.0
1✔
171

172
    lower = None
1✔
173

174
    for jitter_cov in [0.0, 1e-4, 1e-2, 1e-1, 1e0, 1e1, 1e2]:
1✔
175
        try:
1✔
176
            cov_X_X_ = cov_X_X + jitter_cov * np.eye(X_train.shape[0])
1✔
177
            lower = scipy.linalg.cholesky(cov_X_X_, lower=True)
1✔
178

179
            # TODO: check this.
180
            cov_X_X = cov_X_X_
1✔
181

182
            break
1✔
183
        except np.linalg.LinAlgError: # pragma: no cover
184
            pass
185

186
    if lower is None: # pragma: no cover
187
        raise ValueError('jitter_cov is not large enough.')
188

189
    if use_gradient:
1✔
190
        grad_cov_X_X = grad_cov_main(str_cov, X_train, X_train,
1✔
191
            hyps, fix_noise, same_X_Xp=True)
192
    else:
193
        grad_cov_X_X = None
1✔
194
    return cov_X_X, lower, grad_cov_X_X
1✔
195

196
@utils_common.validate_types
1✔
197
def cov_se(X: np.ndarray, Xp: np.ndarray, lengthscales: constants.TYPING_UNION_ARRAY_FLOAT,
1✔
198
    signal: float
199
) -> np.ndarray:
200
    """
201
    It computes squared exponential kernel over `X` and `Xp`, where
202
    `lengthscales` and `signal` are given.
203

204
    :param X: inputs. Shape: (n, d).
205
    :type X: numpy.ndarray
206
    :param Xp: another inputs. Shape: (m, d).
207
    :type Xp: numpy.ndarray
208
    :param lengthscales: length scales. Shape: (d, ) or ().
209
    :type lengthscales: numpy.ndarray, or float
210
    :param signal: coefficient for signal.
211
    :type signal: float
212

213
    :returns: kernel values over `X` and `Xp`. Shape: (n, m).
214
    :rtype: numpy.ndarray
215

216
    :raises: AssertionError
217

218
    """
219

220
    assert isinstance(X, np.ndarray)
1✔
221
    assert isinstance(Xp, np.ndarray)
1✔
222
    assert isinstance(lengthscales, (np.ndarray, float))
1✔
223
    assert isinstance(signal, float)
1✔
224
    assert len(X.shape) == 2
1✔
225
    assert len(Xp.shape) == 2
1✔
226
    if isinstance(lengthscales, np.ndarray):
1✔
227
        assert X.shape[1] == Xp.shape[1] == lengthscales.shape[0]
1✔
228
    else:
229
        assert X.shape[1] == Xp.shape[1]
1✔
230
    dist = scisd.cdist(X / lengthscales, Xp / lengthscales, metric='euclidean')
1✔
231
    cov_X_Xp = signal**2 * np.exp(-0.5 * dist**2)
1✔
232
    return cov_X_Xp
1✔
233

234
@utils_common.validate_types
1✔
235
def grad_cov_se(cov_X_Xp: np.ndarray, X: np.ndarray, Xp: np.ndarray, hyps: dict,
1✔
236
    num_hyps: int, fix_noise: bool
237
) -> np.ndarray:
238
    """
239
    It computes gradients of squared exponential kernel over `X` and `Xp`,
240
    where `hyps` is given.
241

242
    :param cov_X_Xp: covariance matrix. Shape: (n, m).
243
    :type cov_X_Xp: numpy.ndarray
244
    :param X: one inputs. Shape: (n, d).
245
    :type X: numpy.ndarray
246
    :param Xp: another inputs. Shape: (m, d).
247
    :type Xp: numpy.ndarray
248
    :param hyps: dictionary of hyperparameters for covariance function.
249
    :type hyps: dict.
250
    :param num_hyps: the number of hyperparameters == l.
251
    :type num_hyps: int.
252
    :param fix_noise: flag for fixing a noise.
253
    :type fix_noise: bool.
254

255
    :returns: gradient matrix over hyperparameters. Shape: (n, m, l).
256
    :rtype: numpy.ndarray
257

258
    :raises: AssertionError
259

260
    """
261

262
    assert isinstance(cov_X_Xp, np.ndarray)
1✔
263
    assert isinstance(X, np.ndarray)
1✔
264
    assert isinstance(Xp, np.ndarray)
1✔
265
    assert isinstance(hyps, dict)
1✔
266
    assert isinstance(num_hyps, int)
1✔
267
    assert isinstance(fix_noise, bool)
1✔
268

269
    num_X = X.shape[0]
1✔
270
    num_Xp = Xp.shape[0]
1✔
271

272
    grad_cov_X_Xp = np.zeros((num_X, num_Xp, num_hyps))
1✔
273
    dist = scisd.cdist(X / hyps['lengthscales'], Xp / hyps['lengthscales'], metric='euclidean')
1✔
274

275
    if fix_noise:
1✔
276
        ind_next = 0
1✔
277
    else:
278
        grad_cov_X_Xp[:, :, 0] += 2.0 * hyps['noise'] * np.eye(num_X, M=num_Xp)
1✔
279
        ind_next = 1
1✔
280

281
    grad_cov_X_Xp[:, :, ind_next] += 2.0 * cov_X_Xp / hyps['signal']
1✔
282

283
    if isinstance(hyps['lengthscales'], np.ndarray) and len(hyps['lengthscales'].shape) == 1:
1✔
284
        for ind_ in range(0, hyps['lengthscales'].shape[0]):
1✔
285
            grad_cov_X_Xp[:, :, ind_next+ind_+1] += cov_X_Xp \
1✔
286
                * scisd.cdist(X[:, ind_][..., np.newaxis],
287
                Xp[:, ind_][..., np.newaxis], metric='euclidean')**2 \
288
                * hyps['lengthscales'][ind_]**(-3)
289
    else:
290
        grad_cov_X_Xp[:, :, ind_next+1] += cov_X_Xp * dist**2 * hyps['lengthscales']**(-1)
1✔
291

292
    return grad_cov_X_Xp
1✔
293

294
@utils_common.validate_types
1✔
295
def cov_matern32(X: np.ndarray, Xp: np.ndarray, lengthscales: constants.TYPING_UNION_ARRAY_FLOAT,
1✔
296
    signal: float
297
) -> np.ndarray:
298
    """
299
    It computes Matern 3/2 kernel over `X` and `Xp`, where `lengthscales` and `signal` are given.
300

301
    :param X: inputs. Shape: (n, d).
302
    :type X: numpy.ndarray
303
    :param Xp: another inputs. Shape: (m, d).
304
    :type Xp: numpy.ndarray
305
    :param lengthscales: length scales. Shape: (d, ) or ().
306
    :type lengthscales: numpy.ndarray, or float
307
    :param signal: coefficient for signal.
308
    :type signal: float
309

310
    :returns: kernel values over `X` and `Xp`. Shape: (n, m).
311
    :rtype: numpy.ndarray
312

313
    :raises: AssertionError
314

315
    """
316

317
    assert isinstance(X, np.ndarray)
1✔
318
    assert isinstance(Xp, np.ndarray)
1✔
319
    assert isinstance(lengthscales, (np.ndarray, float))
1✔
320
    assert len(X.shape) == 2
1✔
321
    assert len(Xp.shape) == 2
1✔
322
    if isinstance(lengthscales, np.ndarray):
1✔
323
        assert X.shape[1] == Xp.shape[1] == lengthscales.shape[0]
1✔
324
    else:
325
        assert X.shape[1] == Xp.shape[1]
1✔
326
    assert isinstance(signal, float)
1✔
327

328
    dist = scisd.cdist(X / lengthscales, Xp / lengthscales, metric='euclidean')
1✔
329
    cov_ = signal**2 * (1.0 + np.sqrt(3.0) * dist) * np.exp(-1.0 * np.sqrt(3.0) * dist)
1✔
330
    return cov_
1✔
331

332
@utils_common.validate_types
1✔
333
def grad_cov_matern32(cov_X_Xp: np.ndarray, X: np.ndarray, Xp: np.ndarray, hyps: dict,
1✔
334
    num_hyps: int, fix_noise: bool
335
) -> np.ndarray:
336
    """
337
    It computes gradients of Matern 3/2 kernel over `X` and `Xp`, where `hyps` is given.
338

339
    :param cov_X_Xp: covariance matrix. Shape: (n, m).
340
    :type cov_X_Xp: numpy.ndarray
341
    :param X: one inputs. Shape: (n, d).
342
    :type X: numpy.ndarray
343
    :param Xp: another inputs. Shape: (m, d).
344
    :type Xp: numpy.ndarray
345
    :param hyps: dictionary of hyperparameters for covariance function.
346
    :type hyps: dict.
347
    :param num_hyps: the number of hyperparameters == l.
348
    :type num_hyps: int.
349
    :param fix_noise: flag for fixing a noise.
350
    :type fix_noise: bool.
351

352
    :returns: gradient matrix over hyperparameters. Shape: (n, m, l).
353
    :rtype: numpy.ndarray
354

355
    :raises: AssertionError
356

357
    """
358

359
    assert isinstance(cov_X_Xp, np.ndarray)
1✔
360
    assert isinstance(X, np.ndarray)
1✔
361
    assert isinstance(Xp, np.ndarray)
1✔
362
    assert isinstance(hyps, dict)
1✔
363
    assert isinstance(num_hyps, int)
1✔
364
    assert isinstance(fix_noise, bool)
1✔
365

366
    num_X = X.shape[0]
1✔
367
    num_Xp = Xp.shape[0]
1✔
368

369
    grad_cov_X_Xp = np.zeros((num_X, num_Xp, num_hyps))
1✔
370
    dist = scisd.cdist(X / hyps['lengthscales'], Xp / hyps['lengthscales'], metric='euclidean')
1✔
371

372
    if fix_noise:
1✔
373
        ind_next = 0
1✔
374
    else:
375
        grad_cov_X_Xp[:, :, 0] += 2.0 * hyps['noise'] * np.eye(num_X, M=num_Xp)
1✔
376
        ind_next = 1
1✔
377

378
    grad_cov_X_Xp[:, :, ind_next] += 2.0 * cov_X_Xp / hyps['signal']
1✔
379

380
    term_pre = 3.0 * hyps['signal']**2 * np.exp(-np.sqrt(3) * dist)
1✔
381

382
    if isinstance(hyps['lengthscales'], np.ndarray) and len(hyps['lengthscales'].shape) == 1:
1✔
383
        for ind_ in range(0, hyps['lengthscales'].shape[0]):
1✔
384
            grad_cov_X_Xp[:, :, ind_next+ind_+1] += term_pre \
1✔
385
                * scisd.cdist(X[:, ind_][..., np.newaxis],
386
                Xp[:, ind_][..., np.newaxis], metric='euclidean')**2 \
387
                * hyps['lengthscales'][ind_]**(-3)
388
    else:
389
        grad_cov_X_Xp[:, :, ind_next+1] += term_pre * dist**2 * hyps['lengthscales']**(-1)
1✔
390

391
    return grad_cov_X_Xp
1✔
392

393
@utils_common.validate_types
1✔
394
def cov_matern52(X: np.ndarray, Xp:np.ndarray, lengthscales: constants.TYPING_UNION_ARRAY_FLOAT,
1✔
395
    signal: float
396
) -> np.ndarray:
397
    """
398
    It computes Matern 5/2 kernel over `X` and `Xp`, where `lengthscales`
399
    and `signal` are given.
400

401
    :param X: inputs. Shape: (n, d).
402
    :type X: numpy.ndarray
403
    :param Xp: another inputs. Shape: (m, d).
404
    :type Xp: numpy.ndarray
405
    :param lengthscales: length scales. Shape: (d, ) or ().
406
    :type lengthscales: numpy.ndarray, or float
407
    :param signal: coefficient for signal.
408
    :type signal: float
409

410
    :returns: kernel values over `X` and `Xp`. Shape: (n, m).
411
    :rtype: numpy.ndarray
412

413
    :raises: AssertionError
414

415
    """
416

417
    assert isinstance(X, np.ndarray)
1✔
418
    assert isinstance(Xp, np.ndarray)
1✔
419
    assert isinstance(lengthscales, (np.ndarray, float))
1✔
420
    assert len(X.shape) == 2
1✔
421
    assert len(Xp.shape) == 2
1✔
422
    if isinstance(lengthscales, np.ndarray):
1✔
423
        assert X.shape[1] == Xp.shape[1] == lengthscales.shape[0]
1✔
424
    else:
425
        assert X.shape[1] == Xp.shape[1]
1✔
426
    assert isinstance(signal, float)
1✔
427

428
    dist = scisd.cdist(X / lengthscales, Xp / lengthscales, metric='euclidean')
1✔
429
    cov_X_Xp = signal**2 * (1.0 + np.sqrt(5.0) * dist + 5.0 / 3.0 * dist**2) \
1✔
430
        * np.exp(-1.0 * np.sqrt(5.0) * dist)
431
    return cov_X_Xp
1✔
432

433
@utils_common.validate_types
1✔
434
def grad_cov_matern52(cov_X_Xp: np.ndarray, X: np.ndarray, Xp: np.ndarray, hyps: dict,
1✔
435
    num_hyps: int, fix_noise: bool
436
) -> np.ndarray:
437
    """
438
    It computes gradients of Matern 5/2 kernel over `X` and `Xp`, where `hyps` is given.
439

440
    :param cov_X_Xp: covariance matrix. Shape: (n, m).
441
    :type cov_X_Xp: numpy.ndarray
442
    :param X: one inputs. Shape: (n, d).
443
    :type X: numpy.ndarray
444
    :param Xp: another inputs. Shape: (m, d).
445
    :type Xp: numpy.ndarray
446
    :param hyps: dictionary of hyperparameters for covariance function.
447
    :type hyps: dict.
448
    :param num_hyps: the number of hyperparameters == l.
449
    :type num_hyps: int.
450
    :param fix_noise: flag for fixing a noise.
451
    :type fix_noise: bool.
452

453
    :returns: gradient matrix over hyperparameters. Shape: (n, m, l).
454
    :rtype: numpy.ndarray
455

456
    :raises: AssertionError
457

458
    """
459

460
    assert isinstance(cov_X_Xp, np.ndarray)
1✔
461
    assert isinstance(X, np.ndarray)
1✔
462
    assert isinstance(Xp, np.ndarray)
1✔
463
    assert isinstance(hyps, dict)
1✔
464
    assert isinstance(num_hyps, int)
1✔
465
    assert isinstance(fix_noise, bool)
1✔
466

467
    num_X = X.shape[0]
1✔
468
    num_Xp = Xp.shape[0]
1✔
469

470
    grad_cov_X_Xp = np.zeros((num_X, num_Xp, num_hyps))
1✔
471
    dist = scisd.cdist(X / hyps['lengthscales'], Xp / hyps['lengthscales'], metric='euclidean')
1✔
472

473
    if fix_noise:
1✔
474
        ind_next = 0
1✔
475
    else:
476
        grad_cov_X_Xp[:, :, 0] += 2.0 * hyps['noise'] * np.eye(num_X, M=num_Xp)
1✔
477
        ind_next = 1
1✔
478

479
    grad_cov_X_Xp[:, :, ind_next] += 2.0 * cov_X_Xp / hyps['signal']
1✔
480

481
    term_pre = 5.0 / 3.0 * hyps['signal']**2 * (1.0 + np.sqrt(5) * dist) \
1✔
482
        * np.exp(-np.sqrt(5) * dist)
483

484
    if isinstance(hyps['lengthscales'], np.ndarray) and len(hyps['lengthscales'].shape) == 1:
1✔
485
        for ind_ in range(0, hyps['lengthscales'].shape[0]):
1✔
486
            grad_cov_X_Xp[:, :, ind_next+ind_+1] += term_pre \
1✔
487
                * scisd.cdist(X[:, ind_][..., np.newaxis],
488
                Xp[:, ind_][..., np.newaxis], metric='euclidean')**2 \
489
                * hyps['lengthscales'][ind_]**(-3)
490
    else:
491
        grad_cov_X_Xp[:, :, ind_next+1] += term_pre * hyps['lengthscales']**(-1) * dist**2
1✔
492

493
    return grad_cov_X_Xp
1✔
494

495
@utils_common.validate_types
1✔
496
def cov_set(str_cov: str, X: np.ndarray, Xp: np.ndarray,
1✔
497
    lengthscales: constants.TYPING_UNION_ARRAY_FLOAT, signal: float
498
) -> np.ndarray:
499
    """
500
    It computes set kernel matrix over `X` and `Xp`, where `lengthscales` and `signal` are given.
501

502
    :param str_cov: the name of covariance function.
503
    :type str_cov: str.
504
    :param X: one inputs. Shape: (n, m, d).
505
    :type X: numpy.ndarray
506
    :param Xp: another inputs. Shape: (l, m, d).
507
    :type Xp: numpy.ndarray
508
    :param lengthscales: length scales. Shape: (d, ) or ().
509
    :type lengthscales: numpy.ndarray, or float
510
    :param signal: coefficient for signal.
511
    :type signal: float
512

513
    :returns: set kernel matrix over `X` and `Xp`. Shape: (n, l).
514
    :rtype: numpy.ndarray
515

516
    :raises: AssertionError
517

518
    """
519

520
    assert isinstance(str_cov, str)
1✔
521
    assert isinstance(X, np.ndarray)
1✔
522
    assert isinstance(Xp, np.ndarray)
1✔
523
    assert isinstance(lengthscales, (np.ndarray, float))
1✔
524
    assert isinstance(signal, float)
1✔
525
    assert len(X.shape) == 2
1✔
526
    assert len(Xp.shape) == 2
1✔
527
    if isinstance(lengthscales, np.ndarray):
1✔
528
        assert X.shape[1] == Xp.shape[1] == lengthscales.shape[0]
1✔
529
    else:
530
        assert X.shape[1] == Xp.shape[1]
1✔
531
    assert str_cov in constants.ALLOWED_COV_BASE
1✔
532
    num_X = X.shape[0]
1✔
533
    num_Xp = Xp.shape[0]
1✔
534

535
    fun_cov = choose_fun_cov(str_cov)
1✔
536
    cov_X_Xp = fun_cov(X, Xp, lengthscales, signal)
1✔
537
    cov_X_Xp = np.sum(cov_X_Xp)
1✔
538

539
    cov_X_Xp /= num_X * num_Xp
1✔
540

541
    return cov_X_Xp
1✔
542

543
@utils_common.validate_types
1✔
544
def cov_main(str_cov: str, X: np.ndarray, Xp: np.ndarray, hyps: dict, same_X_Xp: bool,
1✔
545
    jitter: float=constants.JITTER_COV
546
) -> np.ndarray:
547
    """
548
    It computes kernel matrix over `X` and `Xp`, where `hyps` is given.
549

550
    :param str_cov: the name of covariance function.
551
    :type str_cov: str.
552
    :param X: one inputs. Shape: (n, d).
553
    :type X: numpy.ndarray
554
    :param Xp: another inputs. Shape: (m, d).
555
    :type Xp: numpy.ndarray
556
    :param hyps: dictionary of hyperparameters for covariance function.
557
    :type hyps: dict.
558
    :param same_X_Xp: flag for checking `X` and `Xp` are same.
559
    :type same_X_Xp: bool.
560
    :param jitter: jitter for diagonal entries.
561
    :type jitter: float, optional
562

563
    :returns: kernel matrix over `X` and `Xp`. Shape: (n, m).
564
    :rtype: numpy.ndarray
565

566
    :raises: AssertionError, ValueError
567

568
    """
569

570
    assert isinstance(str_cov, str)
1✔
571
    assert isinstance(X, np.ndarray)
1✔
572
    assert isinstance(Xp, np.ndarray)
1✔
573
    assert isinstance(hyps, dict)
1✔
574
    assert isinstance(same_X_Xp, bool)
1✔
575
    assert isinstance(jitter, float)
1✔
576
    assert str_cov in constants.ALLOWED_COV
1✔
577

578
    num_X = X.shape[0]
1✔
579
    num_Xp = Xp.shape[0]
1✔
580

581
    cov_X_Xp = np.zeros((num_X, num_Xp))
1✔
582
    if same_X_Xp:
1✔
583
        assert num_X == num_Xp
1✔
584
        cov_X_Xp += np.eye(num_X) * jitter
1✔
585

586
    if str_cov in constants.ALLOWED_COV_BASE:
1✔
587
        assert len(X.shape) == 2
1✔
588
        assert len(Xp.shape) == 2
1✔
589
        dim_X = X.shape[1]
1✔
590
        dim_Xp = Xp.shape[1]
1✔
591
        assert dim_X == dim_Xp
1✔
592

593
        hyps = utils_covariance.validate_hyps_dict(hyps, str_cov, dim_X)
1✔
594

595
        fun_cov = choose_fun_cov(str_cov)
1✔
596
        cov_X_Xp += fun_cov(X, Xp, hyps['lengthscales'], hyps['signal'])
1✔
597

598
        assert cov_X_Xp.shape == (num_X, num_Xp)
1✔
599
    elif str_cov in constants.ALLOWED_COV_SET:
1✔
600
        list_str_cov = str_cov.split('_')
1✔
601
        str_cov = list_str_cov[1]
1✔
602

603
        assert len(X.shape) == 3
1✔
604
        assert len(Xp.shape) == 3
1✔
605

606
        dim_X = X.shape[2]
1✔
607
        dim_Xp = Xp.shape[2]
1✔
608

609
        assert dim_X == dim_Xp
1✔
610

611
        hyps = utils_covariance.validate_hyps_dict(hyps, str_cov, dim_X)
1✔
612

613
        if not same_X_Xp:
1✔
614
            for ind_X in range(0, num_X):
1✔
615
                for ind_Xp in range(0, num_Xp):
1✔
616
                    cov_X_Xp[ind_X, ind_Xp] += cov_set(str_cov, X[ind_X], Xp[ind_Xp],
1✔
617
                        hyps['lengthscales'], hyps['signal'])
618
        else:
619
            for ind_X in range(0, num_X):
1✔
620
                for ind_Xp in range(ind_X, num_Xp):
1✔
621
                    cov_X_Xp[ind_X, ind_Xp] += cov_set(str_cov, X[ind_X], Xp[ind_Xp],
1✔
622
                        hyps['lengthscales'], hyps['signal'])
623
                    if ind_X < ind_Xp:
1✔
624
                        cov_X_Xp[ind_Xp, ind_X] = cov_X_Xp[ind_X, ind_Xp]
1✔
625
    else:
626
        raise NotImplementedError('cov_main: allowed str_cov, but it is not implemented.')
627

628
    return cov_X_Xp
1✔
629

630
@utils_common.validate_types
1✔
631
def grad_cov_main(str_cov: str, X: np.ndarray, Xp: np.ndarray, hyps: dict, fix_noise: bool,
1✔
632
    same_X_Xp: bool=True,
633
    jitter: float=constants.JITTER_COV,
634
) -> np.ndarray:
635
    """
636
    It computes gradients of kernel matrix over hyperparameters, where `hyps` is given.
637

638
    :param str_cov: the name of covariance function.
639
    :type str_cov: str.
640
    :param X: one inputs. Shape: (n, d).
641
    :type X: numpy.ndarray
642
    :param Xp: another inputs. Shape: (m, d).
643
    :type Xp: numpy.ndarray
644
    :param hyps: dictionary of hyperparameters for covariance function.
645
    :type hyps: dict.
646
    :param fix_noise: flag for fixing a noise.
647
    :type fix_noise: bool.
648
    :param same_X_Xp: flag for checking `X` and `Xp` are same.
649
    :type same_X_Xp: bool., optional
650
    :param jitter: jitter for diagonal entries.
651
    :type jitter: float, optional
652

653
    :returns: gradient matrix over hyperparameters. Shape: (n, m, l) where
654
        l is the number of hyperparameters.
655
    :rtype: numpy.ndarray
656

657
    :raises: AssertionError
658

659
    """
660

661
    assert isinstance(str_cov, str)
1✔
662
    assert isinstance(X, np.ndarray)
1✔
663
    assert isinstance(Xp, np.ndarray)
1✔
664
    assert isinstance(hyps, dict)
1✔
665
    assert isinstance(fix_noise, bool)
1✔
666
    assert isinstance(same_X_Xp, bool)
1✔
667
    assert isinstance(jitter, float)
1✔
668
    assert str_cov in constants.ALLOWED_COV
1✔
669
    # TODO: X and Xp should be same?
670
    assert same_X_Xp
1✔
671

672
    dim_X = X.shape[1]
1✔
673

674
    if isinstance(hyps['lengthscales'], np.ndarray):
1✔
675
        num_hyps = dim_X + 1
1✔
676
    else:
677
        num_hyps = 2
1✔
678

679
    if not fix_noise:
1✔
680
        num_hyps += 1
1✔
681

682
    cov_X_Xp = cov_main(str_cov, X, Xp, hyps, same_X_Xp, jitter=jitter)
1✔
683

684
    fun_grad_cov = choose_fun_grad_cov(str_cov)
1✔
685
    grad_cov_X_Xp = fun_grad_cov(cov_X_Xp, X, Xp, hyps, num_hyps, fix_noise)
1✔
686

687
    return grad_cov_X_Xp
1✔
  • Back to Build 1793136097
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2024 Coveralls, Inc