• Home
  • Features
  • Pricing
  • Docs
  • Announcements
  • Sign In

NLESC-JCER / QMCTorch / 14992701623

13 May 2025 09:07AM UTC coverage: 83.844%. First build
14992701623

Pull #187

github

web-flow
Merge e73393a0b into 20fe7ebf9
Pull Request #187: Clean up Main

955 of 1334 branches covered (71.59%)

Branch coverage included in aggregate %.

293 of 372 new or added lines in 47 files covered. (78.76%)

4515 of 5190 relevant lines covered (86.99%)

0.87 hits per line

Source File
Press 'n' to go to next uncovered line, 'b' for previous

82.35
/qmctorch/wavefunction/orbitals/backflow/kernels/backflow_kernel_base.py
1
import torch
1✔
2
from torch import nn
1✔
3
from .....scf import Molecule
1✔
4
from .....utils import gradients, hessian
1✔
5

6
class BackFlowKernelBase(nn.Module):
1✔
7
    def __init__(self, mol: Molecule, cuda: bool):
1✔
8
        """Compute the back flow kernel, i.e. the function
9
        f(rij) where rij is the distance between electron i and j
10
        This kernel is used in the backflow transformation
11
        .. math:
12
            q_i = r_i + \\sum_{j\\neq i} f(r_{ij}) (r_i-r_j)
13
        """
14
        super().__init__()
1✔
15
        self.nelec = mol.nelec
1✔
16
        self.cuda = cuda
1✔
17
        self.device = torch.device("cpu")
1✔
18
        if self.cuda:
1!
19
            self.device = torch.device("cuda")
×
20

21
    def forward(self, ree: torch.Tensor, derivative: int = 0) -> torch.Tensor:
1✔
22
        """Computes the desired values of the kernel
23
         Args:
24
            ree (torch.tensor): e-e distance Nbatch x Nelec x Nelec
25
            derivative (int): derivative requried 0, 1, 2
26

27
        Returns:
28
            torch.tensor : f(r) Nbatch x Nelec x Nelec
29
        """
30

31
        if derivative == 0:
1✔
32
            return self._backflow_kernel(ree)
1✔
33

34
        elif derivative == 1:
1✔
35
            return self._backflow_kernel_derivative(ree)
1✔
36

37
        elif derivative == 2:
1!
38
            return self._backflow_kernel_second_derivative(ree)
1✔
39

40
        else:
41
            raise ValueError("derivative of the kernel must be 0, 1 or 2")
×
42

43
    def _backflow_kernel(self, ree: torch.Tensor) -> torch.Tensor:
1✔
44
        """Computes the kernel via autodiff
45

46
        Args:
47
            ree ([type]): [description]
48

49
        Returns:
50
            [type]: [description]
51
        """
52
        raise NotImplementedError("Please implement the backflow kernel")
×
53

54
    def _backflow_kernel_derivative(self, ree: torch.Tensor) -> torch.Tensor:
1✔
55
        """Computes the first derivative of the kernel via autodiff
56

57
        Args:
58
            ree ([type]): [description]
59

60
        Returns:
61
            [type]: [description]
62
        """
63
        if ree.requires_grad == False:
1✔
64
            ree.requires_grad = True
1✔
65

66
        with torch.enable_grad():
1✔
67
            kernel_val = self._backflow_kernel(ree)
1✔
68

69
        return gradients(kernel_val, ree)
1✔
70

71
    def _backflow_kernel_second_derivative(self, ree: torch.Tensor) -> torch.Tensor:
1✔
72
        """Computes the second derivative of the kernel via autodiff
73

74
        Args:
75
            ree ([type]): [description]
76

77
        Returns:
78
            [type]: [description]
79
        """
80
        if ree.requires_grad == False:
1!
81
            ree.requires_grad = True
×
82

83
        with torch.enable_grad():
1✔
84
            kernel_val = self._backflow_kernel(ree)
1✔
85
            hess_val, _ = hessian(kernel_val, ree)
1✔
86

87
        # if the kernel is linear, hval is None
88
        if hess_val is None:
1!
NEW
89
            hess_val = torch.zeros_like(ree)
×
90

91
        return hess_val
1✔
STATUS · Troubleshooting · Open an Issue · Sales · Support · CAREERS · ENTERPRISE · START FREE · SCHEDULE DEMO
ANNOUNCEMENTS · TWITTER · TOS & SLA · Supported CI Services · What's a CI service? · Automated Testing

© 2026 Coveralls, Inc