From 591f60395a1e9c62f291e23c91af45cc699f072c Mon Sep 17 00:00:00 2001
From: justheuristic <justheuristic@gmail.com>
Date: Sun, 18 Sep 2022 00:52:53 +0300
Subject: add memory efficient backward

---
 bitsandbytes/autograd/_functions.py | 1 -
 1 file changed, 1 deletion(-)

(limited to 'bitsandbytes/autograd/_functions.py')

diff --git a/bitsandbytes/autograd/_functions.py b/bitsandbytes/autograd/_functions.py
index 6674a82..daf9ba0 100644
--- a/bitsandbytes/autograd/_functions.py
+++ b/bitsandbytes/autograd/_functions.py
@@ -381,7 +381,6 @@ class MatMul8bitLt(torch.autograd.Function):
                 grad_A = F.mm_dequant(gradA32, SgradA32, SCgrad, state.SCBt).view(ctx.grad_shape).to(ctx.dtype_A)
 
             elif state.CB is not None:
-                raise NotImplementedError("WIP")
                 CB = state.CB.to(ctx.dtype_B)
                 CB.mul_(state.SCB.unsqueeze(1).div_(127.0).to(CB.dtype))
                 grad_A = torch.matmul(grad_output, CB).view(ctx.grad_shape).to(ctx.dtype_A)
-- 
cgit v1.2.3