
something is weird about this gamma correction
@6d3bc384c91701c7b1448a43c6ca1b9b73bfb23e
--- model/Generator.py
+++ model/Generator.py
... | ... | @@ -35,8 +35,8 @@ |
35 | 35 |
return ret |
36 | 36 |
|
37 | 37 |
def binary_diff_mask(self, clean, dirty, thresold=0.1): |
38 |
- clean = torch.pow(clean, 1/2.2) |
|
39 |
- dirty = torch.pow(dirty, 1/2.2) |
|
38 |
+ clean = torch.pow(clean, 2.2) |
|
39 |
+ dirty = torch.pow(dirty, 2.2) |
|
40 | 40 |
diff = torch.abs(clean - dirty) |
41 | 41 |
diff = torch.sum(diff, dim=1) |
42 | 42 |
# this line is certainly cause problem for quantization |
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?