
something is weird about this gamma correction
@6f62307973748e5c59fbcbb38089b2bb94e45e7c
--- model/Generator.py
+++ model/Generator.py
... | ... | @@ -35,12 +35,12 @@ |
35 | 35 |
return ret |
36 | 36 |
|
37 | 37 |
def binary_diff_mask(self, clean, dirty, thresold=0.1): |
38 |
- clean = torch.pow(clean, 2.2) |
|
39 |
- dirty = torch.pow(dirty, 2.2) |
|
38 |
+ # this parts corrects gamma, and always remember, sRGB values are not in linear scale with lights intensity |
|
39 |
+ clean = torch.pow(clean, 0.45) |
|
40 |
+ dirty = torch.pow(dirty, 0.45) |
|
40 | 41 |
diff = torch.abs(clean - dirty) |
41 | 42 |
diff = torch.sum(diff, dim=1) |
42 |
- # this line is certainly cause problem for quantization |
|
43 |
- # like, hardcoding it, what could go wrong? |
|
43 |
+ |
|
44 | 44 |
bin_diff = (diff > thresold).to(clean.dtype) |
45 | 45 |
|
46 | 46 |
return bin_diff |
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?