bug fix : gpu_oom error ( 2 inference process for gtx1070)
@7fea956f167116ebbee23d0058a28ee304db4595
--- run_image_anal_backend.sh
+++ run_image_anal_backend.sh
... | ... | @@ -28,8 +28,8 @@ |
28 | 28 |
pids+=($!) |
29 | 29 |
|
30 | 30 |
#python test.py |
31 |
-python inference_endpoint.py |
|
32 |
-#gunicorn --workers=6 inference_endpoint:app |
|
31 |
+#python inference_endpoint.py |
|
32 |
+gunicorn --workers=2 inference_endpoint:app --bind localhost:12345 |
|
33 | 33 |
pids+=($!) |
34 | 34 |
|
35 | 35 |
#python postprocess_draft.py |
--- yoloseg/inference_gpu_.py
+++ yoloseg/inference_gpu_.py
... | ... | @@ -28,6 +28,7 @@ |
28 | 28 |
|
29 | 29 |
def run_inference(self, input_image): |
30 | 30 |
model_input = input_image |
31 |
+ # print(input_image) |
|
31 | 32 |
if self.letter_box_for_square and self.model_shape[0] == self.model_shape[1]: |
32 | 33 |
model_input = self.format_to_square(model_input) |
33 | 34 |
|
Add a comment
Delete comment
Once you delete this comment, you won't be able to recover it. Are you sure you want to delete this comment?