使用gpu调用文档检测模型
This commit is contained in:
@@ -1,6 +1,6 @@
|
|||||||
x-env:
|
x-env:
|
||||||
&template
|
&template
|
||||||
image: fcb_photo_review:1.13.2
|
image: fcb_photo_review:1.13.3
|
||||||
restart: always
|
restart: always
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
|||||||
@@ -33,7 +33,7 @@ def detect_image(img_path):
|
|||||||
onnx_file = "model/object_det_model/ppyoloe_plus_crn_x_80e_coco_w_nms.onnx"
|
onnx_file = "model/object_det_model/ppyoloe_plus_crn_x_80e_coco_w_nms.onnx"
|
||||||
infer_cfg = "model/object_det_model/infer_cfg.yml"
|
infer_cfg = "model/object_det_model/infer_cfg.yml"
|
||||||
# load predictor
|
# load predictor
|
||||||
predictor = InferenceSession(onnx_file)
|
predictor = InferenceSession(onnx_file, providers=["CUDAExecutionProvider"])
|
||||||
# load infer config
|
# load infer config
|
||||||
infer_config = PredictConfig(infer_cfg)
|
infer_config = PredictConfig(infer_cfg)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user