Skip to content

Commit

Permalink
updated inference device selection for sub-process yolo script
Browse files Browse the repository at this point in the history
  • Loading branch information
FabianPlum committed Oct 18, 2022
1 parent f8d2d3c commit 89ec587
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 21 deletions.
1 change: 1 addition & 0 deletions darknet/darknet_cpu.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@

from ctypes import *
import math
import numpy
import random
import os

Expand Down
33 changes: 12 additions & 21 deletions yolo_tracker.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
import os
import time
import argparse
import tensorflow as tf
from operator import itemgetter

np.random.seed(0)
Expand Down Expand Up @@ -105,25 +104,6 @@ def __init__(self, net_cfg, net_weight, net_names, net_data, video_path,
frame_start=0, frame_end=-1, continue_tracking=False,
detection_min_size=50, detection_constant_size=100, detection_enforce_constant_size=False):

"""
Check which compute device is selected and set it as active
"""
"""
if inference_device is not None:
self.devices = [inference_device]
else:
self.devices = getInferenceDevices()
setInferenceDevive(self.devices[-1])
# load darknet with compiled DLLs for windows for either GPU or CPU inference from respective path
if self.devices[-1].split("_")[0] == "GPU":
from darknet import darknet as darknet
else:
from darknet import darknet_cpu as darknet
"""
from darknet import darknet as darknet
###

# now we can load the captured video file and display it
self.video_path = video_path
self.cap = cv2.VideoCapture(self.video_path)
Expand Down Expand Up @@ -196,7 +176,18 @@ def __init__(self, net_cfg, net_weight, net_names, net_data, video_path,
def run_inference(self, export_video=False, write_csv=False, write_h5=False, write_pkl=False,
inference_device=None):

from darknet import darknet as darknet
"""
Check which compute device is selected and set it as active
"""

if inference_device is not None:
# load darknet with compiled DLLs for windows for either GPU or CPU inference from respective path
if inference_device.split("_")[0] == "CPU":
from darknet import darknet_cpu as darknet
else:
# use GPU inference by default
from darknet import darknet as darknet

self.network, self.class_names, self.class_colours = darknet.load_network(self.net_cfg,
self.net_data,
self.net_weight,
Expand Down

0 comments on commit 89ec587

Please sign in to comment.