jetson-inference
jetson-inference copied to clipboard
Question about net.GetNetworkFPS()
I'm using detectnet.py for many things and doing a lot of processing using OpenCV. Anyway, does this function return the FPS for the whole script or just the network ?
` while True: # capture the next image img = input.Capture()
detections = net.Detect(img, overlay='box,labels,conf')
# Convert from Cuda to BGR numpy array
img_array = jetson.utils.cudaToNumpy(img)
#print('shape of the image', img_array.shape)
# Object Detection and Distance Estimation
#X, Y, _ = img_array.shape
sep1 = int(img_array.shape[0]*0.333)
sep2 = int(img_array.shape[0]*0.666)
Data_arr = []
#For loop start
for detection in detections:
#xmax = round(detection.Right)
#xmin = round(detection.Left)
#ymin = round(detection.Top)
#ymax = round(detection.Bottom)
#Detected labels
if detection.ClassID == 1:
label = 'P'
elif detection.ClassID == 2:
label = 'C'
#End detected labels
#Position Handling
location = position_handling(detection, sep1, sep2)
#End Position Handling
#Handle distance for both human and car because different sizes
#Distance handling
distance = distance_estimation(detection, img_array.shape[0], img_array.shape[1])
#End Distance Handling
#For UART
Data = "{}{}{}".format(str(distance), location, label)
# convert to RGB
# rgb_array = cv2.cvtColor(img_array, cv2.COLOR_BGR2RGB)
# cv2.rectangle(rgb_array,
# (int(detection.Left), int(detection.Bottom)), (int(detection.Right), int(detection.Top)),
# (255, 0, 0), 4)
cv2.putText(img_array, location,
(int(detection.Right) - 20, int(detection.Top) + 40),
cv2.FONT_HERSHEY_SIMPLEX,
1,
(255, 0, 0), 2)
# cv2.putText(rgb_array, str(label),
# (int(detection.Left) + 20, int(detection.Top) + 40),
# cv2.FONT_HERSHEY_SIMPLEX,
# 1,
# (0, 255, 0), 2)
cv2.putText(img_array, str(distance),
(int(detection.Left) + 20, int(detection.Bottom) - 40),
cv2.FONT_HERSHEY_SIMPLEX,
1, (0, 0, 255), 2)
Data_arr.append(Data)
#Lane Detection Part
img_array = lane_follower.follow_lane(img_array)
if lane_follower.curr_steering_angle >= 84 and lane_follower.curr_steering_angle <= 95:
lane_follower.dir.append('F')
elif lane_follower.curr_steering_angle >= 64 and lane_follower.curr_steering_angle <= 83:
lane_follower.dir.append('LF')
elif lane_follower.curr_steering_angle >= 45 and lane_follower.curr_steering_angle <= 63:
lane_follower.dir.append('L')
elif lane_follower.curr_steering_angle >= 96 and lane_follower.curr_steering_angle <= 115:
lane_follower.dir.append('RF')
elif lane_follower.curr_steering_angle >= 116 and lane_follower.curr_steering_angle <= 135:
lane_follower.dir.append('R')
if len(lane_follower.dir) == 20:
direction = max(set(lane_follower.dir), key=lane_follower.dir.count)
lane_follower.dir = []
cv2.putText(img_array, str(lane_follower.curr_steering_angle) + " " + direction, (50,50), cv2.FONT_HERSHEY_SIMPLEX , 1 , (255, 0, 0) , 2 , cv2.LINE_AA )
#End Lane Detection
#FPS
#UART Writing
# try:
# # Send a simple header
# serial_port.write("#".encode())
# while True:
# Data = Data.encode()
# #print(data)
# serial_port.write(Data)
# #serial_port.write("Kosomk\r\n".encode())
# # if we get a carriage return, add a line feed too
# # \r is a carriage return; \n is a line feed
# # This is to help the tty program on the other end
# # Windows is \r\n for carriage return, line feed
# # Macintosh and Linux use \n
# serial_port.write("# \r\n".encode())
# break
# except KeyboardInterrupt:
# print("Exiting Program")
# except Exception as exception_error:
# # print("Error occurred. Exiting Program")
# # print("Error: " + str(exception_error))
# continue
# time.sleep(0.05)
#UART DONE
#convert back to BGR
## rgb_array
#Calculating Frames
#End Calcularting
#For Loop Ends
#END FPS
# composite new image onto the original
# render the image
#new_img = cv2.cvtColor(rgb_array, cv2.COLOR_RGB2BGR)
img = jetson.utils.cudaFromNumpy(img_array)
output.Render(img)
print("{:.0f} FPS".format(net.GetNetworkFPS()))
# update the title bar
#output.SetStatus("{:s} | Network {:.0f} FPS".format(opt.network, net.GetNetworkFPS()))
#time.sleep(0.5)
# print out performance info
#net.PrintProfilerTimes()
# exit on input/output EOS
if not input.IsStreaming() or not output.IsStreaming():
# serial_port.close()
break`
I added all the previous, and to my surprise, it got me 40+ FPS. Custom trained Mobilenet V1 from your repo
net.GetNetworkFPS()
returns the FPS for just the DNN network.
videoOutput.GetFrameRate()
would return the frame rate that images are rendered to the display.
net.GetNetworkFPS()
returns the FPS for just the DNN network.
videoOutput.GetFrameRate()
would return the frame rate that images are rendered to the display.
when I replaced
print("{:.0f} FPS".format(net.GetNetworkFPS()))
with
print("{:.0f} FPS".format(output.GetFrameRate()))
it gave me a steady 30 FPS even though the video freezes sometimes. I put it right after output.Render(img)
before the end of the for loop
Did you solved your issue?
I think the reason you are getting steady 30FPS is because GetFrameRate()
is giving you the FPS of your video not the FPS of inferencing.