-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathtest.py
More file actions
executable file
·39 lines (28 loc) · 1.23 KB
/
test.py
File metadata and controls
executable file
·39 lines (28 loc) · 1.23 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
# import torch
# import torchvision
# import onnx
# import onnxruntime
# # Load your PyTorch model
# # model = torchvision.models.resnet18(pretrained=True)
# # model.eval()
# model = dict(test_cfg=dict(conf_threshold=0.43))
# # Define example input tensor
# example_input = torch.randn(1, 3, 224, 224) # Adjust the shape as per your model's input requirements
# # Export the model to ONNX format
# onnx_file_path = "clrernet.onnx"
# torch.onnx.export(model, example_input, onnx_file_path, export_params=True, opset_version=11, input_names=['input'], output_names=['output'])
# # Load the ONNX model
# onnx_model = onnx.load(onnx_file_path)
# # Create an ONNX Runtime Inference Session
# ort_session = onnxruntime.InferenceSession(onnx_file_path)
# # Run inference with ONNX Runtime
# ort_inputs = {ort_session.get_inputs()[0].name: example_input.numpy()}
# ort_outs = ort_session.run(None, ort_inputs)
# # Process the output as per your requirements
# print("Output shape:", ort_outs[0].shape)
import numpy as np
import matplotlib.pyplot as plt
training_data = np.load('/home/sami/Desktop/Code/CLRerNet/dataset2/culane/list/train_diffs.npz', allow_pickle=True, mmap_mode='r')
training_data = training_data['data']
plt.imshow(training_data)
plt.show()