inference_iterative.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142
  1. import os
  2. import numpy as np
  3. import onnx
  4. import onnxruntime as ort
  5. # The directory of your input and output data
  6. input_data_dir = 'input_data'
  7. output_data_dir = 'output_data'
  8. model_24 = onnx.load('pangu_weather_24.onnx')
  9. model_6 = onnx.load('pangu_weather_6.onnx')
  10. # Set the behavier of onnxruntime
  11. options = ort.SessionOptions()
  12. options.enable_cpu_mem_arena=False
  13. options.enable_mem_pattern = False
  14. options.enable_mem_reuse = False
  15. # Increase the number for faster inference and more memory consumption
  16. options.intra_op_num_threads = 1
  17. # Set the behavier of cuda provider
  18. cuda_provider_options = {'arena_extend_strategy':'kSameAsRequested',}
  19. # Initialize onnxruntime session for Pangu-Weather Models
  20. ort_session_24 = ort.InferenceSession('pangu_weather_24.onnx', sess_options=options, provider=[('CUDAExecutionProvider', cuda_provider_options)])
  21. ort_session_6 = ort.InferenceSession('pangu_weather_6.onnx', sess_options=options, provider=[('CUDAExecutionProvider', cuda_provider_options)])
  22. # Load the upper-air numpy arrays
  23. input = np.load(os.path.join(input_data_dir, 'input_upper.npy')).astype(np.float32)
  24. # Load the surface numpy arrays
  25. input_surface = np.load(os.path.join(input_data_dir, 'input_surface.npy')).astype(np.float32)
  26. # Run the inference session
  27. input_24, input_surface_24 = input, input_surface
  28. for i in range(28):
  29. if (i+1) % 4 == 0:
  30. output, output_surface = ort_session_24.run(None, {'input':input_24, 'input_surface':input_surface_24})
  31. input_24, input_surface_24 = output, output_surface
  32. else:
  33. output, output_surface = ort_session_6.run(None, {'input':input, 'input_surface':input_surface})
  34. input, input_surface = output, output_surface
  35. # Your can save the results here