Open3

Dwarf-Tensorflow の実行環境とエクスポート

PINTOPINTO
docker pull pinto0309/dwarf_tensorflow_exec_env:latest

docker run --rm -it --gpus all \
-v `pwd`:/workdir \
pinto0309/dwarf_tensorflow_exec_env:latest

python3 run_test.py --cfg configurations/sceneflow.json --cuda-off
python3 run_test.py --cfg configurations/kitti.json --cuda-off
PINTOPINTO
docker run --gpus all -it --rm \
-v `pwd`:/home/user/workdir \
ghcr.io/pinto0309/openvino2tensorflow:latest

DATASET=sceneflow
H=192
W=320

saved_model_to_tflite \
--saved_model_dir_path saved_model_${DATASET}_${H}x${W} \
--output_no_quant_float32_tflite \
--output_dynamic_range_quant_tflite \
--output_weight_quant_tflite \
--output_float16_quant_tflite \
--output_integer_quant_tflite \
--output_integer_quant_type 'uint8' \
--string_formulas_for_normalization 'data / 255.0' \
--output_tfjs \
--output_coreml

python -m tf2onnx.convert \
--opset 11 \
--inputs-as-nchw left_t0,left_t1,right_t0,right_t1 \
--saved-model saved_model_${DATASET}_${H}x${W} \
--output saved_model_${DATASET}_${H}x${W}/model_float32.onnx
onnxsim saved_model_${DATASET}_${H}x${W}/model_float32.onnx saved_model_${DATASET}_${H}x${W}/model_float32.onnx

mv tflite_from_saved_model/* saved_model_${DATASET}_${H}x${W}
rm -rf tflite_from_saved_model