diff --git a/SConstruct b/SConstruct index 14c4336eb6d9a6..b25a3d3e6cb283 100644 --- a/SConstruct +++ b/SConstruct @@ -16,8 +16,11 @@ is_tbp = os.path.isfile('/data/tinkla_buddy_pro') if platform.system() == "Darwin": arch = "Darwin" +webcam = bool(ARGUMENTS.get("use_webcam", 0)) + if arch == "aarch64": if is_tbp: + webcam=True lenv = { "LD_LIBRARY_PATH": '/usr/lib', "PATH": os.environ['PATH'], @@ -47,28 +50,32 @@ if arch == "aarch64": ] if is_tbp: - cflags = ["-mcpu=cortex-a57"] - cxxflags = ["-mcpu=cortex-a57"] + cflags = [] + cxxflags = [] cpppath = [ - "#phonelibs/opencl/include", - "/data/op_rk3399_setup/support_files/include", - "/data/op_rk3399_setup/external/snpe/include", + "/usr/local/include", + "/usr/local/include/opencv4", + "/usr/include/khronos-api", + "/usr/include", + "#phonelibs/snpe/include", + "/usr/include/aarch64-linux-gnu", ] libpath = [ - "/data/op_rk3399_setup/external/snpe/lib/lib", + "/usr/local/lib", + "/usr/lib/aarch64-linux-gnu", "/usr/lib", + "/data/op_rk3399_setup/external/snpe/lib/lib", "/data/data/com.termux/files/usr/lib", - "/system/vendor/lib64", - "/system/comma/usr/lib", "#phonelibs/nanovg", "/data/op_rk3399_setup/support_files/lib", ] rpath = ["/system/vendor/lib64", + "/usr/local/lib", + "/usr/lib/aarch64-linux-gnu", + "/usr/lib", "/data/op_rk3399_setup/external/snpe/lib/lib", "/data/op_rk3399_setup/support_files/lib", - "external/tensorflow/lib", "cereal", - "/usr/lib", "selfdrive/common", ] else: @@ -152,7 +159,7 @@ env = Environment( "#phonelibs/json11", "#phonelibs/eigen", "#phonelibs/curl/include", - "#phonelibs/opencv/include", + #"#phonelibs/opencv/include", "#phonelibs/libgralloc/include", "#phonelibs/android_frameworks_native/include", "#phonelibs/android_hardware_libhardware/include", @@ -211,8 +218,8 @@ def abspath(x): #zmq = 'zmq' # still needed for apks -zmq = FindFile("libzmq.a", libpath) -Export('env', 'arch', 'zmq', 'SHARED', 'is_tbp') +zmq = FindFile("libzmq.so", libpath) +Export('env', 'arch', 'zmq', 'SHARED', 'webcam', 'is_tbp') # cereal and messaging are shared with the system SConscript(['cereal/SConscript']) diff --git a/models/models/dmonitoring_model.keras b/models/models/dmonitoring_model.keras new file mode 100644 index 00000000000000..5fc41d6ff111e7 Binary files /dev/null and b/models/models/dmonitoring_model.keras differ diff --git a/models/models/dmonitoring_model_q.dlc b/models/models/dmonitoring_model_q.dlc new file mode 100644 index 00000000000000..9dbe09274da203 Binary files /dev/null and b/models/models/dmonitoring_model_q.dlc differ diff --git a/models/models/driving_bigmodel.dlc b/models/models/driving_bigmodel.dlc new file mode 100644 index 00000000000000..95c6ec232d36c1 Binary files /dev/null and b/models/models/driving_bigmodel.dlc differ diff --git a/models/models/supercombo.dlc b/models/models/supercombo.dlc new file mode 100644 index 00000000000000..ea6d0a068d8439 Binary files /dev/null and b/models/models/supercombo.dlc differ diff --git a/models/models/supercombo.keras b/models/models/supercombo.keras new file mode 100644 index 00000000000000..ebbeab58696bbc Binary files /dev/null and b/models/models/supercombo.keras differ diff --git a/selfdrive/camerad/SConscript b/selfdrive/camerad/SConscript index 5cfe57103890fb..e4778fdd47f985 100644 --- a/selfdrive/camerad/SConscript +++ b/selfdrive/camerad/SConscript @@ -1,17 +1,33 @@ -Import('env', 'arch', 'is_tbp', 'messaging', 'common', 'gpucommon', 'visionipc', 'cereal') +Import('env', 'arch', 'messaging', 'is_tbp', 'common', 'gpucommon', 'visionipc', 'cereal', 'webcam') libs = ['m', 'pthread', common, 'jpeg', 'json', cereal, 'OpenCL', messaging, 'czmq', 'zmq', 'capnp', 'kj', 'capnp_c', visionipc, gpucommon] +if is_tbp: + arch = "aarch64_TBP" if arch == "aarch64": - if is_tbp: + libs += ['gsl', 'CB', 'adreno_utils', 'EGL', 'GLESv3', 'cutils', 'ui'] + cameras = ['cameras/camera_qcom.c'] +elif arch == "larch64": + libs += [] + cameras = ['cameras/camera_qcom2.c'] +elif arch == "aarch64_TBP": + libs += ['opencv_core', 'opencv_highgui', 'opencv_video', 'opencv_imgproc', 'opencv_videoio'] + cameras = ['cameras/camera_webcam.cc'] + env = env.Clone() + env.Append(CXXFLAGS = '-DWEBCAM') + env.Append(CFLAGS = '-DWEBCAM') + #env.Append(CPPPATH = '/usr/local/include/opencv4') +else: + if webcam: + libs += ['opencv_core', 'opencv_highgui', 'opencv_imgproc', 'opencv_videoio'] + cameras = ['cameras/camera_webcam.cc'] + env = env.Clone() + env.Append(CXXFLAGS = '-DWEBCAM') + env.Append(CFLAGS = '-DWEBCAM') + env.Append(CPPPATH = '/usr/local/include/opencv4') + else: libs += [] cameras = ['cameras/camera_frame_stream.cc'] - else: - libs += ['gsl', 'CB', 'adreno_utils', 'EGL', 'GLESv3', 'cutils', 'ui'] - cameras = ['cameras/camera_qcom.c'] -else: - libs += [] - cameras = ['cameras/camera_frame_stream.cc'] env.SharedLibrary('snapshot/visionipc', ["#selfdrive/common/visionipc.c", "#selfdrive/common/ipc.c"]) diff --git a/selfdrive/camerad/__init__.py b/selfdrive/camerad/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/selfdrive/camerad/cameras/camera_common.h b/selfdrive/camerad/cameras/camera_common.h index 187952e22dc26e..1bd8ac6c775899 100644 --- a/selfdrive/camerad/cameras/camera_common.h +++ b/selfdrive/camerad/cameras/camera_common.h @@ -10,7 +10,10 @@ #define CAMERA_ID_OV8865 3 #define CAMERA_ID_IMX298_FLIPPED 4 #define CAMERA_ID_OV10640 5 -#define CAMERA_ID_MAX 6 +#define CAMERA_ID_LGC920 6 +#define CAMERA_ID_LGC615 7 +#define CAMERA_ID_AR0231 8 +#define CAMERA_ID_MAX 9 #ifdef __cplusplus extern "C" { diff --git a/selfdrive/camerad/cameras/camera_frame_stream.cc b/selfdrive/camerad/cameras/camera_frame_stream.cc index 1a9f8a931f8d16..0061fe6185c8c2 100644 --- a/selfdrive/camerad/cameras/camera_frame_stream.cc +++ b/selfdrive/camerad/cameras/camera_frame_stream.cc @@ -115,7 +115,7 @@ CameraInfo cameras_supported[CAMERA_ID_MAX] = { .frame_width = 1632, .frame_height = 1224, .frame_stride = 2040, // seems right - .bayer = false, + .bayer = true, .bayer_flip = 3, .hdr = false }, diff --git a/selfdrive/camerad/cameras/camera_qcom2.c b/selfdrive/camerad/cameras/camera_qcom2.c new file mode 100644 index 00000000000000..75fa803a53be61 --- /dev/null +++ b/selfdrive/camerad/cameras/camera_qcom2.c @@ -0,0 +1,946 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/util.h" +#include "common/swaglog.h" +#include "camera_qcom2.h" + +#include "media/cam_defs.h" +#include "media/cam_isp.h" +#include "media/cam_isp_ife.h" +#include "media/cam_sensor_cmn_header.h" +#include "media/cam_sensor.h" +#include "media/cam_sync.h" + +#include "sensor2_i2c.h" + +#define FRAME_WIDTH 1928 +#define FRAME_HEIGHT 1208 +//#define FRAME_STRIDE 1936 // for 8 bit output +#define FRAME_STRIDE 2416 // for 10 bit output + +static void hexdump(uint8_t *data, int len) { + for (int i = 0; i < len; i++) { + if (i!=0&&i%0x10==0) printf("\n"); + printf("%02X ", data[i]); + } + printf("\n"); +} + + +extern volatile sig_atomic_t do_exit; + +CameraInfo cameras_supported[CAMERA_ID_MAX] = { + [CAMERA_ID_AR0231] = { + .frame_width = FRAME_WIDTH, + .frame_height = FRAME_HEIGHT, + .frame_stride = FRAME_STRIDE, + .bayer = true, + .bayer_flip = 0, + .hdr = false + }, +}; + +// ************** low level camera helpers **************** + +int cam_control(int fd, int op_code, void *handle, int size) { + struct cam_control camcontrol = {0}; + camcontrol.op_code = op_code; + camcontrol.handle = (uint64_t)handle; + if (size == 0) { camcontrol.size = 8; + camcontrol.handle_type = CAM_HANDLE_MEM_HANDLE; + } else { + camcontrol.size = size; + camcontrol.handle_type = CAM_HANDLE_USER_POINTER; + } + + int ret = ioctl(fd, VIDIOC_CAM_CONTROL, &camcontrol); + if (ret == -1) { + perror("wat"); + } + return ret; +} + +int device_control(int fd, int op_code, int session_handle, int dev_handle) { + // start stop and release are all the same + static struct cam_release_dev_cmd release_dev_cmd; + release_dev_cmd.session_handle = session_handle; + release_dev_cmd.dev_handle = dev_handle; + return cam_control(fd, op_code, &release_dev_cmd, sizeof(release_dev_cmd)); +} + +void *alloc_w_mmu_hdl(int video0_fd, int len, int align, int flags, uint32_t *handle, int mmu_hdl, int mmu_hdl2) { + int ret; + + static struct cam_mem_mgr_alloc_cmd mem_mgr_alloc_cmd = {0}; + mem_mgr_alloc_cmd.len = len; + mem_mgr_alloc_cmd.align = align; + mem_mgr_alloc_cmd.flags = flags; + mem_mgr_alloc_cmd.num_hdl = 0; + if (mmu_hdl != 0) { + mem_mgr_alloc_cmd.mmu_hdls[0] = mmu_hdl; + mem_mgr_alloc_cmd.num_hdl++; + } + if (mmu_hdl2 != 0) { + mem_mgr_alloc_cmd.mmu_hdls[1] = mmu_hdl2; + mem_mgr_alloc_cmd.num_hdl++; + } + + cam_control(video0_fd, CAM_REQ_MGR_ALLOC_BUF, &mem_mgr_alloc_cmd, sizeof(mem_mgr_alloc_cmd)); + *handle = mem_mgr_alloc_cmd.out.buf_handle; + + void *ptr = NULL; + if (mem_mgr_alloc_cmd.out.fd > 0) { + ptr = mmap(NULL, len, PROT_READ | PROT_WRITE, MAP_SHARED, mem_mgr_alloc_cmd.out.fd, 0); + assert(ptr != MAP_FAILED); + } + + LOGD("alloced: %x %d %llx mapped %p", mem_mgr_alloc_cmd.out.buf_handle, mem_mgr_alloc_cmd.out.fd, mem_mgr_alloc_cmd.out.vaddr, ptr); + + return ptr; +} + +void *alloc(int video0_fd, int len, int align, int flags, uint32_t *handle) { + return alloc_w_mmu_hdl(video0_fd, len, align, flags, handle, 0, 0); +} + +void release(int video0_fd, uint32_t handle) { + int ret; + static struct cam_mem_mgr_release_cmd mem_mgr_release_cmd = {0}; + mem_mgr_release_cmd.buf_handle = handle; + + ret = cam_control(video0_fd, CAM_REQ_MGR_RELEASE_BUF, &mem_mgr_release_cmd, sizeof(mem_mgr_release_cmd)); + assert(ret == 0); +} + + +void release_fd(int video0_fd, uint32_t handle) { + // handle to fd + close(handle>>16); + release(video0_fd, handle); +} + +// ************** high level camera helpers **************** + +void sensors_poke(struct CameraState *s, int request_id) { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet); + struct cam_packet *pkt = alloc(s->video0_fd, size, 8, + CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); + pkt->num_cmd_buf = 1; + pkt->kmd_cmd_buf_index = -1; + pkt->header.size = size; + pkt->header.op_code = 0x7f; + pkt->header.request_id = request_id; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + static struct cam_config_dev_cmd config_dev_cmd = {}; + config_dev_cmd.session_handle = s->session_handle; + config_dev_cmd.dev_handle = s->sensor_dev_handle; + config_dev_cmd.offset = 0; + config_dev_cmd.packet_handle = cam_packet_handle; + + int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); + assert(ret == 0); + + release_fd(s->video0_fd, cam_packet_handle); +} + +void sensors_i2c(struct CameraState *s, struct i2c_random_wr_payload* dat, int len, int op_code) { + LOGD("sensors_i2c: %d", len); + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1; + struct cam_packet *pkt = alloc(s->video0_fd, size, 8, + CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); + pkt->num_cmd_buf = 1; + pkt->kmd_cmd_buf_index = -1; + pkt->header.size = size; + pkt->header.op_code = op_code; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_random_wr) + (len-1)*sizeof(struct i2c_random_wr_payload); + buf_desc[0].type = CAM_CMD_BUF_I2C; + struct cam_cmd_power *power = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle); + struct cam_cmd_i2c_random_wr *i2c_random_wr = (void*)power; + i2c_random_wr->header.count = len; + i2c_random_wr->header.op_code = 1; + i2c_random_wr->header.cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR; + i2c_random_wr->header.data_type = CAMERA_SENSOR_I2C_TYPE_WORD; + i2c_random_wr->header.addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; + memcpy(i2c_random_wr->random_wr_payload, dat, len*sizeof(struct i2c_random_wr_payload)); + + static struct cam_config_dev_cmd config_dev_cmd = {}; + config_dev_cmd.session_handle = s->session_handle; + config_dev_cmd.dev_handle = s->sensor_dev_handle; + config_dev_cmd.offset = 0; + config_dev_cmd.packet_handle = cam_packet_handle; + + int ret = cam_control(s->sensor_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); + assert(ret == 0); + + release_fd(s->video0_fd, buf_desc[0].mem_handle); + release_fd(s->video0_fd, cam_packet_handle); +} + +void sensors_init(int video0_fd, int sensor_fd, int camera_num) { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; + struct cam_packet *pkt = alloc(video0_fd, size, 8, + CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); + pkt->num_cmd_buf = 2; + pkt->kmd_cmd_buf_index = -1; + pkt->header.op_code = 0x1000003; + pkt->header.size = size; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_cmd_i2c_info) + sizeof(struct cam_cmd_probe); + buf_desc[0].type = CAM_CMD_BUF_LEGACY; + struct cam_cmd_i2c_info *i2c_info = alloc(video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle); + struct cam_cmd_probe *probe = (struct cam_cmd_probe *)((uint8_t *)i2c_info) + sizeof(struct cam_cmd_i2c_info); + + switch (camera_num) { + case 0: + // port 0 + i2c_info->slave_addr = 0x20; + probe->camera_id = 0; + break; + case 1: + // port 1 + i2c_info->slave_addr = 0x30; + probe->camera_id = 1; + break; + case 2: + // port 2 + i2c_info->slave_addr = 0x20; + probe->camera_id = 2; + break; + } + + // 0(I2C_STANDARD_MODE) = 100khz, 1(I2C_FAST_MODE) = 400khz + //i2c_info->i2c_freq_mode = I2C_STANDARD_MODE; + i2c_info->i2c_freq_mode = I2C_FAST_MODE; + i2c_info->cmd_type = CAMERA_SENSOR_CMD_TYPE_I2C_INFO; + + probe->data_type = CAMERA_SENSOR_I2C_TYPE_WORD; + probe->addr_type = CAMERA_SENSOR_I2C_TYPE_WORD; + probe->op_code = 3; // don't care? + probe->cmd_type = CAMERA_SENSOR_CMD_TYPE_PROBE; + probe->reg_addr = 0x3000; //0x300a; //0x300b; + probe->expected_data = 0x354; //0x7750; //0x885a; + probe->data_mask = 0; + + //buf_desc[1].size = buf_desc[1].length = 148; + buf_desc[1].size = buf_desc[1].length = 196; + buf_desc[1].type = CAM_CMD_BUF_I2C; + struct cam_cmd_power *power = alloc(video0_fd, buf_desc[1].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle); + memset(power, 0, buf_desc[1].size); + struct cam_cmd_unconditional_wait *unconditional_wait; + + void *ptr = power; + // 7750 + /*power->count = 2; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 2; + power->power_settings[1].power_seq_type = 8; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));*/ + + // 885a + power->count = 4; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 3; // clock?? + power->power_settings[1].power_seq_type = 1; // analog + power->power_settings[2].power_seq_type = 2; // digital + power->power_settings[3].power_seq_type = 8; // reset low + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 5; + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // set clock + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 0; + power->power_settings[0].config_val_low = 24000000; //Hz + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 10; // ms + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // 8,1 is this reset? + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 1; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 100; // ms + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // probe happens here + + // disable clock + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 0; + power->power_settings[0].config_val_low = 0; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 1; + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // reset high + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 1; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 1; + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // reset low + power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 8; + power->power_settings[0].config_val_low = 0; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + unconditional_wait = (void*)power; + unconditional_wait->cmd_type = CAMERA_SENSOR_CMD_TYPE_WAIT; + unconditional_wait->delay = 1; + unconditional_wait->op_code = CAMERA_SENSOR_WAIT_OP_SW_UCND; + power = (void*)power + sizeof(struct cam_cmd_unconditional_wait); + + // 7750 + /*power->count = 1; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 2; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings));*/ + + // 885a + power->count = 3; + power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_DOWN; + power->power_settings[0].power_seq_type = 2; + power->power_settings[1].power_seq_type = 1; + power->power_settings[2].power_seq_type = 3; + power = (void*)power + (sizeof(struct cam_cmd_power) + (power->count-1)*sizeof(struct cam_power_settings)); + + LOGD("probing the sensor"); + int ret = cam_control(sensor_fd, CAM_SENSOR_PROBE_CMD, (void *)cam_packet_handle, 0); + assert(ret == 0); + + release_fd(video0_fd, buf_desc[0].mem_handle); + release_fd(video0_fd, buf_desc[1].mem_handle); + release_fd(video0_fd, cam_packet_handle); +} + +void config_isp(struct CameraState *s, int io_mem_handle, int fence, int request_id, int buf0_mem_handle, int buf0_offset) { + uint32_t cam_packet_handle = 0; + int size = sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*2; + if (io_mem_handle != 0) { + size += sizeof(struct cam_buf_io_cfg); + } + struct cam_packet *pkt = alloc(s->video0_fd, size, 8, + CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); + pkt->num_cmd_buf = 2; + pkt->kmd_cmd_buf_index = 0; + + if (io_mem_handle != 0) { + pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*2; + pkt->num_io_configs = 1; + } + + if (io_mem_handle != 0) { + pkt->header.op_code = 0xf000001; + pkt->header.request_id = request_id; + } else { + pkt->header.op_code = 0xf000000; + } + pkt->header.size = size; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + struct cam_buf_io_cfg *io_cfg = (void*)&pkt->payload + pkt->io_configs_offset; + + // TODO: support MMU + buf_desc[0].size = 65624; + buf_desc[0].length = 0; + buf_desc[0].type = CAM_CMD_BUF_DIRECT; + buf_desc[0].meta_data = 3; + buf_desc[0].mem_handle = buf0_mem_handle; + buf_desc[0].offset = buf0_offset; + + buf_desc[1].size = 324; + if (io_mem_handle != 0) { + buf_desc[1].length = 228; // 0 works here too + buf_desc[1].offset = 0x60; + } else { + buf_desc[1].length = 324; + } + buf_desc[1].type = CAM_CMD_BUF_GENERIC; + buf_desc[1].meta_data = CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON; + uint32_t *buf2 = alloc(s->video0_fd, buf_desc[1].size, 0x20, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[1].mem_handle); + + // cam_isp_packet_generic_blob_handler + uint32_t tmp[] = { + // size is 0x20, type is 0(CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG) + 0x2000, + 0x1, 0x0, CAM_ISP_IFE_OUT_RES_RDI_0, 0x1, 0x0, 0x1, 0x0, 0x0, // 1 port, CAM_ISP_IFE_OUT_RES_RDI_0 + // size is 0x38, type is 1(CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG), clocks + 0x3801, + 0x1, 0x4, // Dual mode, 4 RDI wires + 0x18148d00, 0x0, 0x18148d00, 0x0, 0x18148d00, 0x0, // rdi clock + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, // junk? + // offset 0x60 + // size is 0xe0, type is 2(CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG), bandwidth + 0xe002, + 0x1, 0x4, // 4 RDI + 0x0, 0x0, 0x1ad27480, 0x0, 0x1ad27480, 0x0, // left_pix_vote + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, // right_pix_vote + 0x0, 0x0, 0x6ee11c0, 0x2, 0x6ee11c0, 0x2, // rdi_vote + 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, + 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0}; + memcpy(buf2, tmp, sizeof(tmp)); + + if (io_mem_handle != 0) { + io_cfg[0].mem_handle[0] = io_mem_handle; + io_cfg[0].planes[0] = (struct cam_plane_cfg){ + .width = FRAME_WIDTH, + .height = FRAME_HEIGHT, + .plane_stride = FRAME_STRIDE, + .slice_height = FRAME_HEIGHT, + .meta_stride = 0x0, + .meta_size = 0x0, + .meta_offset = 0x0, + .packer_config = 0x0, + .mode_config = 0x0, + .tile_config = 0x0, + .h_init = 0x0, + .v_init = 0x0, + }; + io_cfg[0].format = 0x3; + io_cfg[0].color_pattern = 0x5; + io_cfg[0].bpp = 0xc; + io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0; + io_cfg[0].fence = fence; + io_cfg[0].direction = 0x2; + io_cfg[0].subsample_pattern = 0x1; + io_cfg[0].framedrop_pattern = 0x1; + } + + static struct cam_config_dev_cmd config_dev_cmd = {}; + config_dev_cmd.session_handle = s->session_handle; + config_dev_cmd.dev_handle = s->isp_dev_handle; + config_dev_cmd.offset = 0; + config_dev_cmd.packet_handle = cam_packet_handle; + + int ret = cam_control(s->isp_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); + if (ret != 0) { + printf("ISP CONFIG FAILED\n"); + } + + release_fd(s->video0_fd, buf_desc[1].mem_handle); + //release(s->video0_fd, buf_desc[0].mem_handle); + release_fd(s->video0_fd, cam_packet_handle); +} + +void enqueue_buffer(struct CameraState *s, int i) { + int ret; + int request_id = (++s->sched_request_id); + bool first = true; + + if (s->buf_handle[i]) { + first = false; + release(s->video0_fd, s->buf_handle[i]); + + // destroy old output fence + static struct cam_sync_info sync_destroy = {0}; + strcpy(sync_destroy.name, "NodeOutputPortFence"); + sync_destroy.sync_obj = s->sync_objs[i]; + ret = cam_control(s->video1_fd, CAM_SYNC_DESTROY, &sync_destroy, sizeof(sync_destroy)); + LOGD("fence destroy: %d %d", ret, sync_destroy.sync_obj); + } + + // new request_ids + s->request_ids[i] = request_id; + + // do stuff + static struct cam_req_mgr_sched_request req_mgr_sched_request = {0}; + req_mgr_sched_request.session_hdl = s->session_handle; + req_mgr_sched_request.link_hdl = s->link_handle; + req_mgr_sched_request.req_id = request_id; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_SCHED_REQ, &req_mgr_sched_request, sizeof(req_mgr_sched_request)); + LOGD("sched req: %d %d", ret, request_id); + + // create output fence + static struct cam_sync_info sync_create = {0}; + strcpy(sync_create.name, "NodeOutputPortFence"); + ret = cam_control(s->video1_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create)); + LOGD("fence req: %d %d", ret, sync_create.sync_obj); + s->sync_objs[i] = sync_create.sync_obj; + + // configure ISP to put the image in place + static struct cam_mem_mgr_map_cmd mem_mgr_map_cmd = {0}; + mem_mgr_map_cmd.mmu_hdls[0] = s->device_iommu; + mem_mgr_map_cmd.num_hdl = 1; + mem_mgr_map_cmd.flags = 1; + mem_mgr_map_cmd.fd = s->bufs[i].fd; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd)); + LOGD("map buf req: (fd: %d) 0x%x %d", s->bufs[i].fd, mem_mgr_map_cmd.out.buf_handle, ret); + s->buf_handle[i] = mem_mgr_map_cmd.out.buf_handle; + + // poke sensor + sensors_poke(s, request_id); + LOGD("Poked sensor"); + + // push the buffer + config_isp(s, s->buf_handle[i], s->sync_objs[i], request_id, s->buf0_handle, 65632*(i+1)); +} + + +// ******************* camera ******************* + +static void camera_release_buffer(void* cookie, int i) { + int ret; + CameraState *s = cookie; + enqueue_buffer(s, i); +} + +static void camera_init(CameraState *s, int camera_id, int camera_num, unsigned int fps) { + LOGD("camera init %d", camera_num); + + // TODO: this is copied code from camera_webcam + assert(camera_id < ARRAYSIZE(cameras_supported)); + s->ci = cameras_supported[camera_id]; + assert(s->ci.frame_width != 0); + + s->camera_num = camera_num; + s->frame_size = s->ci.frame_height * s->ci.frame_stride; + + tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", camera_release_buffer, s); + + s->transform = (mat3){{ + 1.0, 0.0, 0.0, + 0.0, 1.0, 0.0, + 0.0, 0.0, 1.0, + }}; + s->digital_gain = 1.0; +} + +static void camera_open(CameraState *s, VisionBuf* b) { + int ret; + s->bufs = b; + + // /dev/v4l-subdev10 is sensor, 11, 12, 13 are the other sensors + switch (s->camera_num) { + case 0: + s->sensor_fd = open("/dev/v4l-subdev10", O_RDWR | O_NONBLOCK); + break; + case 1: + s->sensor_fd = open("/dev/v4l-subdev11", O_RDWR | O_NONBLOCK); + break; + case 2: + s->sensor_fd = open("/dev/v4l-subdev12", O_RDWR | O_NONBLOCK); + break; + } + assert(s->sensor_fd >= 0); + LOGD("opened sensor"); + + // also at /dev/v4l-subdev3, 4, 5, 6 + switch (s->camera_num) { + case 0: + s->csiphy_fd = open("/dev/v4l-subdev3", O_RDWR | O_NONBLOCK); + break; + case 1: + s->csiphy_fd = open("/dev/v4l-subdev4", O_RDWR | O_NONBLOCK); + break; + case 2: + s->csiphy_fd = open("/dev/v4l-subdev5", O_RDWR | O_NONBLOCK); + break; + } + assert(s->csiphy_fd >= 0); + LOGD("opened csiphy"); + + // probe the sensor + LOGD("-- Probing sensor %d", s->camera_num); + sensors_init(s->video0_fd, s->sensor_fd, s->camera_num); + + memset(&s->req_mgr_session_info, 0, sizeof(s->req_mgr_session_info)); + ret = cam_control(s->video0_fd, CAM_REQ_MGR_CREATE_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info)); + LOGD("get session: %d 0x%X", ret, s->req_mgr_session_info.session_hdl); + s->session_handle = s->req_mgr_session_info.session_hdl; + + // access the sensor + LOGD("-- Accessing sensor"); + static struct cam_acquire_dev_cmd acquire_dev_cmd = {0}; + acquire_dev_cmd.session_handle = s->session_handle; + acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER; + ret = cam_control(s->sensor_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd)); + LOGD("acquire sensor dev: %d", ret); + s->sensor_dev_handle = acquire_dev_cmd.dev_handle; + + static struct cam_isp_resource isp_resource = {0}; + + acquire_dev_cmd.session_handle = s->session_handle; + acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER; + acquire_dev_cmd.num_resources = 1; + acquire_dev_cmd.resource_hdl = (uint64_t)&isp_resource; + + isp_resource.resource_id = CAM_ISP_RES_ID_PORT; + isp_resource.length = sizeof(struct cam_isp_in_port_info) + sizeof(struct cam_isp_out_port_info)*(1-1); + isp_resource.handle_type = CAM_HANDLE_USER_POINTER; + + struct cam_isp_in_port_info *in_port_info = malloc(isp_resource.length); + isp_resource.res_hdl = (uint64_t)in_port_info; + + switch (s->camera_num) { + case 0: + in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_0; + break; + case 1: + in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_1; + break; + case 2: + in_port_info->res_type = CAM_ISP_IFE_IN_RES_PHY_2; + break; + } + + in_port_info->lane_type = CAM_ISP_LANE_TYPE_DPHY; + in_port_info->lane_num = 4; + in_port_info->lane_cfg = 0x3210; + + in_port_info->vc = 0x0; + //in_port_info->dt = 0x2C; //CSI_RAW12 + //in_port_info->format = CAM_FORMAT_MIPI_RAW_12; + + in_port_info->dt = 0x2B; //CSI_RAW10 + in_port_info->format = CAM_FORMAT_MIPI_RAW_10; + + in_port_info->test_pattern = 0x2; // 0x3? + in_port_info->usage_type = 0x0; + + in_port_info->left_start = 0x0; + in_port_info->left_stop = FRAME_WIDTH - 1; + in_port_info->left_width = FRAME_WIDTH; + + in_port_info->right_start = 0x0; + in_port_info->right_stop = FRAME_WIDTH - 1; + in_port_info->right_width = FRAME_WIDTH; + + in_port_info->line_start = 0x0; + in_port_info->line_stop = FRAME_HEIGHT - 1; + in_port_info->height = FRAME_HEIGHT; + + in_port_info->pixel_clk = 0x0; + in_port_info->batch_size = 0x0; + in_port_info->dsp_mode = 0x0; + in_port_info->hbi_cnt = 0x0; + in_port_info->custom_csid = 0x0; + + in_port_info->num_out_res = 0x1; + in_port_info->data[0] = (struct cam_isp_out_port_info){ + .res_type = CAM_ISP_IFE_OUT_RES_RDI_0, + //.format = CAM_FORMAT_MIPI_RAW_12, + .format = CAM_FORMAT_MIPI_RAW_10, + .width = FRAME_WIDTH, + .height = FRAME_HEIGHT, + .comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0, + }; + + ret = cam_control(s->isp_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd)); + LOGD("acquire isp dev: %d", ret); + free(in_port_info); + s->isp_dev_handle = acquire_dev_cmd.dev_handle; + + static struct cam_csiphy_acquire_dev_info csiphy_acquire_dev_info = {0}; + csiphy_acquire_dev_info.combo_mode = 0; + + acquire_dev_cmd.session_handle = s->session_handle; + acquire_dev_cmd.handle_type = CAM_HANDLE_USER_POINTER; + acquire_dev_cmd.num_resources = 1; + acquire_dev_cmd.resource_hdl = (uint64_t)&csiphy_acquire_dev_info; + + ret = cam_control(s->csiphy_fd, CAM_ACQUIRE_DEV, &acquire_dev_cmd, sizeof(acquire_dev_cmd)); + LOGD("acquire csiphy dev: %d", ret); + s->csiphy_dev_handle = acquire_dev_cmd.dev_handle; + + // acquires done + + // config ISP + void *buf0 = alloc_w_mmu_hdl(s->video0_fd, 984480, 0x20, CAM_MEM_FLAG_HW_READ_WRITE | CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &s->buf0_handle, s->device_iommu, s->cdm_iommu); + config_isp(s, 0, 0, 1, s->buf0_handle, 0); + + LOG("-- Configuring sensor"); + sensors_i2c(s, init_array_ar0231, sizeof(init_array_ar0231)/sizeof(struct i2c_random_wr_payload), + CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG); + sensors_i2c(s, start_reg_array, sizeof(start_reg_array)/sizeof(struct i2c_random_wr_payload), + CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMON); + sensors_i2c(s, stop_reg_array, sizeof(stop_reg_array)/sizeof(struct i2c_random_wr_payload), + CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMOFF); + + // config csiphy + LOG("-- Config CSI PHY"); + { + uint32_t cam_packet_handle = 0; + struct cam_packet *pkt = alloc(s->video0_fd, sizeof(struct cam_packet)+sizeof(struct cam_cmd_buf_desc)*1, 8, + CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &cam_packet_handle); + pkt->num_cmd_buf = 1; + pkt->kmd_cmd_buf_index = -1; + struct cam_cmd_buf_desc *buf_desc = (struct cam_cmd_buf_desc *)&pkt->payload; + + buf_desc[0].size = buf_desc[0].length = sizeof(struct cam_csiphy_info); + buf_desc[0].type = CAM_CMD_BUF_GENERIC; + struct cam_csiphy_info *csiphy_info = alloc(s->video0_fd, buf_desc[0].size, 8, CAM_MEM_FLAG_KMD_ACCESS | CAM_MEM_FLAG_UMD_ACCESS | CAM_MEM_FLAG_CMD_BUF_TYPE, &buf_desc[0].mem_handle); + + csiphy_info->lane_mask = 0x1f; + csiphy_info->lane_assign = 0x3210;// skip clk. How is this 16 bit for 5 channels?? + csiphy_info->csiphy_3phase = 0x0; // no 3 phase, only 2 conductors per lane + csiphy_info->combo_mode = 0x0; + csiphy_info->lane_cnt = 0x4; + csiphy_info->secure_mode = 0x0; + csiphy_info->settle_time = 2800000000; + csiphy_info->data_rate = 44000000; + + static struct cam_config_dev_cmd config_dev_cmd = {}; + config_dev_cmd.session_handle = s->session_handle; + config_dev_cmd.dev_handle = s->csiphy_dev_handle; + config_dev_cmd.offset = 0; + config_dev_cmd.packet_handle = cam_packet_handle; + + int ret = cam_control(s->csiphy_fd, CAM_CONFIG_DEV, &config_dev_cmd, sizeof(config_dev_cmd)); + assert(ret == 0); + + release(s->video0_fd, buf_desc[0].mem_handle); + release(s->video0_fd, cam_packet_handle); + } + + // link devices + LOG("-- Link devices"); + static struct cam_req_mgr_link_info req_mgr_link_info = {0}; + req_mgr_link_info.session_hdl = s->session_handle; + req_mgr_link_info.num_devices = 2; + req_mgr_link_info.dev_hdls[0] = s->isp_dev_handle; + req_mgr_link_info.dev_hdls[1] = s->sensor_dev_handle; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK, &req_mgr_link_info, sizeof(req_mgr_link_info)); + LOGD("link: %d", ret); + s->link_handle = req_mgr_link_info.link_hdl; + + static struct cam_req_mgr_link_control req_mgr_link_control = {0}; + req_mgr_link_control.ops = 0; + req_mgr_link_control.session_hdl = s->session_handle; + req_mgr_link_control.num_links = 1; + req_mgr_link_control.link_hdls[0] = s->link_handle; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); + LOGD("link control: %d", ret); + + // start devices + LOG("-- Start devices"); + ret = device_control(s->isp_fd, CAM_START_DEV, s->session_handle, s->isp_dev_handle); + LOGD("start isp: %d", ret); + ret = device_control(s->csiphy_fd, CAM_START_DEV, s->session_handle, s->csiphy_dev_handle); + LOGD("start csiphy: %d", ret); + ret = device_control(s->sensor_fd, CAM_START_DEV, s->session_handle, s->sensor_dev_handle); + LOGD("start sensor: %d", ret); + + for (int i = 0; i < FRAME_BUF_COUNT; i++) { + LOG("-- Initting buffer %d", i); + enqueue_buffer(s, i); + } +} + +void cameras_init(DualCameraState *s) { + camera_init(&s->rear, CAMERA_ID_AR0231, 0, 20); + camera_init(&s->wide, CAMERA_ID_AR0231, 1, 20); + camera_init(&s->front, CAMERA_ID_AR0231, 2, 20); +} + +void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front) { + int ret; + + LOG("-- Opening devices"); + // video0 is the target of many ioctls + s->video0_fd = open("/dev/video0", O_RDWR | O_NONBLOCK); + assert(s->video0_fd >= 0); + LOGD("opened video0"); + s->rear.video0_fd = s->front.video0_fd = s->wide.video0_fd = s->video0_fd; + + // video1 is the target of some ioctls + s->video1_fd = open("/dev/video1", O_RDWR | O_NONBLOCK); + assert(s->video1_fd >= 0); + LOGD("opened video1"); + s->rear.video1_fd = s->front.video1_fd = s->wide.video1_fd = s->video1_fd; + + // looks like there's only one of these + s->isp_fd = open("/dev/v4l-subdev1", O_RDWR | O_NONBLOCK); + assert(s->isp_fd >= 0); + LOGD("opened isp"); + s->rear.isp_fd = s->front.isp_fd = s->wide.isp_fd = s->isp_fd; + + // query icp for MMU handles + LOG("-- Query ICP for MMU handles"); + static struct cam_isp_query_cap_cmd isp_query_cap_cmd = {0}; + static struct cam_query_cap_cmd query_cap_cmd = {0}; + query_cap_cmd.handle_type = 1; + query_cap_cmd.caps_handle = (uint64_t)&isp_query_cap_cmd; + query_cap_cmd.size = sizeof(isp_query_cap_cmd); + ret = cam_control(s->isp_fd, CAM_QUERY_CAP, &query_cap_cmd, sizeof(query_cap_cmd)); + assert(ret == 0); + LOGD("using MMU handle: %x", isp_query_cap_cmd.device_iommu.non_secure); + LOGD("using MMU handle: %x", isp_query_cap_cmd.cdm_iommu.non_secure); + int device_iommu = isp_query_cap_cmd.device_iommu.non_secure; + int cdm_iommu = isp_query_cap_cmd.cdm_iommu.non_secure; + s->rear.device_iommu = s->front.device_iommu = s->wide.device_iommu = device_iommu; + s->rear.cdm_iommu = s->front.cdm_iommu = s->wide.cdm_iommu = cdm_iommu; + + // subscribe + LOG("-- Subscribing"); + static struct v4l2_event_subscription sub = {0}; + sub.type = 0x8000000; + sub.id = 0; + ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub); + LOGD("isp subscribe: %d", ret); + sub.id = 1; + ret = ioctl(s->video0_fd, VIDIOC_SUBSCRIBE_EVENT, &sub); + LOGD("isp subscribe: %d", ret); + + camera_open(&s->rear, camera_bufs_rear); + //camera_open(&s->front, camera_bufs_front); + // TODO: add bufs for camera wide +} + +static void camera_close(CameraState *s) { + int ret; + + // stop devices + LOG("-- Stop devices"); + ret = device_control(s->sensor_fd, CAM_STOP_DEV, s->session_handle, s->sensor_dev_handle); + LOGD("stop sensor: %d", ret); + ret = device_control(s->isp_fd, CAM_STOP_DEV, s->session_handle, s->isp_dev_handle); + LOGD("stop isp: %d", ret); + ret = device_control(s->csiphy_fd, CAM_STOP_DEV, s->session_handle, s->csiphy_dev_handle); + LOGD("stop csiphy: %d", ret); + + // link control stop + LOG("-- Stop link control"); + static struct cam_req_mgr_link_control req_mgr_link_control = {0}; + req_mgr_link_control.ops = 1; + req_mgr_link_control.session_hdl = s->session_handle; + req_mgr_link_control.num_links = 1; + req_mgr_link_control.link_hdls[0] = s->link_handle; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_LINK_CONTROL, &req_mgr_link_control, sizeof(req_mgr_link_control)); + LOGD("link control stop: %d", ret); + + // unlink + LOG("-- Unlink"); + static struct cam_req_mgr_unlink_info req_mgr_unlink_info = {0}; + req_mgr_unlink_info.session_hdl = s->session_handle; + req_mgr_unlink_info.link_hdl = s->link_handle; + ret = cam_control(s->video0_fd, CAM_REQ_MGR_UNLINK, &req_mgr_unlink_info, sizeof(req_mgr_unlink_info)); + LOGD("unlink: %d", ret); + + // release devices + LOGD("-- Release devices"); + ret = device_control(s->sensor_fd, CAM_RELEASE_DEV, s->session_handle, s->sensor_dev_handle); + LOGD("release sensor: %d", ret); + ret = device_control(s->isp_fd, CAM_RELEASE_DEV, s->session_handle, s->isp_dev_handle); + LOGD("release isp: %d", ret); + ret = device_control(s->csiphy_fd, CAM_RELEASE_DEV, s->session_handle, s->csiphy_dev_handle); + LOGD("release csiphy: %d", ret); + + ret = cam_control(s->video0_fd, CAM_REQ_MGR_DESTROY_SESSION, &s->req_mgr_session_info, sizeof(s->req_mgr_session_info)); + LOGD("destroyed session: %d", ret); + + tbuffer_stop(&s->camera_tb); +} + +static void cameras_close(DualCameraState *s) { + camera_close(&s->rear); + //camera_close(&s->front); + //camera_close(&s->wide); +} + +struct video_event_data { + int32_t session_hdl; + int32_t link_hdl; + int32_t frame_id; + int32_t reserved; + uint64_t tv_sec; + uint64_t tv_usec; +}; + +void cameras_run(DualCameraState *s) { + LOG("-- Dequeueing Video events"); + int frame_id = 1; + + while (!do_exit) { + struct pollfd fds[2] = {{0}}; + + fds[0].fd = s->video0_fd; + fds[0].events = POLLPRI; + + fds[1].fd = s->video1_fd; + fds[1].events = POLLPRI; + + int ret = poll(fds, ARRAYSIZE(fds), 1000); + if (ret <= 0) { + if (errno == EINTR || errno == EAGAIN) continue; + LOGE("poll failed (%d - %d)", ret, errno); + break; + } + + for (int i=0; i<2; i++) { + if (!fds[i].revents) continue; + static struct v4l2_event ev = {0}; + ret = ioctl(fds[i].fd, VIDIOC_DQEVENT, &ev); + if (ev.type == 0x8000000) { + struct video_event_data *event_data = (struct video_event_data *)ev.u.data; + uint64_t timestamp = (event_data->tv_sec*1000000000ULL + + event_data->tv_usec*1000); + LOGD("video%d dqevent: %d type:0x%x frame_id:%d timestamp: %llu", i, ret, ev.type, event_data->frame_id, timestamp); + + if (event_data->frame_id != 0) { + for (int j = 0; j < FRAME_BUF_COUNT; j++) { + if (s->rear.request_ids[j] == event_data->frame_id) { + // TODO: support more than rear camera + tbuffer_dispatch(&s->rear.camera_tb, j); + s->rear.camera_bufs_metadata[j].frame_id = frame_id++; + break; + } + } + } + } + } + } + + LOG(" ************** STOPPING **************"); + cameras_close(s); +} + +void camera_autoexposure(CameraState *s, float grey_frac) { +} + diff --git a/selfdrive/camerad/cameras/camera_qcom2.h b/selfdrive/camerad/cameras/camera_qcom2.h new file mode 100644 index 00000000000000..b15ea8b128bba2 --- /dev/null +++ b/selfdrive/camerad/cameras/camera_qcom2.h @@ -0,0 +1,86 @@ +#ifndef CAMERA_H +#define CAMERA_H + +#include +#include +#include + +#include "common/mat.h" +#include "common/visionbuf.h" +#include "common/buffering.h" + +#include "camera_common.h" + +#include "media/cam_req_mgr.h" + +#define FRAME_BUF_COUNT 4 +#define METADATA_BUF_COUNT 4 + +#ifdef __cplusplus +extern "C" { +#endif + + +typedef struct CameraState { + CameraInfo ci; + FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT]; + TBuffer camera_tb; + + int frame_size; + float digital_gain; + mat3 transform; + + int device_iommu; + int cdm_iommu; + + int video0_fd; + int video1_fd; + int isp_fd; + + int sensor_fd; + int csiphy_fd; + + int camera_num; + + VisionBuf *bufs; + + uint32_t session_handle; + + uint32_t sensor_dev_handle; + uint32_t isp_dev_handle; + uint32_t csiphy_dev_handle; + + int32_t link_handle; + + int buf0_handle; + int buf_handle[FRAME_BUF_COUNT]; + int sched_request_id; + int request_ids[FRAME_BUF_COUNT]; + int sync_objs[FRAME_BUF_COUNT]; + + struct cam_req_mgr_session_info req_mgr_session_info; +} CameraState; + +typedef struct DualCameraState { + int device; + + int video0_fd; + int video1_fd; + int isp_fd; + + CameraState rear; + CameraState front; + CameraState wide; +} DualCameraState; + +void cameras_init(DualCameraState *s); +void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front); +void cameras_run(DualCameraState *s); +void camera_autoexposure(CameraState *s, float grey_frac); + +#ifdef __cplusplus +} // extern "C" +#endif + +#endif + diff --git a/selfdrive/camerad/cameras/camera_webcam.cc b/selfdrive/camerad/cameras/camera_webcam.cc new file mode 100644 index 00000000000000..e245a6a20af7ac --- /dev/null +++ b/selfdrive/camerad/cameras/camera_webcam.cc @@ -0,0 +1,249 @@ +#include "camera_webcam.h" + +#include +#include +#include +#include + +#include "common/util.h" +#include "common/timing.h" +#include "common/swaglog.h" +#include "buffering.h" + +#include +#include +#include +#include + +extern volatile sig_atomic_t do_exit; +#define FRAME_WIDTH 1164 +#define FRAME_HEIGHT 874 +#define FRAME_WIDTH_FRONT 1152 +#define FRAME_HEIGHT_FRONT 864 +namespace { + +void camera_open(CameraState *s, VisionBuf *camera_bufs, bool rear) { + printf("open cameras"); + assert(camera_bufs); + s->camera_bufs = camera_bufs; +} + +void camera_close(CameraState *s) { + tbuffer_stop(&s->camera_tb); +} + +void camera_release_buffer(void *cookie, int buf_idx) { + CameraState *s = static_cast(cookie); +} + + +void open_gl_stream_def(CameraState * s, int video_id, int width, int height, char ** strm_def) { + printf("OPENGLSTREAM"); + std::string strm_template="v4l2src device=/dev/video%d ! video/x-raw,width=%d,height=%d,framerate=%d/1,format=YUY2 !" + "nvvidconv ! video/x-raw(memory:NVMM),format=I420 !" + "nvvidconv ! video/x-raw,format=BGRx !" + "videoconvert ! video/x-raw,format=BGR !" + "videoscale ! video/x-raw,width=%d,height=%d !" + " appsink "; + * strm_def = (char*)calloc(300,1); + sprintf(*strm_def,strm_template.c_str(),video_id, width, height, s->fps, s->ci.frame_width, s->ci.frame_height); + printf(" GL Stream :[%s]\n",*strm_def); +} + +void camera_init(CameraState *s, int camera_id, unsigned int fps) { + printf("\n\n\n Init Camera %d\n", camera_id); + assert(camera_id < ARRAYSIZE(cameras_supported)); + s->ci = cameras_supported[camera_id]; + assert(s->ci.frame_width != 0); + + s->frame_size = s->ci.frame_height * s->ci.frame_stride; + s->fps = fps; + tbuffer_init2(&s->camera_tb, FRAME_BUF_COUNT, "frame", camera_release_buffer, s); +} + +static void* rear_thread(void *arg) { + int err; + set_thread_name("webcam_rear_thread"); + CameraState* s = (CameraState*)arg; + char * strm_def; + printf("open_GL"); + open_gl_stream_def(s,1, 800, 600, &strm_def); + cv::VideoCapture cap_rear(strm_def); // road + free(strm_def); + + cv::Size size; + size.height = s->ci.frame_height; + size.width = s->ci.frame_width; + + if (!cap_rear.isOpened()) { + err = 1; + } + uint32_t frame_id = 0; + TBuffer* tb = &s->camera_tb; + cv::Mat transformed_mat; + while (!do_exit) { + if (cap_rear.read(transformed_mat)) { + //cv::Size tsize = transformed_mat.size(); + //printf("Raw Rear, W=%d, H=%d\n", tsize.width, tsize.height); + + int transformed_size = transformed_mat.total() * transformed_mat.elemSize(); + + const int buf_idx = tbuffer_select(tb); + s->camera_bufs_metadata[buf_idx] = { + .frame_id = frame_id, + }; + + cl_command_queue q = s->camera_bufs[buf_idx].copy_q; + cl_mem yuv_cl = s->camera_bufs[buf_idx].buf_cl; + cl_event map_event; + void *yuv_buf = (void *)clEnqueueMapBuffer(q, yuv_cl, CL_TRUE, + CL_MAP_WRITE, 0, transformed_size, + 0, NULL, &map_event, &err); + assert(err == 0); + clWaitForEvents(1, &map_event); + clReleaseEvent(map_event); + memcpy(yuv_buf, transformed_mat.data, transformed_size); + + clEnqueueUnmapMemObject(q, yuv_cl, yuv_buf, 0, NULL, &map_event); + clWaitForEvents(1, &map_event); + clReleaseEvent(map_event); + tbuffer_dispatch(tb, buf_idx); + + frame_id += 1; + + } + } + transformed_mat.release(); + cap_rear.release(); + return NULL; +} + +void front_thread(CameraState *s) { + int err; + printf("OPEN FRONT"); + char * strm_def; + open_gl_stream_def(s,0,640,480, &strm_def); + cv::VideoCapture cap_front(strm_def); // driver + free(strm_def); + + cv::Size size; + size.height = s->ci.frame_height; + size.width = s->ci.frame_width; + + if (!cap_front.isOpened()) { + err = 1; + } + + uint32_t frame_id = 0; + TBuffer* tb = &s->camera_tb; + cv::Mat transformed_mat; + + while (!do_exit) { + if(cap_front.read(transformed_mat)){ + //cv::Size tsize = transformed_mat.size(); + //printf("Raw Front, W=%d, H=%d\n", tsize.width, tsize.height); + + + int transformed_size = transformed_mat.total() * transformed_mat.elemSize(); + + const int buf_idx = tbuffer_select(tb); + s->camera_bufs_metadata[buf_idx] = { + .frame_id = frame_id, + }; + + cl_command_queue q = s->camera_bufs[buf_idx].copy_q; + cl_mem yuv_cl = s->camera_bufs[buf_idx].buf_cl; + cl_event map_event; + void *yuv_buf = (void *)clEnqueueMapBuffer(q, yuv_cl, CL_TRUE, + CL_MAP_WRITE, 0, transformed_size, + 0, NULL, &map_event, &err); + assert(err == 0); + clWaitForEvents(1, &map_event); + clReleaseEvent(map_event); + memcpy(yuv_buf, transformed_mat.data, transformed_size); + + clEnqueueUnmapMemObject(q, yuv_cl, yuv_buf, 0, NULL, &map_event); + clWaitForEvents(1, &map_event); + clReleaseEvent(map_event); + tbuffer_dispatch(tb, buf_idx); + + frame_id += 1; + } + } + transformed_mat.release(); + cap_front.release(); + return; +} + +} // namespace + +CameraInfo cameras_supported[CAMERA_ID_MAX] = { + // road facing + [CAMERA_ID_LGC920] = { + .frame_width = FRAME_WIDTH, + .frame_height = FRAME_HEIGHT, + .frame_stride = FRAME_WIDTH*3, + .bayer = false, + .bayer_flip = false, + }, + // driver facing + [CAMERA_ID_LGC615] = { + .frame_width = FRAME_WIDTH_FRONT, + .frame_height = FRAME_HEIGHT_FRONT, + .frame_stride = FRAME_WIDTH_FRONT*3, + .bayer = false, + .bayer_flip = false, + }, +}; + +void cameras_init(DualCameraState *s) { + memset(s, 0, sizeof(*s)); + + camera_init(&s->rear, CAMERA_ID_LGC920, 20); + s->rear.transform = (mat3){{ + 1.0, 0.0, 0.0, + 0.0, 1.0, 0.0, + 0.0, 0.0, 1.0, + }}; + camera_init(&s->front, CAMERA_ID_LGC615, 10); + s->front.transform = (mat3){{ + 1.0, 0.0, 0.0, + 0.0, 1.0, 0.0, + 0.0, 0.0, 1.0, + }}; +} + +void camera_autoexposure(CameraState *s, float grey_frac) {} + +void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, + VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, + VisionBuf *camera_bufs_front) { + assert(camera_bufs_rear); + assert(camera_bufs_front); + int err; + + printf("*** open front ***"); + camera_open(&s->front, camera_bufs_front, false); + + printf("*** open rear ***"); + camera_open(&s->rear, camera_bufs_rear, true); +} + +void cameras_close(DualCameraState *s) { + camera_close(&s->rear); + camera_close(&s->front); +} + +void cameras_run(DualCameraState *s) { + set_thread_name("webcam_thread"); + int err; + pthread_t rear_thread_handle; + err = pthread_create(&rear_thread_handle, NULL, + rear_thread, &s->rear); + assert(err == 0); + front_thread(&s->front); + + err = pthread_join(rear_thread_handle, NULL); + assert(err == 0); + cameras_close(s); +} diff --git a/selfdrive/camerad/cameras/camera_webcam.h b/selfdrive/camerad/cameras/camera_webcam.h new file mode 100644 index 00000000000000..28c3e6520955aa --- /dev/null +++ b/selfdrive/camerad/cameras/camera_webcam.h @@ -0,0 +1,58 @@ +#ifndef CAMERA_WEBCAM +#define CAMERA_WEBCAM + +#include + +#ifdef __APPLE__ +#include +#else +#include +#endif + +#include "common/mat.h" + +#include "buffering.h" +#include "common/visionbuf.h" +#include "camera_common.h" + +#define FRAME_BUF_COUNT 16 + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct CameraState { + int camera_id; + CameraInfo ci; + int frame_size; + + VisionBuf *camera_bufs; + FrameMetadata camera_bufs_metadata[FRAME_BUF_COUNT]; + TBuffer camera_tb; + + int fps; + float digital_gain; + + float cur_gain_frac; + + mat3 transform; +} CameraState; + + +typedef struct DualCameraState { + int ispif_fd; + + CameraState rear; + CameraState front; +} DualCameraState; + +void cameras_init(DualCameraState *s); +void cameras_open(DualCameraState *s, VisionBuf *camera_bufs_rear, VisionBuf *camera_bufs_focus, VisionBuf *camera_bufs_stats, VisionBuf *camera_bufs_front); +void cameras_run(DualCameraState *s); +void cameras_close(DualCameraState *s); +void camera_autoexposure(CameraState *s, float grey_frac); +#ifdef __cplusplus +} // extern "C" +#endif + +#endif diff --git a/selfdrive/camerad/cameras/debayer.cl b/selfdrive/camerad/cameras/debayer.cl index 344ead035672ec..c9e3716dfd1aa6 100644 --- a/selfdrive/camerad/cameras/debayer.cl +++ b/selfdrive/camerad/cameras/debayer.cl @@ -81,17 +81,28 @@ __kernel void debayer10(__global uchar const * const in, float4 p = convert_float4(pint); +#if NEW == 1 + // now it's 0x2a + const float black_level = 42.0f; + // max on black level + p = max(0.0, p - black_level); +#else // 64 is the black level of the sensor, remove // (changed to 56 for HDR) const float black_level = 56.0f; + // TODO: switch to max here? p = (p - black_level); +#endif + +#if NEW == 0 // correct vignetting (no pow function?) // see https://www.eecis.udel.edu/~jye/lab_research/09/JiUp.pdf the A (4th order) const float r = ((oy - RGB_HEIGHT/2)*(oy - RGB_HEIGHT/2) + (ox - RGB_WIDTH/2)*(ox - RGB_WIDTH/2)); const float fake_f = 700.0f; // should be 910, but this fits... const float lil_a = (1.0f + r/(fake_f*fake_f)); p = p * lil_a * lil_a; +#endif // rescale to 1.0 #if HDR @@ -114,8 +125,12 @@ __kernel void debayer10(__global uchar const * const in, float3 c1 = (float3)(p.s0, (p.s1+p.s2)/2.0f, p.s3); #endif +#if NEW + // TODO: new color correction +#else // color correction c1 = color_correct(c1); +#endif #if HDR // srgb gamma isn't right for YUV, so it's disabled for now diff --git a/selfdrive/camerad/cameras/sensor2_i2c.h b/selfdrive/camerad/cameras/sensor2_i2c.h new file mode 100644 index 00000000000000..621d28251e4d44 --- /dev/null +++ b/selfdrive/camerad/cameras/sensor2_i2c.h @@ -0,0 +1,690 @@ +struct i2c_random_wr_payload start_reg_array[] = {{0x301a, 0x1c}}; +//struct i2c_random_wr_payload stop_reg_array[] = {{0x301a, 0x10d8}}; +struct i2c_random_wr_payload stop_reg_array[] = {{0x301a, 0x18}};; + +struct i2c_random_wr_payload init_array_ar0231[] = { + {0x3092, 0x0C24}, + {0x337A, 0x0C80}, + {0x3520, 0x1288}, + {0x3522, 0x880C}, + {0x3524, 0x0C12}, + {0x352C, 0x1212}, + {0x354A, 0x007F}, + {0x350C, 28}, + {0x3506, 0x3333}, + {0x3508, 0x3333}, + {0x3100, 0x4000}, + {0x3280, 0x0FA0}, + {0x3282, 0x0FA0}, + {0x3284, 0x0FA0}, + {0x3286, 0x0FA0}, + {0x3288, 0x0FA0}, + {0x328A, 0x0FA0}, + {0x328C, 0x0FA0}, + {0x328E, 0x0FA0}, + {0x3290, 0x0FA0}, + {0x3292, 0x0FA0}, + {0x3294, 0x0FA0}, + {0x3296, 0x0FA0}, + {0x3298, 0x0FA0}, + {0x329A, 0x0FA0}, + {0x329C, 0x0FA0}, + {0x329E, 0x0FA0}, + + // SEQ_DATA_PORT? + {0x2512, 0x8000}, + {0x2510, 0x0905}, + {0x2510, 0x3350}, + {0x2510, 0x2004}, + {0x2510, 0x1460}, + {0x2510, 0x1578}, + {0x2510, 0x0901}, + {0x2510, 0x7B24}, + {0x2510, 0xFF24}, + {0x2510, 0xFF24}, + {0x2510, 0xEA24}, + {0x2510, 0x1022}, + {0x2510, 0x2410}, + {0x2510, 0x155A}, + {0x2510, 0x0901}, + {0x2510, 0x1400}, + {0x2510, 0x24FF}, + {0x2510, 0x24FF}, + {0x2510, 0x24EA}, + {0x2510, 0x2324}, + {0x2510, 0x647A}, + {0x2510, 0x2404}, + {0x2510, 0x052C}, + {0x2510, 0x400A}, + {0x2510, 0xFF0A}, + {0x2510, 0xFF0A}, + {0x2510, 0x1008}, + {0x2510, 0x3851}, + {0x2510, 0x1440}, + {0x2510, 0x0004}, + {0x2510, 0x0801}, + {0x2510, 0x0408}, + {0x2510, 0x1180}, + {0x2510, 0x2652}, + {0x2510, 0x1518}, + {0x2510, 0x0906}, + {0x2510, 0x1348}, + {0x2510, 0x1002}, + {0x2510, 0x1016}, + {0x2510, 0x1181}, + {0x2510, 0x1189}, + {0x2510, 0x1056}, + {0x2510, 0x1210}, + {0x2510, 0x0901}, + {0x2510, 0x0D09}, + {0x2510, 0x1413}, + {0x2510, 0x8809}, + {0x2510, 0x2B15}, + {0x2510, 0x8809}, + {0x2510, 0x0311}, + {0x2510, 0xD909}, + {0x2510, 0x1214}, + {0x2510, 0x4109}, + {0x2510, 0x0312}, + {0x2510, 0x1409}, + {0x2510, 0x0110}, + {0x2510, 0xD612}, + {0x2510, 0x1012}, + {0x2510, 0x1212}, + {0x2510, 0x1011}, + {0x2510, 0xDD11}, + {0x2510, 0xD910}, + {0x2510, 0x5609}, + {0x2510, 0x1511}, + {0x2510, 0xDB09}, + {0x2510, 0x1511}, + {0x2510, 0x9B09}, + {0x2510, 0x0F11}, + {0x2510, 0xBB12}, + {0x2510, 0x1A12}, + {0x2510, 0x1014}, + {0x2510, 0x6012}, + {0x2510, 0x5010}, + {0x2510, 0x7610}, + {0x2510, 0xE609}, + {0x2510, 0x0812}, + {0x2510, 0x4012}, + {0x2510, 0x6009}, + {0x2510, 0x290B}, + {0x2510, 0x0904}, + {0x2510, 0x1440}, + {0x2510, 0x0923}, + {0x2510, 0x15C8}, + {0x2510, 0x13C8}, + {0x2510, 0x092C}, + {0x2510, 0x1588}, + {0x2510, 0x1388}, + {0x2510, 0x0C09}, + {0x2510, 0x0C14}, + {0x2510, 0x4109}, + {0x2510, 0x1112}, + {0x2510, 0x6212}, + {0x2510, 0x6011}, + {0x2510, 0xBF11}, + {0x2510, 0xBB10}, + {0x2510, 0x6611}, + {0x2510, 0xFB09}, + {0x2510, 0x3511}, + {0x2510, 0xBB12}, + {0x2510, 0x6312}, + {0x2510, 0x6014}, + {0x2510, 0x0015}, + {0x2510, 0x0011}, + {0x2510, 0xB812}, + {0x2510, 0xA012}, + {0x2510, 0x0010}, + {0x2510, 0x2610}, + {0x2510, 0x0013}, + {0x2510, 0x0011}, + {0x2510, 0x0008}, + {0x2510, 0x3053}, + {0x2510, 0x4215}, + {0x2510, 0x4013}, + {0x2510, 0x4010}, + {0x2510, 0x0210}, + {0x2510, 0x1611}, + {0x2510, 0x8111}, + {0x2510, 0x8910}, + {0x2510, 0x5612}, + {0x2510, 0x1009}, + {0x2510, 0x010D}, + {0x2510, 0x0815}, + {0x2510, 0xC015}, + {0x2510, 0xD013}, + {0x2510, 0x5009}, + {0x2510, 0x1313}, + {0x2510, 0xD009}, + {0x2510, 0x0215}, + {0x2510, 0xC015}, + {0x2510, 0xC813}, + {0x2510, 0xC009}, + {0x2510, 0x0515}, + {0x2510, 0x8813}, + {0x2510, 0x8009}, + {0x2510, 0x0213}, + {0x2510, 0x8809}, + {0x2510, 0x0411}, + {0x2510, 0xC909}, + {0x2510, 0x0814}, + {0x2510, 0x0109}, + {0x2510, 0x0B11}, + {0x2510, 0xD908}, + {0x2510, 0x1400}, + {0x2510, 0x091A}, + {0x2510, 0x1440}, + {0x2510, 0x0903}, + {0x2510, 0x1214}, + {0x2510, 0x0901}, + {0x2510, 0x10D6}, + {0x2510, 0x1210}, + {0x2510, 0x1212}, + {0x2510, 0x1210}, + {0x2510, 0x11DD}, + {0x2510, 0x11D9}, + {0x2510, 0x1056}, + {0x2510, 0x0917}, + {0x2510, 0x11DB}, + {0x2510, 0x0913}, + {0x2510, 0x11FB}, + {0x2510, 0x0905}, + {0x2510, 0x11BB}, + {0x2510, 0x121A}, + {0x2510, 0x1210}, + {0x2510, 0x1460}, + {0x2510, 0x1250}, + {0x2510, 0x1076}, + {0x2510, 0x10E6}, + {0x2510, 0x0901}, + {0x2510, 0x15A8}, + {0x2510, 0x0901}, + {0x2510, 0x13A8}, + {0x2510, 0x1240}, + {0x2510, 0x1260}, + {0x2510, 0x0925}, + {0x2510, 0x13AD}, + {0x2510, 0x0902}, + {0x2510, 0x0907}, + {0x2510, 0x1588}, + {0x2510, 0x0901}, + {0x2510, 0x138D}, + {0x2510, 0x0B09}, + {0x2510, 0x0914}, + {0x2510, 0x4009}, + {0x2510, 0x0B13}, + {0x2510, 0x8809}, + {0x2510, 0x1C0C}, + {0x2510, 0x0920}, + {0x2510, 0x1262}, + {0x2510, 0x1260}, + {0x2510, 0x11BF}, + {0x2510, 0x11BB}, + {0x2510, 0x1066}, + {0x2510, 0x090A}, + {0x2510, 0x11FB}, + {0x2510, 0x093B}, + {0x2510, 0x11BB}, + {0x2510, 0x1263}, + {0x2510, 0x1260}, + {0x2510, 0x1400}, + {0x2510, 0x1508}, + {0x2510, 0x11B8}, + {0x2510, 0x12A0}, + {0x2510, 0x1200}, + {0x2510, 0x1026}, + {0x2510, 0x1000}, + {0x2510, 0x1300}, + {0x2510, 0x1100}, + {0x2510, 0x437A}, + {0x2510, 0x0609}, + {0x2510, 0x0B05}, + {0x2510, 0x0708}, + {0x2510, 0x4137}, + {0x2510, 0x502C}, + {0x2510, 0x2CFE}, + {0x2510, 0x15FE}, + {0x2510, 0x0C2C}, + + // end SEQ_DATA_PORT + {0x32e6,0xe0}, + + // exposure time + {0x1008,0x36f}, + {0x100c,0x58f}, + {0x100e,0x7af}, + {0x1010,0x14f}, + {0x3230,0x312}, + {0x3232,0x532}, + {0x3234,0x752}, + {0x3236,0xf2}, + + {0x3566, 0x0}, + {0x32D0, 0x3A02}, + {0x32D2, 0x3508}, + {0x32D4, 0x3702}, + {0x32D6, 0x3C04}, + {0x32DC, 0x370A}, + {0x30b0, 0x200}, + {0x3082, 0x0}, + {0x33E0, 0x0080}, + {0x3180, 0x0080}, + {0x33E4, 0x0080}, + {0x33E0, 0x0C00}, + {0x33E0, 0x0000}, + + {0x31B4, 0x2185}, + {0x31B6, 0x1146}, + {0x31B8, 0x3047}, + + {0x31BA, 0x186}, + {0x31BC, 0x805}, + + // additions + // mipi + 4 lanes + {0x31AE, 0x0204}, + // DATA_FORMAT_RAW = 12 + // DATA_FORMAT_OUTPUT = 0? + //{0x31AC, 0x0C08}, + {0x31AC, 0x0C0A}, + + // 0x2B = CSI_RAW10 + {0x3342, 0x122B}, + {0x3346, 0x122B}, + {0x334a, 0x122B}, + {0x334e, 0x122B}, + + // 10-bit + {0x3036, 0xA}, +}; + +struct i2c_random_wr_payload poke_array_ov7750[] = { + {0x3208, 0x0}, {0x380e, 0x1a}, {0x380f, 0xf0}, {0x3500, 0x0}, {0x3501, 0x0}, {0x3502, 0x10}, {0x350a, 0x0}, {0x350b, 0x10}, {0x3208, 0x10}, {0x3208, 0xa0}, + //{0x3208, 0x0}, {0x380e, 0x1a}, {0x380f, 0xf0}, {0x3500, 0x0}, {0x3501, 0x0}, {0x3502, 0x10}, {0x350a, 0x0}, {0x350b, 0x10}, {0x3208, 0x10}, {0x3208, 0xa0}, +}; + +struct i2c_random_wr_payload preinit_array_ov7750[] = { + {0x103, 0x1}, + {0x303b, 0x2}, + {0x302b, 0x80}, +}; + +struct i2c_random_wr_payload init_array_ov7750[] = { + // 2nd batch + {0x3005, 0x0}, + {0x3012, 0xc0}, + {0x3013, 0xd2}, + {0x3014, 0x4}, + {0x3016, 0xf0}, + {0x3017, 0xf0}, + {0x3018, 0xf0}, + {0x301a, 0xf0}, + {0x301b, 0xf0}, + {0x301c, 0xf0}, + {0x3023, 0x5}, + {0x3037, 0xf0}, + {0x3098, 0x4}, + {0x3099, 0x28}, + {0x309a, 0x5}, + {0x309b, 0x4}, + {0x30b0, 0xa}, + {0x30b1, 0x1}, + {0x30b3, 0x64}, + {0x30b4, 0x3}, + {0x30b5, 0x5}, + {0x3106, 0xda}, + {0x3500, 0x0}, + {0x3501, 0x1f}, + {0x3502, 0x80}, + {0x3503, 0x7}, + {0x3509, 0x10}, + {0x350b, 0x10}, + {0x3600, 0x1c}, + {0x3602, 0x62}, + {0x3620, 0xb7}, + {0x3622, 0x4}, + {0x3626, 0x21}, + {0x3627, 0x30}, + {0x3630, 0x44}, + {0x3631, 0x35}, + {0x3634, 0x60}, + {0x3636, 0x0}, + {0x3662, 0x1}, + {0x3663, 0x70}, + {0x3664, 0xf0}, + {0x3666, 0xa}, + {0x3669, 0x1a}, + {0x366a, 0x0}, + {0x366b, 0x50}, + {0x3673, 0x1}, + {0x3674, 0xff}, + {0x3675, 0x3}, + {0x3705, 0xc1}, + {0x3709, 0x40}, + {0x373c, 0x8}, + {0x3742, 0x0}, + {0x3757, 0xb3}, + {0x3788, 0x0}, + {0x37a8, 0x1}, + {0x37a9, 0xc0}, + {0x3800, 0x0}, + {0x3801, 0x4}, + {0x3802, 0x0}, + {0x3803, 0x4}, + {0x3804, 0x2}, + {0x3805, 0x8b}, + {0x3806, 0x1}, + {0x3807, 0xeb}, + {0x3808, 0x2}, + {0x3809, 0x80}, + {0x380a, 0x1}, + {0x380b, 0xe0}, + {0x380c, 0x3}, + {0x380d, 0xa0}, + {0x380e, 0x6}, + {0x380f, 0xbc}, + {0x3810, 0x0}, + {0x3811, 0x4}, + {0x3812, 0x0}, + {0x3813, 0x5}, + {0x3814, 0x11}, + {0x3815, 0x11}, + {0x3820, 0x40}, + {0x3821, 0x0}, + {0x382f, 0xe}, + {0x3832, 0x0}, + {0x3833, 0x5}, + {0x3834, 0x0}, + {0x3835, 0xc}, + {0x3837, 0x0}, + {0x3b80, 0x0}, + {0x3b81, 0xa5}, + {0x3b82, 0x10}, + {0x3b83, 0x0}, + {0x3b84, 0x8}, + {0x3b85, 0x0}, + {0x3b86, 0x1}, + {0x3b87, 0x0}, + {0x3b88, 0x0}, + {0x3b89, 0x0}, + {0x3b8a, 0x0}, + {0x3b8b, 0x5}, + {0x3b8c, 0x0}, + {0x3b8d, 0x0}, + {0x3b8e, 0x0}, + {0x3b8f, 0x1a}, + {0x3b94, 0x5}, + {0x3b95, 0xf2}, + {0x3b96, 0x40}, + {0x3c00, 0x89}, + {0x3c01, 0x63}, + {0x3c02, 0x1}, + {0x3c03, 0x0}, + {0x3c04, 0x0}, + {0x3c05, 0x3}, + {0x3c06, 0x0}, + {0x3c07, 0x6}, + {0x3c0c, 0x1}, + {0x3c0d, 0xd0}, + {0x3c0e, 0x2}, + {0x3c0f, 0xa}, + {0x4001, 0x42}, + {0x4004, 0x4}, + {0x4005, 0x0}, + {0x404e, 0x1}, + {0x4300, 0xff}, + {0x4301, 0x0}, + {0x4315, 0x0}, + {0x4501, 0x48}, + {0x4600, 0x0}, + {0x4601, 0x4e}, + {0x4801, 0xf}, + {0x4806, 0xf}, + {0x4819, 0xaa}, + {0x4823, 0x3e}, + {0x4837, 0x19}, + {0x4a0d, 0x0}, + {0x4a47, 0x7f}, + {0x4a49, 0xf0}, + {0x4a4b, 0x30}, + {0x5000, 0x85}, + {0x5001, 0x80}, +}; + +struct i2c_random_wr_payload init_array_ov8856[] = { + // part 1 184 + {0x103, 0x1}, + {0x302, 0x3c}, + {0x303, 0x1}, + {0x31e, 0xc}, + {0x3000, 0x0}, + {0x300e, 0x0}, + {0x3010, 0x0}, + {0x3015, 0x84}, + {0x3018, 0x72}, + {0x3033, 0x24}, + {0x3500, 0x0}, + {0x3501, 0x4c}, + {0x3502, 0xe0}, + {0x3503, 0x8}, + {0x3505, 0x83}, + {0x3508, 0x1}, + {0x3509, 0x80}, + {0x350c, 0x0}, + {0x350d, 0x80}, + {0x350e, 0x4}, + {0x350f, 0x0}, + {0x3510, 0x0}, + {0x3511, 0x2}, + {0x3512, 0x0}, + {0x3600, 0x72}, + {0x3601, 0x40}, + {0x3602, 0x30}, + {0x3610, 0xc5}, + {0x3611, 0x58}, + {0x3612, 0x5c}, + {0x3613, 0x5a}, + {0x3614, 0x60}, + {0x3628, 0xff}, + {0x3629, 0xff}, + {0x362a, 0xff}, + {0x3633, 0x10}, + {0x3634, 0x10}, + {0x3635, 0x10}, + {0x3636, 0x10}, + {0x3663, 0x8}, + {0x3669, 0x34}, + {0x366e, 0x8}, + {0x3706, 0x86}, + {0x370b, 0x7e}, + {0x3714, 0x27}, + {0x3730, 0x12}, + {0x3733, 0x10}, + {0x3764, 0x0}, + {0x3765, 0x0}, + {0x3769, 0x62}, + {0x376a, 0x2a}, + {0x376b, 0x3b}, + {0x3780, 0x0}, + {0x3781, 0x24}, + {0x3782, 0x0}, + {0x3783, 0x23}, + {0x3798, 0x2f}, + {0x37a1, 0x60}, + {0x37a8, 0x6a}, + {0x37ab, 0x3f}, + {0x37c2, 0x14}, + {0x37c3, 0xf1}, + {0x37c9, 0x80}, + {0x37cb, 0x3}, + {0x37cc, 0xa}, + {0x37cd, 0x16}, + {0x37ce, 0x1f}, + {0x3800, 0x0}, + {0x3801, 0x0}, + {0x3802, 0x0}, + {0x3803, 0xc}, + {0x3804, 0xc}, + {0x3805, 0xdf}, + {0x3806, 0x9}, + {0x3807, 0xa3}, + {0x3808, 0x6}, + {0x3809, 0x60}, + {0x380a, 0x4}, + {0x380b, 0xc8}, + {0x380c, 0x7}, + {0x380d, 0x8c}, + {0x380e, 0x9}, + {0x380f, 0xb2}, + {0x3810, 0x0}, + {0x3811, 0x8}, + {0x3812, 0x0}, + {0x3813, 0x2}, + {0x3814, 0x3}, + {0x3815, 0x1}, + {0x3816, 0x0}, + {0x3817, 0x0}, + {0x3818, 0x0}, + {0x3819, 0x0}, + {0x3820, 0x90}, + {0x3821, 0x67}, + {0x382a, 0x3}, + {0x382b, 0x1}, + {0x3830, 0x6}, + {0x3836, 0x2}, + {0x3862, 0x4}, + {0x3863, 0x8}, + {0x3cc0, 0x33}, + {0x3d85, 0x17}, + {0x3d8c, 0x73}, + {0x3d8d, 0xde}, + {0x4001, 0xe0}, + {0x4003, 0x40}, + {0x4008, 0x0}, + {0x4009, 0x5}, + {0x400f, 0x80}, + {0x4010, 0xf0}, + {0x4011, 0xff}, + {0x4012, 0x2}, + {0x4013, 0x1}, + {0x4014, 0x1}, + {0x4015, 0x1}, + {0x4042, 0x0}, + {0x4043, 0x80}, + {0x4044, 0x0}, + {0x4045, 0x80}, + {0x4046, 0x0}, + {0x4047, 0x80}, + {0x4048, 0x0}, + {0x4049, 0x80}, + {0x4041, 0x3}, + {0x404c, 0x20}, + {0x404d, 0x0}, + {0x404e, 0x20}, + {0x4203, 0x80}, + {0x4307, 0x30}, + {0x4317, 0x0}, + {0x4503, 0x8}, + {0x4601, 0x80}, + {0x4816, 0x53}, + {0x481b, 0x58}, + {0x481f, 0x27}, + {0x4837, 0x16}, + {0x5000, 0x77}, + {0x5001, 0xe}, + {0x5004, 0x0}, + {0x502e, 0x0}, + {0x5030, 0x41}, + {0x5795, 0x0}, + {0x5796, 0x10}, + {0x5797, 0x10}, + {0x5798, 0x73}, + {0x5799, 0x73}, + {0x579a, 0x0}, + {0x579b, 0x28}, + {0x579c, 0x0}, + {0x579d, 0x16}, + {0x579e, 0x6}, + {0x579f, 0x20}, + {0x57a0, 0x4}, + {0x57a1, 0xa0}, + {0x5780, 0x14}, + {0x5781, 0xf}, + {0x5782, 0x44}, + {0x5783, 0x2}, + {0x5784, 0x1}, + {0x5785, 0x1}, + {0x5786, 0x0}, + {0x5787, 0x4}, + {0x5788, 0x2}, + {0x5789, 0xf}, + {0x578a, 0xfd}, + {0x578b, 0xf5}, + {0x578c, 0xf5}, + {0x578d, 0x3}, + {0x578e, 0x8}, + {0x578f, 0xc}, + {0x5790, 0x8}, + {0x5791, 0x4}, + {0x5792, 0x0}, + {0x5793, 0x52}, + {0x5794, 0xa3}, + {0x5a08, 0x2}, + {0x5b00, 0x2}, + {0x5b01, 0x10}, + {0x5b02, 0x3}, + {0x5b03, 0xcf}, + {0x5b05, 0x6c}, + {0x5e00, 0x0}, + + // part 2 45 + {0x3501, 0x9a}, + {0x3502, 0x20}, + {0x366d, 0x0}, + {0x366e, 0x10}, + {0x3714, 0x23}, + {0x37c2, 0x4}, + {0x3800, 0x0}, + {0x3801, 0x0}, + {0x3802, 0x0}, + {0x3803, 0xc}, + {0x3804, 0xc}, + {0x3805, 0xdf}, + {0x3806, 0x9}, + {0x3807, 0xa3}, + {0x3808, 0xc}, + {0x3809, 0xc0}, + {0x380a, 0x9}, + {0x380b, 0x90}, + {0x380c, 0x7}, + {0x380d, 0x8c}, + {0x380e, 0x9}, + {0x380f, 0xb2}, + {0x3811, 0x10}, + {0x3813, 0x4}, + {0x3814, 0x1}, + {0x3820, 0xc6}, + {0x3821, 0x40}, + {0x382a, 0x1}, + {0x4009, 0xb}, + {0x4601, 0x80}, + {0x5003, 0xc8}, + {0x5006, 0x0}, + {0x5007, 0x0}, + {0x5795, 0x2}, + {0x5796, 0x20}, + {0x5797, 0x20}, + {0x5798, 0xd5}, + {0x5799, 0xd5}, + {0x579b, 0x50}, + {0x579d, 0x2c}, + {0x579e, 0xc}, + {0x579f, 0x40}, + {0x57a0, 0x9}, + {0x57a1, 0x40}, + {0x5e10, 0xfc}, +}; + diff --git a/selfdrive/camerad/include/media/cam_cpas.h b/selfdrive/camerad/include/media/cam_cpas.h new file mode 100644 index 00000000000000..c5cbac82e71b50 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_cpas.h @@ -0,0 +1,25 @@ +#ifndef __UAPI_CAM_CPAS_H__ +#define __UAPI_CAM_CPAS_H__ + +#include "cam_defs.h" + +#define CAM_FAMILY_CAMERA_SS 1 +#define CAM_FAMILY_CPAS_SS 2 + +/** + * struct cam_cpas_query_cap - CPAS query device capability payload + * + * @camera_family : Camera family type + * @reserved : Reserved field for alignment + * @camera_version : Camera platform version + * @cpas_version : Camera CPAS version within camera platform + * + */ +struct cam_cpas_query_cap { + uint32_t camera_family; + uint32_t reserved; + struct cam_hw_version camera_version; + struct cam_hw_version cpas_version; +}; + +#endif /* __UAPI_CAM_CPAS_H__ */ diff --git a/selfdrive/camerad/include/media/cam_defs.h b/selfdrive/camerad/include/media/cam_defs.h new file mode 100644 index 00000000000000..e006463d2aa6c3 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_defs.h @@ -0,0 +1,477 @@ +#ifndef __UAPI_CAM_DEFS_H__ +#define __UAPI_CAM_DEFS_H__ + +#include +#include +#include + + +/* camera op codes */ +#define CAM_COMMON_OPCODE_BASE 0x100 +#define CAM_QUERY_CAP (CAM_COMMON_OPCODE_BASE + 0x1) +#define CAM_ACQUIRE_DEV (CAM_COMMON_OPCODE_BASE + 0x2) +#define CAM_START_DEV (CAM_COMMON_OPCODE_BASE + 0x3) +#define CAM_STOP_DEV (CAM_COMMON_OPCODE_BASE + 0x4) +#define CAM_CONFIG_DEV (CAM_COMMON_OPCODE_BASE + 0x5) +#define CAM_RELEASE_DEV (CAM_COMMON_OPCODE_BASE + 0x6) +#define CAM_SD_SHUTDOWN (CAM_COMMON_OPCODE_BASE + 0x7) +#define CAM_FLUSH_REQ (CAM_COMMON_OPCODE_BASE + 0x8) +#define CAM_COMMON_OPCODE_MAX (CAM_COMMON_OPCODE_BASE + 0x9) + +#define CAM_EXT_OPCODE_BASE 0x200 +#define CAM_CONFIG_DEV_EXTERNAL (CAM_EXT_OPCODE_BASE + 0x1) + +/* camera handle type */ +#define CAM_HANDLE_USER_POINTER 1 +#define CAM_HANDLE_MEM_HANDLE 2 + +/* Generic Blob CmdBuffer header properties */ +#define CAM_GENERIC_BLOB_CMDBUFFER_SIZE_MASK 0xFFFFFF00 +#define CAM_GENERIC_BLOB_CMDBUFFER_SIZE_SHIFT 8 +#define CAM_GENERIC_BLOB_CMDBUFFER_TYPE_MASK 0xFF +#define CAM_GENERIC_BLOB_CMDBUFFER_TYPE_SHIFT 0 + +/* Command Buffer Types */ +#define CAM_CMD_BUF_DMI 0x1 +#define CAM_CMD_BUF_DMI16 0x2 +#define CAM_CMD_BUF_DMI32 0x3 +#define CAM_CMD_BUF_DMI64 0x4 +#define CAM_CMD_BUF_DIRECT 0x5 +#define CAM_CMD_BUF_INDIRECT 0x6 +#define CAM_CMD_BUF_I2C 0x7 +#define CAM_CMD_BUF_FW 0x8 +#define CAM_CMD_BUF_GENERIC 0x9 +#define CAM_CMD_BUF_LEGACY 0xA + +/** + * enum flush_type_t - Identifies the various flush types + * + * @CAM_FLUSH_TYPE_REQ: Flush specific request + * @CAM_FLUSH_TYPE_ALL: Flush all requests belonging to a context + * @CAM_FLUSH_TYPE_MAX: Max enum to validate flush type + * + */ +enum flush_type_t { + CAM_FLUSH_TYPE_REQ, + CAM_FLUSH_TYPE_ALL, + CAM_FLUSH_TYPE_MAX +}; + +/** + * struct cam_control - Structure used by ioctl control for camera + * + * @op_code: This is the op code for camera control + * @size: Control command size + * @handle_type: User pointer or shared memory handle + * @reserved: Reserved field for 64 bit alignment + * @handle: Control command payload + */ +struct cam_control { + uint32_t op_code; + uint32_t size; + uint32_t handle_type; + uint32_t reserved; + uint64_t handle; +}; + +/* camera IOCTL */ +#define VIDIOC_CAM_CONTROL \ + _IOWR('V', BASE_VIDIOC_PRIVATE, struct cam_control) + +/** + * struct cam_hw_version - Structure for HW version of camera devices + * + * @major : Hardware version major + * @minor : Hardware version minor + * @incr : Hardware version increment + * @reserved : Reserved for 64 bit aligngment + */ +struct cam_hw_version { + uint32_t major; + uint32_t minor; + uint32_t incr; + uint32_t reserved; +}; + +/** + * struct cam_iommu_handle - Structure for IOMMU handles of camera hw devices + * + * @non_secure: Device Non Secure IOMMU handle + * @secure: Device Secure IOMMU handle + * + */ +struct cam_iommu_handle { + int32_t non_secure; + int32_t secure; +}; + +/* camera secure mode */ +#define CAM_SECURE_MODE_NON_SECURE 0 +#define CAM_SECURE_MODE_SECURE 1 + +/* Camera Format Type */ +#define CAM_FORMAT_BASE 0 +#define CAM_FORMAT_MIPI_RAW_6 1 +#define CAM_FORMAT_MIPI_RAW_8 2 +#define CAM_FORMAT_MIPI_RAW_10 3 +#define CAM_FORMAT_MIPI_RAW_12 4 +#define CAM_FORMAT_MIPI_RAW_14 5 +#define CAM_FORMAT_MIPI_RAW_16 6 +#define CAM_FORMAT_MIPI_RAW_20 7 +#define CAM_FORMAT_QTI_RAW_8 8 +#define CAM_FORMAT_QTI_RAW_10 9 +#define CAM_FORMAT_QTI_RAW_12 10 +#define CAM_FORMAT_QTI_RAW_14 11 +#define CAM_FORMAT_PLAIN8 12 +#define CAM_FORMAT_PLAIN16_8 13 +#define CAM_FORMAT_PLAIN16_10 14 +#define CAM_FORMAT_PLAIN16_12 15 +#define CAM_FORMAT_PLAIN16_14 16 +#define CAM_FORMAT_PLAIN16_16 17 +#define CAM_FORMAT_PLAIN32_20 18 +#define CAM_FORMAT_PLAIN64 19 +#define CAM_FORMAT_PLAIN128 20 +#define CAM_FORMAT_ARGB 21 +#define CAM_FORMAT_ARGB_10 22 +#define CAM_FORMAT_ARGB_12 23 +#define CAM_FORMAT_ARGB_14 24 +#define CAM_FORMAT_DPCM_10_6_10 25 +#define CAM_FORMAT_DPCM_10_8_10 26 +#define CAM_FORMAT_DPCM_12_6_12 27 +#define CAM_FORMAT_DPCM_12_8_12 28 +#define CAM_FORMAT_DPCM_14_8_14 29 +#define CAM_FORMAT_DPCM_14_10_14 30 +#define CAM_FORMAT_NV21 31 +#define CAM_FORMAT_NV12 32 +#define CAM_FORMAT_TP10 33 +#define CAM_FORMAT_YUV422 34 +#define CAM_FORMAT_PD8 35 +#define CAM_FORMAT_PD10 36 +#define CAM_FORMAT_UBWC_NV12 37 +#define CAM_FORMAT_UBWC_NV12_4R 38 +#define CAM_FORMAT_UBWC_TP10 39 +#define CAM_FORMAT_UBWC_P010 40 +#define CAM_FORMAT_PLAIN8_SWAP 41 +#define CAM_FORMAT_PLAIN8_10 42 +#define CAM_FORMAT_PLAIN8_10_SWAP 43 +#define CAM_FORMAT_YV12 44 +#define CAM_FORMAT_Y_ONLY 45 +#define CAM_FORMAT_MAX 46 + +/* camera rotaion */ +#define CAM_ROTATE_CW_0_DEGREE 0 +#define CAM_ROTATE_CW_90_DEGREE 1 +#define CAM_RORATE_CW_180_DEGREE 2 +#define CAM_ROTATE_CW_270_DEGREE 3 + +/* camera Color Space */ +#define CAM_COLOR_SPACE_BASE 0 +#define CAM_COLOR_SPACE_BT601_FULL 1 +#define CAM_COLOR_SPACE_BT601625 2 +#define CAM_COLOR_SPACE_BT601525 3 +#define CAM_COLOR_SPACE_BT709 4 +#define CAM_COLOR_SPACE_DEPTH 5 +#define CAM_COLOR_SPACE_MAX 6 + +/* camera buffer direction */ +#define CAM_BUF_INPUT 1 +#define CAM_BUF_OUTPUT 2 +#define CAM_BUF_IN_OUT 3 + +/* camera packet device Type */ +#define CAM_PACKET_DEV_BASE 0 +#define CAM_PACKET_DEV_IMG_SENSOR 1 +#define CAM_PACKET_DEV_ACTUATOR 2 +#define CAM_PACKET_DEV_COMPANION 3 +#define CAM_PACKET_DEV_EEPOM 4 +#define CAM_PACKET_DEV_CSIPHY 5 +#define CAM_PACKET_DEV_OIS 6 +#define CAM_PACKET_DEV_FLASH 7 +#define CAM_PACKET_DEV_FD 8 +#define CAM_PACKET_DEV_JPEG_ENC 9 +#define CAM_PACKET_DEV_JPEG_DEC 10 +#define CAM_PACKET_DEV_VFE 11 +#define CAM_PACKET_DEV_CPP 12 +#define CAM_PACKET_DEV_CSID 13 +#define CAM_PACKET_DEV_ISPIF 14 +#define CAM_PACKET_DEV_IFE 15 +#define CAM_PACKET_DEV_ICP 16 +#define CAM_PACKET_DEV_LRME 17 +#define CAM_PACKET_DEV_MAX 18 + + +/* constants */ +#define CAM_PACKET_MAX_PLANES 3 + +/** + * struct cam_plane_cfg - Plane configuration info + * + * @width: Plane width in pixels + * @height: Plane height in lines + * @plane_stride: Plane stride in pixel + * @slice_height: Slice height in line (not used by ISP) + * @meta_stride: UBWC metadata stride + * @meta_size: UBWC metadata plane size + * @meta_offset: UBWC metadata offset + * @packer_config: UBWC packer config + * @mode_config: UBWC mode config + * @tile_config: UBWC tile config + * @h_init: UBWC horizontal initial coordinate in pixels + * @v_init: UBWC vertical initial coordinate in lines + * + */ +struct cam_plane_cfg { + uint32_t width; + uint32_t height; + uint32_t plane_stride; + uint32_t slice_height; + uint32_t meta_stride; + uint32_t meta_size; + uint32_t meta_offset; + uint32_t packer_config; + uint32_t mode_config; + uint32_t tile_config; + uint32_t h_init; + uint32_t v_init; +}; + +/** + * struct cam_cmd_buf_desc - Command buffer descriptor + * + * @mem_handle: Command buffer handle + * @offset: Command start offset + * @size: Size of the command buffer in bytes + * @length: Used memory in command buffer in bytes + * @type: Type of the command buffer + * @meta_data: Data type for private command buffer + * Between UMD and KMD + * + */ +struct cam_cmd_buf_desc { + int32_t mem_handle; + uint32_t offset; + uint32_t size; + uint32_t length; + uint32_t type; + uint32_t meta_data; +}; + +/** + * struct cam_buf_io_cfg - Buffer io configuration for buffers + * + * @mem_handle: Mem_handle array for the buffers. + * @offsets: Offsets for each planes in the buffer + * @planes: Per plane information + * @width: Main plane width in pixel + * @height: Main plane height in lines + * @format: Format of the buffer + * @color_space: Color space for the buffer + * @color_pattern: Color pattern in the buffer + * @bpp: Bit per pixel + * @rotation: Rotation information for the buffer + * @resource_type: Resource type associated with the buffer + * @fence: Fence handle + * @early_fence: Fence handle for early signal + * @aux_cmd_buf: An auxiliary command buffer that may be + * used for programming the IO + * @direction: Direction of the config + * @batch_size: Batch size in HFR mode + * @subsample_pattern: Subsample pattern. Used in HFR mode. It + * should be consistent with batchSize and + * CAMIF programming. + * @subsample_period: Subsample period. Used in HFR mode. It + * should be consistent with batchSize and + * CAMIF programming. + * @framedrop_pattern: Framedrop pattern + * @framedrop_period: Framedrop period + * @flag: Flags for extra information + * @direction: Buffer direction: input or output + * @padding: Padding for the structure + * + */ +struct cam_buf_io_cfg { + int32_t mem_handle[CAM_PACKET_MAX_PLANES]; + uint32_t offsets[CAM_PACKET_MAX_PLANES]; + struct cam_plane_cfg planes[CAM_PACKET_MAX_PLANES]; + uint32_t format; + uint32_t color_space; + uint32_t color_pattern; + uint32_t bpp; + uint32_t rotation; + uint32_t resource_type; + int32_t fence; + int32_t early_fence; + struct cam_cmd_buf_desc aux_cmd_buf; + uint32_t direction; + uint32_t batch_size; + uint32_t subsample_pattern; + uint32_t subsample_period; + uint32_t framedrop_pattern; + uint32_t framedrop_period; + uint32_t flag; + uint32_t padding; +}; + +/** + * struct cam_packet_header - Camera packet header + * + * @op_code: Camera packet opcode + * @size: Size of the camera packet in bytes + * @request_id: Request id for this camera packet + * @flags: Flags for the camera packet + * @padding: Padding + * + */ +struct cam_packet_header { + uint32_t op_code; + uint32_t size; + uint64_t request_id; + uint32_t flags; + uint32_t padding; +}; + +/** + * struct cam_patch_desc - Patch structure + * + * @dst_buf_hdl: Memory handle for the dest buffer + * @dst_offset: Offset byte in the dest buffer + * @src_buf_hdl: Memory handle for the source buffer + * @src_offset: Offset byte in the source buffer + * + */ +struct cam_patch_desc { + int32_t dst_buf_hdl; + uint32_t dst_offset; + int32_t src_buf_hdl; + uint32_t src_offset; +}; + +/** + * struct cam_packet - Camera packet structure + * + * @header: Camera packet header + * @cmd_buf_offset: Command buffer start offset + * @num_cmd_buf: Number of the command buffer in the packet + * @io_config_offset: Buffer io configuration start offset + * @num_io_configs: Number of the buffer io configurations + * @patch_offset: Patch offset for the patch structure + * @num_patches: Number of the patch structure + * @kmd_cmd_buf_index: Command buffer index which contains extra + * space for the KMD buffer + * @kmd_cmd_buf_offset: Offset from the beginning of the command + * buffer for KMD usage. + * @payload: Camera packet payload + * + */ +struct cam_packet { + struct cam_packet_header header; + uint32_t cmd_buf_offset; + uint32_t num_cmd_buf; + uint32_t io_configs_offset; + uint32_t num_io_configs; + uint32_t patch_offset; + uint32_t num_patches; + uint32_t kmd_cmd_buf_index; + uint32_t kmd_cmd_buf_offset; + uint64_t payload[1]; + +}; + +/** + * struct cam_release_dev_cmd - Control payload for release devices + * + * @session_handle: Session handle for the release + * @dev_handle: Device handle for the release + */ +struct cam_release_dev_cmd { + int32_t session_handle; + int32_t dev_handle; +}; + +/** + * struct cam_start_stop_dev_cmd - Control payload for start/stop device + * + * @session_handle: Session handle for the start/stop command + * @dev_handle: Device handle for the start/stop command + * + */ +struct cam_start_stop_dev_cmd { + int32_t session_handle; + int32_t dev_handle; +}; + +/** + * struct cam_config_dev_cmd - Command payload for configure device + * + * @session_handle: Session handle for the command + * @dev_handle: Device handle for the command + * @offset: Offset byte in the packet handle. + * @packet_handle: Packet memory handle for the actual packet: + * struct cam_packet. + * + */ +struct cam_config_dev_cmd { + int32_t session_handle; + int32_t dev_handle; + uint64_t offset; + uint64_t packet_handle; +}; + +/** + * struct cam_query_cap_cmd - Payload for query device capability + * + * @size: Handle size + * @handle_type: User pointer or shared memory handle + * @caps_handle: Device specific query command payload + * + */ +struct cam_query_cap_cmd { + uint32_t size; + uint32_t handle_type; + uint64_t caps_handle; +}; + +/** + * struct cam_acquire_dev_cmd - Control payload for acquire devices + * + * @session_handle: Session handle for the acquire command + * @dev_handle: Device handle to be returned + * @handle_type: Resource handle type: + * 1 = user pointer, 2 = mem handle + * @num_resources: Number of the resources to be acquired + * @resources_hdl: Resource handle that refers to the actual + * resource array. Each item in this + * array is device specific resource structure + * + */ +struct cam_acquire_dev_cmd { + int32_t session_handle; + int32_t dev_handle; + uint32_t handle_type; + uint32_t num_resources; + uint64_t resource_hdl; +}; + +/** + * struct cam_flush_dev_cmd - Control payload for flush devices + * + * @version: Version + * @session_handle: Session handle for the acquire command + * @dev_handle: Device handle to be returned + * @flush_type: Flush type: + * 0 = flush specific request + * 1 = flush all + * @reserved: Reserved for 64 bit aligngment + * @req_id: Request id that needs to cancel + * + */ +struct cam_flush_dev_cmd { + uint64_t version; + int32_t session_handle; + int32_t dev_handle; + uint32_t flush_type; + uint32_t reserved; + int64_t req_id; +}; + +#endif /* __UAPI_CAM_DEFS_H__ */ diff --git a/selfdrive/camerad/include/media/cam_fd.h b/selfdrive/camerad/include/media/cam_fd.h new file mode 100644 index 00000000000000..8feb6e4da89a79 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_fd.h @@ -0,0 +1,127 @@ +#ifndef __UAPI_CAM_FD_H__ +#define __UAPI_CAM_FD_H__ + +#include "cam_defs.h" + +#define CAM_FD_MAX_FACES 35 +#define CAM_FD_RAW_RESULT_ENTRIES 512 + +/* FD Op Codes */ +#define CAM_PACKET_OPCODES_FD_FRAME_UPDATE 0x0 + +/* FD Command Buffer identifiers */ +#define CAM_FD_CMD_BUFFER_ID_GENERIC 0x0 +#define CAM_FD_CMD_BUFFER_ID_CDM 0x1 +#define CAM_FD_CMD_BUFFER_ID_MAX 0x2 + +/* FD Blob types */ +#define CAM_FD_BLOB_TYPE_SOC_CLOCK_BW_REQUEST 0x0 +#define CAM_FD_BLOB_TYPE_RAW_RESULTS_REQUIRED 0x1 + +/* FD Resource IDs */ +#define CAM_FD_INPUT_PORT_ID_IMAGE 0x0 +#define CAM_FD_INPUT_PORT_ID_MAX 0x1 + +#define CAM_FD_OUTPUT_PORT_ID_RESULTS 0x0 +#define CAM_FD_OUTPUT_PORT_ID_RAW_RESULTS 0x1 +#define CAM_FD_OUTPUT_PORT_ID_WORK_BUFFER 0x2 +#define CAM_FD_OUTPUT_PORT_ID_MAX 0x3 + +/** + * struct cam_fd_soc_clock_bw_request - SOC clock, bandwidth request info + * + * @clock_rate : Clock rate required while processing frame + * @bandwidth : Bandwidth required while processing frame + * @reserved : Reserved for future use + */ +struct cam_fd_soc_clock_bw_request { + uint64_t clock_rate; + uint64_t bandwidth; + uint64_t reserved[4]; +}; + +/** + * struct cam_fd_face - Face properties + * + * @prop1 : Property 1 of face + * @prop2 : Property 2 of face + * @prop3 : Property 3 of face + * @prop4 : Property 4 of face + * + * Do not change this layout, this is inline with how HW writes + * these values directly when the buffer is programmed to HW + */ +struct cam_fd_face { + uint32_t prop1; + uint32_t prop2; + uint32_t prop3; + uint32_t prop4; +}; + +/** + * struct cam_fd_results - FD results layout + * + * @faces : Array of faces with face properties + * @face_count : Number of faces detected + * @reserved : Reserved for alignment + * + * Do not change this layout, this is inline with how HW writes + * these values directly when the buffer is programmed to HW + */ +struct cam_fd_results { + struct cam_fd_face faces[CAM_FD_MAX_FACES]; + uint32_t face_count; + uint32_t reserved[3]; +}; + +/** + * struct cam_fd_hw_caps - Face properties + * + * @core_version : FD core version + * @wrapper_version : FD wrapper version + * @raw_results_available : Whether raw results are available on this HW + * @supported_modes : Modes supported by this HW. + * @reserved : Reserved for future use + */ +struct cam_fd_hw_caps { + struct cam_hw_version core_version; + struct cam_hw_version wrapper_version; + uint32_t raw_results_available; + uint32_t supported_modes; + uint64_t reserved; +}; + +/** + * struct cam_fd_query_cap_cmd - FD Query capabilities information + * + * @device_iommu : FD IOMMU handles + * @cdm_iommu : CDM iommu handles + * @hw_caps : FD HW capabilities + * @reserved : Reserved for alignment + */ +struct cam_fd_query_cap_cmd { + struct cam_iommu_handle device_iommu; + struct cam_iommu_handle cdm_iommu; + struct cam_fd_hw_caps hw_caps; + uint64_t reserved; +}; + +/** + * struct cam_fd_acquire_dev_info - FD acquire device information + * + * @clk_bw_request : SOC clock, bandwidth request + * @priority : Priority for this acquire + * @mode : Mode in which to run FD HW. + * @get_raw_results : Whether this acquire needs face raw results + * while frame processing + * @reserved : Reserved field for 64 bit alignment + */ +struct cam_fd_acquire_dev_info { + struct cam_fd_soc_clock_bw_request clk_bw_request; + uint32_t priority; + uint32_t mode; + uint32_t get_raw_results; + uint32_t reserved[13]; +}; + +#endif /* __UAPI_CAM_FD_H__ */ diff --git a/selfdrive/camerad/include/media/cam_icp.h b/selfdrive/camerad/include/media/cam_icp.h new file mode 100644 index 00000000000000..680d05b630a67e --- /dev/null +++ b/selfdrive/camerad/include/media/cam_icp.h @@ -0,0 +1,179 @@ +#ifndef __UAPI_CAM_ICP_H__ +#define __UAPI_CAM_ICP_H__ + +#include "cam_defs.h" + +/* icp, ipe, bps, cdm(ipe/bps) are used in querycap */ +#define CAM_ICP_DEV_TYPE_A5 1 +#define CAM_ICP_DEV_TYPE_IPE 2 +#define CAM_ICP_DEV_TYPE_BPS 3 +#define CAM_ICP_DEV_TYPE_IPE_CDM 4 +#define CAM_ICP_DEV_TYPE_BPS_CDM 5 +#define CAM_ICP_DEV_TYPE_MAX 5 + +/* definitions needed for icp aquire device */ +#define CAM_ICP_RES_TYPE_BPS 1 +#define CAM_ICP_RES_TYPE_IPE_RT 2 +#define CAM_ICP_RES_TYPE_IPE 3 +#define CAM_ICP_RES_TYPE_MAX 4 + +/* packet opcode types */ +#define CAM_ICP_OPCODE_IPE_UPDATE 0 +#define CAM_ICP_OPCODE_BPS_UPDATE 1 + +/* IPE input port resource type */ +#define CAM_ICP_IPE_INPUT_IMAGE_FULL 0x0 +#define CAM_ICP_IPE_INPUT_IMAGE_DS4 0x1 +#define CAM_ICP_IPE_INPUT_IMAGE_DS16 0x2 +#define CAM_ICP_IPE_INPUT_IMAGE_DS64 0x3 +#define CAM_ICP_IPE_INPUT_IMAGE_FULL_REF 0x4 +#define CAM_ICP_IPE_INPUT_IMAGE_DS4_REF 0x5 +#define CAM_ICP_IPE_INPUT_IMAGE_DS16_REF 0x6 +#define CAM_ICP_IPE_INPUT_IMAGE_DS64_REF 0x7 + +/* IPE output port resource type */ +#define CAM_ICP_IPE_OUTPUT_IMAGE_DISPLAY 0x8 +#define CAM_ICP_IPE_OUTPUT_IMAGE_VIDEO 0x9 +#define CAM_ICP_IPE_OUTPUT_IMAGE_FULL_REF 0xA +#define CAM_ICP_IPE_OUTPUT_IMAGE_DS4_REF 0xB +#define CAM_ICP_IPE_OUTPUT_IMAGE_DS16_REF 0xC +#define CAM_ICP_IPE_OUTPUT_IMAGE_DS64_REF 0xD + +#define CAM_ICP_IPE_IMAGE_MAX 0xE + +/* BPS input port resource type */ +#define CAM_ICP_BPS_INPUT_IMAGE 0x0 + +/* BPS output port resource type */ +#define CAM_ICP_BPS_OUTPUT_IMAGE_FULL 0x1 +#define CAM_ICP_BPS_OUTPUT_IMAGE_DS4 0x2 +#define CAM_ICP_BPS_OUTPUT_IMAGE_DS16 0x3 +#define CAM_ICP_BPS_OUTPUT_IMAGE_DS64 0x4 +#define CAM_ICP_BPS_OUTPUT_IMAGE_STATS_BG 0x5 +#define CAM_ICP_BPS_OUTPUT_IMAGE_STATS_BHIST 0x6 +#define CAM_ICP_BPS_OUTPUT_IMAGE_REG1 0x7 +#define CAM_ICP_BPS_OUTPUT_IMAGE_REG2 0x8 + +#define CAM_ICP_BPS_IO_IMAGES_MAX 0x9 + +/* Command meta types */ +#define CAM_ICP_CMD_META_GENERIC_BLOB 0x1 + +/* Generic blob types */ +#define CAM_ICP_CMD_GENERIC_BLOB_CLK 0x1 +#define CAM_ICP_CMD_GENERIC_BLOB_CFG_IO 0x2 + +/** + * struct cam_icp_clk_bw_request + * + * @budget_ns: Time required to process frame + * @frame_cycles: Frame cycles needed to process the frame + * @rt_flag: Flag to indicate real time stream + * @uncompressed_bw: Bandwidth required to process frame + * @compressed_bw: Compressed bandwidth to process frame + */ +struct cam_icp_clk_bw_request { + uint64_t budget_ns; + uint32_t frame_cycles; + uint32_t rt_flag; + uint64_t uncompressed_bw; + uint64_t compressed_bw; +}; + +/** + * struct cam_icp_dev_ver - Device information for particular hw type + * + * This is used to get device version info of + * ICP, IPE, BPS and CDM related IPE and BPS from firmware + * and use this info in CAM_QUERY_CAP IOCTL + * + * @dev_type: hardware type for the cap info(icp, ipe, bps, cdm(ipe/bps)) + * @reserved: reserved field + * @hw_ver: major, minor and incr values of a device version + */ +struct cam_icp_dev_ver { + uint32_t dev_type; + uint32_t reserved; + struct cam_hw_version hw_ver; +}; + +/** + * struct cam_icp_ver - ICP version info + * + * This strcuture is used for fw and api version + * this is used to get firmware version and api version from firmware + * and use this info in CAM_QUERY_CAP IOCTL + * + * @major: FW version major + * @minor: FW version minor + * @revision: FW version increment + */ +struct cam_icp_ver { + uint32_t major; + uint32_t minor; + uint32_t revision; + uint32_t reserved; +}; + +/** + * struct cam_icp_query_cap_cmd - ICP query device capability payload + * + * @dev_iommu_handle: icp iommu handles for secure/non secure modes + * @cdm_iommu_handle: iommu handles for secure/non secure modes + * @fw_version: firmware version info + * @api_version: api version info + * @num_ipe: number of ipes + * @num_bps: number of bps + * @dev_ver: returned device capability array + */ +struct cam_icp_query_cap_cmd { + struct cam_iommu_handle dev_iommu_handle; + struct cam_iommu_handle cdm_iommu_handle; + struct cam_icp_ver fw_version; + struct cam_icp_ver api_version; + uint32_t num_ipe; + uint32_t num_bps; + struct cam_icp_dev_ver dev_ver[CAM_ICP_DEV_TYPE_MAX]; +}; + +/** + * struct cam_icp_res_info - ICP output resource info + * + * @format: format of the resource + * @width: width in pixels + * @height: height in lines + * @fps: fps + */ +struct cam_icp_res_info { + uint32_t format; + uint32_t width; + uint32_t height; + uint32_t fps; +}; + +/** + * struct cam_icp_acquire_dev_info - An ICP device info + * + * @scratch_mem_size: Output param - size of scratch memory + * @dev_type: device type (IPE_RT/IPE_NON_RT/BPS) + * @io_config_cmd_size: size of IO config command + * @io_config_cmd_handle: IO config command for each acquire + * @secure_mode: camera mode (secure/non secure) + * @chain_info: chaining info of FW device handles + * @in_res: resource info used for clock and bandwidth calculation + * @num_out_res: number of output resources + * @out_res: output resource + */ +struct cam_icp_acquire_dev_info { + uint32_t scratch_mem_size; + uint32_t dev_type; + uint32_t io_config_cmd_size; + int32_t io_config_cmd_handle; + uint32_t secure_mode; + int32_t chain_info; + struct cam_icp_res_info in_res; + uint32_t num_out_res; + struct cam_icp_res_info out_res[1]; +} __attribute__((__packed__)); + +#endif /* __UAPI_CAM_ICP_H__ */ diff --git a/selfdrive/camerad/include/media/cam_isp.h b/selfdrive/camerad/include/media/cam_isp.h new file mode 100644 index 00000000000000..266840d38c2547 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_isp.h @@ -0,0 +1,379 @@ +#ifndef __UAPI_CAM_ISP_H__ +#define __UAPI_CAM_ISP_H__ + +#include "cam_defs.h" +#include "cam_isp_vfe.h" +#include "cam_isp_ife.h" + + +/* ISP driver name */ +#define CAM_ISP_DEV_NAME "cam-isp" + +/* HW type */ +#define CAM_ISP_HW_BASE 0 +#define CAM_ISP_HW_CSID 1 +#define CAM_ISP_HW_VFE 2 +#define CAM_ISP_HW_IFE 3 +#define CAM_ISP_HW_ISPIF 4 +#define CAM_ISP_HW_MAX 5 + +/* Color Pattern */ +#define CAM_ISP_PATTERN_BAYER_RGRGRG 0 +#define CAM_ISP_PATTERN_BAYER_GRGRGR 1 +#define CAM_ISP_PATTERN_BAYER_BGBGBG 2 +#define CAM_ISP_PATTERN_BAYER_GBGBGB 3 +#define CAM_ISP_PATTERN_YUV_YCBYCR 4 +#define CAM_ISP_PATTERN_YUV_YCRYCB 5 +#define CAM_ISP_PATTERN_YUV_CBYCRY 6 +#define CAM_ISP_PATTERN_YUV_CRYCBY 7 +#define CAM_ISP_PATTERN_MAX 8 + +/* Usage Type */ +#define CAM_ISP_RES_USAGE_SINGLE 0 +#define CAM_ISP_RES_USAGE_DUAL 1 +#define CAM_ISP_RES_USAGE_MAX 2 + +/* Resource ID */ +#define CAM_ISP_RES_ID_PORT 0 +#define CAM_ISP_RES_ID_CLK 1 +#define CAM_ISP_RES_ID_MAX 2 + +/* Resource Type - Type of resource for the resource id + * defined in cam_isp_vfe.h, cam_isp_ife.h + */ + +/* Lane Type in input resource for Port */ +#define CAM_ISP_LANE_TYPE_DPHY 0 +#define CAM_ISP_LANE_TYPE_CPHY 1 +#define CAM_ISP_LANE_TYPE_MAX 2 + +/* ISP Resurce Composite Group ID */ +#define CAM_ISP_RES_COMP_GROUP_NONE 0 +#define CAM_ISP_RES_COMP_GROUP_ID_0 1 +#define CAM_ISP_RES_COMP_GROUP_ID_1 2 +#define CAM_ISP_RES_COMP_GROUP_ID_2 3 +#define CAM_ISP_RES_COMP_GROUP_ID_3 4 +#define CAM_ISP_RES_COMP_GROUP_ID_4 5 +#define CAM_ISP_RES_COMP_GROUP_ID_5 6 +#define CAM_ISP_RES_COMP_GROUP_ID_MAX 6 + +/* ISP packet opcode for ISP */ +#define CAM_ISP_PACKET_OP_BASE 0 +#define CAM_ISP_PACKET_INIT_DEV 1 +#define CAM_ISP_PACKET_UPDATE_DEV 2 +#define CAM_ISP_PACKET_OP_MAX 3 + +/* ISP packet meta_data type for command buffer */ +#define CAM_ISP_PACKET_META_BASE 0 +#define CAM_ISP_PACKET_META_LEFT 1 +#define CAM_ISP_PACKET_META_RIGHT 2 +#define CAM_ISP_PACKET_META_COMMON 3 +#define CAM_ISP_PACKET_META_DMI_LEFT 4 +#define CAM_ISP_PACKET_META_DMI_RIGHT 5 +#define CAM_ISP_PACKET_META_DMI_COMMON 6 +#define CAM_ISP_PACKET_META_CLOCK 7 +#define CAM_ISP_PACKET_META_CSID 8 +#define CAM_ISP_PACKET_META_DUAL_CONFIG 9 +#define CAM_ISP_PACKET_META_GENERIC_BLOB_LEFT 10 +#define CAM_ISP_PACKET_META_GENERIC_BLOB_RIGHT 11 +#define CAM_ISP_PACKET_META_GENERIC_BLOB_COMMON 12 + +/* DSP mode */ +#define CAM_ISP_DSP_MODE_NONE 0 +#define CAM_ISP_DSP_MODE_ONE_WAY 1 +#define CAM_ISP_DSP_MODE_ROUND 2 + +/* ISP Generic Cmd Buffer Blob types */ +#define CAM_ISP_GENERIC_BLOB_TYPE_HFR_CONFIG 0 +#define CAM_ISP_GENERIC_BLOB_TYPE_CLOCK_CONFIG 1 +#define CAM_ISP_GENERIC_BLOB_TYPE_BW_CONFIG 2 + +/* Query devices */ +/** + * struct cam_isp_dev_cap_info - A cap info for particular hw type + * + * @hw_type: Hardware type for the cap info + * @reserved: reserved field for alignment + * @hw_version: Hardware version + * + */ +struct cam_isp_dev_cap_info { + uint32_t hw_type; + uint32_t reserved; + struct cam_hw_version hw_version; +}; + +/** + * struct cam_isp_query_cap_cmd - ISP query device capability payload + * + * @device_iommu: returned iommu handles for device + * @cdm_iommu: returned iommu handles for cdm + * @num_dev: returned number of device capabilities + * @reserved: reserved field for alignment + * @dev_caps: returned device capability array + * + */ +struct cam_isp_query_cap_cmd { + struct cam_iommu_handle device_iommu; + struct cam_iommu_handle cdm_iommu; + int32_t num_dev; + uint32_t reserved; + struct cam_isp_dev_cap_info dev_caps[CAM_ISP_HW_MAX]; +}; + +/* Acquire Device */ +/** + * struct cam_isp_out_port_info - An output port resource info + * + * @res_type: output resource type defined in file + * cam_isp_vfe.h or cam_isp_ife.h + * @format: output format of the resource + * @wdith: output width in pixels + * @height: output height in lines + * @comp_grp_id: composite group id for the resource. + * @split_point: split point in pixels for the dual VFE. + * @secure_mode: flag to tell if output should be run in secure + * mode or not. See cam_defs.h for definition + * @reserved: reserved field for alignment + * + */ +struct cam_isp_out_port_info { + uint32_t res_type; + uint32_t format; + uint32_t width; + uint32_t height; + uint32_t comp_grp_id; + uint32_t split_point; + uint32_t secure_mode; + uint32_t reserved; +}; + +/** + * struct cam_isp_in_port_info - An input port resource info + * + * @res_type: input resource type define in file + * cam_isp_vfe.h or cam_isp_ife.h + * @lane_type: lane type: c-phy or d-phy. + * @lane_num: active lane number + * @lane_cfg: lane configurations: 4 bits per lane + * @vc: input virtual channel number + * @dt: input data type number + * @format: input format + * @test_pattern: test pattern for the testgen + * @usage_type: whether dual vfe is required + * @left_start: left input start offset in pixels + * @left_stop: left input stop offset in pixels + * @left_width: left input width in pixels + * @right_start: right input start offset in pixels. + * Only for Dual VFE + * @right_stop: right input stop offset in pixels. + * Only for Dual VFE + * @right_width: right input width in pixels. + * Only for dual VFE + * @line_start: top of the line number + * @line_stop: bottome of the line number + * @height: input height in lines + * @pixel_clk; sensor output clock + * @batch_size: batch size for HFR mode + * @dsp_mode: DSP stream mode (Defines as CAM_ISP_DSP_MODE_*) + * @hbi_cnt: HBI count for the camif input + * @reserved: Reserved field for alignment + * @num_out_res: number of the output resource associated + * @data: payload that contains the output resources + * + */ +struct cam_isp_in_port_info { + uint32_t res_type; + uint32_t lane_type; + uint32_t lane_num; + uint32_t lane_cfg; + uint32_t vc; + uint32_t dt; + uint32_t format; + uint32_t test_pattern; + uint32_t usage_type; + uint32_t left_start; + uint32_t left_stop; + uint32_t left_width; + uint32_t right_start; + uint32_t right_stop; + uint32_t right_width; + uint32_t line_start; + uint32_t line_stop; + uint32_t height; + uint32_t pixel_clk; + uint32_t batch_size; + uint32_t dsp_mode; + uint32_t hbi_cnt; + uint32_t custom_csid; + uint32_t reserved; + uint32_t num_out_res; + struct cam_isp_out_port_info data[1]; +}; + +/** + * struct cam_isp_resource - A resource bundle + * + * @resoruce_id: resource id for the resource bundle + * @length: length of the while resource blob + * @handle_type: type of the resource handle + * @reserved: reserved field for alignment + * @res_hdl: resource handle that points to the + * resource array; + * + */ +struct cam_isp_resource { + uint32_t resource_id; + uint32_t length; + uint32_t handle_type; + uint32_t reserved; + uint64_t res_hdl; +}; + +/** + * struct cam_isp_port_hfr_config - HFR configuration for this port + * + * @resource_type: Resource type + * @subsample_pattern: Subsample pattern. Used in HFR mode. It + * should be consistent with batchSize and + * CAMIF programming. + * @subsample_period: Subsample period. Used in HFR mode. It + * should be consistent with batchSize and + * CAMIF programming. + * @framedrop_pattern: Framedrop pattern + * @framedrop_period: Framedrop period + * @reserved: Reserved for alignment + */ +struct cam_isp_port_hfr_config { + uint32_t resource_type; + uint32_t subsample_pattern; + uint32_t subsample_period; + uint32_t framedrop_pattern; + uint32_t framedrop_period; + uint32_t reserved; +} __attribute__((packed)); + +/** + * struct cam_isp_resource_hfr_config - Resource HFR configuration + * + * @num_ports: Number of ports + * @reserved: Reserved for alignment + * @port_hfr_config: HFR configuration for each IO port + */ +struct cam_isp_resource_hfr_config { + uint32_t num_ports; + uint32_t reserved; + struct cam_isp_port_hfr_config port_hfr_config[1]; +} __attribute__((packed)); + +/** + * struct cam_isp_dual_split_params - dual isp spilt parameters + * + * @split_point: Split point information x, where (0 < x < width) + * left ISP's input ends at x + righ padding and + * Right ISP's input starts at x - left padding + * @right_padding: Padding added past the split point for left + * ISP's input + * @left_padding: Padding added before split point for right + * ISP's input + * @reserved: Reserved filed for alignment + * + */ +struct cam_isp_dual_split_params { + uint32_t split_point; + uint32_t right_padding; + uint32_t left_padding; + uint32_t reserved; +}; + +/** + * struct cam_isp_dual_stripe_config - stripe config per bus client + * + * @offset: Start horizontal offset relative to + * output buffer + * In UBWC mode, this value indicates the H_INIT + * value in pixel + * @width: Width of the stripe in bytes + * @tileconfig Ubwc meta tile config. Contain the partial + * tile info + * @port_id: port id of ISP output + * + */ +struct cam_isp_dual_stripe_config { + uint32_t offset; + uint32_t width; + uint32_t tileconfig; + uint32_t port_id; +}; + +/** + * struct cam_isp_dual_config - dual isp configuration + * + * @num_ports Number of isp output ports + * @reserved Reserved field for alignment + * @split_params: Inpput split parameters + * @stripes: Stripe information + * + */ +struct cam_isp_dual_config { + uint32_t num_ports; + uint32_t reserved; + struct cam_isp_dual_split_params split_params; + struct cam_isp_dual_stripe_config stripes[1]; +} __attribute__((packed)); + +/** + * struct cam_isp_clock_config - Clock configuration + * + * @usage_type: Usage type (Single/Dual) + * @num_rdi: Number of RDI votes + * @left_pix_hz: Pixel Clock for Left ISP + * @right_pix_hz: Pixel Clock for Right ISP, valid only if Dual + * @rdi_hz: RDI Clock. ISP clock will be max of RDI and + * PIX clocks. For a particular context which ISP + * HW the RDI is allocated to is not known to UMD. + * Hence pass the clock and let KMD decide. + */ +struct cam_isp_clock_config { + uint32_t usage_type; + uint32_t num_rdi; + uint64_t left_pix_hz; + uint64_t right_pix_hz; + uint64_t rdi_hz[1]; +} __attribute__((packed)); + +/** + * struct cam_isp_bw_vote - Bandwidth vote information + * + * @resource_id: Resource ID + * @reserved: Reserved field for alignment + * @cam_bw_bps: Bandwidth vote for CAMNOC + * @ext_bw_bps: Bandwidth vote for path-to-DDR after CAMNOC + */ + +struct cam_isp_bw_vote { + uint32_t resource_id; + uint32_t reserved; + uint64_t cam_bw_bps; + uint64_t ext_bw_bps; +} __attribute__((packed)); + +/** + * struct cam_isp_bw_config - Bandwidth configuration + * + * @usage_type: Usage type (Single/Dual) + * @num_rdi: Number of RDI votes + * @left_pix_vote: Bandwidth vote for left ISP + * @right_pix_vote: Bandwidth vote for right ISP + * @rdi_vote: RDI bandwidth requirements + */ + +struct cam_isp_bw_config { + uint32_t usage_type; + uint32_t num_rdi; + struct cam_isp_bw_vote left_pix_vote; + struct cam_isp_bw_vote right_pix_vote; + struct cam_isp_bw_vote rdi_vote[1]; +} __attribute__((packed)); + +#endif /* __UAPI_CAM_ISP_H__ */ diff --git a/selfdrive/camerad/include/media/cam_isp_ife.h b/selfdrive/camerad/include/media/cam_isp_ife.h new file mode 100644 index 00000000000000..f5e72813fc0d2d --- /dev/null +++ b/selfdrive/camerad/include/media/cam_isp_ife.h @@ -0,0 +1,39 @@ +#ifndef __UAPI_CAM_ISP_IFE_H__ +#define __UAPI_CAM_ISP_IFE_H__ + +/* IFE output port resource type (global unique)*/ +#define CAM_ISP_IFE_OUT_RES_BASE 0x3000 + +#define CAM_ISP_IFE_OUT_RES_FULL (CAM_ISP_IFE_OUT_RES_BASE + 0) +#define CAM_ISP_IFE_OUT_RES_DS4 (CAM_ISP_IFE_OUT_RES_BASE + 1) +#define CAM_ISP_IFE_OUT_RES_DS16 (CAM_ISP_IFE_OUT_RES_BASE + 2) +#define CAM_ISP_IFE_OUT_RES_RAW_DUMP (CAM_ISP_IFE_OUT_RES_BASE + 3) +#define CAM_ISP_IFE_OUT_RES_FD (CAM_ISP_IFE_OUT_RES_BASE + 4) +#define CAM_ISP_IFE_OUT_RES_PDAF (CAM_ISP_IFE_OUT_RES_BASE + 5) +#define CAM_ISP_IFE_OUT_RES_RDI_0 (CAM_ISP_IFE_OUT_RES_BASE + 6) +#define CAM_ISP_IFE_OUT_RES_RDI_1 (CAM_ISP_IFE_OUT_RES_BASE + 7) +#define CAM_ISP_IFE_OUT_RES_RDI_2 (CAM_ISP_IFE_OUT_RES_BASE + 8) +#define CAM_ISP_IFE_OUT_RES_RDI_3 (CAM_ISP_IFE_OUT_RES_BASE + 9) +#define CAM_ISP_IFE_OUT_RES_STATS_HDR_BE (CAM_ISP_IFE_OUT_RES_BASE + 10) +#define CAM_ISP_IFE_OUT_RES_STATS_HDR_BHIST (CAM_ISP_IFE_OUT_RES_BASE + 11) +#define CAM_ISP_IFE_OUT_RES_STATS_TL_BG (CAM_ISP_IFE_OUT_RES_BASE + 12) +#define CAM_ISP_IFE_OUT_RES_STATS_BF (CAM_ISP_IFE_OUT_RES_BASE + 13) +#define CAM_ISP_IFE_OUT_RES_STATS_AWB_BG (CAM_ISP_IFE_OUT_RES_BASE + 14) +#define CAM_ISP_IFE_OUT_RES_STATS_BHIST (CAM_ISP_IFE_OUT_RES_BASE + 15) +#define CAM_ISP_IFE_OUT_RES_STATS_RS (CAM_ISP_IFE_OUT_RES_BASE + 16) +#define CAM_ISP_IFE_OUT_RES_STATS_CS (CAM_ISP_IFE_OUT_RES_BASE + 17) +#define CAM_ISP_IFE_OUT_RES_STATS_IHIST (CAM_ISP_IFE_OUT_RES_BASE + 18) +#define CAM_ISP_IFE_OUT_RES_MAX (CAM_ISP_IFE_OUT_RES_BASE + 19) + + +/* IFE input port resource type (global unique) */ +#define CAM_ISP_IFE_IN_RES_BASE 0x4000 + +#define CAM_ISP_IFE_IN_RES_TPG (CAM_ISP_IFE_IN_RES_BASE + 0) +#define CAM_ISP_IFE_IN_RES_PHY_0 (CAM_ISP_IFE_IN_RES_BASE + 1) +#define CAM_ISP_IFE_IN_RES_PHY_1 (CAM_ISP_IFE_IN_RES_BASE + 2) +#define CAM_ISP_IFE_IN_RES_PHY_2 (CAM_ISP_IFE_IN_RES_BASE + 3) +#define CAM_ISP_IFE_IN_RES_PHY_3 (CAM_ISP_IFE_IN_RES_BASE + 4) +#define CAM_ISP_IFE_IN_RES_MAX (CAM_ISP_IFE_IN_RES_BASE + 5) + +#endif /* __UAPI_CAM_ISP_IFE_H__ */ diff --git a/selfdrive/camerad/include/media/cam_isp_vfe.h b/selfdrive/camerad/include/media/cam_isp_vfe.h new file mode 100644 index 00000000000000..e48db2f98d91c2 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_isp_vfe.h @@ -0,0 +1,44 @@ +#ifndef __UAPI_CAM_ISP_VFE_H__ +#define __UAPI_CAM_ISP_VFE_H__ + +/* VFE output port resource type (global unique) */ +#define CAM_ISP_VFE_OUT_RES_BASE 0x1000 + +#define CAM_ISP_VFE_OUT_RES_ENC (CAM_ISP_VFE_OUT_RES_BASE + 0) +#define CAM_ISP_VFE_OUT_RES_VIEW (CAM_ISP_VFE_OUT_RES_BASE + 1) +#define CAM_ISP_VFE_OUT_RES_VID (CAM_ISP_VFE_OUT_RES_BASE + 2) +#define CAM_ISP_VFE_OUT_RES_RDI_0 (CAM_ISP_VFE_OUT_RES_BASE + 3) +#define CAM_ISP_VFE_OUT_RES_RDI_1 (CAM_ISP_VFE_OUT_RES_BASE + 4) +#define CAM_ISP_VFE_OUT_RES_RDI_2 (CAM_ISP_VFE_OUT_RES_BASE + 5) +#define CAM_ISP_VFE_OUT_RES_RDI_3 (CAM_ISP_VFE_OUT_RES_BASE + 6) +#define CAM_ISP_VFE_OUT_RES_STATS_AEC (CAM_ISP_VFE_OUT_RES_BASE + 7) +#define CAM_ISP_VFE_OUT_RES_STATS_AF (CAM_ISP_VFE_OUT_RES_BASE + 8) +#define CAM_ISP_VFE_OUT_RES_STATS_AWB (CAM_ISP_VFE_OUT_RES_BASE + 9) +#define CAM_ISP_VFE_OUT_RES_STATS_RS (CAM_ISP_VFE_OUT_RES_BASE + 10) +#define CAM_ISP_VFE_OUT_RES_STATS_CS (CAM_ISP_VFE_OUT_RES_BASE + 11) +#define CAM_ISP_VFE_OUT_RES_STATS_IHIST (CAM_ISP_VFE_OUT_RES_BASE + 12) +#define CAM_ISP_VFE_OUT_RES_STATS_SKIN (CAM_ISP_VFE_OUT_RES_BASE + 13) +#define CAM_ISP_VFE_OUT_RES_STATS_BG (CAM_ISP_VFE_OUT_RES_BASE + 14) +#define CAM_ISP_VFE_OUT_RES_STATS_BF (CAM_ISP_VFE_OUT_RES_BASE + 15) +#define CAM_ISP_VFE_OUT_RES_STATS_BE (CAM_ISP_VFE_OUT_RES_BASE + 16) +#define CAM_ISP_VFE_OUT_RES_STATS_BHIST (CAM_ISP_VFE_OUT_RES_BASE + 17) +#define CAM_ISP_VFE_OUT_RES_STATS_BF_SCALE (CAM_ISP_VFE_OUT_RES_BASE + 18) +#define CAM_ISP_VFE_OUT_RES_STATS_HDR_BE (CAM_ISP_VFE_OUT_RES_BASE + 19) +#define CAM_ISP_VFE_OUT_RES_STATS_HDR_BHIST (CAM_ISP_VFE_OUT_RES_BASE + 20) +#define CAM_ISP_VFE_OUT_RES_STATS_AEC_BG (CAM_ISP_VFE_OUT_RES_BASE + 21) +#define CAM_ISP_VFE_OUT_RES_CAMIF_RAW (CAM_ISP_VFE_OUT_RES_BASE + 22) +#define CAM_ISP_VFE_OUT_RES_IDEAL_RAW (CAM_ISP_VFE_OUT_RES_BASE + 23) +#define CAM_ISP_VFE_OUT_RES_MAX (CAM_ISP_VFE_OUT_RES_BASE + 24) + +/* VFE input port_ resource type (global unique) */ +#define CAM_ISP_VFE_IN_RES_BASE 0x2000 + +#define CAM_ISP_VFE_IN_RES_TPG (CAM_ISP_VFE_IN_RES_BASE + 0) +#define CAM_ISP_VFE_IN_RES_PHY_0 (CAM_ISP_VFE_IN_RES_BASE + 1) +#define CAM_ISP_VFE_IN_RES_PHY_1 (CAM_ISP_VFE_IN_RES_BASE + 2) +#define CAM_ISP_VFE_IN_RES_PHY_2 (CAM_ISP_VFE_IN_RES_BASE + 3) +#define CAM_ISP_VFE_IN_RES_PHY_3 (CAM_ISP_VFE_IN_RES_BASE + 4) +#define CAM_ISP_VFE_IN_RES_FE (CAM_ISP_VFE_IN_RES_BASE + 5) +#define CAM_ISP_VFE_IN_RES_MAX (CAM_ISP_VFE_IN_RES_BASE + 6) + +#endif /* __UAPI_CAM_ISP_VFE_H__ */ diff --git a/selfdrive/camerad/include/media/cam_jpeg.h b/selfdrive/camerad/include/media/cam_jpeg.h new file mode 100644 index 00000000000000..f3082f3bfe295b --- /dev/null +++ b/selfdrive/camerad/include/media/cam_jpeg.h @@ -0,0 +1,117 @@ +#ifndef __UAPI_CAM_JPEG_H__ +#define __UAPI_CAM_JPEG_H__ + +#include "cam_defs.h" + +/* enc, dma, cdm(enc/dma) are used in querycap */ +#define CAM_JPEG_DEV_TYPE_ENC 0 +#define CAM_JPEG_DEV_TYPE_DMA 1 +#define CAM_JPEG_DEV_TYPE_MAX 2 + +#define CAM_JPEG_NUM_DEV_PER_RES_MAX 1 + +/* definitions needed for jpeg aquire device */ +#define CAM_JPEG_RES_TYPE_ENC 0 +#define CAM_JPEG_RES_TYPE_DMA 1 +#define CAM_JPEG_RES_TYPE_MAX 2 + +/* packet opcode types */ +#define CAM_JPEG_OPCODE_ENC_UPDATE 0 +#define CAM_JPEG_OPCODE_DMA_UPDATE 1 + +/* ENC input port resource type */ +#define CAM_JPEG_ENC_INPUT_IMAGE 0x0 + +/* ENC output port resource type */ +#define CAM_JPEG_ENC_OUTPUT_IMAGE 0x1 + +#define CAM_JPEG_ENC_IO_IMAGES_MAX 0x2 + +/* DMA input port resource type */ +#define CAM_JPEG_DMA_INPUT_IMAGE 0x0 + +/* DMA output port resource type */ +#define CAM_JPEG_DMA_OUTPUT_IMAGE 0x1 + +#define CAM_JPEG_DMA_IO_IMAGES_MAX 0x2 + +#define CAM_JPEG_IMAGE_MAX 0x2 + +/** + * struct cam_jpeg_dev_ver - Device information for particular hw type + * + * This is used to get device version info of JPEG ENC, JPEG DMA + * from hardware and use this info in CAM_QUERY_CAP IOCTL + * + * @size : Size of struct passed + * @dev_type: Hardware type for the cap info(jpeg enc, jpeg dma) + * @hw_ver: Major, minor and incr values of a device version + */ +struct cam_jpeg_dev_ver { + uint32_t size; + uint32_t dev_type; + struct cam_hw_version hw_ver; +}; + +/** + * struct cam_jpeg_query_cap_cmd - JPEG query device capability payload + * + * @dev_iommu_handle: Jpeg iommu handles for secure/non secure + * modes + * @cdm_iommu_handle: Iommu handles for secure/non secure modes + * @num_enc: Number of encoder + * @num_dma: Number of dma + * @dev_ver: Returned device capability array + */ +struct cam_jpeg_query_cap_cmd { + struct cam_iommu_handle dev_iommu_handle; + struct cam_iommu_handle cdm_iommu_handle; + uint32_t num_enc; + uint32_t num_dma; + struct cam_jpeg_dev_ver dev_ver[CAM_JPEG_DEV_TYPE_MAX]; +}; + +/** + * struct cam_jpeg_res_info - JPEG output resource info + * + * @format: Format of the resource + * @width: Width in pixels + * @height: Height in lines + * @fps: Fps + */ +struct cam_jpeg_res_info { + uint32_t format; + uint32_t width; + uint32_t height; + uint32_t fps; +}; + +/** + * struct cam_jpeg_acquire_dev_info - An JPEG device info + * + * @dev_type: Device type (ENC/DMA) + * @reserved: Reserved Bytes + * @in_res: In resource info + * @in_res: Iut resource info + */ +struct cam_jpeg_acquire_dev_info { + uint32_t dev_type; + uint32_t reserved; + struct cam_jpeg_res_info in_res; + struct cam_jpeg_res_info out_res; +}; + +/** + * struct cam_jpeg_config_inout_param_info - JPEG Config time + * input output params + * + * @clk_index: Input Param- clock selection index.(-1 default) + * @output_size: Output Param - jpeg encode/dma output size in + * bytes + */ +struct cam_jpeg_config_inout_param_info { + int32_t clk_index; + int32_t output_size; +}; + +#endif /* __UAPI_CAM_JPEG_H__ */ diff --git a/selfdrive/camerad/include/media/cam_lrme.h b/selfdrive/camerad/include/media/cam_lrme.h new file mode 100644 index 00000000000000..97d957835ee3b7 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_lrme.h @@ -0,0 +1,65 @@ +#ifndef __UAPI_CAM_LRME_H__ +#define __UAPI_CAM_LRME_H__ + +#include "cam_defs.h" + +/* LRME Resource Types */ + +enum CAM_LRME_IO_TYPE { + CAM_LRME_IO_TYPE_TAR, + CAM_LRME_IO_TYPE_REF, + CAM_LRME_IO_TYPE_RES, + CAM_LRME_IO_TYPE_DS2, +}; + +#define CAM_LRME_INPUT_PORT_TYPE_TAR (1 << 0) +#define CAM_LRME_INPUT_PORT_TYPE_REF (1 << 1) + +#define CAM_LRME_OUTPUT_PORT_TYPE_DS2 (1 << 0) +#define CAM_LRME_OUTPUT_PORT_TYPE_RES (1 << 1) + +#define CAM_LRME_DEV_MAX 1 + + +struct cam_lrme_hw_version { + uint32_t gen; + uint32_t rev; + uint32_t step; +}; + +struct cam_lrme_dev_cap { + struct cam_lrme_hw_version clc_hw_version; + struct cam_lrme_hw_version bus_rd_hw_version; + struct cam_lrme_hw_version bus_wr_hw_version; + struct cam_lrme_hw_version top_hw_version; + struct cam_lrme_hw_version top_titan_version; +}; + +/** + * struct cam_lrme_query_cap_cmd - LRME query device capability payload + * + * @dev_iommu_handle: LRME iommu handles for secure/non secure + * modes + * @cdm_iommu_handle: Iommu handles for secure/non secure modes + * @num_devices: number of hardware devices + * @dev_caps: Returned device capability array + */ +struct cam_lrme_query_cap_cmd { + struct cam_iommu_handle device_iommu; + struct cam_iommu_handle cdm_iommu; + uint32_t num_devices; + struct cam_lrme_dev_cap dev_caps[CAM_LRME_DEV_MAX]; +}; + +struct cam_lrme_soc_info { + uint64_t clock_rate; + uint64_t bandwidth; + uint64_t reserved[4]; +}; + +struct cam_lrme_acquire_args { + struct cam_lrme_soc_info lrme_soc_info; +}; + +#endif /* __UAPI_CAM_LRME_H__ */ + diff --git a/selfdrive/camerad/include/media/cam_req_mgr.h b/selfdrive/camerad/include/media/cam_req_mgr.h new file mode 100644 index 00000000000000..ae65649964ff0a --- /dev/null +++ b/selfdrive/camerad/include/media/cam_req_mgr.h @@ -0,0 +1,436 @@ +#ifndef __UAPI_LINUX_CAM_REQ_MGR_H +#define __UAPI_LINUX_CAM_REQ_MGR_H + +#include +#include +#include +#include +#include + +#define CAM_REQ_MGR_VNODE_NAME "cam-req-mgr-devnode" + +#define CAM_DEVICE_TYPE_BASE (MEDIA_ENT_F_OLD_BASE) +#define CAM_VNODE_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE) +#define CAM_SENSOR_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 1) +#define CAM_IFE_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 2) +#define CAM_ICP_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 3) +#define CAM_LRME_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 4) +#define CAM_JPEG_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 5) +#define CAM_FD_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 6) +#define CAM_CPAS_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 7) +#define CAM_CSIPHY_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 8) +#define CAM_ACTUATOR_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 9) +#define CAM_CCI_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 10) +#define CAM_FLASH_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 11) +#define CAM_EEPROM_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 12) +#define CAM_OIS_DEVICE_TYPE (CAM_DEVICE_TYPE_BASE + 13) + +/* cam_req_mgr hdl info */ +#define CAM_REQ_MGR_HDL_IDX_POS 8 +#define CAM_REQ_MGR_HDL_IDX_MASK ((1 << CAM_REQ_MGR_HDL_IDX_POS) - 1) +#define CAM_REQ_MGR_GET_HDL_IDX(hdl) (hdl & CAM_REQ_MGR_HDL_IDX_MASK) + +/** + * Max handles supported by cam_req_mgr + * It includes both session and device handles + */ +#define CAM_REQ_MGR_MAX_HANDLES 64 +#define MAX_LINKS_PER_SESSION 2 + +/* V4L event type which user space will subscribe to */ +#define V4L_EVENT_CAM_REQ_MGR_EVENT (V4L2_EVENT_PRIVATE_START + 0) + +/* Specific event ids to get notified in user space */ +#define V4L_EVENT_CAM_REQ_MGR_SOF 0 +#define V4L_EVENT_CAM_REQ_MGR_ERROR 1 +#define V4L_EVENT_CAM_REQ_MGR_SOF_BOOT_TS 2 + +/* SOF Event status */ +#define CAM_REQ_MGR_SOF_EVENT_SUCCESS 0 +#define CAM_REQ_MGR_SOF_EVENT_ERROR 1 + +/* Link control operations */ +#define CAM_REQ_MGR_LINK_ACTIVATE 0 +#define CAM_REQ_MGR_LINK_DEACTIVATE 1 + +/** + * Request Manager : flush_type + * @CAM_REQ_MGR_FLUSH_TYPE_ALL: Req mgr will remove all the pending + * requests from input/processing queue. + * @CAM_REQ_MGR_FLUSH_TYPE_CANCEL_REQ: Req mgr will remove only particular + * request id from input/processing queue. + * @CAM_REQ_MGR_FLUSH_TYPE_MAX: Max number of the flush type + * @opcode: CAM_REQ_MGR_FLUSH_REQ + */ +#define CAM_REQ_MGR_FLUSH_TYPE_ALL 0 +#define CAM_REQ_MGR_FLUSH_TYPE_CANCEL_REQ 1 +#define CAM_REQ_MGR_FLUSH_TYPE_MAX 2 + +/** + * Request Manager : Sync Mode type + * @CAM_REQ_MGR_SYNC_MODE_NO_SYNC: Req mgr will apply non-sync mode for this + * request. + * @CAM_REQ_MGR_SYNC_MODE_SYNC: Req mgr will apply sync mode for this request. + */ +#define CAM_REQ_MGR_SYNC_MODE_NO_SYNC 0 +#define CAM_REQ_MGR_SYNC_MODE_SYNC 1 + +/** + * struct cam_req_mgr_event_data + * @session_hdl: session handle + * @link_hdl: link handle + * @frame_id: frame id + * @reserved: reserved for 64 bit aligngment + * @req_id: request id + * @tv_sec: timestamp in seconds + * @tv_usec: timestamp in micro seconds + */ +struct cam_req_mgr_event_data { + int32_t session_hdl; + int32_t link_hdl; + int32_t frame_id; + int32_t reserved; + int64_t req_id; + uint64_t tv_sec; + uint64_t tv_usec; +}; + +/** + * struct cam_req_mgr_session_info + * @session_hdl: In/Output param - session_handle + * @opcode1: CAM_REQ_MGR_CREATE_SESSION + * @opcode2: CAM_REQ_MGR_DESTROY_SESSION + */ +struct cam_req_mgr_session_info { + int32_t session_hdl; + int32_t reserved; +}; + +/** + * struct cam_req_mgr_link_info + * @session_hdl: Input param - Identifier for CSL session + * @num_devices: Input Param - Num of devices to be linked + * @dev_hdls: Input param - List of device handles to be linked + * @link_hdl: Output Param -Identifier for link + * @opcode: CAM_REQ_MGR_LINK + */ +struct cam_req_mgr_link_info { + int32_t session_hdl; + uint32_t num_devices; + int32_t dev_hdls[CAM_REQ_MGR_MAX_HANDLES]; + int32_t link_hdl; +}; + +/** + * struct cam_req_mgr_unlink_info + * @session_hdl: input param - session handle + * @link_hdl: input param - link handle + * @opcode: CAM_REQ_MGR_UNLINK + */ +struct cam_req_mgr_unlink_info { + int32_t session_hdl; + int32_t link_hdl; +}; + +/** + * struct cam_req_mgr_flush_info + * @brief: User can tell drivers to flush a particular request id or + * flush all requests from its pending processing queue. Flush is a + * blocking call and driver shall ensure all requests are flushed + * before returning. + * @session_hdl: Input param - Identifier for CSL session + * @link_hdl: Input Param -Identifier for link + * @flush_type: User can cancel a particular req id or can flush + * all requests in queue + * @reserved: reserved for 64 bit aligngment + * @req_id: field is valid only if flush type is cancel request + * for flush all this field value is not considered. + * @opcode: CAM_REQ_MGR_FLUSH_REQ + */ +struct cam_req_mgr_flush_info { + int32_t session_hdl; + int32_t link_hdl; + uint32_t flush_type; + uint32_t reserved; + int64_t req_id; +}; + +/** struct cam_req_mgr_sched_info + * @session_hdl: Input param - Identifier for CSL session + * @link_hdl: Input Param -Identifier for link + * inluding itself. + * @bubble_enable: Input Param - Cam req mgr will do bubble recovery if this + * flag is set. + * @sync_mode: Type of Sync mode for this request + * @req_id: Input Param - Request Id from which all requests will be flushed + */ +struct cam_req_mgr_sched_request { + int32_t session_hdl; + int32_t link_hdl; + int32_t bubble_enable; + int32_t sync_mode; + int64_t req_id; +}; + +/** + * struct cam_req_mgr_sync_mode + * @session_hdl: Input param - Identifier for CSL session + * @sync_mode: Input Param - Type of sync mode + * @num_links: Input Param - Num of links in sync mode (Valid only + * when sync_mode is one of SYNC enabled modes) + * @link_hdls: Input Param - Array of link handles to be in sync mode + * (Valid only when sync_mode is one of SYNC + * enabled modes) + * @master_link_hdl: Input Param - To dictate which link's SOF drives system + * (Valid only when sync_mode is one of SYNC + * enabled modes) + * + * @opcode: CAM_REQ_MGR_SYNC_MODE + */ +struct cam_req_mgr_sync_mode { + int32_t session_hdl; + int32_t sync_mode; + int32_t num_links; + int32_t link_hdls[MAX_LINKS_PER_SESSION]; + int32_t master_link_hdl; + int32_t reserved; +}; + +/** + * struct cam_req_mgr_link_control + * @ops: Link operations: activate/deactive + * @session_hdl: Input param - Identifier for CSL session + * @num_links: Input Param - Num of links + * @reserved: reserved field + * @link_hdls: Input Param - Links to be activated/deactivated + * + * @opcode: CAM_REQ_MGR_LINK_CONTROL + */ +struct cam_req_mgr_link_control { + int32_t ops; + int32_t session_hdl; + int32_t num_links; + int32_t reserved; + int32_t link_hdls[MAX_LINKS_PER_SESSION]; +}; + +/** + * cam_req_mgr specific opcode ids + */ +#define CAM_REQ_MGR_CREATE_DEV_NODES (CAM_COMMON_OPCODE_MAX + 1) +#define CAM_REQ_MGR_CREATE_SESSION (CAM_COMMON_OPCODE_MAX + 2) +#define CAM_REQ_MGR_DESTROY_SESSION (CAM_COMMON_OPCODE_MAX + 3) +#define CAM_REQ_MGR_LINK (CAM_COMMON_OPCODE_MAX + 4) +#define CAM_REQ_MGR_UNLINK (CAM_COMMON_OPCODE_MAX + 5) +#define CAM_REQ_MGR_SCHED_REQ (CAM_COMMON_OPCODE_MAX + 6) +#define CAM_REQ_MGR_FLUSH_REQ (CAM_COMMON_OPCODE_MAX + 7) +#define CAM_REQ_MGR_SYNC_MODE (CAM_COMMON_OPCODE_MAX + 8) +#define CAM_REQ_MGR_ALLOC_BUF (CAM_COMMON_OPCODE_MAX + 9) +#define CAM_REQ_MGR_MAP_BUF (CAM_COMMON_OPCODE_MAX + 10) +#define CAM_REQ_MGR_RELEASE_BUF (CAM_COMMON_OPCODE_MAX + 11) +#define CAM_REQ_MGR_CACHE_OPS (CAM_COMMON_OPCODE_MAX + 12) +#define CAM_REQ_MGR_LINK_CONTROL (CAM_COMMON_OPCODE_MAX + 13) +/* end of cam_req_mgr opcodes */ + +#define CAM_MEM_FLAG_HW_READ_WRITE (1<<0) +#define CAM_MEM_FLAG_HW_READ_ONLY (1<<1) +#define CAM_MEM_FLAG_HW_WRITE_ONLY (1<<2) +#define CAM_MEM_FLAG_KMD_ACCESS (1<<3) +#define CAM_MEM_FLAG_UMD_ACCESS (1<<4) +#define CAM_MEM_FLAG_PROTECTED_MODE (1<<5) +#define CAM_MEM_FLAG_CMD_BUF_TYPE (1<<6) +#define CAM_MEM_FLAG_PIXEL_BUF_TYPE (1<<7) +#define CAM_MEM_FLAG_STATS_BUF_TYPE (1<<8) +#define CAM_MEM_FLAG_PACKET_BUF_TYPE (1<<9) +#define CAM_MEM_FLAG_CACHE (1<<10) +#define CAM_MEM_FLAG_HW_SHARED_ACCESS (1<<11) + +#define CAM_MEM_MMU_MAX_HANDLE 16 + +/* Maximum allowed buffers in existence */ +#define CAM_MEM_BUFQ_MAX 1024 + +#define CAM_MEM_MGR_SECURE_BIT_POS 15 +#define CAM_MEM_MGR_HDL_IDX_SIZE 15 +#define CAM_MEM_MGR_HDL_FD_SIZE 16 +#define CAM_MEM_MGR_HDL_IDX_END_POS 16 +#define CAM_MEM_MGR_HDL_FD_END_POS 32 + +#define CAM_MEM_MGR_HDL_IDX_MASK ((1 << CAM_MEM_MGR_HDL_IDX_SIZE) - 1) + +#define GET_MEM_HANDLE(idx, fd) \ + ((idx & CAM_MEM_MGR_HDL_IDX_MASK) | \ + (fd << (CAM_MEM_MGR_HDL_FD_END_POS - CAM_MEM_MGR_HDL_FD_SIZE))) \ + +#define GET_FD_FROM_HANDLE(hdl) \ + (hdl >> (CAM_MEM_MGR_HDL_FD_END_POS - CAM_MEM_MGR_HDL_FD_SIZE)) \ + +#define CAM_MEM_MGR_GET_HDL_IDX(hdl) (hdl & CAM_MEM_MGR_HDL_IDX_MASK) + +#define CAM_MEM_MGR_SET_SECURE_HDL(hdl, flag) \ + ((flag) ? (hdl |= (1 << CAM_MEM_MGR_SECURE_BIT_POS)) : \ + ((hdl) &= ~(1 << CAM_MEM_MGR_SECURE_BIT_POS))) + +#define CAM_MEM_MGR_IS_SECURE_HDL(hdl) \ + (((hdl) & \ + (1<> CAM_MEM_MGR_SECURE_BIT_POS) + +/** + * memory allocation type + */ +#define CAM_MEM_DMA_NONE 0 +#define CAM_MEM_DMA_BIDIRECTIONAL 1 +#define CAM_MEM_DMA_TO_DEVICE 2 +#define CAM_MEM_DMA_FROM_DEVICE 3 + + +/** + * memory cache operation + */ +#define CAM_MEM_CLEAN_CACHE 1 +#define CAM_MEM_INV_CACHE 2 +#define CAM_MEM_CLEAN_INV_CACHE 3 + + +/** + * struct cam_mem_alloc_out_params + * @buf_handle: buffer handle + * @fd: output buffer file descriptor + * @vaddr: virtual address pointer + */ +struct cam_mem_alloc_out_params { + uint32_t buf_handle; + int32_t fd; + uint64_t vaddr; +}; + +/** + * struct cam_mem_map_out_params + * @buf_handle: buffer handle + * @reserved: reserved for future + * @vaddr: virtual address pointer + */ +struct cam_mem_map_out_params { + uint32_t buf_handle; + uint32_t reserved; + uint64_t vaddr; +}; + +/** + * struct cam_mem_mgr_alloc_cmd + * @len: size of buffer to allocate + * @align: alignment of the buffer + * @mmu_hdls: array of mmu handles + * @num_hdl: number of handles + * @flags: flags of the buffer + * @out: out params + */ +/* CAM_REQ_MGR_ALLOC_BUF */ +struct cam_mem_mgr_alloc_cmd { + uint64_t len; + uint64_t align; + int32_t mmu_hdls[CAM_MEM_MMU_MAX_HANDLE]; + uint32_t num_hdl; + uint32_t flags; + struct cam_mem_alloc_out_params out; +}; + +/** + * struct cam_mem_mgr_map_cmd + * @mmu_hdls: array of mmu handles + * @num_hdl: number of handles + * @flags: flags of the buffer + * @fd: output buffer file descriptor + * @reserved: reserved field + * @out: out params + */ + +/* CAM_REQ_MGR_MAP_BUF */ +struct cam_mem_mgr_map_cmd { + int32_t mmu_hdls[CAM_MEM_MMU_MAX_HANDLE]; + uint32_t num_hdl; + uint32_t flags; + int32_t fd; + uint32_t reserved; + struct cam_mem_map_out_params out; +}; + +/** + * struct cam_mem_mgr_map_cmd + * @buf_handle: buffer handle + * @reserved: reserved field + */ +/* CAM_REQ_MGR_RELEASE_BUF */ +struct cam_mem_mgr_release_cmd { + int32_t buf_handle; + uint32_t reserved; +}; + +/** + * struct cam_mem_mgr_map_cmd + * @buf_handle: buffer handle + * @ops: cache operations + */ +/* CAM_REQ_MGR_CACHE_OPS */ +struct cam_mem_cache_ops_cmd { + int32_t buf_handle; + uint32_t mem_cache_ops; +}; + +/** + * Request Manager : error message type + * @CAM_REQ_MGR_ERROR_TYPE_DEVICE: Device error message, fatal to session + * @CAM_REQ_MGR_ERROR_TYPE_REQUEST: Error on a single request, not fatal + * @CAM_REQ_MGR_ERROR_TYPE_BUFFER: Buffer was not filled, not fatal + */ +#define CAM_REQ_MGR_ERROR_TYPE_DEVICE 0 +#define CAM_REQ_MGR_ERROR_TYPE_REQUEST 1 +#define CAM_REQ_MGR_ERROR_TYPE_BUFFER 2 + +/** + * struct cam_req_mgr_error_msg + * @error_type: type of error + * @request_id: request id of frame + * @device_hdl: device handle + * @linke_hdl: link_hdl + * @resource_size: size of the resource + */ +struct cam_req_mgr_error_msg { + uint32_t error_type; + uint32_t request_id; + int32_t device_hdl; + int32_t link_hdl; + uint64_t resource_size; +}; + +/** + * struct cam_req_mgr_frame_msg + * @request_id: request id of the frame + * @frame_id: frame id of the frame + * @timestamp: timestamp of the frame + * @link_hdl: link handle associated with this message + * @sof_status: sof status success or fail + */ +struct cam_req_mgr_frame_msg { + uint64_t request_id; + uint64_t frame_id; + uint64_t timestamp; + int32_t link_hdl; + uint32_t sof_status; +}; + +/** + * struct cam_req_mgr_message + * @session_hdl: session to which the frame belongs to + * @reserved: reserved field + * @u: union which can either be error or frame message + */ +struct cam_req_mgr_message { + int32_t session_hdl; + int32_t reserved; + union { + struct cam_req_mgr_error_msg err_msg; + struct cam_req_mgr_frame_msg frame_msg; + } u; +}; +#endif /* __UAPI_LINUX_CAM_REQ_MGR_H */ diff --git a/selfdrive/camerad/include/media/cam_sensor.h b/selfdrive/camerad/include/media/cam_sensor.h new file mode 100644 index 00000000000000..f5af6047f5ac88 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_sensor.h @@ -0,0 +1,477 @@ +#ifndef __UAPI_CAM_SENSOR_H__ +#define __UAPI_CAM_SENSOR_H__ + +#include +#include +#include + +#define CAM_SENSOR_PROBE_CMD (CAM_COMMON_OPCODE_MAX + 1) +#define CAM_FLASH_MAX_LED_TRIGGERS 3 +#define MAX_OIS_NAME_SIZE 32 +#define CAM_CSIPHY_SECURE_MODE_ENABLED 1 +/** + * struct cam_sensor_query_cap - capabilities info for sensor + * + * @slot_info : Indicates about the slotId or cell Index + * @secure_camera : Camera is in secure/Non-secure mode + * @pos_pitch : Sensor position pitch + * @pos_roll : Sensor position roll + * @pos_yaw : Sensor position yaw + * @actuator_slot_id : Actuator slot id which connected to sensor + * @eeprom_slot_id : EEPROM slot id which connected to sensor + * @ois_slot_id : OIS slot id which connected to sensor + * @flash_slot_id : Flash slot id which connected to sensor + * @csiphy_slot_id : CSIphy slot id which connected to sensor + * + */ +struct cam_sensor_query_cap { + uint32_t slot_info; + uint32_t secure_camera; + uint32_t pos_pitch; + uint32_t pos_roll; + uint32_t pos_yaw; + uint32_t actuator_slot_id; + uint32_t eeprom_slot_id; + uint32_t ois_slot_id; + uint32_t flash_slot_id; + uint32_t csiphy_slot_id; +} __attribute__((packed)); + +/** + * struct cam_csiphy_query_cap - capabilities info for csiphy + * + * @slot_info : Indicates about the slotId or cell Index + * @version : CSIphy version + * @clk lane : Of the 5 lanes, informs lane configured + * as clock lane + * @reserved + */ +struct cam_csiphy_query_cap { + uint32_t slot_info; + uint32_t version; + uint32_t clk_lane; + uint32_t reserved; +} __attribute__((packed)); + +/** + * struct cam_actuator_query_cap - capabilities info for actuator + * + * @slot_info : Indicates about the slotId or cell Index + * @reserved + */ +struct cam_actuator_query_cap { + uint32_t slot_info; + uint32_t reserved; +} __attribute__((packed)); + +/** + * struct cam_eeprom_query_cap_t - capabilities info for eeprom + * + * @slot_info : Indicates about the slotId or cell Index + * @eeprom_kernel_probe : Indicates about the kernel or userspace probe + */ +struct cam_eeprom_query_cap_t { + uint32_t slot_info; + uint16_t eeprom_kernel_probe; + uint16_t reserved; +} __attribute__((packed)); + +/** + * struct cam_ois_query_cap_t - capabilities info for ois + * + * @slot_info : Indicates about the slotId or cell Index + */ +struct cam_ois_query_cap_t { + uint32_t slot_info; + uint16_t reserved; +} __attribute__((packed)); + +/** + * struct cam_cmd_i2c_info - Contains slave I2C related info + * + * @slave_addr : Slave address + * @i2c_freq_mode : 4 bits are used for I2c freq mode + * @cmd_type : Explains type of command + */ +struct cam_cmd_i2c_info { + uint16_t slave_addr; + uint8_t i2c_freq_mode; + uint8_t cmd_type; +} __attribute__((packed)); + +/** + * struct cam_ois_opcode - Contains OIS opcode + * + * @prog : OIS FW prog register address + * @coeff : OIS FW coeff register address + * @pheripheral : OIS pheripheral + * @memory : OIS memory + */ +struct cam_ois_opcode { + uint32_t prog; + uint32_t coeff; + uint32_t pheripheral; + uint32_t memory; +} __attribute__((packed)); + +/** + * struct cam_cmd_ois_info - Contains OIS slave info + * + * @slave_addr : OIS i2c slave address + * @i2c_freq_mode : i2c frequency mode + * @cmd_type : Explains type of command + * @ois_fw_flag : indicates if fw is present or not + * @is_ois_calib : indicates the calibration data is available + * @ois_name : OIS name + * @opcode : opcode + */ +struct cam_cmd_ois_info { + uint16_t slave_addr; + uint8_t i2c_freq_mode; + uint8_t cmd_type; + uint8_t ois_fw_flag; + uint8_t is_ois_calib; + char ois_name[MAX_OIS_NAME_SIZE]; + struct cam_ois_opcode opcode; +} __attribute__((packed)); + +/** + * struct cam_cmd_probe - Contains sensor slave info + * + * @data_type : Slave register data type + * @addr_type : Slave register address type + * @op_code : Don't Care + * @cmd_type : Explains type of command + * @reg_addr : Slave register address + * @expected_data : Data expected at slave register address + * @data_mask : Data mask if only few bits are valid + * @camera_id : Indicates the slot to which camera + * needs to be probed + * @reserved + */ +struct cam_cmd_probe { + uint8_t data_type; + uint8_t addr_type; + uint8_t op_code; + uint8_t cmd_type; + uint32_t reg_addr; + uint32_t expected_data; + uint32_t data_mask; + uint16_t camera_id; + uint16_t reserved; +} __attribute__((packed)); + +/** + * struct cam_power_settings - Contains sensor power setting info + * + * @power_seq_type : Type of power sequence + * @reserved + * @config_val_low : Lower 32 bit value configuration value + * @config_val_high : Higher 32 bit value configuration value + * + */ +struct cam_power_settings { + uint16_t power_seq_type; + uint16_t reserved; + uint32_t config_val_low; + uint32_t config_val_high; +} __attribute__((packed)); + +/** + * struct cam_cmd_power - Explains about the power settings + * + * @count : Number of power settings follows + * @reserved + * @cmd_type : Explains type of command + * @power_settings : Contains power setting info + */ +struct cam_cmd_power { + uint16_t count; + uint8_t reserved; + uint8_t cmd_type; + struct cam_power_settings power_settings[1]; +} __attribute__((packed)); + +/** + * struct i2c_rdwr_header - header of READ/WRITE I2C command + * + * @ count : Number of registers / data / reg-data pairs + * @ op_code : Operation code + * @ cmd_type : Command buffer type + * @ data_type : I2C data type + * @ addr_type : I2C address type + * @ reserved + */ +struct i2c_rdwr_header { + uint16_t count; + uint8_t op_code; + uint8_t cmd_type; + uint8_t data_type; + uint8_t addr_type; + uint16_t reserved; +} __attribute__((packed)); + +/** + * struct i2c_random_wr_payload - payload for I2C random write + * + * @ reg_addr : Register address + * @ reg_data : Register data + * + */ +struct i2c_random_wr_payload { + uint32_t reg_addr; + uint32_t reg_data; +} __attribute__((packed)); + +/** + * struct cam_cmd_i2c_random_wr - I2C random write command + * @ header : header of READ/WRITE I2C command + * @ random_wr_payload : payload for I2C random write + */ +struct cam_cmd_i2c_random_wr { + struct i2c_rdwr_header header; + struct i2c_random_wr_payload random_wr_payload[1]; +} __attribute__((packed)); + +/** + * struct cam_cmd_read - I2C read command + * @ reg_data : Register data + * @ reserved + */ +struct cam_cmd_read { + uint32_t reg_data; + uint32_t reserved; +} __attribute__((packed)); + +/** + * struct cam_cmd_i2c_continuous_wr - I2C continuous write command + * @ header : header of READ/WRITE I2C command + * @ reg_addr : Register address + * @ data_read : I2C read command + */ +struct cam_cmd_i2c_continuous_wr { + struct i2c_rdwr_header header; + uint32_t reg_addr; + struct cam_cmd_read data_read[1]; +} __attribute__((packed)); + +/** + * struct cam_cmd_i2c_random_rd - I2C random read command + * @ header : header of READ/WRITE I2C command + * @ data_read : I2C read command + */ +struct cam_cmd_i2c_random_rd { + struct i2c_rdwr_header header; + struct cam_cmd_read data_read[1]; +} __attribute__((packed)); + +/** + * struct cam_cmd_i2c_continuous_rd - I2C continuous continuous read command + * @ header : header of READ/WRITE I2C command + * @ reg_addr : Register address + * + */ +struct cam_cmd_i2c_continuous_rd { + struct i2c_rdwr_header header; + uint32_t reg_addr; +} __attribute__((packed)); + +/** + * struct cam_cmd_conditional_wait - Conditional wait command + * @data_type : Data type + * @addr_type : Address type + * @op_code : Opcode + * @cmd_type : Explains type of command + * @timeout : Timeout for retries + * @reserved + * @reg_addr : Register Address + * @reg_data : Register data + * @data_mask : Data mask if only few bits are valid + * @camera_id : Indicates the slot to which camera + * needs to be probed + * + */ +struct cam_cmd_conditional_wait { + uint8_t data_type; + uint8_t addr_type; + uint8_t op_code; + uint8_t cmd_type; + uint16_t timeout; + uint16_t reserved; + uint32_t reg_addr; + uint32_t reg_data; + uint32_t data_mask; +} __attribute__((packed)); + +/** + * struct cam_cmd_unconditional_wait - Un-conditional wait command + * @delay : Delay + * @op_code : Opcode + * @cmd_type : Explains type of command + */ +struct cam_cmd_unconditional_wait { + int16_t delay; + uint8_t op_code; + uint8_t cmd_type; +} __attribute__((packed)); + +/** + * cam_csiphy_info: Provides cmdbuffer structre + * @lane_mask : Lane mask details + * @lane_assign : Lane sensor will be using + * @csiphy_3phase : Total number of lanes + * @combo_mode : Info regarding combo_mode is enable / disable + * @lane_cnt : Total number of lanes + * @secure_mode : Secure mode flag to enable / disable + * @3phase : Details whether 3Phase / 2Phase operation + * @settle_time : Settling time in ms + * @data_rate : Data rate + * + */ +struct cam_csiphy_info { + uint16_t lane_mask; + uint16_t lane_assign; + uint8_t csiphy_3phase; + uint8_t combo_mode; + uint8_t lane_cnt; + uint8_t secure_mode; + uint64_t settle_time; + uint64_t data_rate; +} __attribute__((packed)); + +/** + * cam_csiphy_acquire_dev_info : Information needed for + * csiphy at the time of acquire + * @combo_mode : Indicates the device mode of operation + * @reserved + * + */ +struct cam_csiphy_acquire_dev_info { + uint32_t combo_mode; + uint32_t reserved; +} __attribute__((packed)); + +/** + * cam_sensor_acquire_dev : Updates sensor acuire cmd + * @device_handle : Updates device handle + * @session_handle : Session handle for acquiring device + * @handle_type : Resource handle type + * @reserved + * @info_handle : Handle to additional info + * needed for sensor sub modules + * + */ +struct cam_sensor_acquire_dev { + uint32_t session_handle; + uint32_t device_handle; + uint32_t handle_type; + uint32_t reserved; + uint64_t info_handle; +} __attribute__((packed)); + +/** + * cam_sensor_streamon_dev : StreamOn command for the sensor + * @session_handle : Session handle for acquiring device + * @device_handle : Updates device handle + * @handle_type : Resource handle type + * @reserved + * @info_handle : Information Needed at the time of streamOn + * + */ +struct cam_sensor_streamon_dev { + uint32_t session_handle; + uint32_t device_handle; + uint32_t handle_type; + uint32_t reserved; + uint64_t info_handle; +} __attribute__((packed)); + +/** + * struct cam_flash_init : Init command for the flash + * @flash_type : flash hw type + * @reserved + * @cmd_type : command buffer type + */ +struct cam_flash_init { + uint8_t flash_type; + uint16_t reserved; + uint8_t cmd_type; +} __attribute__((packed)); + +/** + * struct cam_flash_set_rer : RedEyeReduction command buffer + * + * @count : Number of flash leds + * @opcode : Command buffer opcode + * CAM_FLASH_FIRE_RER + * @cmd_type : command buffer operation type + * @num_iteration : Number of led turn on/off sequence + * @reserved + * @led_on_delay_ms : flash led turn on time in ms + * @led_off_delay_ms : flash led turn off time in ms + * @led_current_ma : flash led current in ma + * + */ +struct cam_flash_set_rer { + uint16_t count; + uint8_t opcode; + uint8_t cmd_type; + uint16_t num_iteration; + uint16_t reserved; + uint32_t led_on_delay_ms; + uint32_t led_off_delay_ms; + uint32_t led_current_ma[CAM_FLASH_MAX_LED_TRIGGERS]; +} __attribute__((packed)); + +/** + * struct cam_flash_set_on_off : led turn on/off command buffer + * + * @count : Number of Flash leds + * @opcode : command buffer opcodes + * CAM_FLASH_FIRE_LOW + * CAM_FLASH_FIRE_HIGH + * CAM_FLASH_OFF + * @cmd_type : command buffer operation type + * @led_current_ma : flash led current in ma + * + */ +struct cam_flash_set_on_off { + uint16_t count; + uint8_t opcode; + uint8_t cmd_type; + uint32_t led_current_ma[CAM_FLASH_MAX_LED_TRIGGERS]; +} __attribute__((packed)); + +/** + * struct cam_flash_query_curr : query current command buffer + * + * @reserved + * @opcode : command buffer opcode + * @cmd_type : command buffer operation type + * @query_current_ma : battery current in ma + * + */ +struct cam_flash_query_curr { + uint16_t reserved; + uint8_t opcode; + uint8_t cmd_type; + uint32_t query_current_ma; +} __attribute__ ((packed)); + +/** + * struct cam_flash_query_cap : capabilities info for flash + * + * @slot_info : Indicates about the slotId or cell Index + * @max_current_flash : max supported current for flash + * @max_duration_flash : max flash turn on duration + * @max_current_torch : max supported current for torch + * + */ +struct cam_flash_query_cap_info { + uint32_t slot_info; + uint32_t max_current_flash[CAM_FLASH_MAX_LED_TRIGGERS]; + uint32_t max_duration_flash[CAM_FLASH_MAX_LED_TRIGGERS]; + uint32_t max_current_torch[CAM_FLASH_MAX_LED_TRIGGERS]; +} __attribute__ ((packed)); + +#endif diff --git a/selfdrive/camerad/include/media/cam_sensor_cmn_header.h b/selfdrive/camerad/include/media/cam_sensor_cmn_header.h new file mode 100644 index 00000000000000..24617b39bd1266 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_sensor_cmn_header.h @@ -0,0 +1,391 @@ +/* Copyright (c) 2017-2018, The Linux Foundation. All rights reserved. + * + * This program is free software; you can redistribute it and/or modify + * it under the terms of the GNU General Public License version 2 and + * only version 2 as published by the Free Software Foundation. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + */ + +#ifndef _CAM_SENSOR_CMN_HEADER_ +#define _CAM_SENSOR_CMN_HEADER_ + +#include +#include +#include + +#define MAX_REGULATOR 5 +#define MAX_POWER_CONFIG 12 + +#define MAX_PER_FRAME_ARRAY 32 +#define BATCH_SIZE_MAX 16 + +#define CAM_SENSOR_NAME "cam-sensor" +#define CAM_ACTUATOR_NAME "cam-actuator" +#define CAM_CSIPHY_NAME "cam-csiphy" +#define CAM_FLASH_NAME "cam-flash" +#define CAM_EEPROM_NAME "cam-eeprom" +#define CAM_OIS_NAME "cam-ois" + +#define MAX_SYSTEM_PIPELINE_DELAY 2 + +#define CAM_PKT_NOP_OPCODE 127 + +enum camera_sensor_cmd_type { + CAMERA_SENSOR_CMD_TYPE_INVALID, + CAMERA_SENSOR_CMD_TYPE_PROBE, + CAMERA_SENSOR_CMD_TYPE_PWR_UP, + CAMERA_SENSOR_CMD_TYPE_PWR_DOWN, + CAMERA_SENSOR_CMD_TYPE_I2C_INFO, + CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_WR, + CAMERA_SENSOR_CMD_TYPE_I2C_RNDM_RD, + CAMERA_SENSOR_CMD_TYPE_I2C_CONT_WR, + CAMERA_SENSOR_CMD_TYPE_I2C_CONT_RD, + CAMERA_SENSOR_CMD_TYPE_WAIT, + CAMERA_SENSOR_FLASH_CMD_TYPE_INIT_INFO, + CAMERA_SENSOR_FLASH_CMD_TYPE_FIRE, + CAMERA_SENSOR_FLASH_CMD_TYPE_RER, + CAMERA_SENSOR_FLASH_CMD_TYPE_QUERYCURR, + CAMERA_SENSOR_FLASH_CMD_TYPE_WIDGET, + CAMERA_SENSOR_CMD_TYPE_RD_DATA, + CAMERA_SENSOR_FLASH_CMD_TYPE_INIT_FIRE, + CAMERA_SENSOR_CMD_TYPE_MAX, +}; + +enum camera_sensor_i2c_op_code { + CAMERA_SENSOR_I2C_OP_INVALID, + CAMERA_SENSOR_I2C_OP_RNDM_WR, + CAMERA_SENSOR_I2C_OP_RNDM_WR_VERF, + CAMERA_SENSOR_I2C_OP_CONT_WR_BRST, + CAMERA_SENSOR_I2C_OP_CONT_WR_BRST_VERF, + CAMERA_SENSOR_I2C_OP_CONT_WR_SEQN, + CAMERA_SENSOR_I2C_OP_CONT_WR_SEQN_VERF, + CAMERA_SENSOR_I2C_OP_MAX, +}; + +enum camera_sensor_wait_op_code { + CAMERA_SENSOR_WAIT_OP_INVALID, + CAMERA_SENSOR_WAIT_OP_COND, + CAMERA_SENSOR_WAIT_OP_HW_UCND, + CAMERA_SENSOR_WAIT_OP_SW_UCND, + CAMERA_SENSOR_WAIT_OP_MAX, +}; + +enum camera_flash_opcode { + CAMERA_SENSOR_FLASH_OP_INVALID, + CAMERA_SENSOR_FLASH_OP_OFF, + CAMERA_SENSOR_FLASH_OP_FIRELOW, + CAMERA_SENSOR_FLASH_OP_FIREHIGH, + CAMERA_SENSOR_FLASH_OP_MAX, +}; + +enum camera_sensor_i2c_type { + CAMERA_SENSOR_I2C_TYPE_INVALID, + CAMERA_SENSOR_I2C_TYPE_BYTE, + CAMERA_SENSOR_I2C_TYPE_WORD, + CAMERA_SENSOR_I2C_TYPE_3B, + CAMERA_SENSOR_I2C_TYPE_DWORD, + CAMERA_SENSOR_I2C_TYPE_MAX, +}; + +enum i2c_freq_mode { + I2C_STANDARD_MODE, + I2C_FAST_MODE, + I2C_CUSTOM_MODE, + I2C_FAST_PLUS_MODE, + I2C_MAX_MODES, +}; + +enum position_roll { + ROLL_0 = 0, + ROLL_90 = 90, + ROLL_180 = 180, + ROLL_270 = 270, + ROLL_INVALID = 360, +}; + +enum position_yaw { + FRONT_CAMERA_YAW = 0, + REAR_CAMERA_YAW = 180, + INVALID_YAW = 360, +}; + +enum position_pitch { + LEVEL_PITCH = 0, + INVALID_PITCH = 360, +}; + +enum sensor_sub_module { + SUB_MODULE_SENSOR, + SUB_MODULE_ACTUATOR, + SUB_MODULE_EEPROM, + SUB_MODULE_LED_FLASH, + SUB_MODULE_CSID, + SUB_MODULE_CSIPHY, + SUB_MODULE_OIS, + SUB_MODULE_EXT, + SUB_MODULE_MAX, +}; + +enum msm_camera_power_seq_type { + SENSOR_MCLK, + SENSOR_VANA, + SENSOR_VDIG, + SENSOR_VIO, + SENSOR_VAF, + SENSOR_VAF_PWDM, + SENSOR_CUSTOM_REG1, + SENSOR_CUSTOM_REG2, + SENSOR_RESET, + SENSOR_STANDBY, + SENSOR_CUSTOM_GPIO1, + SENSOR_CUSTOM_GPIO2, + SENSOR_SEQ_TYPE_MAX, +}; + +enum cam_sensor_packet_opcodes { + CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMON, + CAM_SENSOR_PACKET_OPCODE_SENSOR_UPDATE, + CAM_SENSOR_PACKET_OPCODE_SENSOR_INITIAL_CONFIG, + CAM_SENSOR_PACKET_OPCODE_SENSOR_PROBE, + CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, + CAM_SENSOR_PACKET_OPCODE_SENSOR_STREAMOFF, + CAM_SENSOR_PACKET_OPCODE_SENSOR_NOP = 127 +}; + +enum cam_actuator_packet_opcodes { + CAM_ACTUATOR_PACKET_OPCODE_INIT, + CAM_ACTUATOR_PACKET_AUTO_MOVE_LENS, + CAM_ACTUATOR_PACKET_MANUAL_MOVE_LENS +}; + +enum cam_eeprom_packet_opcodes { + CAM_EEPROM_PACKET_OPCODE_INIT +}; + +enum cam_ois_packet_opcodes { + CAM_OIS_PACKET_OPCODE_INIT, + CAM_OIS_PACKET_OPCODE_OIS_CONTROL +}; + +enum msm_bus_perf_setting { + S_INIT, + S_PREVIEW, + S_VIDEO, + S_CAPTURE, + S_ZSL, + S_STEREO_VIDEO, + S_STEREO_CAPTURE, + S_DEFAULT, + S_LIVESHOT, + S_DUAL, + S_EXIT +}; + +enum msm_camera_device_type_t { + MSM_CAMERA_I2C_DEVICE, + MSM_CAMERA_PLATFORM_DEVICE, + MSM_CAMERA_SPI_DEVICE, +}; + +enum cam_flash_device_type { + CAMERA_FLASH_DEVICE_TYPE_PMIC = 0, + CAMERA_FLASH_DEVICE_TYPE_I2C, + CAMERA_FLASH_DEVICE_TYPE_GPIO, +}; + +enum cci_i2c_master_t { + MASTER_0, + MASTER_1, + MASTER_MAX, +}; + +enum camera_vreg_type { + VREG_TYPE_DEFAULT, + VREG_TYPE_CUSTOM, +}; + +enum cam_sensor_i2c_cmd_type { + CAM_SENSOR_I2C_WRITE_RANDOM, + CAM_SENSOR_I2C_WRITE_BURST, + CAM_SENSOR_I2C_WRITE_SEQ, + CAM_SENSOR_I2C_READ, + CAM_SENSOR_I2C_POLL +}; + +struct common_header { + uint16_t first_word; + uint8_t third_byte; + uint8_t cmd_type; +}; + +struct camera_vreg_t { + const char *reg_name; + int min_voltage; + int max_voltage; + int op_mode; + uint32_t delay; + const char *custom_vreg_name; + enum camera_vreg_type type; +}; + +struct msm_camera_gpio_num_info { + uint16_t gpio_num[SENSOR_SEQ_TYPE_MAX]; + uint8_t valid[SENSOR_SEQ_TYPE_MAX]; +}; + +struct msm_cam_clk_info { + const char *clk_name; + long clk_rate; + uint32_t delay; +}; + +struct msm_pinctrl_info { + struct pinctrl *pinctrl; + struct pinctrl_state *gpio_state_active; + struct pinctrl_state *gpio_state_suspend; + bool use_pinctrl; +}; + +struct cam_sensor_i2c_reg_array { + uint32_t reg_addr; + uint32_t reg_data; + uint32_t delay; + uint32_t data_mask; +}; + +struct cam_sensor_i2c_reg_setting { + struct cam_sensor_i2c_reg_array *reg_setting; + unsigned short size; + enum camera_sensor_i2c_type addr_type; + enum camera_sensor_i2c_type data_type; + unsigned short delay; +}; + +/*struct i2c_settings_list { + struct cam_sensor_i2c_reg_setting i2c_settings; + enum cam_sensor_i2c_cmd_type op_code; + struct list_head list; +}; + +struct i2c_settings_array { + struct list_head list_head; + int32_t is_settings_valid; + int64_t request_id; +}; + +struct i2c_data_settings { + struct i2c_settings_array init_settings; + struct i2c_settings_array config_settings; + struct i2c_settings_array streamon_settings; + struct i2c_settings_array streamoff_settings; + struct i2c_settings_array *per_frame; +};*/ + +struct cam_sensor_power_ctrl_t { + struct device *dev; + struct cam_sensor_power_setting *power_setting; + uint16_t power_setting_size; + struct cam_sensor_power_setting *power_down_setting; + uint16_t power_down_setting_size; + struct msm_camera_gpio_num_info *gpio_num_info; + struct msm_pinctrl_info pinctrl_info; + uint8_t cam_pinctrl_status; +}; + +struct cam_camera_slave_info { + uint16_t sensor_slave_addr; + uint16_t sensor_id_reg_addr; + uint16_t sensor_id; + uint16_t sensor_id_mask; +}; + +struct msm_sensor_init_params { + int modes_supported; + unsigned int sensor_mount_angle; +}; + +enum msm_sensor_camera_id_t { + CAMERA_0, + CAMERA_1, + CAMERA_2, + CAMERA_3, + CAMERA_4, + CAMERA_5, + CAMERA_6, + MAX_CAMERAS, +}; + +struct msm_sensor_id_info_t { + unsigned short sensor_id_reg_addr; + unsigned short sensor_id; + unsigned short sensor_id_mask; +}; + +enum msm_sensor_output_format_t { + MSM_SENSOR_BAYER, + MSM_SENSOR_YCBCR, + MSM_SENSOR_META, +}; + +struct cam_sensor_power_setting { + enum msm_camera_power_seq_type seq_type; + unsigned short seq_val; + long config_val; + unsigned short delay; + void *data[10]; +}; + +struct cam_sensor_board_info { + struct cam_camera_slave_info slave_info; + int32_t sensor_mount_angle; + int32_t secure_mode; + int modes_supported; + int32_t pos_roll; + int32_t pos_yaw; + int32_t pos_pitch; + int32_t subdev_id[SUB_MODULE_MAX]; + int32_t subdev_intf[SUB_MODULE_MAX]; + const char *misc_regulator; + struct cam_sensor_power_ctrl_t power_info; +}; + +enum msm_camera_vreg_name_t { + CAM_VDIG, + CAM_VIO, + CAM_VANA, + CAM_VAF, + CAM_V_CUSTOM1, + CAM_V_CUSTOM2, + CAM_VREG_MAX, +}; + +struct msm_camera_gpio_conf { + void *cam_gpiomux_conf_tbl; + uint8_t cam_gpiomux_conf_tbl_size; + struct gpio *cam_gpio_common_tbl; + uint8_t cam_gpio_common_tbl_size; + struct gpio *cam_gpio_req_tbl; + uint8_t cam_gpio_req_tbl_size; + uint32_t gpio_no_mux; + uint32_t *camera_off_table; + uint8_t camera_off_table_size; + uint32_t *camera_on_table; + uint8_t camera_on_table_size; + struct msm_camera_gpio_num_info *gpio_num_info; +}; + +/*for tof camera Begin*/ +enum EEPROM_DATA_OP_T{ + EEPROM_DEFAULT_DATA = 0, + EEPROM_INIT_DATA, + EEPROM_CONFIG_DATA, + EEPROM_STREAMON_DATA, + EEPROM_STREAMOFF_DATA, + EEPROM_OTHER_DATA, +}; +/*for tof camera End*/ +#endif /* _CAM_SENSOR_CMN_HEADER_ */ diff --git a/selfdrive/camerad/include/media/cam_sync.h b/selfdrive/camerad/include/media/cam_sync.h new file mode 100644 index 00000000000000..4a8781fc823d54 --- /dev/null +++ b/selfdrive/camerad/include/media/cam_sync.h @@ -0,0 +1,134 @@ +#ifndef __UAPI_CAM_SYNC_H__ +#define __UAPI_CAM_SYNC_H__ + +#include +#include +#include +#include + +#define CAM_SYNC_DEVICE_NAME "cam_sync_device" + +/* V4L event which user space will subscribe to */ +#define CAM_SYNC_V4L_EVENT (V4L2_EVENT_PRIVATE_START + 0) + +/* Specific event ids to get notified in user space */ +#define CAM_SYNC_V4L_EVENT_ID_CB_TRIG 0 + +/* Size of opaque payload sent to kernel for safekeeping until signal time */ +#define CAM_SYNC_USER_PAYLOAD_SIZE 2 + +/* Device type for sync device needed for device discovery */ +#define CAM_SYNC_DEVICE_TYPE (MEDIA_ENT_F_OLD_BASE) + +#define CAM_SYNC_GET_PAYLOAD_PTR(ev, type) \ + (type *)((char *)ev.u.data + sizeof(struct cam_sync_ev_header)) + +#define CAM_SYNC_GET_HEADER_PTR(ev) \ + ((struct cam_sync_ev_header *)ev.u.data) + +#define CAM_SYNC_STATE_INVALID 0 +#define CAM_SYNC_STATE_ACTIVE 1 +#define CAM_SYNC_STATE_SIGNALED_SUCCESS 2 +#define CAM_SYNC_STATE_SIGNALED_ERROR 3 + +/** + * struct cam_sync_ev_header - Event header for sync event notification + * + * @sync_obj: Sync object + * @status: Status of the object + */ +struct cam_sync_ev_header { + int32_t sync_obj; + int32_t status; +}; + +/** + * struct cam_sync_info - Sync object creation information + * + * @name: Optional string representation of the sync object + * @sync_obj: Sync object returned after creation in kernel + */ +struct cam_sync_info { + char name[64]; + int32_t sync_obj; +}; + +/** + * struct cam_sync_signal - Sync object signaling struct + * + * @sync_obj: Sync object to be signaled + * @sync_state: State of the sync object to which it should be signaled + */ +struct cam_sync_signal { + int32_t sync_obj; + uint32_t sync_state; +}; + +/** + * struct cam_sync_merge - Merge information for sync objects + * + * @sync_objs: Pointer to sync objects + * @num_objs: Number of objects in the array + * @merged: Merged sync object + */ +struct cam_sync_merge { + __u64 sync_objs; + uint32_t num_objs; + int32_t merged; +}; + +/** + * struct cam_sync_userpayload_info - Payload info from user space + * + * @sync_obj: Sync object for which payload has to be registered for + * @reserved: Reserved + * @payload: Pointer to user payload + */ +struct cam_sync_userpayload_info { + int32_t sync_obj; + uint32_t reserved; + __u64 payload[CAM_SYNC_USER_PAYLOAD_SIZE]; +}; + +/** + * struct cam_sync_wait - Sync object wait information + * + * @sync_obj: Sync object to wait on + * @reserved: Reserved + * @timeout_ms: Timeout in milliseconds + */ +struct cam_sync_wait { + int32_t sync_obj; + uint32_t reserved; + uint64_t timeout_ms; +}; + +/** + * struct cam_private_ioctl_arg - Sync driver ioctl argument + * + * @id: IOCTL command id + * @size: Size of command payload + * @result: Result of command execution + * @reserved: Reserved + * @ioctl_ptr: Pointer to user data + */ +struct cam_private_ioctl_arg { + __u32 id; + __u32 size; + __u32 result; + __u32 reserved; + __u64 ioctl_ptr; +}; + +#define CAM_PRIVATE_IOCTL_CMD \ + _IOWR('V', BASE_VIDIOC_PRIVATE, struct cam_private_ioctl_arg) + +#define CAM_SYNC_CREATE 0 +#define CAM_SYNC_DESTROY 1 +#define CAM_SYNC_SIGNAL 2 +#define CAM_SYNC_MERGE 3 +#define CAM_SYNC_REGISTER_PAYLOAD 4 +#define CAM_SYNC_DEREGISTER_PAYLOAD 5 +#define CAM_SYNC_WAIT 6 + +#endif /* __UAPI_CAM_SYNC_H__ */ diff --git a/selfdrive/camerad/main.cc b/selfdrive/camerad/main.cc index 15611681704244..48322b0a6578d0 100644 --- a/selfdrive/camerad/main.cc +++ b/selfdrive/camerad/main.cc @@ -4,6 +4,10 @@ #ifdef QCOM #include "cameras/camera_qcom.h" +#elif QCOM2 +#include "cameras/camera_qcom2.h" +#elif WEBCAM +#include "cameras/camera_webcam.h" #else #include "cameras/camera_frame_stream.h" #endif @@ -160,31 +164,37 @@ void* frontview_thread(void *arg) { } int ui_idx = tbuffer_select(&s->ui_front_tb); + int rgb_idx = ui_idx; FrameMetadata frame_data = s->cameras.front.camera_bufs_metadata[buf_idx]; double t1 = millis_since_boot(); - err = clSetKernelArg(s->krnl_debayer_front, 0, sizeof(cl_mem), &s->front_camera_bufs_cl[buf_idx]); - assert(err == 0); - err = clSetKernelArg(s->krnl_debayer_front, 1, sizeof(cl_mem), &s->rgb_front_bufs_cl[ui_idx]); - assert(err == 0); - float digital_gain = 1.0; - err = clSetKernelArg(s->krnl_debayer_front, 2, sizeof(float), &digital_gain); - assert(err == 0); - cl_event debayer_event; - const size_t debayer_work_size = s->rgb_front_height; - const size_t debayer_local_work_size = 128; - err = clEnqueueNDRangeKernel(q, s->krnl_debayer_front, 1, NULL, - &debayer_work_size, &debayer_local_work_size, 0, 0, &debayer_event); - assert(err == 0); + if (s->cameras.front.ci.bayer) { + err = clSetKernelArg(s->krnl_debayer_front, 0, sizeof(cl_mem), &s->front_camera_bufs_cl[buf_idx]); + cl_check_error(err); + err = clSetKernelArg(s->krnl_debayer_front, 1, sizeof(cl_mem), &s->rgb_front_bufs_cl[rgb_idx]); + cl_check_error(err); + float digital_gain = 1.0; + err = clSetKernelArg(s->krnl_debayer_front, 2, sizeof(float), &digital_gain); + assert(err == 0); + + const size_t debayer_work_size = s->rgb_front_height; // doesn't divide evenly, is this okay? + const size_t debayer_local_work_size = 128; + err = clEnqueueNDRangeKernel(q, s->krnl_debayer_front, 1, NULL, + &debayer_work_size, &debayer_local_work_size, 0, 0, &debayer_event); + assert(err == 0); + } else { + assert(s->rgb_front_buf_size >= s->cameras.front.frame_size); + assert(s->rgb_front_stride == s->cameras.front.ci.frame_stride); + err = clEnqueueCopyBuffer(q, s->front_camera_bufs_cl[buf_idx], s->rgb_front_bufs_cl[rgb_idx], + 0, 0, s->rgb_front_buf_size, 0, 0, &debayer_event); + assert(err == 0); + } clWaitForEvents(1, &debayer_event); clReleaseEvent(debayer_event); - tbuffer_release(&s->cameras.front.camera_tb, buf_idx); - visionbuf_sync(&s->rgb_front_bufs[ui_idx], VISIONBUF_SYNC_FROM_DEVICE); - // set front camera metering target Message *msg = monitoring_sock->receive(true); if (msg != NULL) { @@ -288,27 +298,29 @@ void* frontview_thread(void *arg) { // send frame event { - capnp::MallocMessageBuilder msg; - cereal::Event::Builder event = msg.initRoot(); - event.setLogMonoTime(nanos_since_boot()); - - auto framed = event.initFrontFrame(); - framed.setFrameId(frame_data.frame_id); - framed.setEncodeId(cnt); - framed.setTimestampEof(frame_data.timestamp_eof); - framed.setFrameLength(frame_data.frame_length); - framed.setIntegLines(frame_data.integ_lines); - framed.setGlobalGain(frame_data.global_gain); - framed.setLensPos(frame_data.lens_pos); - framed.setLensSag(frame_data.lens_sag); - framed.setLensErr(frame_data.lens_err); - framed.setLensTruePos(frame_data.lens_true_pos); - framed.setGainFrac(frame_data.gain_frac); - framed.setFrameType(cereal::FrameData::FrameType::FRONT); + if (s->front_frame_sock != NULL) { + capnp::MallocMessageBuilder msg; + cereal::Event::Builder event = msg.initRoot(); + event.setLogMonoTime(nanos_since_boot()); + + auto framed = event.initFrontFrame(); + framed.setFrameId(frame_data.frame_id); + framed.setEncodeId(cnt); + framed.setTimestampEof(frame_data.timestamp_eof); + framed.setFrameLength(frame_data.frame_length); + framed.setIntegLines(frame_data.integ_lines); + framed.setGlobalGain(frame_data.global_gain); + framed.setLensPos(frame_data.lens_pos); + framed.setLensSag(frame_data.lens_sag); + framed.setLensErr(frame_data.lens_err); + framed.setLensTruePos(frame_data.lens_true_pos); + framed.setGainFrac(frame_data.gain_frac); + framed.setFrameType(cereal::FrameData::FrameType::FRONT); - auto words = capnp::messageToFlatArray(msg); - auto bytes = words.asBytes(); - s->front_frame_sock->send((char*)bytes.begin(), bytes.size()); + auto words = capnp::messageToFlatArray(msg); + auto bytes = words.asBytes(); + s->front_frame_sock->send((char*)bytes.begin(), bytes.size()); + } } /*FILE *f = fopen("/tmp/test2", "wb"); @@ -421,43 +433,48 @@ void* processing_thread(void *arg) { // send frame event { - capnp::MallocMessageBuilder msg; - cereal::Event::Builder event = msg.initRoot(); - event.setLogMonoTime(nanos_since_boot()); + if (s->frame_sock != NULL) { + capnp::MallocMessageBuilder msg; + cereal::Event::Builder event = msg.initRoot(); + event.setLogMonoTime(nanos_since_boot()); + + auto framed = event.initFrame(); + framed.setFrameId(frame_data.frame_id); + framed.setEncodeId(cnt); + framed.setTimestampEof(frame_data.timestamp_eof); + framed.setFrameLength(frame_data.frame_length); + framed.setIntegLines(frame_data.integ_lines); + framed.setGlobalGain(frame_data.global_gain); + framed.setLensPos(frame_data.lens_pos); + framed.setLensSag(frame_data.lens_sag); + framed.setLensErr(frame_data.lens_err); + framed.setLensTruePos(frame_data.lens_true_pos); + framed.setGainFrac(frame_data.gain_frac); - auto framed = event.initFrame(); - framed.setFrameId(frame_data.frame_id); - framed.setEncodeId(cnt); - framed.setTimestampEof(frame_data.timestamp_eof); - framed.setFrameLength(frame_data.frame_length); - framed.setIntegLines(frame_data.integ_lines); - framed.setGlobalGain(frame_data.global_gain); - framed.setLensPos(frame_data.lens_pos); - framed.setLensSag(frame_data.lens_sag); - framed.setLensErr(frame_data.lens_err); - framed.setLensTruePos(frame_data.lens_true_pos); - framed.setGainFrac(frame_data.gain_frac); #ifdef QCOM - kj::ArrayPtr focus_vals(&s->cameras.rear.focus[0], NUM_FOCUS); - kj::ArrayPtr focus_confs(&s->cameras.rear.confidence[0], NUM_FOCUS); - framed.setFocusVal(focus_vals); - framed.setFocusConf(focus_confs); + kj::ArrayPtr focus_vals(&s->cameras.rear.focus[0], NUM_FOCUS); + kj::ArrayPtr focus_confs(&s->cameras.rear.confidence[0], NUM_FOCUS); + framed.setFocusVal(focus_vals); + framed.setFocusConf(focus_confs); #endif -#ifndef QCOM - framed.setImage(kj::arrayPtr((const uint8_t*)s->yuv_ion[yuv_idx].addr, s->yuv_buf_size)); +// TODO: add this back +#if !defined(QCOM) && !defined(QCOM2) +//#ifndef QCOM + framed.setImage(kj::arrayPtr((const uint8_t*)s->yuv_ion[yuv_idx].addr, s->yuv_buf_size)); #endif - kj::ArrayPtr transform_vs(&s->yuv_transform.v[0], 9); - framed.setTransform(transform_vs); + kj::ArrayPtr transform_vs(&s->yuv_transform.v[0], 9); + framed.setTransform(transform_vs); - if (s->frame_sock != NULL) { auto words = capnp::messageToFlatArray(msg); auto bytes = words.asBytes(); s->frame_sock->send((char*)bytes.begin(), bytes.size()); } } +#ifndef QCOM2 + // TODO: fix on QCOM2, giving scanline error // one thumbnail per 5 seconds (instead of %5 == 0 posenet) if (cnt % 100 == 3) { uint8_t* thumbnail_buffer = NULL; @@ -521,6 +538,7 @@ void* processing_thread(void *arg) { free(thumbnail_buffer); } +#endif tbuffer_dispatch(&s->ui_tb, ui_idx); @@ -862,15 +880,21 @@ cl_program build_debayer_program(VisionState *s, assert(rgb_width == frame_width/2); assert(rgb_height == frame_height/2); + #ifdef QCOM2 + int dnew = 1; + #else + int dnew = 0; + #endif + char args[4096]; snprintf(args, sizeof(args), "-cl-fast-relaxed-math -cl-denorms-are-zero " "-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d " "-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DRGB_STRIDE=%d " - "-DBAYER_FLIP=%d -DHDR=%d", + "-DBAYER_FLIP=%d -DHDR=%d -DNEW=%d", frame_width, frame_height, frame_stride, rgb_width, rgb_height, rgb_stride, - bayer_flip, hdr); + bayer_flip, hdr, dnew); return CLU_LOAD_FROM_FILE(s->context, s->device_id, "cameras/debayer.cl", args); } @@ -886,7 +910,7 @@ void cl_init(VisionState *s) { &s->device_id, &num_devices); assert(err == 0); - cl_print_info(platform_id, s->device_id); + //cl_print_info(platform_id, s->device_id); printf("\n"); s->context = clCreateContext(NULL, 1, &s->device_id, NULL, NULL, &err); @@ -908,9 +932,11 @@ void init_buffers(VisionState *s) { for (int i=0; icamera_bufs[i] = visionbuf_allocate_cl(s->frame_size, s->device_id, s->context, &s->camera_bufs_cl[i]); - // TODO: make lengths correct - s->focus_bufs[i] = visionbuf_allocate(0xb80); - s->stats_bufs[i] = visionbuf_allocate(0xb80); + #ifndef QCOM2 + // TODO: make lengths correct + s->focus_bufs[i] = visionbuf_allocate(0xb80); + s->stats_bufs[i] = visionbuf_allocate(0xb80); + #endif } for (int i=0; irgb_width = s->frame_width; s->rgb_height = s->frame_height; } - for (int i=0; irgb_width, s->rgb_height, &s->rgb_bufs[i]); s->rgb_bufs_cl[i] = visionbuf_to_cl(&s->rgb_bufs[i], s->device_id, s->context); @@ -939,8 +964,13 @@ void init_buffers(VisionState *s) { tbuffer_init(&s->ui_tb, UI_BUF_COUNT, "rgb"); //assert(s->cameras.front.ci.bayer); - s->rgb_front_width = s->cameras.front.ci.frame_width/2; - s->rgb_front_height = s->cameras.front.ci.frame_height/2; + if (s->cameras.front.ci.bayer) { + s->rgb_front_width = s->cameras.front.ci.frame_width/2; + s->rgb_front_height = s->cameras.front.ci.frame_height/2; + } else { + s->rgb_front_width = s->cameras.front.ci.frame_width; + s->rgb_front_height = s->cameras.front.ci.frame_height; + } for (int i=0; irgb_front_width, s->rgb_front_height, &s->rgb_front_bufs[i]); @@ -959,7 +989,6 @@ void init_buffers(VisionState *s) { s->yuv_width = s->rgb_width; s->yuv_height = s->rgb_height; s->yuv_buf_size = s->rgb_width * s->rgb_height * 3 / 2; - for (int i=0; iyuv_ion[i] = visionbuf_allocate_cl(s->yuv_buf_size, s->device_id, s->context, &s->yuv_cl[i]); s->yuv_bufs[i].y = (uint8_t*)s->yuv_ion[i].addr; @@ -1007,7 +1036,6 @@ void init_buffers(VisionState *s) { s->krnl_debayer_front = clCreateKernel(s->prg_debayer_front, "debayer10", &err); assert(err == 0); } - rgb_to_yuv_init(&s->rgb_to_yuv_state, s->context, s->device_id, s->yuv_width, s->yuv_height, s->rgb_stride); rgb_to_yuv_init(&s->front_rgb_to_yuv_state, s->context, s->device_id, s->yuv_front_width, s->yuv_front_height, s->rgb_front_stride); } @@ -1053,10 +1081,13 @@ void party(VisionState *s) { processing_thread, s); assert(err == 0); +#ifndef QCOM2 + // TODO: fix front camera on qcom2 pthread_t frontview_thread_handle; err = pthread_create(&frontview_thread_handle, NULL, frontview_thread, s); assert(err == 0); +#endif // priority for cameras err = set_realtime_priority(1); @@ -1071,9 +1102,11 @@ void party(VisionState *s) { zsock_signal(s->terminate_pub, 0); +#ifndef QCOM2 LOG("joining frontview_thread"); err = pthread_join(frontview_thread_handle, NULL); assert(err == 0); +#endif LOG("joining visionserver_thread"); err = pthread_join(visionserver_thread_handle, NULL); @@ -1109,7 +1142,7 @@ int main(int argc, char *argv[]) { init_buffers(s); -#ifdef QCOM +#if defined(QCOM) || defined(QCOM2) s->msg_context = Context::create(); s->frame_sock = PubSocket::create(s->msg_context, "frame"); s->front_frame_sock = PubSocket::create(s->msg_context, "frontFrame"); @@ -1118,12 +1151,10 @@ int main(int argc, char *argv[]) { assert(s->front_frame_sock != NULL); assert(s->thumbnail_sock != NULL); #endif - cameras_open(&s->cameras, &s->camera_bufs[0], &s->focus_bufs[0], &s->stats_bufs[0], &s->front_camera_bufs[0]); - party(s); -#ifdef QCOM +#if defined(QCOM) || defined(QCOM2) delete s->frame_sock; delete s->front_frame_sock; delete s->thumbnail_sock; diff --git a/selfdrive/camerad/snapshot/snapshot.py b/selfdrive/camerad/snapshot/snapshot.py index 0da8d679736f8e..4ba10bf4bea392 100755 --- a/selfdrive/camerad/snapshot/snapshot.py +++ b/selfdrive/camerad/snapshot/snapshot.py @@ -22,6 +22,9 @@ def snapshot(): params = Params() front_camera_allowed = int(params.get("RecordFront")) + if params.get("IsTakingSnapshot") == b"1": + return None + params.put("IsTakingSnapshot", "1") params.put("Offroad_IsTakingSnapshot", json.dumps(OFFROAD_ALERTS["Offroad_IsTakingSnapshot"])) time.sleep(2.0) # Give thermald time to read the param, or if just started give camerad time to start diff --git a/selfdrive/camerad/test/camera/test.c b/selfdrive/camerad/test/camera/test.c new file mode 100644 index 00000000000000..8a7d44517b0351 --- /dev/null +++ b/selfdrive/camerad/test/camera/test.c @@ -0,0 +1,48 @@ +#include +#include +#include + +#include "camera_qcom.h" + +bool do_exit = false; + +void cloudlog_e(int levelnum, const char* filename, int lineno, const char* func, + const char* fmt, ...) { + va_list args; + va_start(args, fmt); + vprintf(fmt, args); + printf("\n"); +} + +void set_thread_name(const char* name) { +} + +// tbuffers + +void tbuffer_init2(TBuffer *tb, int num_bufs, const char* name, + void (*release_cb)(void* c, int idx), + void* cb_cookie) { + printf("tbuffer_init2\n"); +} + +void tbuffer_dispatch(TBuffer *tb, int idx) { + printf("tbuffer_dispatch\n"); +} + +void tbuffer_stop(TBuffer *tb) { + printf("tbuffer_stop\n"); +} + +int main() { + DualCameraState s; + cameras_init(&s); + VisionBuf camera_bufs_rear[0x10] = {0}; + VisionBuf camera_bufs_focus[0x10] = {0}; + VisionBuf camera_bufs_stats[0x10] = {0}; + VisionBuf camera_bufs_front[0x10] = {0}; + cameras_open(&s, + camera_bufs_rear, camera_bufs_focus, + camera_bufs_stats, camera_bufs_front); + cameras_close(&s); +} + diff --git a/selfdrive/camerad/test/camera/test.sh b/selfdrive/camerad/test/camera/test.sh new file mode 100755 index 00000000000000..195a5a6a6b9678 --- /dev/null +++ b/selfdrive/camerad/test/camera/test.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +gcc -I ../../include \ + -I ~/one/phonelibs/android_system_core/include -I ~/one/phonelibs/opencl/include \ + -I ~/one/selfdrive/visiond/cameras \ + -I /usr/local/lib -I /usr/lib/aarch64-linux-gnu -I /usr/lib -I /data/op_rk3399_setup/external/snpe/lib/lib -I /data/data/com.termux/files/usr/lib -I /data/op_rk3399_setup/support_files/lib + + test.c ../../cameras/camera_qcom.c \ + -l:libczmq.a -l:libzmq.a -lgnustl_shared -lm -llog -lcutils \ + -l:libcapn.a -l:libcapnp.a -l:libkj.a \ + ~/one/cereal/gen/c/log.capnp.o + diff --git a/selfdrive/camerad/test/frame_test.py b/selfdrive/camerad/test/frame_test.py new file mode 100755 index 00000000000000..2518e80344c8e0 --- /dev/null +++ b/selfdrive/camerad/test/frame_test.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +import time +import numpy as np +import cereal.messaging as messaging +from PIL import ImageFont, ImageDraw, Image + +font = ImageFont.truetype("arial", size=72) +def get_frame(idx): + img = np.zeros((874, 1164, 3), np.uint8) + img[100:400, 100:100+(idx%10)*100] = 255 + + # big number + im2 = Image.new("RGB", (200,200)) + draw = ImageDraw.Draw(im2) + draw.text((10, 100), "%02d" % idx, font=font) + img[400:600, 400:600] = np.array(im2.getdata()).reshape((200,200,3)) + return img.tostring() + +if __name__ == "__main__": + from common.realtime import Ratekeeper + rk = Ratekeeper(20) + + pm = messaging.PubMaster(['frame']) + frm = [get_frame(x) for x in range(30)] + idx = 0 + while 1: + print("send %d" % idx) + dat = messaging.new_message('frame') + dat.valid = True + dat.frame = { + "frameId": idx, + "image": frm[idx%len(frm)], + } + pm.send('frame', dat) + + idx += 1 + rk.keep_time() + #time.sleep(1.0) + diff --git a/selfdrive/camerad/test/stress_restart.sh b/selfdrive/camerad/test/stress_restart.sh new file mode 100755 index 00000000000000..0445dcba798229 --- /dev/null +++ b/selfdrive/camerad/test/stress_restart.sh @@ -0,0 +1,9 @@ +#!/bin/sh +cd .. +while :; do + ./camerad & + pid="$!" + sleep 2 + kill -2 $pid + wait $pid +done diff --git a/selfdrive/camerad/test/yuv_bench/Makefile b/selfdrive/camerad/test/yuv_bench/Makefile new file mode 100644 index 00000000000000..4a47356d167cbf --- /dev/null +++ b/selfdrive/camerad/test/yuv_bench/Makefile @@ -0,0 +1,57 @@ +CC = clang +CXX = clang++ + +PHONELIBS = ../../../../phonelibs + +WARN_FLAGS = -Werror=implicit-function-declaration \ + -Werror=incompatible-pointer-types \ + -Werror=int-conversion \ + -Werror=return-type \ + -Werror=format-extra-args \ + -Wno-deprecated-declarations + +CFLAGS = -std=gnu11 -fPIC -O2 $(WARN_FLAGS) + +UNAME_M := $(shell uname -m) +ifeq ($(UNAME_M),x86_64) + OPENCL_LIBS = -framework OpenCL +else + OPENCL_FLAGS = -I$(PHONELIBS)/opencl/include + OPENCL_LIBS = -L/system/vendor/lib64 -lgsl -lCB -lOpenCL +endif + +OBJS += yuv_bench.o \ + ../../../common/util.o \ + ../../clutil.o + +OUTPUT = yuv + +.PHONY: all +all: $(OUTPUT) + +$(OUTPUT): $(OBJS) + @echo "[ LINK ] $@" + $(CXX) -fPIC -o '$@' $^ \ + -L/usr/lib \ + $(OPENCL_LIBS) + + +%.o: %.cc + @echo "[ CXX ] $@" + $(CXX) $(CXXFLAGS) -MMD \ + -I../.. -I../../.. -I ../../../.. \ + $(OPENCL_FLAGS) \ + -c -o '$@' '$<' + + +%.o: %.c + @echo "[ CC ] $@" + $(CC) $(CFLAGS) -MMD \ + -I../.. -I../../.. -I ../../../.. \ + $(OPENCL_FLAGS) \ + -c -o '$@' '$<' + +.PHONY: clean +clean: + rm -f $(OUTPUT) $(OBJS) $(DEPS) + diff --git a/selfdrive/camerad/test/yuv_bench/cnv.py b/selfdrive/camerad/test/yuv_bench/cnv.py new file mode 100644 index 00000000000000..86fd0c99823ac4 --- /dev/null +++ b/selfdrive/camerad/test/yuv_bench/cnv.py @@ -0,0 +1,19 @@ +import numpy as np +import cv2 + +# img_bgr = np.zeros((874, 1164, 3), dtype=np.uint8) +# for y in range(874): +# for k in range(1164*3): +# img_bgr[y, k//3, k%3] = k ^ y + +# cv2.imwrite("img_rgb.png", img_bgr) + + +cl = np.fromstring(open("out_cl.bin", "rb").read(), dtype=np.uint8) + +cl_r = cl.reshape(874 * 3 // 2, -1) + +cv2.imwrite("out_y.png", cl_r[:874]) + +cl_bgr = cv2.cvtColor(cl_r, cv2.COLOR_YUV2BGR_I420) +cv2.imwrite("out_cl.png", cl_bgr) diff --git a/selfdrive/camerad/test/yuv_bench/yuv.cl b/selfdrive/camerad/test/yuv_bench/yuv.cl new file mode 100644 index 00000000000000..03ce340a3862c3 --- /dev/null +++ b/selfdrive/camerad/test/yuv_bench/yuv.cl @@ -0,0 +1,116 @@ + +#define PIX_PER_WI_X 1 +#define PIX_PER_WI_Y 1 + +#define scn 3 +#define bidx 2 +#define uidx 0 + +#define R_COMP x +#define G_COMP y +#define B_COMP z + +__constant float c_RGB2YUVCoeffs_420[8] = { 0.256999969f, 0.50399971f, 0.09799957f, -0.1479988098f, -0.2909994125f, + 0.438999176f, -0.3679990768f, -0.0709991455f }; + +__kernel void RGB2YUV_YV12_IYUV(__global const uchar* srcptr, int src_step, int src_offset, + __global uchar* dstptr, int dst_step, int dst_offset, + int rows, int cols) +{ + int x = get_global_id(0) * PIX_PER_WI_X; + int y = get_global_id(1) * PIX_PER_WI_Y; + + if (x < cols/2) + { + int src_index = mad24(y << 1, src_step, mad24(x << 1, scn, src_offset)); + int ydst_index = mad24(y << 1, dst_step, (x << 1) + dst_offset); + int y_rows = rows / 3 * 2; + int vsteps[2] = { cols >> 1, dst_step - (cols >> 1)}; + __constant float* coeffs = c_RGB2YUVCoeffs_420; + + #pragma unroll + for (int cy = 0; cy < PIX_PER_WI_Y; ++cy) + { + if (y < rows / 3) + { + __global const uchar* src1 = srcptr + src_index; + __global const uchar* src2 = src1 + src_step; + __global uchar* ydst1 = dstptr + ydst_index; + __global uchar* ydst2 = ydst1 + dst_step; + + __global uchar* udst = dstptr + mad24(y_rows + (y>>1), dst_step, dst_offset + (y%2)*(cols >> 1) + x); + __global uchar* vdst = udst + mad24(y_rows >> 2, dst_step, y_rows % 4 ? vsteps[y%2] : 0); + +#if PIX_PER_WI_X == 2 + int s11 = *((__global const int*) src1); + int s12 = *((__global const int*) src1 + 1); + int s13 = *((__global const int*) src1 + 2); +#if scn == 4 + int s14 = *((__global const int*) src1 + 3); +#endif + int s21 = *((__global const int*) src2); + int s22 = *((__global const int*) src2 + 1); + int s23 = *((__global const int*) src2 + 2); +#if scn == 4 + int s24 = *((__global const int*) src2 + 3); +#endif + float src_pix1[scn * 4], src_pix2[scn * 4]; + + *((float4*) src_pix1) = convert_float4(as_uchar4(s11)); + *((float4*) src_pix1 + 1) = convert_float4(as_uchar4(s12)); + *((float4*) src_pix1 + 2) = convert_float4(as_uchar4(s13)); +#if scn == 4 + *((float4*) src_pix1 + 3) = convert_float4(as_uchar4(s14)); +#endif + *((float4*) src_pix2) = convert_float4(as_uchar4(s21)); + *((float4*) src_pix2 + 1) = convert_float4(as_uchar4(s22)); + *((float4*) src_pix2 + 2) = convert_float4(as_uchar4(s23)); +#if scn == 4 + *((float4*) src_pix2 + 3) = convert_float4(as_uchar4(s24)); +#endif + uchar4 y1, y2; + y1.x = convert_uchar_sat(fma(coeffs[0], src_pix1[ 2-bidx], fma(coeffs[1], src_pix1[ 1], fma(coeffs[2], src_pix1[ bidx], 16.5f)))); + y1.y = convert_uchar_sat(fma(coeffs[0], src_pix1[ scn+2-bidx], fma(coeffs[1], src_pix1[ scn+1], fma(coeffs[2], src_pix1[ scn+bidx], 16.5f)))); + y1.z = convert_uchar_sat(fma(coeffs[0], src_pix1[2*scn+2-bidx], fma(coeffs[1], src_pix1[2*scn+1], fma(coeffs[2], src_pix1[2*scn+bidx], 16.5f)))); + y1.w = convert_uchar_sat(fma(coeffs[0], src_pix1[3*scn+2-bidx], fma(coeffs[1], src_pix1[3*scn+1], fma(coeffs[2], src_pix1[3*scn+bidx], 16.5f)))); + y2.x = convert_uchar_sat(fma(coeffs[0], src_pix2[ 2-bidx], fma(coeffs[1], src_pix2[ 1], fma(coeffs[2], src_pix2[ bidx], 16.5f)))); + y2.y = convert_uchar_sat(fma(coeffs[0], src_pix2[ scn+2-bidx], fma(coeffs[1], src_pix2[ scn+1], fma(coeffs[2], src_pix2[ scn+bidx], 16.5f)))); + y2.z = convert_uchar_sat(fma(coeffs[0], src_pix2[2*scn+2-bidx], fma(coeffs[1], src_pix2[2*scn+1], fma(coeffs[2], src_pix2[2*scn+bidx], 16.5f)))); + y2.w = convert_uchar_sat(fma(coeffs[0], src_pix2[3*scn+2-bidx], fma(coeffs[1], src_pix2[3*scn+1], fma(coeffs[2], src_pix2[3*scn+bidx], 16.5f)))); + + *((__global int*) ydst1) = as_int(y1); + *((__global int*) ydst2) = as_int(y2); + + float uv[4] = { fma(coeffs[3], src_pix1[ 2-bidx], fma(coeffs[4], src_pix1[ 1], fma(coeffs[5], src_pix1[ bidx], 128.5f))), + fma(coeffs[5], src_pix1[ 2-bidx], fma(coeffs[6], src_pix1[ 1], fma(coeffs[7], src_pix1[ bidx], 128.5f))), + fma(coeffs[3], src_pix1[2*scn+2-bidx], fma(coeffs[4], src_pix1[2*scn+1], fma(coeffs[5], src_pix1[2*scn+bidx], 128.5f))), + fma(coeffs[5], src_pix1[2*scn+2-bidx], fma(coeffs[6], src_pix1[2*scn+1], fma(coeffs[7], src_pix1[2*scn+bidx], 128.5f))) }; + + udst[0] = convert_uchar_sat(uv[uidx] ); + vdst[0] = convert_uchar_sat(uv[1 - uidx]); + udst[1] = convert_uchar_sat(uv[2 + uidx]); + vdst[1] = convert_uchar_sat(uv[3 - uidx]); +#else + float4 src_pix1 = convert_float4(vload4(0, src1)); + float4 src_pix2 = convert_float4(vload4(0, src1+scn)); + float4 src_pix3 = convert_float4(vload4(0, src2)); + float4 src_pix4 = convert_float4(vload4(0, src2+scn)); + + ydst1[0] = convert_uchar_sat(fma(coeffs[0], src_pix1.R_COMP, fma(coeffs[1], src_pix1.G_COMP, fma(coeffs[2], src_pix1.B_COMP, 16.5f)))); + ydst1[1] = convert_uchar_sat(fma(coeffs[0], src_pix2.R_COMP, fma(coeffs[1], src_pix2.G_COMP, fma(coeffs[2], src_pix2.B_COMP, 16.5f)))); + ydst2[0] = convert_uchar_sat(fma(coeffs[0], src_pix3.R_COMP, fma(coeffs[1], src_pix3.G_COMP, fma(coeffs[2], src_pix3.B_COMP, 16.5f)))); + ydst2[1] = convert_uchar_sat(fma(coeffs[0], src_pix4.R_COMP, fma(coeffs[1], src_pix4.G_COMP, fma(coeffs[2], src_pix4.B_COMP, 16.5f)))); + + float uv[2] = { fma(coeffs[3], src_pix1.R_COMP, fma(coeffs[4], src_pix1.G_COMP, fma(coeffs[5], src_pix1.B_COMP, 128.5f))), + fma(coeffs[5], src_pix1.R_COMP, fma(coeffs[6], src_pix1.G_COMP, fma(coeffs[7], src_pix1.B_COMP, 128.5f))) }; + + udst[0] = convert_uchar_sat(uv[uidx] ); + vdst[0] = convert_uchar_sat(uv[1-uidx]); +#endif + ++y; + src_index += 2*src_step; + ydst_index += 2*dst_step; + } + } + } +} diff --git a/selfdrive/camerad/test/yuv_bench/yuv_bench.cc b/selfdrive/camerad/test/yuv_bench/yuv_bench.cc new file mode 100644 index 00000000000000..22e71c41282b2d --- /dev/null +++ b/selfdrive/camerad/test/yuv_bench/yuv_bench.cc @@ -0,0 +1,145 @@ +#include +#include +#include + +#include +#include + +// #include + +#ifdef __APPLE__ +#include +#else +#include +#endif + +#include "common/util.h" +#include "common/timing.h" +#include "common/mat.h" +#include "clutil.h" + +int main() { + + int rgb_width = 1164; + int rgb_height = 874; + + int rgb_stride = rgb_width*3; + + size_t out_size = rgb_width*rgb_height*3/2; + + uint8_t* rgb_buf = (uint8_t*)calloc(1, rgb_width*rgb_height*3); + uint8_t* out = (uint8_t*)calloc(1, out_size); + + for (int y=0; ywidth = width; s->height = height; - char args[1024]; + char args[8024]; + printf("snprintf"); snprintf(args, sizeof(args), - "-cl-fast-relaxed-math -cl-denorms-are-zero " + "-cl-denorms-are-zero " #ifdef CL_DEBUG "-DCL_DEBUG " #endif "-DWIDTH=%d -DHEIGHT=%d -DUV_WIDTH=%d -DUV_HEIGHT=%d -DRGB_STRIDE=%d -DRGB_SIZE=%d", width, height, width/ 2, height / 2, rgb_stride, width * height); - cl_program prg = CLU_LOAD_FROM_FILE(ctx, device_id, "transforms/rgb_to_yuv.cl", args); + printf("loading transforms"); + cl_program prg = CLU_LOAD_FROM_FILE(ctx, device_id, "/data/openpilot/selfdrive/camerad/transforms/rgb_to_yuv.cl", args); s->rgb_to_yuv_krnl = clCreateKernel(prg, "rgb_to_yuv", &err); assert(err == 0); diff --git a/selfdrive/controls/controlsd.py b/selfdrive/controls/controlsd.py index 64fc7a91e2ada1..aeb19a46e7e667 100755 --- a/selfdrive/controls/controlsd.py +++ b/selfdrive/controls/controlsd.py @@ -553,7 +553,7 @@ def controlsd_thread(sm=None, pm=None, can_sock=None): logAllAliveAndValidInfoToTinklad(sm=sm, tinklaClient=tinklaClient) if not sm['pathPlan'].mpcSolutionValid: events.append(create_event('plannerError', [ET.NO_ENTRY, ET.IMMEDIATE_DISABLE])) - if not sm['pathPlan'].sensorValid: + if not sm['pathPlan'].sensorValid and os.getenf("NOSENSOR") is None: events.append(create_event('sensorDataInvalid', [ET.NO_ENTRY, ET.PERMANENT])) if not sm['pathPlan'].paramsValid: events.append(create_event('vehicleModelInvalid', [ET.WARNING])) diff --git a/selfdrive/manager.py b/selfdrive/manager.py index 687ee5669428f7..e2b7875d105187 100755 --- a/selfdrive/manager.py +++ b/selfdrive/manager.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3.7 +#!/usr/bin/env python3 import os import time import sys @@ -14,6 +14,7 @@ import datetime from common.basedir import BASEDIR, PARAMS +WEBCAM = os.getenv("WEBCAM") is not None from common.android import ANDROID sys.path.append(os.path.join(BASEDIR, "pyextra")) os.environ['BASEDIR'] = BASEDIR @@ -115,7 +116,7 @@ def unblock_stdout(): for i in range(3,-1,-1): print("....%d" % i) time.sleep(1) - subprocess.check_call(["scons", "-c"], cwd=BASEDIR, env=env) + #subprocess.check_call(["scons", "-c"], cwd=BASEDIR, env=env) shutil.rmtree("/tmp/scons_cache") else: raise RuntimeError("scons build failed") @@ -192,10 +193,13 @@ def get_running(): persistent_processes = [ 'tinklad', 'thermald', - 'logmessaged', 'ui', - 'uploader', ] +if not WEBCAM: + persistent_process += [ + 'logmessaged', + 'uploader', + ] if ANDROID: persistent_processes += [ 'logcatd', @@ -206,17 +210,22 @@ def get_running(): car_started_processes = [ 'controlsd', 'plannerd', - 'loggerd', 'radard', 'dmonitoringd', 'calibrationd', 'paramsd', 'camerad', 'modeld', - 'proclogd', 'ubloxd', 'locationd', + 'loggerd', ] + +if not WEBCAM: + car_start_processes += [ + 'proclogd', + ] + if ANDROID: car_started_processes += [ 'sensord', @@ -364,7 +373,9 @@ def manager_init(should_register=True): raise Exception("server registration failed") else: dongle_id = "c"*16 - + #BB + if not dongle_id: + dongle_id = "nada" # set dongle id cloudlog.info("dongle id is " + dongle_id) os.environ['DONGLE_ID'] = dongle_id @@ -564,7 +575,7 @@ def main(): ("LongitudinalControl", "0"), ("LimitSetSpeed", "0"), ("LimitSetSpeedNeural", "0"), - ("LastUpdateTime", datetime.datetime.now().isoformat().encode('utf8')), + ("LastUpdateTime", datetime.datetime.utcnow().isoformat().encode('utf8')), ("OpenpilotEnabledToggle", "1"), ("LaneChangeEnabled", "1"), ] diff --git a/selfdrive/modeld/SConscript b/selfdrive/modeld/SConscript index b60e9f579deb28..a683e1ceb7706a 100644 --- a/selfdrive/modeld/SConscript +++ b/selfdrive/modeld/SConscript @@ -1,7 +1,7 @@ Import('env', 'arch','is_tbp', 'messaging', 'common', 'gpucommon', 'visionipc') lenv = env.Clone() -libs = [messaging, common, 'OpenCL', 'SNPE', 'capnp', 'zmq', 'kj', 'yuv', gpucommon, visionipc] +libs = [messaging, common, 'OpenCL', 'capnp', 'zmq', 'kj', 'yuv', gpucommon, visionipc] common_src = [ "models/commonmodel.c", @@ -11,12 +11,20 @@ common_src = [ if is_tbp: arch = "aarch64_TBP" if arch == "aarch64": - libs += ['gsl', 'CB', 'gnustl_shared'] + libs += ['SNPE', 'gsl', 'CB', 'gnustl_shared'] elif arch == "aarch64_TBP": libs += ['pthread'] + # for tensorflow support + common_src = [ + "models/commonmodel.c", + "transforms/loadyuv.c", + "transforms/transform.c"] + common_src += ['runners/tfmodel.cc'] + # tell runners to use it + lenv['CFLAGS'].append("-DUSE_TF_MODEL") + lenv['CXXFLAGS'].append("-DUSE_TF_MODEL") else: libs += ['symphony-cpu', 'pthread'] - # for tensorflow support common_src += ['runners/tfmodel.cc'] # tell runners to use it diff --git a/selfdrive/modeld/__init__.py b/selfdrive/modeld/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/selfdrive/modeld/dmonitoringmodeld b/selfdrive/modeld/dmonitoringmodeld index 710e0e595b208f..51b520d74333e0 100755 --- a/selfdrive/modeld/dmonitoringmodeld +++ b/selfdrive/modeld/dmonitoringmodeld @@ -1,5 +1,5 @@ #!/bin/sh -export LD_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64-android-clang3.8:$HOME/openpilot/phonelibs/snpe/x86_64-linux-clang:$LD_LIBRARY_PATH" -export ADSP_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64-android-clang3.8/" +export LD_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64/:$HOME/openpilot/phonelibs/snpe/larch64:$HOME/openpilot/phonelibs/snpe/x86_64-linux-clang:$LD_LIBRARY_PATH" +export ADSP_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64/" exec ./_dmonitoringmodeld diff --git a/selfdrive/modeld/modeld b/selfdrive/modeld/modeld index bbe7006afc747e..5369034dfeeede 100755 --- a/selfdrive/modeld/modeld +++ b/selfdrive/modeld/modeld @@ -1,4 +1,4 @@ #!/bin/sh -export LD_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64-android-clang3.8/:$HOME/openpilot/phonelibs/snpe/x86_64-linux-clang:$LD_LIBRARY_PATH" +export LD_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64/:$HOME/openpilot/phonelibs/snpe/larch64:$HOME/openpilot/phonelibs/snpe/x86_64-linux-clang:$LD_LIBRARY_PATH" exec ./_modeld diff --git a/selfdrive/modeld/models/dmonitoring.cc b/selfdrive/modeld/models/dmonitoring.cc index 787608f91a5b49..58464b5357d593 100644 --- a/selfdrive/modeld/models/dmonitoring.cc +++ b/selfdrive/modeld/models/dmonitoring.cc @@ -10,8 +10,19 @@ #define MODEL_HEIGHT 320 #define FULL_W 426 +#if defined(QCOM) || defined(QCOM2) +#define input_lambda(x) (x - 128.f) * 0.0078125f +#else +#define input_lambda(x) x // for non SNPE running platforms, assume keras model instead has lambda layer +#endif + void dmonitoring_init(DMonitoringModelState* s) { - s->m = new DefaultRunModel("../../models/dmonitoring_model_q.dlc", (float*)&s->output, OUTPUT_SIZE, USE_DSP_RUNTIME); +#if defined(QCOM) || defined(QCOM2) + const char* model_path = "../../models/dmonitoring_model_q.dlc"; +#else + const char* model_path = "../../models/dmonitoring_model.dlc"; +#endif + s->m = new DefaultRunModel(model_path, (float*)&s->output, OUTPUT_SIZE, USE_DSP_RUNTIME); } DMonitoringResult dmonitoring_eval_frame(DMonitoringModelState* s, void* stream_buf, int width, int height) { @@ -84,27 +95,28 @@ DMonitoringResult dmonitoring_eval_frame(DMonitoringModelState* s, void* stream_ for (int r = 0; r < MODEL_HEIGHT/2; r++) { for (int c = 0; c < MODEL_WIDTH/2; c++) { // Y_ul - net_input_buf[(c*MODEL_HEIGHT/2) + r] = (resized_buf[(2*r*resized_width) + (2*c)] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r] = input_lambda(resized_buf[(2*r*resized_width) + (2*c)]); // Y_ur - net_input_buf[(c*MODEL_HEIGHT/2) + r + ((MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = (resized_buf[(2*r*resized_width) + (2*c+1)] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r + ((MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = input_lambda(resized_buf[(2*r*resized_width) + (2*c+1)]); // Y_dl - net_input_buf[(c*MODEL_HEIGHT/2) + r + (2*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = (resized_buf[(2*r*resized_width+1) + (2*c)] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r + (2*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = input_lambda(resized_buf[(2*r*resized_width+1) + (2*c)]); // Y_dr - net_input_buf[(c*MODEL_HEIGHT/2) + r + (3*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = (resized_buf[(2*r*resized_width+1) + (2*c+1)] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r + (3*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = input_lambda(resized_buf[(2*r*resized_width+1) + (2*c+1)]); // U - net_input_buf[(c*MODEL_HEIGHT/2) + r + (4*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = (resized_buf[(resized_width*resized_height) + (r*resized_width/2) + c] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r + (4*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = input_lambda(resized_buf[(resized_width*resized_height) + (r*resized_width/2) + c]); // V - net_input_buf[(c*MODEL_HEIGHT/2) + r + (5*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = (resized_buf[(resized_width*resized_height) + ((resized_width/2)*(resized_height/2)) + (r*resized_width/2) + c] - 128.f) * 0.0078125f; + net_input_buf[(c*MODEL_HEIGHT/2) + r + (5*(MODEL_WIDTH/2)*(MODEL_HEIGHT/2))] = input_lambda(resized_buf[(resized_width*resized_height) + ((resized_width/2)*(resized_height/2)) + (r*resized_width/2) + c]); } } - // FILE *dump_yuv_file = fopen("/sdcard/rawdump.yuv", "wb"); - // fwrite(raw_buf, height*width*3/2, sizeof(uint8_t), dump_yuv_file); - // fclose(dump_yuv_file); + //printf("preprocess completed. %d \n", yuv_buf_len); + //FILE *dump_yuv_file = fopen("/tmp/rawdump.yuv", "wb"); + //fwrite(raw_buf, height*width*3/2, sizeof(uint8_t), dump_yuv_file); + //fclose(dump_yuv_file); - // FILE *dump_yuv_file2 = fopen("/sdcard/inputdump.yuv", "wb"); - // fwrite(net_input_buf, MODEL_HEIGHT*MODEL_WIDTH*3/2, sizeof(float), dump_yuv_file2); - // fclose(dump_yuv_file2); + //FILE *dump_yuv_file2 = fopen("/tmp/inputdump.yuv", "wb"); + //fwrite(net_input_buf, MODEL_HEIGHT*MODEL_WIDTH*3/2, sizeof(float), dump_yuv_file2); + //fclose(dump_yuv_file2); delete[] cropped_buf; delete[] resized_buf; diff --git a/selfdrive/modeld/models/driving.cc b/selfdrive/modeld/models/driving.cc index b544a3e59ca035..2033ee8ec35631 100644 --- a/selfdrive/modeld/models/driving.cc +++ b/selfdrive/modeld/models/driving.cc @@ -205,14 +205,18 @@ void fill_meta(cereal::ModelData::MetaData::Builder meta, const float * meta_dat meta.setDesirePrediction(desire_pred); } -void fill_longi(cereal::ModelData::LongitudinalData::Builder longi, const float * long_v_data, const float * long_a_data) { +void fill_longi(cereal::ModelData::LongitudinalData::Builder longi, const float * long_x_data, const float * long_v_data, const float * long_a_data) { // just doing 10 vals, 1 every sec for now + float dist_arr[TIME_DISTANCE/10]; float speed_arr[TIME_DISTANCE/10]; float accel_arr[TIME_DISTANCE/10]; for (int i=0; i dist(&dist_arr[0], ARRAYSIZE(dist_arr)); + longi.setDistances(dist); kj::ArrayPtr speed(&speed_arr[0], ARRAYSIZE(speed_arr)); longi.setSpeeds(speed); kj::ArrayPtr accel(&accel_arr[0], ARRAYSIZE(accel_arr)); @@ -237,7 +241,7 @@ void model_publish(PubSocket *sock, uint32_t frame_id, auto right_lane = framed.initRightLane(); fill_path(right_lane, net_outputs.right_lane, true, -1.8); auto longi = framed.initLongitudinal(); - fill_longi(longi, net_outputs.long_v, net_outputs.long_a); + fill_longi(longi, net_outputs.long_x, net_outputs.long_v, net_outputs.long_a); // Find the distribution that corresponds to the current lead diff --git a/selfdrive/modeld/runners/keras_runner.py b/selfdrive/modeld/runners/keras_runner.py new file mode 100755 index 00000000000000..4dfd1976733452 --- /dev/null +++ b/selfdrive/modeld/runners/keras_runner.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python3 +# TODO: why are the keras models saved with python 2? +from __future__ import print_function + +import tensorflow as tf +import os +import sys +import tensorflow.keras as keras +import numpy as np +from tensorflow.keras.models import Model +from tensorflow.keras.models import load_model + +def read(sz): + dd = [] + gt = 0 + while gt < sz*4: + st = os.read(0, sz*4 - gt) + assert(len(st) > 0) + dd.append(st) + gt += len(st) + return np.fromstring(b''.join(dd), dtype=np.float32) + +def write(d): + os.write(1, d.tobytes()) + +def run_loop(m): + isize = m.inputs[0].shape[1] + osize = m.outputs[0].shape[1] + print("ready to run keras model %d -> %d" % (isize, osize), file=sys.stderr) + while 1: + idata = read(isize).reshape((1, isize)) + ret = m.predict_on_batch(idata) + write(ret) + +if __name__ == "__main__": + print(tf.__version__, file=sys.stderr) + # limit gram alloc + gpus = tf.config.experimental.list_physical_devices('GPU') + if len(gpus) > 0: + tf.config.experimental.set_virtual_device_configuration(gpus[0], [tf.config.experimental.VirtualDeviceConfiguration(memory_limit=2048)]) + + m = load_model(sys.argv[1]) + print(m, file=sys.stderr) + bs = [int(np.product(ii.shape[1:])) for ii in m.inputs] + ri = keras.layers.Input((sum(bs),)) + + tii = [] + acc = 0 + for i, ii in enumerate(m.inputs): + print(ii, file=sys.stderr) + ti = keras.layers.Lambda(lambda x: x[:,acc:acc+bs[i]], output_shape=(1, bs[i]))(ri) + acc += bs[i] + tr = keras.layers.Reshape(ii.shape[1:])(ti) + tii.append(tr) + no = keras.layers.Concatenate()(m(tii)) + m = Model(inputs=ri, outputs=[no]) + run_loop(m) + diff --git a/selfdrive/modeld/runners/tfmodel.cc b/selfdrive/modeld/runners/tfmodel.cc index 502ac4ba63a6e7..0f50614b8f6b98 100644 --- a/selfdrive/modeld/runners/tfmodel.cc +++ b/selfdrive/modeld/runners/tfmodel.cc @@ -91,8 +91,12 @@ void TFModel::addDesire(float *state, int state_size) { void TFModel::execute(float *net_input_buf, int buf_size) { // order must be this pwrite(net_input_buf, buf_size); - pwrite(desire_input_buf, desire_state_size); - pwrite(rnn_input_buf, rnn_state_size); + if (desire_input_buf != NULL) { + pwrite(desire_input_buf, desire_state_size); + } + if (rnn_input_buf != NULL) { + pwrite(rnn_input_buf, rnn_state_size); + } pread(output, output_size); } diff --git a/selfdrive/modeld/test/opencl_hooks/build.sh b/selfdrive/modeld/test/opencl_hooks/build.sh new file mode 100755 index 00000000000000..03f81153540515 --- /dev/null +++ b/selfdrive/modeld/test/opencl_hooks/build.sh @@ -0,0 +1,3 @@ +#!/bin/sh +gcc -fPIC -I /data/openpilot/phonelibs/opencl/include -shared hook.c + diff --git a/selfdrive/modeld/test/opencl_hooks/hook.c b/selfdrive/modeld/test/opencl_hooks/hook.c new file mode 100644 index 00000000000000..f2ee2c0d511362 --- /dev/null +++ b/selfdrive/modeld/test/opencl_hooks/hook.c @@ -0,0 +1,155 @@ +#include +#include +#include +#include +#include +#include + +static inline uint64_t nanos_since_boot() { + struct timespec t; + clock_gettime(CLOCK_BOOTTIME, &t); + return t.tv_sec * 1000000000ULL + t.tv_nsec; +} + +struct kernel { + cl_kernel k; + const char *name; + cl_program p; +}; + + +int k_index = 0; +struct kernel kk[0x1000] = {0}; + +FILE *f = NULL; + +cl_program clCreateProgramWithSource(cl_context context, + cl_uint count, + const char **strings, + const size_t *lengths, + cl_int *errcode_ret) { + printf("clCreateProgramWithSource: %d\n", count); + + if (f == NULL) { + f = fopen("/tmp/kernels.cl", "w"); + } + + fprintf(f, "/* ************************ PROGRAM BREAK ****************************/\n"); + for (int i = 0; i < count; i++) { + fprintf(f, "%s\n", strings[i]); + if (i != 0) fprintf(f, "/* ************************ SECTION BREAK ****************************/\n"); + } + fflush(f); + + cl_program (*my_clCreateProgramWithSource)(cl_context context, + cl_uint count, + const char **strings, + const size_t *lengths, + cl_int *errcode_ret) = dlsym(RTLD_NEXT, "REAL_clCreateProgramWithSource"); + + return my_clCreateProgramWithSource(context, count, strings, lengths, errcode_ret); +} + +cl_program clCreateProgramWithBinary(cl_context context, + cl_uint num_devices, + const cl_device_id *device_list, + const size_t *lengths, + const unsigned char **binaries, + cl_int *binary_status, + cl_int *errcode_ret) { + printf("clCreateProgramWithBinary\n"); + + cl_program (*my_clCreateProgramWithBinary)(cl_context context, + cl_uint num_devices, + const cl_device_id *device_list, + const size_t *lengths, + const unsigned char **binaries, + cl_int *binary_status, + cl_int *errcode_ret) = dlsym(RTLD_NEXT, "REAL_clCreateProgramWithBinary"); + + return my_clCreateProgramWithBinary(context, num_devices, device_list, lengths, binaries, binary_status, errcode_ret); +} + +cl_kernel clCreateKernel(cl_program program, const char *kernel_name, cl_int *errcode_ret) { + cl_kernel (*my_clCreateKernel)(cl_program program, const char *kernel_name, cl_int *errcode_ret); + my_clCreateKernel = dlsym(RTLD_NEXT, "REAL_clCreateKernel"); + cl_kernel ret = my_clCreateKernel(program, kernel_name, errcode_ret); + //printf("clCreateKernel: %s -> %p\n", kernel_name, ret); + + char *tmp = (char*)malloc(strlen(kernel_name)+1); + strcpy(tmp, kernel_name); + + kk[k_index].k = ret; + kk[k_index].name = tmp; + kk[k_index].p = program; + k_index++; + return ret; +} + + +uint64_t start_time = 0; +int cnt = 0; + +cl_int clEnqueueNDRangeKernel(cl_command_queue command_queue, + cl_kernel kernel, + cl_uint work_dim, + const size_t *global_work_offset, + const size_t *global_work_size, + const size_t *local_work_size, + cl_uint num_events_in_wait_list, + const cl_event *event_wait_list, + cl_event *event) { + + cl_int (*my_clEnqueueNDRangeKernel)(cl_command_queue, cl_kernel, cl_uint, + const size_t *, const size_t *, const size_t *, + cl_uint, const cl_event *, cl_event *) = NULL; + my_clEnqueueNDRangeKernel = dlsym(RTLD_NEXT, "REAL_clEnqueueNDRangeKernel"); + + if (start_time == 0) { + start_time = nanos_since_boot(); + } + + // get kernel name + const char *name = NULL; + cl_program p; + for (int i = 0; i < k_index; i++) { + if (kk[i].k == kernel) { + name = kk[i].name; + p = kk[i].p; + break; + } + } + + uint64_t tb = nanos_since_boot(); + cl_int ret = my_clEnqueueNDRangeKernel(command_queue, kernel, work_dim, + global_work_offset, global_work_size, local_work_size, + num_events_in_wait_list, event_wait_list, event); + uint64_t te = nanos_since_boot(); + + printf("%10lu run%8d in %5ld us command_queue:%p work_dim:%d event:%p ", (tb-start_time)/1000, cnt++, (te-tb)/1000, + command_queue, work_dim, event); + for (int i = 0; i < work_dim; i++) { + printf("%4zu ", global_work_size[i]); + } + printf("%p %s\n", p, name); + return ret; +} + +void *dlsym(void *handle, const char *symbol) { + void *(*my_dlsym)(void *handle, const char *symbol) = (void*)dlopen-0x2d4; + if (memcmp("REAL_", symbol, 5) == 0) { + return my_dlsym(handle, symbol+5); + } else if (strcmp("clEnqueueNDRangeKernel", symbol) == 0) { + return clEnqueueNDRangeKernel; + } else if (strcmp("clCreateKernel", symbol) == 0) { + return clCreateKernel; + } else if (strcmp("clCreateProgramWithSource", symbol) == 0) { + return clCreateProgramWithSource; + } else if (strcmp("clCreateProgramWithBinary", symbol) == 0) { + return clCreateProgramWithBinary; + } else { + printf("dlsym %s\n", symbol); + return my_dlsym(handle, symbol); + } +} + diff --git a/selfdrive/modeld/test/polyfit/.gitignore b/selfdrive/modeld/test/polyfit/.gitignore new file mode 100644 index 00000000000000..ba2906d0666cf7 --- /dev/null +++ b/selfdrive/modeld/test/polyfit/.gitignore @@ -0,0 +1 @@ +main diff --git a/selfdrive/modeld/test/polyfit/Makefile b/selfdrive/modeld/test/polyfit/Makefile new file mode 100644 index 00000000000000..1b8ce740e5b8b7 --- /dev/null +++ b/selfdrive/modeld/test/polyfit/Makefile @@ -0,0 +1,15 @@ +PHONELIBS = ../../../../phonelibs + +EIGEN_FLAGS = -I$(PHONELIBS)/eigen + +CXXFLAGS += $(EIGEN_FLAGS) +LDFLAGS += -lm + +.PHONY: clean + +main: main.cc data.h + g++ -O2 $(EIGEN_FLAGS) -o main main.cc -lm + + +clean: + rm -f main diff --git a/selfdrive/modeld/test/polyfit/data.h b/selfdrive/modeld/test/polyfit/data.h new file mode 100644 index 00000000000000..0eaedf29ee086d --- /dev/null +++ b/selfdrive/modeld/test/polyfit/data.h @@ -0,0 +1,8 @@ +#pragma once + +#define MODEL_PATH_DISTANCE 192 +#define POLYFIT_DEGREE 4 + +float pts[MODEL_PATH_DISTANCE] = {3.1261718, 3.1642578, 3.0548828, 3.1125, 3.190625, 3.01875, 2.9816406, 3.1222656, 2.9728515, 2.9826171, 3.034375, 3.0392578, 3.1642578, 3.0792968, 3.0011718, 3.0705078, 2.9904296, 3.0089843, 3.0597656, 3.0978515, 2.9210937, 2.9992187, 2.9474609, 2.9621093, 2.9289062, 2.89375, 2.7975585, 2.9015625, 2.8175781, 2.9132812, 2.8175781, 2.7501953, 2.8332031, 2.8166015, 2.7638671, 2.8878906, 2.7599609, 2.6999023, 2.6720703, 2.6398437, 2.7243164, 2.6120117, 2.6588867, 2.5558593, 2.5978515, 2.5485351, 2.4269531, 2.5001953, 2.4855468, 2.4367187, 2.2973144, 2.2812011, 2.2890136, 2.39375, 2.2836425, 2.3815429, 2.2138183, 2.1964843, 2.1840332, 2.1759765, 2.0421875, 2.1034667, 2.0281494, 2.0880859, 1.9706542, 1.9276855, 1.8522155, 1.8991821, 1.7780273, 1.8180053, 1.8326843, 1.8270385, 1.7182128, 1.6439941, 1.5360839, 1.68385, 1.4584472, 1.5955322, 1.6002929, 1.4157226, 1.4704101, 1.2936523, 1.2990234, 1.4281738, 1.4357421, 1.409375, 1.2511718, 1.2194335, 1.1554687, 1.043164, 1.0954101, 1.0392578, 1.0895507, 1.0880859, 0.897168, 0.83369142, 0.86494142, 0.87763673, 0.85322267, 0.72968751, 0.57832032, 0.73066407, 0.78828126, 0.69160157, 0.64375, 0.5919922, 0.5529297, 0.52070314, 0.60957032, 0.51093751, 0.3576172, 0.49921876, 0.284375, 0.21992187, 0.25214845, 0.30683595, 0.30976564, 0.2716797, 0.22089843, 0.25507814, 0.084179685, 0.071484372, 0.1828125, 0.15644531, 0.13789062, 0.054882813, 0.021679688, -0.091601565, -0.0203125, -0.13359375, -0.037890624, -0.29765624, -0.15605469, -0.30351561, -0.055468749, -0.22148438, -0.246875, -0.31718749, -0.25468749, -0.35234374, -0.16484375, -0.56523436, -0.56523436, -0.39921874, -0.58671874, -0.45585936, -0.50859374, -0.44023436, -0.42656249, -0.56328124, -0.70195311, -0.403125, -0.76445311, -0.98710936, -0.7625, -0.75273436, -0.825, -0.996875, -0.86210936, -0.99492186, -0.85625, -0.88359374, -0.97148436, -1.0320313, -1.1609375, -1.1296875, -1.0203125, -1.0691407, -1.2371094, -1.1277344, -1.2214844, -1.1921875, -1.2996094, -1.2917969, -1.3699219, -1.434375, -1.3699219, -1.3601563, -1.5730469, -1.3152344, -1.4851563, -1.48125, -1.5925782, -1.746875, -1.5847657, -1.6003907, -1.5984375, -1.7703125, -1.8328125, -1.8152344, -1.9714844, -1.9421875}; + +float stds[MODEL_PATH_DISTANCE] = {1.0945262, 1.156862, 1.0777057, 1.1501777, 1.234844, 1.0140595, 1.2004665, 1.1926303, 1.1269455, 1.0362904, 0.98873031, 0.88530254, 1.0078473, 0.93637651, 0.90959895, 0.86409503, 0.86353016, 0.74534553, 0.78025728, 0.88014913, 0.75756663, 0.77129823, 0.75581717, 0.79222, 0.84098673, 0.79402477, 0.85648865, 0.80315614, 0.77346581, 0.73097658, 0.72557795, 0.72930044, 0.666103, 0.77142948, 0.704379, 0.6806078, 0.67680347, 0.71318036, 0.72244918, 0.66123307, 0.62547487, 0.67786956, 0.68404138, 0.70508122, 0.62400025, 0.72325015, 0.73942852, 0.67811751, 0.70370805, 0.65040058, 0.6870054, 0.66093785, 0.666103, 0.70040214, 0.65300173, 0.69714534, 0.65825552, 0.64833081, 0.6464982, 0.75850725, 0.69627059, 0.71659386, 0.69307244, 0.61554217, 0.62015557, 0.61998636, 0.67650336, 0.68142927, 0.6278621, 0.612294, 0.62592906, 0.63736153, 0.74233508, 0.69297016, 0.69621509, 0.67229682, 0.64879686, 0.72361159, 0.70229048, 0.60928106, 0.62712252, 0.66923952, 0.65802008, 0.68361813, 0.61587888, 0.63348651, 0.60727841, 0.64873856, 0.68847752, 0.58432156, 0.61683363, 0.63311422, 0.64981711, 0.57369792, 0.62604266, 0.62162364, 0.62066346, 0.62808979, 0.58524042, 0.63537884, 0.65367514, 0.63900274, 0.61089778, 0.62513435, 0.6470505, 0.63952166, 0.5937764, 0.64310449, 0.64330715, 0.64322031, 0.64632386, 0.60827911, 0.58887208, 0.61959165, 0.70725286, 0.64287293, 0.62326396, 0.65896219, 0.55610275, 0.6658656, 0.65681434, 0.583188, 0.6311124, 0.559652, 0.71419227, 0.62490743, 0.66699386, 0.62032485, 0.663036, 0.61414057, 0.66179425, 0.59399503, 0.65203643, 0.67839557, 0.63698763, 0.617452, 0.61022842, 0.7398752, 0.65657932, 0.68718743, 0.67901206, 0.66126263, 0.69949967, 0.70709819, 0.713336, 0.68130863, 0.68652785, 0.67028236, 0.7626031, 0.65259206, 0.72977453, 0.66049516, 0.64261246, 0.66906089, 0.69762796, 0.73719794, 0.69081914, 0.69849437, 0.72435051, 0.62354708, 0.68812829, 0.7193296, 0.66211933, 0.69278532, 0.7518425, 0.69661695, 0.672491, 0.71539241, 0.7369433, 0.66120356, 0.79088491, 0.77491313, 0.79442614, 0.7878198, 0.78881842, 0.70690477, 0.80707121, 0.78768665, 0.7215547, 0.75226194, 0.72196257, 0.765799, 0.77267712, 0.75844234, 0.81038833, 0.81188059, 0.79864907, 0.816436, 0.845298, 0.85074174, 0.73668873, 0.83516812}; diff --git a/selfdrive/modeld/test/polyfit/main.cc b/selfdrive/modeld/test/polyfit/main.cc new file mode 100644 index 00000000000000..b649ee9ca5c407 --- /dev/null +++ b/selfdrive/modeld/test/polyfit/main.cc @@ -0,0 +1,52 @@ +#include +#include + +#include + +#include "data.h" + +Eigen::Matrix vander; + +void poly_init(){ + // Build Vandermonde matrix + for(int i = 0; i < MODEL_PATH_DISTANCE; i++) { + for(int j = 0; j < POLYFIT_DEGREE; j++) { + vander(i, j) = pow(i, POLYFIT_DEGREE-j-1); + } + } +} + +void poly_fit(float *in_pts, float *in_stds, float *out) { + // References to inputs + Eigen::Map > pts(in_pts, MODEL_PATH_DISTANCE); + Eigen::Map > std(in_stds, MODEL_PATH_DISTANCE); + Eigen::Map > p(out, POLYFIT_DEGREE); + + // Build Least Squares equations + Eigen::Matrix lhs = vander.array().colwise() / std.array(); + Eigen::Matrix rhs = pts.array() / std.array(); + + Eigen::Matrix scale = 1. / (lhs.array()*lhs.array()).sqrt().colwise().sum(); + lhs = lhs * scale.asDiagonal(); + + // Solve inplace + Eigen::ColPivHouseholderQR > qr(lhs); + p = qr.solve(rhs); + + p = p.transpose() * scale.asDiagonal(); +} + +int main(void) { + poly_init(); + + + float poly[4]; + poly_fit(pts, stds, poly); + + std::cout << "["; + std::cout << poly[0] << ","; + std::cout << poly[1] << ","; + std::cout << poly[2] << ","; + std::cout << poly[3]; + std::cout << "]" << std::endl; +} diff --git a/selfdrive/modeld/test/polyfit/test.py b/selfdrive/modeld/test/polyfit/test.py new file mode 100644 index 00000000000000..0caadf352a294d --- /dev/null +++ b/selfdrive/modeld/test/polyfit/test.py @@ -0,0 +1,24 @@ +import numpy as np + +pts = np.array([3.1261718, 3.1642578, 3.0548828, 3.1125, 3.190625, 3.01875, 2.9816406, 3.1222656, 2.9728515, 2.9826171, 3.034375, 3.0392578, 3.1642578, 3.0792968, 3.0011718, 3.0705078, 2.9904296, 3.0089843, 3.0597656, 3.0978515, 2.9210937, 2.9992187, 2.9474609, 2.9621093, 2.9289062, 2.89375, 2.7975585, 2.9015625, 2.8175781, 2.9132812, 2.8175781, 2.7501953, 2.8332031, 2.8166015, 2.7638671, 2.8878906, 2.7599609, 2.6999023, 2.6720703, 2.6398437, 2.7243164, 2.6120117, 2.6588867, 2.5558593, 2.5978515, 2.5485351, 2.4269531, 2.5001953, 2.4855468, 2.4367187, 2.2973144, 2.2812011, 2.2890136, 2.39375, 2.2836425, 2.3815429, 2.2138183, 2.1964843, 2.1840332, 2.1759765, 2.0421875, 2.1034667, 2.0281494, 2.0880859, 1.9706542, 1.9276855, 1.8522155, 1.8991821, 1.7780273, 1.8180053, 1.8326843, 1.8270385, 1.7182128, 1.6439941, 1.5360839, 1.68385, 1.4584472, 1.5955322, 1.6002929, 1.4157226, 1.4704101, 1.2936523, 1.2990234, 1.4281738, 1.4357421, 1.409375, 1.2511718, 1.2194335, 1.1554687, 1.043164, 1.0954101, 1.0392578, 1.0895507, 1.0880859, 0.897168, 0.83369142, 0.86494142, 0.87763673, 0.85322267, 0.72968751, 0.57832032, 0.73066407, 0.78828126, 0.69160157, 0.64375, 0.5919922, 0.5529297, 0.52070314, 0.60957032, 0.51093751, 0.3576172, 0.49921876, 0.284375, 0.21992187, 0.25214845, 0.30683595, 0.30976564, 0.2716797, 0.22089843, 0.25507814, 0.084179685, 0.071484372, 0.1828125, 0.15644531, 0.13789062, 0.054882813, 0.021679688, -0.091601565, -0.0203125, -0.13359375, -0.037890624, -0.29765624, -0.15605469, -0.30351561, -0.055468749, -0.22148438, -0.246875, -0.31718749, -0.25468749, -0.35234374, -0.16484375, -0.56523436, -0.56523436, -0.39921874, -0.58671874, -0.45585936, -0.50859374, -0.44023436, -0.42656249, -0.56328124, -0.70195311, -0.403125, -0.76445311, -0.98710936, -0.7625, -0.75273436, -0.825, -0.996875, -0.86210936, -0.99492186, -0.85625, -0.88359374, -0.97148436, -1.0320313, -1.1609375, -1.1296875, -1.0203125, -1.0691407, -1.2371094, -1.1277344, -1.2214844, -1.1921875, -1.2996094, -1.2917969, -1.3699219, -1.434375, -1.3699219, -1.3601563, -1.5730469, -1.3152344, -1.4851563, -1.48125, -1.5925782, -1.746875, -1.5847657, -1.6003907, -1.5984375, -1.7703125, -1.8328125, -1.8152344, -1.9714844, -1.9421875]) + +stds = np.array([1.0945262, 1.156862, 1.0777057, 1.1501777, 1.234844, 1.0140595, 1.2004665, 1.1926303, 1.1269455, 1.0362904, 0.98873031, 0.88530254, 1.0078473, 0.93637651, 0.90959895, 0.86409503, 0.86353016, 0.74534553, 0.78025728, 0.88014913, 0.75756663, 0.77129823, 0.75581717, 0.79222, 0.84098673, 0.79402477, 0.85648865, 0.80315614, 0.77346581, 0.73097658, 0.72557795, 0.72930044, 0.666103, 0.77142948, 0.704379, 0.6806078, 0.67680347, 0.71318036, 0.72244918, 0.66123307, 0.62547487, 0.67786956, 0.68404138, 0.70508122, 0.62400025, 0.72325015, 0.73942852, 0.67811751, 0.70370805, 0.65040058, 0.6870054, 0.66093785, 0.666103, 0.70040214, 0.65300173, 0.69714534, 0.65825552, 0.64833081, 0.6464982, 0.75850725, 0.69627059, 0.71659386, 0.69307244, 0.61554217, 0.62015557, 0.61998636, 0.67650336, 0.68142927, 0.6278621, 0.612294, 0.62592906, 0.63736153, 0.74233508, 0.69297016, 0.69621509, 0.67229682, 0.64879686, 0.72361159, 0.70229048, 0.60928106, 0.62712252, 0.66923952, 0.65802008, 0.68361813, 0.61587888, 0.63348651, 0.60727841, 0.64873856, 0.68847752, 0.58432156, 0.61683363, 0.63311422, 0.64981711, 0.57369792, 0.62604266, 0.62162364, 0.62066346, 0.62808979, 0.58524042, 0.63537884, 0.65367514, 0.63900274, 0.61089778, 0.62513435, 0.6470505, 0.63952166, 0.5937764, 0.64310449, 0.64330715, 0.64322031, 0.64632386, 0.60827911, 0.58887208, 0.61959165, 0.70725286, 0.64287293, 0.62326396, 0.65896219, 0.55610275, 0.6658656, 0.65681434, 0.583188, 0.6311124, 0.559652, 0.71419227, 0.62490743, 0.66699386, 0.62032485, 0.663036, 0.61414057, 0.66179425, 0.59399503, 0.65203643, 0.67839557, 0.63698763, 0.617452, 0.61022842, 0.7398752, 0.65657932, 0.68718743, 0.67901206, 0.66126263, 0.69949967, 0.70709819, 0.713336, 0.68130863, 0.68652785, 0.67028236, 0.7626031, 0.65259206, 0.72977453, 0.66049516, 0.64261246, 0.66906089, 0.69762796, 0.73719794, 0.69081914, 0.69849437, 0.72435051, 0.62354708, 0.68812829, 0.7193296, 0.66211933, 0.69278532, 0.7518425, 0.69661695, 0.672491, 0.71539241, 0.7369433, 0.66120356, 0.79088491, 0.77491313, 0.79442614, 0.7878198, 0.78881842, 0.70690477, 0.80707121, 0.78768665, 0.7215547, 0.75226194, 0.72196257, 0.765799, 0.77267712, 0.75844234, 0.81038833, 0.81188059, 0.79864907, 0.816436, 0.845298, 0.85074174, 0.73668873, 0.83516812]) + +order = 3 + +x = np.arange(0, len(pts)) +print(np.polyfit(x, pts, order, w=1/stds)) + +# Do polyfit manually +w = 1.0 / stds +lhs = np.vander(x, order+1).astype(np.float64) +rhs = pts + +lhs *= np.atleast_2d(w).T +rhs *= w + +scale = np.sqrt((lhs*lhs).sum(axis=0)) +lhs = lhs / scale +c, resids, rank, s = np.linalg.lstsq(lhs, rhs, rcond=None) +c = (c.T/scale).T +print(c) diff --git a/selfdrive/modeld/test/snpe_benchmark/.gitignore b/selfdrive/modeld/test/snpe_benchmark/.gitignore new file mode 100644 index 00000000000000..d83a1b2ff5c336 --- /dev/null +++ b/selfdrive/modeld/test/snpe_benchmark/.gitignore @@ -0,0 +1 @@ +benchmark diff --git a/selfdrive/modeld/test/snpe_benchmark/benchmark.cc b/selfdrive/modeld/test/snpe_benchmark/benchmark.cc new file mode 100644 index 00000000000000..0e8f4faf494c78 --- /dev/null +++ b/selfdrive/modeld/test/snpe_benchmark/benchmark.cc @@ -0,0 +1,190 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace std; + +int64_t timespecDiff(struct timespec *timeA_p, struct timespec *timeB_p) { + return ((timeA_p->tv_sec * 1000000000) + timeA_p->tv_nsec) - ((timeB_p->tv_sec * 1000000000) + timeB_p->tv_nsec); +} + +void PrintErrorStringAndExit() { + cout << "ERROR!" << endl; + const char* const errStr = zdl::DlSystem::getLastErrorString(); + std::cerr << errStr << std::endl; + std::exit(EXIT_FAILURE); +} + + +zdl::DlSystem::Runtime_t checkRuntime() { + static zdl::DlSystem::Version_t Version = zdl::SNPE::SNPEFactory::getLibraryVersion(); + static zdl::DlSystem::Runtime_t Runtime; + std::cout << "SNPE Version: " << Version.asString().c_str() << std::endl; //Print Version number + if (zdl::SNPE::SNPEFactory::isRuntimeAvailable(zdl::DlSystem::Runtime_t::DSP)) { + std::cout << "Using DSP runtime" << std::endl; + Runtime = zdl::DlSystem::Runtime_t::DSP; + } else if (zdl::SNPE::SNPEFactory::isRuntimeAvailable(zdl::DlSystem::Runtime_t::GPU)) { + std::cout << "Using GPU runtime" << std::endl; + Runtime = zdl::DlSystem::Runtime_t::GPU; + } else { + std::cout << "Using cpu runtime" << std::endl; + Runtime = zdl::DlSystem::Runtime_t::CPU; + } + return Runtime; +} + +void test(char *filename) { + static zdl::DlSystem::Runtime_t runtime = checkRuntime(); + std::unique_ptr container; + container = zdl::DlContainer::IDlContainer::open(filename); + + if (!container) { PrintErrorStringAndExit(); } + cout << "start build" << endl; + std::unique_ptr snpe; + { + snpe = NULL; + zdl::SNPE::SNPEBuilder snpeBuilder(container.get()); + snpe = snpeBuilder.setOutputLayers({}) + .setRuntimeProcessor(runtime) + .setUseUserSuppliedBuffers(false) + //.setDebugMode(true) + .build(); + if (!snpe) { + cout << "ERROR!" << endl; + const char* const errStr = zdl::DlSystem::getLastErrorString(); + std::cerr << errStr << std::endl; + } + cout << "ran snpeBuilder" << endl; + } + + const auto &strList_opt = snpe->getInputTensorNames(); + if (!strList_opt) throw std::runtime_error("Error obtaining input tensor names"); + + cout << "get input tensor names done" << endl; + const auto &strList = *strList_opt; + static zdl::DlSystem::TensorMap inputTensorMap; + static zdl::DlSystem::TensorMap outputTensorMap; + vector > inputs; + for (int i = 0; i < strList.size(); i++) { + cout << "input name: " << strList.at(i) << endl; + + const auto &inputDims_opt = snpe->getInputDimensions(strList.at(i)); + const auto &inputShape = *inputDims_opt; + inputs.push_back(zdl::SNPE::SNPEFactory::getTensorFactory().createTensor(inputShape)); + inputTensorMap.add(strList.at(i), inputs[i].get()); + } + + struct timespec start, end; + cout << "**** starting benchmark ****" << endl; + for (int i = 0; i < 50; i++) { + clock_gettime(CLOCK_MONOTONIC, &start); + assert(snpe->execute(inputTensorMap, outputTensorMap)); + clock_gettime(CLOCK_MONOTONIC, &end); + uint64_t timeElapsed = timespecDiff(&end, &start); + printf("time: %f ms\n", timeElapsed*1.0/1e6); + } +} + +void get_testframe(int index, std::unique_ptr &input) { + FILE * pFile; + string filepath="/data/ipt/quantize_samples/sample_input_"+std::to_string(index); + pFile = fopen(filepath.c_str(),"rb"); + int length = 1*6*160*320*4; + float * frame_buffer = new float[length/4]; // 32/8 + fread(frame_buffer, length, 1, pFile); + // std::cout << *(frame_buffer+length/4-1) << std::endl; + std::copy(frame_buffer, frame_buffer+(length/4), input->begin()); +} + +void SaveITensor(const std::string& path, const zdl::DlSystem::ITensor* tensor) +{ + std::ofstream os(path, std::ofstream::binary); + if (!os) + { + std::cerr << "Failed to open output file for writing: " << path << "\n"; + std::exit(EXIT_FAILURE); + } + for ( auto it = tensor->cbegin(); it != tensor->cend(); ++it ) + { + float f = *it; + if (!os.write(reinterpret_cast(&f), sizeof(float))) + { + std::cerr << "Failed to write data to: " << path << "\n"; + std::exit(EXIT_FAILURE); + } + } +} + +void testrun(char* modelfile) { + static zdl::DlSystem::Runtime_t runtime = checkRuntime(); + std::unique_ptr container; + container = zdl::DlContainer::IDlContainer::open(modelfile); + + if (!container) { PrintErrorStringAndExit(); } + cout << "start build" << endl; + std::unique_ptr snpe; + { + snpe = NULL; + zdl::SNPE::SNPEBuilder snpeBuilder(container.get()); + snpe = snpeBuilder.setOutputLayers({}) + .setRuntimeProcessor(runtime) + .setUseUserSuppliedBuffers(false) + //.setDebugMode(true) + .build(); + if (!snpe) { + cout << "ERROR!" << endl; + const char* const errStr = zdl::DlSystem::getLastErrorString(); + std::cerr << errStr << std::endl; + } + cout << "ran snpeBuilder" << endl; + } + + const auto &strList_opt = snpe->getInputTensorNames(); + if (!strList_opt) throw std::runtime_error("Error obtaining input tensor names"); + cout << "get input tensor names done" << endl; + + const auto &strList = *strList_opt; + static zdl::DlSystem::TensorMap inputTensorMap; + static zdl::DlSystem::TensorMap outputTensorMap; + + assert (strList.size() == 1); + const auto &inputDims_opt = snpe->getInputDimensions(strList.at(0)); + const auto &inputShape = *inputDims_opt; + std::cout << "winkwink" << std::endl; + + for (int i=0;i<10000;i++) { + std::unique_ptr input; + input = zdl::SNPE::SNPEFactory::getTensorFactory().createTensor(inputShape); + get_testframe(i,input); + snpe->execute(input.get(), outputTensorMap); + zdl::DlSystem::StringList tensorNames = outputTensorMap.getTensorNames(); + std::for_each( tensorNames.begin(), tensorNames.end(), [&](const char* name) { + std::ostringstream path; + path << "/data/opt/Result_" << std::to_string(i) << ".raw"; + auto tensorPtr = outputTensorMap.getTensor(name); + SaveITensor(path.str(), tensorPtr); + }); + } +} + +int main(int argc, char* argv[]) { + if (argc < 2) { + printf("usage: %s \n", argv[0]); + return -1; + } + + if (argc == 2) { + while(1) test(argv[1]); + } else if (argc == 3) { + testrun(argv[1]); + } + return 0; +} + diff --git a/selfdrive/modeld/test/snpe_benchmark/benchmark.sh b/selfdrive/modeld/test/snpe_benchmark/benchmark.sh new file mode 100755 index 00000000000000..e87da7db4d4020 --- /dev/null +++ b/selfdrive/modeld/test/snpe_benchmark/benchmark.sh @@ -0,0 +1,4 @@ +#!/bin/sh -e +clang++ -I /data/openpilot/phonelibs/snpe/include/ -L/data/pythonpath/phonelibs/snpe/aarch64 -lSNPE benchmark.cc -o benchmark +export LD_LIBRARY_PATH="/data/pythonpath/phonelibs/snpe/aarch64/:$HOME/openpilot/phonelibs/snpe/x86_64/:$LD_LIBRARY_PATH" +exec ./benchmark $1 diff --git a/selfdrive/modeld/test/tf_test/build.sh b/selfdrive/modeld/test/tf_test/build.sh new file mode 100755 index 00000000000000..4e92ca06981999 --- /dev/null +++ b/selfdrive/modeld/test/tf_test/build.sh @@ -0,0 +1,2 @@ +#!/bin/bash +clang++ -I /home/batman/one/external/tensorflow/include/ -L /home/batman/one/external/tensorflow/lib -Wl,-rpath=/home/batman/one/external/tensorflow/lib main.cc -ltensorflow diff --git a/selfdrive/modeld/test/tf_test/main.cc b/selfdrive/modeld/test/tf_test/main.cc new file mode 100644 index 00000000000000..bd325c81c88ad7 --- /dev/null +++ b/selfdrive/modeld/test/tf_test/main.cc @@ -0,0 +1,69 @@ +#include +#include +#include +#include "tensorflow/c/c_api.h" + +void* read_file(const char* path, size_t* out_len) { + FILE* f = fopen(path, "r"); + if (!f) { + return NULL; + } + fseek(f, 0, SEEK_END); + long f_len = ftell(f); + rewind(f); + + char* buf = (char*)calloc(f_len, 1); + assert(buf); + + size_t num_read = fread(buf, f_len, 1, f); + fclose(f); + + if (num_read != 1) { + free(buf); + return NULL; + } + + if (out_len) { + *out_len = f_len; + } + + return buf; +} + +static void DeallocateBuffer(void* data, size_t) { + free(data); +} + +int main(int argc, char* argv[]) { + TF_Buffer* buf; + TF_Graph* graph; + TF_Status* status; + char *path = argv[1]; + + // load model + { + size_t model_size; + char tmp[1024]; + snprintf(tmp, sizeof(tmp), "%s.pb", path); + printf("loading model %s\n", tmp); + uint8_t *model_data = (uint8_t *)read_file(tmp, &model_size); + buf = TF_NewBuffer(); + buf->data = model_data; + buf->length = model_size; + buf->data_deallocator = DeallocateBuffer; + printf("loaded model of size %d\n", model_size); + } + + // import graph + status = TF_NewStatus(); + graph = TF_NewGraph(); + TF_ImportGraphDefOptions *opts = TF_NewImportGraphDefOptions(); + TF_GraphImportGraphDef(graph, buf, opts, status); + TF_DeleteImportGraphDefOptions(opts); + TF_DeleteBuffer(buf); + if (TF_GetCode(status) != TF_OK) { + printf("FAIL: %s\n", TF_Message(status)); + } else { + printf("SUCCESS\n"); + } +} diff --git a/selfdrive/modeld/test/tf_test/pb_loader.py b/selfdrive/modeld/test/tf_test/pb_loader.py new file mode 100755 index 00000000000000..d4db20eb661b10 --- /dev/null +++ b/selfdrive/modeld/test/tf_test/pb_loader.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python3 +import sys +import tensorflow as tf + +with open(sys.argv[1], "rb") as f: + graph_def = tf.compat.v1.GraphDef() + graph_def.ParseFromString(f.read()) + #tf.io.write_graph(graph_def, '', sys.argv[1]+".try") + diff --git a/selfdrive/modeld/visiontest.c b/selfdrive/modeld/visiontest.c new file mode 100644 index 00000000000000..325e257e86ff2a --- /dev/null +++ b/selfdrive/modeld/visiontest.c @@ -0,0 +1,160 @@ +#include +#include +#include + +#define CL_USE_DEPRECATED_OPENCL_1_2_APIS +#ifdef __APPLE__ +#include +#else +#include +#endif + +#include "clutil.h" +#include "transforms/transform.h" + +typedef struct { + int disable_model; + Transform transform; + + int in_width; + int in_height; + int out_width; + int out_height; + + cl_context context; + cl_command_queue command_queue; + cl_device_id device_id; + + size_t in_yuv_size; + cl_mem in_yuv_cl; + + cl_mem out_y_cl, out_u_cl, out_v_cl; +} VisionTest; + +void initialize_opencl(VisionTest* visiontest) { + // init cl + /* Get Platform and Device Info */ + cl_platform_id platform_ids[16] = {0}; + cl_uint num_platforms; + int err = clGetPlatformIDs(16, platform_ids, &num_platforms); + if (err != 0) { + fprintf(stderr, "cl error: %d\n", err); + } + assert(err == 0); + + // try to find a CPU device + cl_device_id device_id = NULL; + for (int i=0; icontext = clCreateContext(NULL, 1, &device_id, NULL, NULL, &err); + assert(err == 0); + + visiontest->device_id = device_id; +} + +VisionTest* visiontest_create(int temporal_model, int disable_model, + int input_width, int input_height, + int model_input_width, int model_input_height) { + int err = 0; + + VisionTest* const vt = calloc(1, sizeof(*vt)); + assert(vt); + + vt->disable_model = disable_model; + vt->in_width = input_width; + vt->in_height = input_height; + vt->out_width = model_input_width; + vt->out_height = model_input_height; + + initialize_opencl(vt); + + transform_init(&vt->transform, vt->context, vt->device_id); + + + assert((vt->in_width%2) == 0 && (vt->in_height%2) == 0); + vt->in_yuv_size = vt->in_width*vt->in_height*3/2; + vt->in_yuv_cl = clCreateBuffer(vt->context, CL_MEM_READ_WRITE, + vt->in_yuv_size, NULL, &err); + assert(err == 0); + + vt->out_y_cl = clCreateBuffer(vt->context, CL_MEM_READ_WRITE, + vt->out_width*vt->out_width, NULL, &err); + assert(err == 0); + vt->out_u_cl = clCreateBuffer(vt->context, CL_MEM_READ_WRITE, + vt->out_width*vt->out_width/4, NULL, &err); + assert(err == 0); + vt->out_v_cl = clCreateBuffer(vt->context, CL_MEM_READ_WRITE, + vt->out_width*vt->out_width/4, NULL, &err); + assert(err == 0); + + vt->command_queue = clCreateCommandQueue(vt->context, vt->device_id, 0, &err); + assert(err == 0); + + return vt; +} + +void visiontest_destroy(VisionTest* vt) { + transform_destroy(&vt->transform); + + int err = 0; + + err = clReleaseMemObject(vt->in_yuv_cl); + assert(err == 0); + err = clReleaseMemObject(vt->out_y_cl); + assert(err == 0); + err = clReleaseMemObject(vt->out_u_cl); + assert(err == 0); + err = clReleaseMemObject(vt->out_v_cl); + assert(err == 0); + + err = clReleaseCommandQueue(vt->command_queue); + assert(err == 0); + + err = clReleaseContext(vt->context); + assert(err == 0); + + free(vt); +} + +void visiontest_transform(VisionTest* vt, const uint8_t* yuv_data, + uint8_t* out_y, uint8_t* out_u, uint8_t* out_v, + const float* transform) { + int err = 0; + + err = clEnqueueWriteBuffer(vt->command_queue, vt->in_yuv_cl, CL_FALSE, + 0, vt->in_yuv_size, yuv_data, 0, NULL, NULL); + assert(err == 0); + + mat3 transform_m = *(const mat3*)transform; + + transform_queue(&vt->transform, vt->command_queue, + vt->in_yuv_cl, vt->in_width, vt->in_height, + vt->out_y_cl, vt->out_u_cl, vt->out_v_cl, + vt->out_width, vt->out_height, + transform_m); + + err = clEnqueueReadBuffer(vt->command_queue, vt->out_y_cl, CL_FALSE, + 0, vt->out_width*vt->out_height, out_y, + 0, NULL, NULL); + assert(err == 0); + err = clEnqueueReadBuffer(vt->command_queue, vt->out_u_cl, CL_FALSE, + 0, vt->out_width*vt->out_height/4, out_u, + 0, NULL, NULL); + assert(err == 0); + err = clEnqueueReadBuffer(vt->command_queue, vt->out_v_cl, CL_FALSE, + 0, vt->out_width*vt->out_height/4, out_v, + 0, NULL, NULL); + assert(err == 0); + + clFinish(vt->command_queue); +} + diff --git a/selfdrive/modeld/visiontest.mk b/selfdrive/modeld/visiontest.mk new file mode 100644 index 00000000000000..f1aa7afdbaa85b --- /dev/null +++ b/selfdrive/modeld/visiontest.mk @@ -0,0 +1,105 @@ +CC:=clang +CXX:=clang++ +OPT_FLAGS:=-O2 -g -ggdb3 + +UNAME_S := $(shell uname -s) +ifeq ($(UNAME_S),Linux) + SHARED_FLAGS=-Wl,--whole-archive $^ -Wl,--no-whole-archive +endif +ifeq ($(UNAME_S),Darwin) + SHARED_FLAGS=-Wl,-force_load $^ +endif + +PHONELIBS := ../../phonelibs +BASEDIR := ../.. + +WARN_FLAGS = -Werror=implicit-function-declaration \ + -Werror=incompatible-pointer-types \ + -Werror=int-conversion \ + -Werror=return-type \ + -Werror=format-extra-args + +CFLAGS = -std=gnu11 -g -fPIC $(OPT_FLAGS) $(WARN_FLAGS) +CXXFLAGS = -std=c++14 -fPIC $(OPT_FLAGS) $(WARN_FLAGS) + +EIGEN_FLAGS = -I$(PHONELIBS)/eigen + +CEREAL_LIBS = $(BASEDIR)/cereal/libmessaging.a + +OPENCV_LIBS = -lopencv_video -lopencv_core -lopencv_imgproc + +ifeq ($(UNAME_S),Darwin) + VT_LDFLAGS += $(PHONELIBS)/capnp-c/mac/lib/libcapnp_c.a \ + $(PHONELIBS)/zmq/mac/lib/libczmq.a \ + $(PHONELIBS)/zmq/mac/lib/libzmq.a \ + -framework OpenCL + + OPENCV_LIBS += -L/usr/local/opt/opencv@2/lib + OPENCV_FLAGS += -I/usr/local/opt/opencv@2/include + +else + VT_LDFLAGS += $(CEREAL_LIBS) \ + -L/system/vendor/lib64 \ + -L$(BASEDIR)/external/zmq/lib/ \ + -l:libczmq.a -l:libzmq.a \ + -lOpenCL +endif + +.PHONY: all visiontest clean test +all: visiontest + +libvisiontest_inputs := visiontest.c \ + transforms/transform.c \ + transforms/loadyuv.c \ + ../common/clutil.c \ + $(BASEDIR)/selfdrive/common/util.c \ + $(CEREAL_OBJS) + +visiontest: libvisiontest.so +all-tests := $(addsuffix .test, $(basename $(wildcard test_*))) + +%.o: %.cc + @echo "[ CXX ] $@" + $(CXX) $(CXXFLAGS) -MMD \ + -I. -I.. -I../.. \ + -Wall \ + -I$(BASEDIR)/ -I$(BASEDIR)/selfdrive -I$(BASEDIR)/selfdrive/common \ + $(EIGEN_FLAGS) \ + $(OPENCV_FLAGS) \ + $(CEREAL_CXXFLAGS) \ + -c -o '$@' '$<' + +%.o: %.c + @echo "[ CXX ] $@" + $(CC) $(CFLAGS) -MMD \ + -I. -I.. -I../.. \ + -Wall \ + -I$(BASEDIR)/ -I$(BASEDIR)/selfdrive -I$(BASEDIR)/selfdrive/common \ + $(CEREAL_CFLAGS) \ + -c -o '$@' '$<' + +libvisiontest.so: $(libvisiontest_inputs) + $(eval $@_TMP := $(shell mktemp)) + $(CC) -std=gnu11 -shared -fPIC -O2 -g \ + -Werror=implicit-function-declaration -Werror=incompatible-pointer-types \ + -Werror=int-conversion -Wno-pointer-to-int-cast \ + -I. -DCLU_NO_CACHE \ + $^ -o $($@_TMP) \ + -I$(PHONELIBS)/opencl/include \ + -I$(BASEDIR)/selfdrive/common \ + $(CEREAL_CXXFLAGS) \ + $(CEREAL_CFLAGS) \ + -I$(BASEDIR)/external/zmq/include \ + -I$(BASEDIR)/ -I$(BASEDIR)/selfdrive \ + -lstdc++ \ + $(VT_LDFLAGS) \ + -lm -lpthread + mv $($@_TMP) $@ + +test : $(all-tests) + +test_%.test : test_% + @./'$<' || echo FAIL + +clean: + rm -rf *.o *.so *.a diff --git a/selfdrive/modeld/visiontest.py b/selfdrive/modeld/visiontest.py new file mode 100644 index 00000000000000..f9661bf463163e --- /dev/null +++ b/selfdrive/modeld/visiontest.py @@ -0,0 +1,135 @@ +import os +import subprocess +from cffi import FFI +from common.basedir import BASEDIR + +# Initialize visiontest. Ignore output. +_visiond_dir = os.path.dirname(os.path.abspath(__file__)) +_libvisiontest = "libvisiontest.so" +try: # bacause this crashes somtimes when running pipeline + subprocess.check_output(["make", "-C", _visiond_dir, "-f", + os.path.join(_visiond_dir, "visiontest.mk"), + _libvisiontest]) +except: + pass + +class VisionTest(): + """A version of the vision model that can be run on a desktop. + + WARNING: This class is not thread safe. VisionTest objects cannot be + created or used on multiple threads simultaneously. + """ + + ffi = FFI() + ffi.cdef(""" + typedef unsigned char uint8_t; + + struct VisionTest; + typedef struct VisionTest VisionTest; + + VisionTest* visiontest_create(int temporal_model, int disable_model, + int input_width, int input_height, + int model_input_width, int model_input_height); + void visiontest_destroy(VisionTest* visiontest); + + void visiontest_transform(VisionTest* vt, const uint8_t* yuv_data, + uint8_t* out_y, uint8_t* out_u, uint8_t* out_v, + const float* transform); + """) + + clib = ffi.dlopen(os.path.join(_visiond_dir, _libvisiontest)) + + def __init__(self, input_size, model_input_size, model): + """Create a wrapper around visiond for off-device python code. + + Inputs: + input_size: The size of YUV images passed to transform. + model_input_size: The size of YUV images passed to the model. + model: The name of the model to use. "temporal", "yuv", or None to disable the + model (used to disable OpenCL). + """ + self._input_size = input_size + self._model_input_size = model_input_size + + if model is None: + disable_model = 1 + temporal_model = 0 + elif model == "yuv": + disable_model = 0 + temporal_model = 0 + elif model == "temporal": + disable_model = 0 + temporal_model = 1 + else: + raise ValueError("Bad model name: {}".format(model)) + + prevdir = os.getcwd() + os.chdir(_visiond_dir) # tmp hack to find kernels + os.environ['BASEDIR'] = BASEDIR + self._visiontest_c = self.clib.visiontest_create( + temporal_model, disable_model, self._input_size[0], self._input_size[1], + self._model_input_size[0], self._model_input_size[1]) + os.chdir(prevdir) + + @property + def input_size(self): + return self._input_size + + @property + def model_input_size(self): + return self._model_input_size + + def transform(self, yuv_data, transform): + y_len = self.model_input_size[0] * self.model_input_size[1] + t_y_ptr = bytearray(y_len) + t_u_ptr = bytearray(y_len // 4) + t_v_ptr = bytearray(y_len // 4) + + self.transform_output_buffer(yuv_data, t_y_ptr, t_u_ptr, t_v_ptr, + transform) + + return t_y_ptr, t_u_ptr, t_v_ptr + + def transform_contiguous(self, yuv_data, transform): + y_ol = self.model_input_size[0] * self.model_input_size[1] + uv_ol = y_ol // 4 + result = bytearray(y_ol * 3 // 2) + result_view = memoryview(result) + t_y_ptr = result_view[:y_ol] + t_u_ptr = result_view[y_ol:y_ol + uv_ol] + t_v_ptr = result_view[y_ol + uv_ol:] + + self.transform_output_buffer(yuv_data, t_y_ptr, t_u_ptr, t_v_ptr, + transform) + return result + + + def transform_output_buffer(self, yuv_data, y_out, u_out, v_out, + transform): + assert len(yuv_data) == self.input_size[0] * self.input_size[1] * 3/2 + + cast = self.ffi.cast + from_buffer = self.ffi.from_buffer + yuv_ptr = cast("unsigned char*", from_buffer(yuv_data)) + transform_ptr = self.ffi.new("float[]", transform) + + y_out_ptr = cast("unsigned char*", from_buffer(y_out)) + u_out_ptr = cast("unsigned char*", from_buffer(u_out)) + v_out_ptr = cast("unsigned char*", from_buffer(v_out)) + + self.clib.visiontest_transform(self._visiontest_c, yuv_ptr, y_out_ptr, + u_out_ptr, v_out_ptr, transform_ptr) + + def close(self): + self.clib.visiontest_destroy(self._visiontest_c) + self._visiontest_c = None + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + +if __name__ == "__main__": + VisionTest((560, 304), (320, 160), "temporal") diff --git a/selfdrive/tbp_manager.py b/selfdrive/tbp_manager.py deleted file mode 100755 index 687ee5669428f7..00000000000000 --- a/selfdrive/tbp_manager.py +++ /dev/null @@ -1,613 +0,0 @@ -#!/usr/bin/env python3.7 -import os -import time -import sys -import fcntl -import errno -import signal -import shutil -import subprocess -import time -from selfdrive.tinklad.tinkla_interface import TinklaClient -from cereal import tinkla -from selfdrive.car.tesla.readconfig import CarSettings -import datetime - -from common.basedir import BASEDIR, PARAMS -from common.android import ANDROID -sys.path.append(os.path.join(BASEDIR, "pyextra")) -os.environ['BASEDIR'] = BASEDIR - -TOTAL_SCONS_NODES = 1195 -prebuilt = os.path.exists(os.path.join(BASEDIR, 'prebuilt')) - -# Create folders needed for msgq -try: - os.mkdir("/dev/shm") -except FileExistsError: - pass -except PermissionError: - print("WARNING: failed to make /dev/shm") - -if ANDROID: - os.chmod("/dev/shm", 0o777) - -def unblock_stdout(): - # get a non-blocking stdout - child_pid, child_pty = os.forkpty() - if child_pid != 0: # parent - - # child is in its own process group, manually pass kill signals - signal.signal(signal.SIGINT, lambda signum, frame: os.kill(child_pid, signal.SIGINT)) - signal.signal(signal.SIGTERM, lambda signum, frame: os.kill(child_pid, signal.SIGTERM)) - - fcntl.fcntl(sys.stdout, fcntl.F_SETFL, - fcntl.fcntl(sys.stdout, fcntl.F_GETFL) | os.O_NONBLOCK) - - while True: - try: - dat = os.read(child_pty, 4096) - except OSError as e: - if e.errno == errno.EIO: - break - continue - - if not dat: - break - - try: - sys.stdout.write(dat.decode('utf8')) - except (OSError, IOError, UnicodeDecodeError): - pass - - # os.wait() returns a tuple with the pid and a 16 bit value - # whose low byte is the signal number and whose high byte is the exit satus - exit_status = os.wait()[1] >> 8 - os._exit(exit_status) - - -if __name__ == "__main__": - unblock_stdout() - from common.spinner import Spinner -else: - from common.spinner import FakeSpinner as Spinner - -import importlib -import traceback -from multiprocessing import Process - -# Run scons -spinner = Spinner() -spinner.update("0") - -if not prebuilt: - for retry in [True, False]: - # run scons - env = os.environ.copy() - env['SCONS_PROGRESS'] = "1" - env['SCONS_CACHE'] = "1" - - nproc = os.cpu_count() - j_flag = "" if nproc is None else "-j%d" % (nproc - 1) - scons = subprocess.Popen(["scons", j_flag], cwd=BASEDIR, env=env, stderr=subprocess.PIPE) - - # Read progress from stderr and update spinner - while scons.poll() is None: - try: - line = scons.stderr.readline() - if line is None: - continue - - line = line.rstrip() - prefix = b'progress: ' - if line.startswith(prefix): - i = int(line[len(prefix):]) - if spinner is not None: - spinner.update("%d" % (50.0 * (i / TOTAL_SCONS_NODES))) - elif len(line): - print(line.decode('utf8')) - except Exception: - pass - - if scons.returncode != 0: - if retry: - print("scons build failed, cleaning in") - for i in range(3,-1,-1): - print("....%d" % i) - time.sleep(1) - subprocess.check_call(["scons", "-c"], cwd=BASEDIR, env=env) - shutil.rmtree("/tmp/scons_cache") - else: - raise RuntimeError("scons build failed") - else: - break - -import cereal -import cereal.messaging as messaging - -from common.params import Params -import selfdrive.crash as crash -from selfdrive.swaglog import cloudlog -from selfdrive.registration import register -from selfdrive.version import version, dirty -from selfdrive.loggerd.config import ROOT -from selfdrive.launcher import launcher -from common import android -from common.apk import update_apks, pm_apply_packages, start_offroad -from common.manager_helpers import print_cpu_usage - -ThermalStatus = cereal.log.ThermalData.ThermalStatus - -# comment out anything you don't want to run -managed_processes = { - "tinklad": "selfdrive.tinklad.tinklad", - "thermald": "selfdrive.thermald.thermald", - "uploader": "selfdrive.loggerd.uploader", - "deleter": "selfdrive.loggerd.deleter", - "controlsd": "selfdrive.controls.controlsd", - "plannerd": "selfdrive.controls.plannerd", - "radard": "selfdrive.controls.radard", - "dmonitoringd": "selfdrive.controls.dmonitoringd", - "ubloxd": ("selfdrive/locationd", ["./ubloxd"]), - "loggerd": ("selfdrive/loggerd", ["./loggerd"]), - "logmessaged": "selfdrive.logmessaged", - "locationd": "selfdrive.locationd.locationd", - "tombstoned": "selfdrive.tombstoned", - "logcatd": ("selfdrive/logcatd", ["./logcatd"]), - "proclogd": ("selfdrive/proclogd", ["./proclogd"]), - "boardd": ("selfdrive/boardd", ["./boardd"]), # not used directly - "pandad": "selfdrive.pandad", - "ui": ("selfdrive/ui", ["./ui"]), - "calibrationd": "selfdrive.locationd.calibrationd", - "paramsd": ("selfdrive/locationd", ["./paramsd"]), - "camerad": ("selfdrive/camerad", ["./camerad"]), - "sensord": ("selfdrive/sensord", ["./sensord"]), - "clocksd": ("selfdrive/clocksd", ["./clocksd"]), - "gpsd": ("selfdrive/sensord", ["./gpsd"]), - "updated": "selfdrive.updated", - "dmonitoringmodeld": ("selfdrive/modeld", ["./dmonitoringmodeld"]), - "modeld": ("selfdrive/modeld", ["./modeld"]), -} - -daemon_processes = { - "manage_athenad": ("selfdrive.athena.manage_athenad", "AthenadPid"), -} - -running = {} -def get_running(): - return running - -# due to qualcomm kernel bugs SIGKILLing camerad sometimes causes page table corruption -unkillable_processes = ['camerad'] - -# processes to end with SIGINT instead of SIGTERM -interrupt_processes = [] - -# processes to end with SIGKILL instead of SIGTERM -kill_processes = ['sensord', 'paramsd'] - -# processes to end if thermal conditions exceed Green parameters -green_temp_processes = ['uploader'] - -persistent_processes = [ - 'tinklad', - 'thermald', - 'logmessaged', - 'ui', - 'uploader', -] -if ANDROID: - persistent_processes += [ - 'logcatd', - 'tombstoned', - 'updated', - ] - -car_started_processes = [ - 'controlsd', - 'plannerd', - 'loggerd', - 'radard', - 'dmonitoringd', - 'calibrationd', - 'paramsd', - 'camerad', - 'modeld', - 'proclogd', - 'ubloxd', - 'locationd', -] -if ANDROID: - car_started_processes += [ - 'sensord', - 'clocksd', - 'gpsd', - 'dmonitoringmodeld', - 'deleter', - ] - -def register_managed_process(name, desc, car_started=False): - global managed_processes, car_started_processes, persistent_processes - print("registering %s" % name) - managed_processes[name] = desc - if car_started: - car_started_processes.append(name) - else: - persistent_processes.append(name) - -# ****************** process management functions ****************** -def nativelauncher(pargs, cwd): - # exec the process - os.chdir(cwd) - - # because when extracted from pex zips permissions get lost -_- - os.chmod(pargs[0], 0o700) - - os.execvp(pargs[0], pargs) - -def start_managed_process(name): - if name in running or name not in managed_processes: - return - proc = managed_processes[name] - if isinstance(proc, str): - cloudlog.info("starting python %s" % proc) - running[name] = Process(name=name, target=launcher, args=(proc,)) - else: - pdir, pargs = proc - cwd = os.path.join(BASEDIR, pdir) - cloudlog.info("starting process %s" % name) - running[name] = Process(name=name, target=nativelauncher, args=(pargs, cwd)) - running[name].start() - -def start_daemon_process(name): - params = Params() - proc, pid_param = daemon_processes[name] - pid = params.get(pid_param, encoding='utf-8') - - if pid is not None: - try: - os.kill(int(pid), 0) - with open(f'/proc/{pid}/cmdline') as f: - if proc in f.read(): - # daemon is running - return - except (OSError, FileNotFoundError): - # process is dead - pass - - cloudlog.info("starting daemon %s" % name) - proc = subprocess.Popen(['python', '-m', proc], - stdin=open('/dev/null', 'r'), - stdout=open('/dev/null', 'w'), - stderr=open('/dev/null', 'w'), - preexec_fn=os.setpgrp) - - params.put(pid_param, str(proc.pid)) - -def prepare_managed_process(p): - proc = managed_processes[p] - if isinstance(proc, str): - # import this python - cloudlog.info("preimporting %s" % proc) - importlib.import_module(proc) - elif os.path.isfile(os.path.join(BASEDIR, proc[0], "Makefile")): - # build this process - cloudlog.info("building %s" % (proc,)) - try: - subprocess.check_call(["make", "-j4"], cwd=os.path.join(BASEDIR, proc[0])) - except subprocess.CalledProcessError: - # make clean if the build failed - cloudlog.warning("building %s failed, make clean" % (proc, )) - subprocess.check_call(["make", "clean"], cwd=os.path.join(BASEDIR, proc[0])) - subprocess.check_call(["make", "-j4"], cwd=os.path.join(BASEDIR, proc[0])) - - -def join_process(process, timeout): - # Process().join(timeout) will hang due to a python 3 bug: https://bugs.python.org/issue28382 - # We have to poll the exitcode instead - t = time.time() - while time.time() - t < timeout and process.exitcode is None: - time.sleep(0.001) - - -def kill_managed_process(name): - if name not in running or name not in managed_processes: - return - cloudlog.info("killing %s" % name) - - if running[name].exitcode is None: - if name in interrupt_processes: - os.kill(running[name].pid, signal.SIGINT) - elif name in kill_processes: - os.kill(running[name].pid, signal.SIGKILL) - else: - running[name].terminate() - - join_process(running[name], 5) - - if running[name].exitcode is None: - if name in unkillable_processes: - cloudlog.critical("unkillable process %s failed to exit! rebooting in 15 if it doesn't die" % name) - join_process(running[name], 15) - if running[name].exitcode is None: - cloudlog.critical("FORCE REBOOTING PHONE!") - os.system("date >> /sdcard/unkillable_reboot") - os.system("reboot") - raise RuntimeError - else: - cloudlog.info("killing %s with SIGKILL" % name) - os.kill(running[name].pid, signal.SIGKILL) - running[name].join() - - cloudlog.info("%s is dead with %d" % (name, running[name].exitcode)) - del running[name] - - -def cleanup_all_processes(signal, frame): - cloudlog.info("caught ctrl-c %s %s" % (signal, frame)) - - if ANDROID: - pm_apply_packages('disable') - - for name in list(running.keys()): - kill_managed_process(name) - cloudlog.info("everything is dead") - -# ****************** run loop ****************** - -def manager_init(should_register=True): - if should_register: - reg_res = register() - if reg_res: - dongle_id, dongle_secret = reg_res - else: - raise Exception("server registration failed") - else: - dongle_id = "c"*16 - - # set dongle id - cloudlog.info("dongle id is " + dongle_id) - os.environ['DONGLE_ID'] = dongle_id - - cloudlog.info("dirty is %d" % dirty) - if not dirty: - os.environ['CLEAN'] = '1' - - cloudlog.bind_global(dongle_id=dongle_id, version=version, dirty=dirty, is_eon=True) - crash.bind_user(id=dongle_id) - crash.bind_extra(version=version, dirty=dirty, is_eon=True) - - os.umask(0) - try: - os.mkdir(ROOT, 0o777) - except OSError: - pass - - # ensure shared libraries are readable by apks - if ANDROID: - os.chmod(BASEDIR, 0o755) - os.chmod(os.path.join(BASEDIR, "cereal"), 0o755) - os.chmod(os.path.join(BASEDIR, "cereal", "libmessaging_shared.so"), 0o755) - -def system(cmd): - try: - cloudlog.info("running %s" % cmd) - subprocess.check_output(cmd, stderr=subprocess.STDOUT, shell=True) - except subprocess.CalledProcessError as e: - cloudlog.event("running failed", - cmd=e.cmd, - output=e.output[-1024:], - returncode=e.returncode) - -def sendUserInfoToTinkla(params, tinklaClient): - carSettings = CarSettings() - gitRemote = params.get("GitRemote") - gitBranch = params.get("GitBranch") - gitHash = params.get("GitCommit") - dongleId = params.get("DongleId") - userHandle = carSettings.userHandle - info = tinkla.Interface.UserInfo.new_message( - openPilotId=dongleId, - userHandle=userHandle, - gitRemote=gitRemote, - gitBranch=gitBranch, - gitHash=gitHash - ) - tinklaClient.setUserInfo(info) - -def manager_thread(): - # now loop - thermal_sock = messaging.sub_sock('thermal') - - if os.getenv("GET_CPU_USAGE"): - proc_sock = messaging.sub_sock('procLog', conflate=True) - - cloudlog.info("manager start") - cloudlog.info({"environ": os.environ}) - - # save boot log - subprocess.call(["./loggerd", "--bootlog"], cwd=os.path.join(BASEDIR, "selfdrive/loggerd")) - - params = Params() - - # start daemon processes - for p in daemon_processes: - start_daemon_process(p) - - # start persistent processes - for p in persistent_processes: - start_managed_process(p) - - # start offroad - if ANDROID: - pm_apply_packages('enable') - start_offroad() - - if os.getenv("NOBOARD") is None: - start_managed_process("pandad") - - if os.getenv("BLOCK") is not None: - for k in os.getenv("BLOCK").split(","): - del managed_processes[k] - - logger_dead = False - - # Tinkla interface - last_tinklad_send_attempt_time = 0 - tinklaClient = TinklaClient() - sendUserInfoToTinkla(params=params, tinklaClient=tinklaClient) - start_t = time.time() - first_proc = None - - while 1: - msg = messaging.recv_sock(thermal_sock, wait=True) - - # heavyweight batch processes are gated on favorable thermal conditions - if msg.thermal.thermalStatus >= ThermalStatus.yellow: - for p in green_temp_processes: - if p in persistent_processes: - kill_managed_process(p) - else: - for p in green_temp_processes: - if p in persistent_processes: - start_managed_process(p) - - # Attempt to send pending messages if there's any that queued while offline - # Seems this loop runs every second or so, throttle to once every 30s - now = time.time() - if now - last_tinklad_send_attempt_time >= 30: - tinklaClient.attemptToSendPendingMessages() - last_tinklad_send_attempt_time = now - - if msg.thermal.freeSpace < 0.05: - logger_dead = True - - if msg.thermal.started: - for p in car_started_processes: - if p == "loggerd" and logger_dead: - kill_managed_process(p) - else: - start_managed_process(p) - else: - logger_dead = False - for p in reversed(car_started_processes): - kill_managed_process(p) - - # check the status of all processes, did any of them die? - running_list = ["%s%s\u001b[0m" % ("\u001b[32m" if running[p].is_alive() else "\u001b[31m", p) for p in running] - #cloudlog.debug(' '.join(running_list)) - - # Exit main loop when uninstall is needed - if params.get("DoUninstall", encoding='utf8') == "1": - break - - if os.getenv("GET_CPU_USAGE"): - dt = time.time() - start_t - - # Get first sample - if dt > 30 and first_proc is None: - first_proc = messaging.recv_sock(proc_sock) - - # Get last sample and exit - if dt > 90: - first_proc = first_proc - last_proc = messaging.recv_sock(proc_sock, wait=True) - - cleanup_all_processes(None, None) - sys.exit(print_cpu_usage(first_proc, last_proc)) - -def manager_prepare(spinner=None): - - carSettings = CarSettings() - # build all processes - os.chdir(os.path.dirname(os.path.abspath(__file__))) - - # Spinner has to start from 70 here - total = 100.0 if prebuilt else 50.0 - - for i, p in enumerate(managed_processes): - if spinner is not None: - spinText = carSettings.spinnerText - spinner.update(spinText % ((100.0 - total) + total * (i + 1) / len(managed_processes),)) - prepare_managed_process(p) - -def uninstall(): - cloudlog.warning("uninstalling") - with open('/cache/recovery/command', 'w') as f: - f.write('--wipe_data\n') - # IPowerManager.reboot(confirm=false, reason="recovery", wait=true) - android.reboot(reason="recovery") - -def main(): - os.environ['PARAMS_PATH'] = PARAMS - - # the flippening! - os.system('LD_LIBRARY_PATH="" content insert --uri content://settings/system --bind name:s:user_rotation --bind value:i:1') - - # disable bluetooth - os.system('service call bluetooth_manager 8') - - params = Params() - params.manager_start() - - default_params = [ - ("CommunityFeaturesToggle", "0"), - ("CompletedTrainingVersion", "0"), - ("IsMetric", "0"), - ("RecordFront", "0"), - ("HasAcceptedTerms", "0"), - ("HasCompletedSetup", "0"), - ("IsUploadRawEnabled", "1"), - ("IsLdwEnabled", "1"), - ("IsGeofenceEnabled", "-1"), - ("SpeedLimitOffset", "0"), - ("LongitudinalControl", "0"), - ("LimitSetSpeed", "0"), - ("LimitSetSpeedNeural", "0"), - ("LastUpdateTime", datetime.datetime.now().isoformat().encode('utf8')), - ("OpenpilotEnabledToggle", "1"), - ("LaneChangeEnabled", "1"), - ] - - # set unset params - for k, v in default_params: - if params.get(k) is None: - params.put(k, v) - - # is this chffrplus? - if os.getenv("PASSIVE") is not None: - params.put("Passive", str(int(os.getenv("PASSIVE")))) - - if params.get("Passive") is None: - raise Exception("Passive must be set to continue") - - if ANDROID: - update_apks() - manager_init() - manager_prepare(spinner) - spinner.close() - - if os.getenv("PREPAREONLY") is not None: - return - - # SystemExit on sigterm - signal.signal(signal.SIGTERM, lambda signum, frame: sys.exit(1)) - - try: - manager_thread() - except SystemExit: - raise - except Exception: - traceback.print_exc() - crash.capture_exception() - print ("EXIT ON EXCEPTION") - finally: - cleanup_all_processes(None, None) - - if params.get("DoUninstall", encoding='utf8') == "1": - uninstall() - -if __name__ == "__main__": - main() - # manual exit because we are forked - sys.exit(0) diff --git a/selfdrive/thermald/thermald.py b/selfdrive/thermald/thermald.py index 4efd18bb84f9ef..6da27e293d53b3 100644 --- a/selfdrive/thermald/thermald.py +++ b/selfdrive/thermald/thermald.py @@ -19,6 +19,11 @@ from selfdrive.pandad import get_expected_signature from selfdrive.car.tesla.readconfig import read_config_file,CarSettings from selfdrive.thermald.power_monitoring import PowerMonitoring, get_battery_capacity, get_battery_status, get_battery_current, get_battery_voltage, get_usb_present +WEBCAM = os.getenv("WEBCAM") is not None +if WEBCAM: + from backports.datetime_fromisoformat import MonkeyPatch + MonkeyPatch.patch_fromisoformat() + FW_SIGNATURE = get_expected_signature() diff --git a/selfdrive/ui/ui b/selfdrive/ui/ui index e1c1748e3040c4..7ea68031f21013 100755 --- a/selfdrive/ui/ui +++ b/selfdrive/ui/ui @@ -1,3 +1,8 @@ #!/bin/sh export LD_LIBRARY_PATH="/system/lib64:$LD_LIBRARY_PATH" -exec ./_ui +TBP=/data/tinkla_buddy_pro +if test -f "$TBP"; then + exec /usr/bin/startx ./_ui +else + exec ./_ui +fi diff --git a/selfdrive/visiond/models/driving.cc b/selfdrive/visiond/models/driving.cc deleted file mode 100644 index d19dc147fc689d..00000000000000 --- a/selfdrive/visiond/models/driving.cc +++ /dev/null @@ -1,287 +0,0 @@ -#include -#include -#include -#include - -#ifdef QCOM -#include -#else -#include -#endif - -#include "common/timing.h" -#include "driving.h" - -#define MODEL_WIDTH 512 -#define MODEL_HEIGHT 256 -#define MODEL_NAME "driving_model_dlc" - -#define LEAD_MDN_N 5 // probs for 5 groups -#define MDN_VALS 4 // output xyva for each lead group -#define SELECTION 3 //output 3 group (lead now, in 2s and 6s) -#define MDN_GROUP_SIZE 11 -#define SPEED_BUCKETS 100 -#define OUTPUT_SIZE ((MODEL_PATH_DISTANCE*2) + (2*(MODEL_PATH_DISTANCE*2 + 1)) + MDN_GROUP_SIZE*LEAD_MDN_N + SELECTION) -#ifdef TEMPORAL - #define TEMPORAL_SIZE 512 -#else - #define TEMPORAL_SIZE 0 -#endif - -// #define DUMP_YUV - -Eigen::Matrix vander; - -void model_init(ModelState* s, cl_device_id device_id, cl_context context, int temporal) { - model_input_init(&s->in, MODEL_WIDTH, MODEL_HEIGHT, device_id, context); - const int output_size = OUTPUT_SIZE + TEMPORAL_SIZE; - s->output = (float*)malloc(output_size * sizeof(float)); - memset(s->output, 0, output_size * sizeof(float)); - s->m = new DefaultRunModel("../../models/driving_model.dlc", s->output, output_size, USE_GPU_RUNTIME); -#ifdef TEMPORAL - assert(temporal); - s->m->addRecurrent(&s->output[OUTPUT_SIZE], TEMPORAL_SIZE); -#endif -#ifdef DESIRE - s->desire = (float*)malloc(DESIRE_SIZE * sizeof(float)); - for (int i = 0; i < DESIRE_SIZE; i++) s->desire[i] = 0.0; - s->m->addDesire(s->desire, DESIRE_SIZE); -#endif - - // Build Vandermonde matrix - for(int i = 0; i < MODEL_PATH_DISTANCE; i++) { - for(int j = 0; j < POLYFIT_DEGREE - 1; j++) { - vander(i, j) = pow(i, POLYFIT_DEGREE-j-1); - } - } -} - -ModelData model_eval_frame(ModelState* s, cl_command_queue q, - cl_mem yuv_cl, int width, int height, - mat3 transform, void* sock, float *desire_in) { - struct { - float *path; - float *left_lane; - float *right_lane; - float *lead; - float *speed; - } net_outputs = {NULL}; - -#ifdef DESIRE - if (desire_in != NULL) { - for (int i = 0; i < DESIRE_SIZE; i++) s->desire[i] = desire_in[i]; - } -#endif - - //for (int i = 0; i < OUTPUT_SIZE + TEMPORAL_SIZE; i++) { printf("%f ", s->output[i]); } printf("\n"); - - float *net_input_buf = model_input_prepare(&s->in, q, yuv_cl, width, height, transform); - - #ifdef DUMP_YUV - FILE *dump_yuv_file = fopen("/sdcard/dump.yuv", "wb"); - fwrite(net_input_buf, MODEL_HEIGHT*MODEL_WIDTH*3/2, sizeof(float), dump_yuv_file); - fclose(dump_yuv_file); - assert(1==2); - #endif - - //printf("readinggggg \n"); - //FILE *f = fopen("goof_frame", "r"); - //fread(net_input_buf, sizeof(float), MODEL_HEIGHT*MODEL_WIDTH*3/2, f); - //fclose(f); - //sleep(1); - //printf("%i \n",OUTPUT_SIZE); - //printf("%i \n",MDN_GROUP_SIZE); - s->m->execute(net_input_buf); - - // net outputs - net_outputs.path = &s->output[0]; - net_outputs.left_lane = &s->output[MODEL_PATH_DISTANCE*2]; - net_outputs.right_lane = &s->output[MODEL_PATH_DISTANCE*2 + MODEL_PATH_DISTANCE*2 + 1]; - net_outputs.lead = &s->output[MODEL_PATH_DISTANCE*2 + (MODEL_PATH_DISTANCE*2 + 1)*2]; - //net_outputs.speed = &s->output[OUTPUT_SIZE - SPEED_BUCKETS]; - - ModelData model = {0}; - - for (int i=0; i net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 8]) { - mdn_max_idx = i; - } - } - model.lead.prob = sigmoid(net_outputs.lead[LEAD_MDN_N*MDN_GROUP_SIZE]); - model.lead.dist = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE] * max_dist; - model.lead.std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS]) * max_dist; - model.lead.rel_y = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 1]; - model.lead.rel_y_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 1]); - model.lead.rel_v = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 2] * max_rel_vel; - model.lead.rel_v_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 2]) * max_rel_vel; - model.lead.rel_a = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 3]; - model.lead.rel_a_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 3]); - - // Find the distribution that corresponds to the lead in 2s - mdn_max_idx = 0; - for (int i=1; i net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 9]) { - mdn_max_idx = i; - } - } - model.lead_future.prob = sigmoid(net_outputs.lead[LEAD_MDN_N*MDN_GROUP_SIZE + 1]); - model.lead_future.dist = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE] * max_dist; - model.lead_future.std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS]) * max_dist; - model.lead_future.rel_y = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 1]; - model.lead_future.rel_y_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 1]); - model.lead_future.rel_v = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 2] * max_rel_vel; - model.lead_future.rel_v_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 2]) * max_rel_vel; - model.lead_future.rel_a = net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + 3]; - model.lead_future.rel_a_std = softplus(net_outputs.lead[mdn_max_idx*MDN_GROUP_SIZE + MDN_VALS + 3]); - - - // get speed percentiles numbers represent 5th, 15th, ... 95th percentile - for (int i=0; i < SPEED_PERCENTILES; i++) { - model.speed[i] = ((float) SPEED_BUCKETS)/2.0; - } - //float sum = 0; - //for (int idx = 0; idx < SPEED_BUCKETS; idx++) { - // sum += net_outputs.speed[idx]; - // int idx_percentile = (sum + .05) * SPEED_PERCENTILES; - // if (idx_percentile < SPEED_PERCENTILES ){ - // model.speed[idx_percentile] = ((float)idx)/2.0; - // } - //} - // make sure no percentiles are skipped - //for (int i=SPEED_PERCENTILES-1; i > 0; i--){ - // if (model.speed[i-1] > model.speed[i]){ - // model.speed[i-1] = model.speed[i]; - // } - //} - return model; -} - -void model_free(ModelState* s) { - free(s->output); - model_input_free(&s->in); - delete s->m; -} - -void poly_fit(float *in_pts, float *in_stds, float *out, int dx0, int dx1) { - // References to inputs - Eigen::Map > pts(in_pts, MODEL_PATH_DISTANCE); - Eigen::Map > std(in_stds, MODEL_PATH_DISTANCE); - Eigen::Map > p(out, POLYFIT_DEGREE - 1); - - float y0 = pts[dx0]; - - pts = pts.array() - y0; - - // Build Least Squares equations - Eigen::Matrix lhs = vander.array().colwise() / std.array(); - Eigen::Matrix rhs = pts.array() / std.array(); - - // Improve numerical stability - Eigen::Matrix scale = 1. / (lhs.array()*lhs.array()).sqrt().colwise().sum(); - lhs = lhs * scale.asDiagonal(); - - // Solve inplace - Eigen::ColPivHouseholderQR > qr(lhs); - p = qr.solve(rhs); - - // Apply scale to output - p = p.transpose() * scale.asDiagonal(); - out[3] = y0; - - //if dx1 is not zero then change slope slightly to force through the second point as well - if (dx1 > 0) { - out[2] = (pts[dx1] - out[0] * pow(dx1,3) - out[1] * pow(dx1,2) - out[3] ) / dx1; - } -} - -void fill_path(cereal::ModelData::PathData::Builder path, const PathData path_data) { - if (std::getenv("DEBUG")){ - kj::ArrayPtr stds(&path_data.stds[0], ARRAYSIZE(path_data.stds)); - path.setStds(stds); - - kj::ArrayPtr points(&path_data.points[0], ARRAYSIZE(path_data.points)); - path.setPoints(points); - } - - kj::ArrayPtr poly(&path_data.poly[0], ARRAYSIZE(path_data.poly)); - path.setPoly(poly); - path.setProb(path_data.prob); - path.setStd(path_data.std); -} - -void fill_lead(cereal::ModelData::LeadData::Builder lead, const LeadData lead_data) { - lead.setDist(lead_data.dist); - lead.setProb(lead_data.prob); - lead.setStd(lead_data.std); - lead.setRelY(lead_data.rel_y); - lead.setRelYStd(lead_data.rel_y_std); - lead.setRelVel(lead_data.rel_v); - lead.setRelVelStd(lead_data.rel_v_std); - lead.setRelA(lead_data.rel_a); - lead.setRelAStd(lead_data.rel_a_std); -} - -void model_publish(PubSocket *sock, uint32_t frame_id, - const ModelData data, uint64_t timestamp_eof) { - // make msg - capnp::MallocMessageBuilder msg; - cereal::Event::Builder event = msg.initRoot(); - event.setLogMonoTime(nanos_since_boot()); - - auto framed = event.initModel(); - framed.setFrameId(frame_id); - framed.setTimestampEof(timestamp_eof); - - kj::ArrayPtr speed(&data.speed[0], ARRAYSIZE(data.speed)); - framed.setSpeed(speed); - - - auto lpath = framed.initPath(); - fill_path(lpath, data.path); - auto left_lane = framed.initLeftLane(); - fill_path(left_lane, data.left_lane); - auto right_lane = framed.initRightLane(); - fill_path(right_lane, data.right_lane); - - auto lead = framed.initLead(); - fill_lead(lead, data.lead); - auto lead_future = framed.initLeadFuture(); - fill_lead(lead_future, data.lead_future); - - - // send message - auto words = capnp::messageToFlatArray(msg); - auto bytes = words.asBytes(); - sock->send((char*)bytes.begin(), bytes.size()); - } - - diff --git a/selfdrive/visiond/models/driving.h b/selfdrive/visiond/models/driving.h deleted file mode 100644 index 6036806fdee91a..00000000000000 --- a/selfdrive/visiond/models/driving.h +++ /dev/null @@ -1,44 +0,0 @@ -#ifndef MODEL_H -#define MODEL_H - -// gate this here -#define TEMPORAL -#define DESIRE - -#ifdef DESIRE - #define DESIRE_SIZE 8 -#endif - -#include "common/mat.h" -#include "common/modeldata.h" -#include "common/util.h" - -#include "commonmodel.h" -#include "runners/run.h" - -#include "cereal/gen/cpp/log.capnp.h" -#include -#include -#include "messaging.hpp" - - -typedef struct ModelState { - ModelInput in; - float *output; - RunModel *m; -#ifdef DESIRE - float *desire; -#endif -} ModelState; - -void model_init(ModelState* s, cl_device_id device_id, - cl_context context, int temporal); -ModelData model_eval_frame(ModelState* s, cl_command_queue q, - cl_mem yuv_cl, int width, int height, - mat3 transform, void* sock, float *desire_in); -void model_free(ModelState* s); -void poly_fit(float *in_pts, float *in_stds, float *out, int dx0, int dx1); - -void model_publish(PubSocket* sock, uint32_t frame_id, - const ModelData data, uint64_t timestamp_eof); -#endif diff --git a/start_tbp.sh b/start_tbp.sh index c30c981f2fbb37..53964c39e404f8 100755 --- a/start_tbp.sh +++ b/start_tbp.sh @@ -1,3 +1,4 @@ #!/bin/bash +export PYTHONPATH=/data/openpilot cd /data/openpilot/selfdrive -PASSIVE=0 NOSENSOR=1 ./manager.py +PASSIVE=0 NOSENSOR=1 WEBCAM=1 ./manager.py diff --git a/tbp_strt.sh b/tbp_strt.sh deleted file mode 100755 index e4ce9c86be76c4..00000000000000 --- a/tbp_strt.sh +++ /dev/null @@ -1,112 +0,0 @@ -#!/bin/bash - -export OMP_NUM_THREADS=1 -export MKL_NUM_THREADS=1 -export NUMEXPR_NUM_THREADS=1 -export OPENBLAS_NUM_THREADS=1 -export VECLIB_MAXIMUM_THREADS=1 - -if [ -z "$BASEDIR" ]; then - BASEDIR="/data/openpilot" -fi - -if [ -z "$PASSIVE" ]; then - export PASSIVE="1" -fi - -. /data/openpilot/selfdrive/car/tesla/readconfig.sh -STAGING_ROOT="/data/safe_staging" - - -function launch { - # Wifi scan - wpa_cli IFNAME=wlan0 SCAN - - #BB here was to prevent the autoupdate; need to find another way - # # apply update - # if [ $do_auto_update == "True" ]; then - # if [ "$(git rev-parse HEAD)" != "$(git rev-parse @{u})" ]; then - # git reset --hard @{u} && - # git clean -xdf && - - # # Touch all files on release2 after checkout to prevent rebuild - # BRANCH=$(git rev-parse --abbrev-ref HEAD) - # if [[ "$BRANCH" == "release2" ]]; then - # touch ** - # fi - - # Check to see if there's a valid overlay-based update available. Conditions - # are as follows: - # - # 1. The BASEDIR init file has to exist, with a newer modtime than anything in - # the BASEDIR Git repo. This checks for local development work or the user - # switching branches/forks, which should not be overwritten. - # 2. The FINALIZED consistent file has to exist, indicating there's an update - # that completed successfully and synced to disk. - - - - if [ $do_auto_update == "True" ] && [ -f "${BASEDIR}/.overlay_init" ]; then - find ${BASEDIR}/.git -newer ${BASEDIR}/.overlay_init | grep -q '.' 2> /dev/null - if [ $? -eq 0 ]; then - echo "${BASEDIR} has been modified, skipping overlay update installation" - else - if [ -f "${STAGING_ROOT}/finalized/.overlay_consistent" ]; then - if [ ! -d /data/safe_staging/old_openpilot ]; then - echo "Valid overlay update found, installing" - LAUNCHER_LOCATION="${BASH_SOURCE[0]}" - - mv $BASEDIR /data/safe_staging/old_openpilot - mv "${STAGING_ROOT}/finalized" $BASEDIR - - # The mv changed our working directory to /data/safe_staging/old_openpilot - cd "${BASEDIR}" - - echo "Restarting launch script ${LAUNCHER_LOCATION}" - exec "${LAUNCHER_LOCATION}" - else - echo "openpilot backup found, not updating" - # TODO: restore backup? This means the updater didn't start after swapping - fi - fi - fi - fi - - # no cpu rationing for now - echo 0-3 > /dev/cpuset/background/cpus - echo 0-3 > /dev/cpuset/system-background/cpus - echo 0-3 > /dev/cpuset/foreground/boost/cpus - echo 0-3 > /dev/cpuset/foreground/cpus - echo 0-3 > /dev/cpuset/android/cpus - - DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )" - - # Remove old NEOS update file - # TODO: move this code to the updater - if [ -d /data/neoupdate ]; then - rm -rf /data/neoupdate - fi - - # Check for NEOS update - if [ $(< /VERSION) != "14" ]; then - if [ -f "$DIR/scripts/continue.sh" ]; then - cp "$DIR/scripts/continue.sh" "/data/data/com.termux/files/continue.sh" - fi - - "$DIR/installer/updater/updater" "file://$DIR/installer/updater/update.json" - fi - - - # handle pythonpath - ln -sfn $(pwd) /data/pythonpath - export PYTHONPATH="$PWD" - - # start manager - cd selfdrive - ./tbp_manager.py - - # if broken, keep on screen error - while true; do sleep 1; done -} - -launch diff --git a/tools/LICENSE b/tools/LICENSE new file mode 100644 index 00000000000000..b8fd9e299c97ad --- /dev/null +++ b/tools/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2018 comma.ai + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/tools/README.md b/tools/README.md new file mode 100644 index 00000000000000..81ccb178c3f6da --- /dev/null +++ b/tools/README.md @@ -0,0 +1,312 @@ +openpilot-tools +============ + +Repo which contains tools to facilitate development and debugging of [openpilot](openpilot.comma.ai). + +![Imgur](https://i.imgur.com/IdfBgwK.jpg) + + +Table of Contents +============ + + + * [Requirements](#requirements) + * [Setup](#setup) + * [Tool examples](#tool-examples) + * [Replay driving data](#replay-driving-data) + * [Debug car controls](#debug-car-controls) + * [Stream replayed CAN messages to EON](#stream-replayed-can-messages-to-eon) + * [Stream EON video data to a PC](#stream-eon-video-data-to-a-pc) + * [Welcomed contributions](#welcomed-contributions) + + + +Requirements +============ + +openpilot-tools and the following setup steps are developed and tested on Ubuntu 16.04, MacOS 10.14.2 and Python 3.7.3. + +Setup +============ +TODO: These instructions maybe outdated, follow ubuntu_setup.sh setup instructions + +1. Install native dependencies (Mac and Ubuntu sections listed below) + + **Ubuntu** + + - core tools + ```bash + sudo apt install git curl python-pip + sudo pip install --upgrade pip>=18.0 pipenv + ``` + + - ffmpeg (tested with 3.3.2) + ```bash + sudo apt install ffmpeg libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev libswscale-dev libavresample-dev libavfilter-dev + ``` + + - build tools + ```bash + sudo apt install autoconf automake clang clang-3.8 libtool pkg-config build-essential + ``` + + - libarchive-dev (tested with 3.1.2-11ubuntu0.16.04.4) + ```bash + sudo apt install libarchive-dev + ``` + + - qt python binding (tested with python-qt4, 4.11.4+dfsg-1build4) + ```bash + sudo apt install python-qt4 + ``` + + - zmq 4.2.3 (required for replay) + ```bash + curl -LO https://github.com/zeromq/libzmq/releases/download/v4.2.3/zeromq-4.2.3.tar.gz + tar xfz zeromq-4.2.3.tar.gz + cd zeromq-4.2.3 + ./autogen.sh + ./configure CPPFLAGS=-DPIC CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-fPIC --disable-shared --enable-static + make + sudo make install + ``` + + **Mac** + + - brew + ``` bash + /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" + ``` + + - core tools + ``` bash + brew install git + sudo pip install --upgrade pip pipenv + xcode-select --install + ``` + + - ffmpeg (tested with 3.4.1) + ```bash + brew install ffmpeg + ``` + + - build tools + ```bash + brew install autoconf automake libtool llvm pkg-config + ``` + + - libarchive-dev (tested with 3.3.3) + ```bash + brew install libarchive + ``` + + - qt for Mac + ```bash + brew install qt + ``` + + - zmq 4.3.1 (required for replay) + ```bash + brew install zeromq + ``` + +2. Install Cap'n Proto + + ```bash + curl -O https://capnproto.org/capnproto-c++-0.6.1.tar.gz + tar xvf capnproto-c++-0.6.1.tar.gz + cd capnproto-c++-0.6.1 + ./configure --prefix=/usr/local CPPFLAGS=-DPIC CFLAGS=-fPIC CXXFLAGS=-fPIC LDFLAGS=-fPIC --disable-shared --enable-static + make -j4 + sudo make install + + cd .. + git clone https://github.com/commaai/c-capnproto.git + cd c-capnproto + git submodule update --init --recursive + autoreconf -f -i -s + CFLAGS="-fPIC" ./configure --prefix=/usr/local + make -j4 + sudo make install + ``` + + + + +2. Clone openpilot if you haven't already + + ```bash + git clone https://github.com/commaai/openpilot.git + cd openpilot + pipenv install # Install dependencies in a virtualenv + pipenv shell # Activate the virtualenv + ``` + + **For Mac users** + + Recompile longitudinal_mpc for mac + + Navigate to: + ``` bash + cd selfdrive/controls/lib/longitudinal_mpc + make clean + make + ``` + +3. Clone tools within openpilot, and install dependencies + + ```bash + cd tools + pip install -r requirements.txt # Install openpilot-tools dependencies in virtualenv + ``` + +4. Add openpilot to your `PYTHONPATH`. + + For bash users: + ```bash + echo 'export PYTHONPATH="$PYTHONPATH:"' >> ~/.bashrc + source ~/.bashrc + ``` + +5. Add some folders to root + ```bash + sudo mkdir /data + sudo mkdir /data/params + sudo chown $USER /data/params + ``` + +6. Try out some tools! + + +Tool examples +============ + + +Replay driving data +------------- + +**Hardware needed**: none + +`unlogger.py` replays data collected with [chffrplus](https://github.com/commaai/chffrplus) or [openpilot](https://github.com/commaai/openpilot). + +Unlogger with remote data: + +``` +# Log in via browser +python lib/auth.py + +# Start unlogger +python replay/unlogger.py +#Example: +#python replay/unlogger.py '3533c53bb29502d1|2019-12-10--01-13-27' + +# In another terminal you can run a debug visualizer: +python replay/ui.py # Define the environmental variable HORIZONTAL is the ui layout is too tall +``` + +Unlogger with local data downloaded from device or https://my.comma.ai: + +``` +python replay/unlogger.py + +#Example: + +#python replay/unlogger.py '99c94dc769b5d96e|2018-11-14--13-31-42' /home/batman/unlogger_data + +#Within /home/batman/unlogger_data: +# 99c94dc769b5d96e|2018-11-14--13-31-42--0--fcamera.hevc +# 99c94dc769b5d96e|2018-11-14--13-31-42--0--rlog.bz2 +# ... +``` +![Imgur](https://i.imgur.com/Yppe0h2.png) + +LogReader with remote data + +```python +from tools.lib.logreader import LogReader +from tools.lib.route import Route +route = Route('3533c53bb29502d1|2019-12-10--01-13-27') +log_paths = route.log_paths() +events_seg0 = list(LogReader(log_paths[0])) +print(len(events_seg0), 'events logged in first segment') +``` + +Debug car controls +------------- + +**Hardware needed**: [panda](panda.comma.ai), [giraffe](https://comma.ai/shop/products/giraffe/), joystick + +Use the panda's OBD-II port to connect with your car and a usb cable to connect the panda to your pc. +Also, connect a joystick to your pc. + +`joystickd.py` runs a deamon that reads inputs from a joystick and publishes them over zmq. +`boardd.py` sends the CAN messages from your pc to the panda. +`debug_controls` is a mocked version of `controlsd.py` and uses input from a joystick to send controls to your car. + +Usage: +``` +python carcontrols/joystickd.py + +# In another terminal: +selfdrive/boardd/tests/boardd_old.py # Make sure the safety setting is hardcoded to ALL_OUTPUT + +# In another terminal: +python carcontrols/debug_controls.py + +``` +![Imgur](steer.gif) + + +Stream replayed CAN messages to EON +------------- + +**Hardware needed**: 2 x [panda](panda.comma.ai), [debug board](https://comma.ai/shop/products/panda-debug-board/), [EON](https://comma.ai/shop/products/eon-gold-dashcam-devkit/). + +It is possible to replay CAN messages as they were recorded and forward them to a EON.  +Connect 2 pandas to the debug board. A panda connects to the PC, the other panda connects to the EON. + +Usage: +``` +# With MOCK=1 boardd will read logged can messages from a replay and send them to the panda. +MOCK=1 selfdrive/boardd/tests/boardd_old.py + +# In another terminal: +python replay/unlogger.py + +``` +![Imgur](https://i.imgur.com/AcurZk8.jpg) + + +Stream EON video data to a PC +------------- + +**Hardware needed**: [EON](https://comma.ai/shop/products/eon-gold-dashcam-devkit/), [comma Smays](https://comma.ai/shop/products/comma-smays-adapter/). + +You can connect your EON to your pc using the Ethernet cable provided with the comma Smays and you'll be able to stream data from your EON, in real time, with low latency. A useful application is being able to stream the raw video frames at 20fps, as captured by the EON's camera. + +Usage: +``` +# ssh into the eon and run loggerd with the flag "--stream". In ../selfdrive/manager.py you can change: +# ... +# "loggerd": ("selfdrive/loggerd", ["./loggerd"]), +# ... +# with: +# ... +# "loggerd": ("selfdrive/loggerd", ["./loggerd", "--stream"]), +# ... + +# On the PC: +# To receive frames from the EON and re-publish them. Set PYGAME env variable if you want to display the video stream +python streamer/streamerd.py +``` + +![Imgur](stream.gif) + + +Welcomed contributions +============= + +* Documentation: code comments, better tutorials, etc.. +* Support for other platforms other than Ubuntu 16.04. +* Performance improvements: the tools have been developed on high-performance workstations (12+ logical cores with 32+ GB of RAM), so they are not optimized for running efficiently. For example, `ui.py` might not be able to run real-time on most PCs. +* More tools: anything that you think might be helpful to others. diff --git a/tools/__init__.py b/tools/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/carcontrols/__init__.py b/tools/carcontrols/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/carcontrols/debug_controls.py b/tools/carcontrols/debug_controls.py new file mode 100755 index 00000000000000..9ce98568d4921a --- /dev/null +++ b/tools/carcontrols/debug_controls.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +import struct +from common.numpy_fast import clip +from common.params import Params +from copy import copy +from cereal import car, log +import cereal.messaging as messaging +from selfdrive.car.car_helpers import get_car +from selfdrive.boardd.boardd import can_list_to_can_capnp + +HwType = log.HealthData.HwType + + +def steer_thread(): + poller = messaging.Poller() + + logcan = messaging.sub_sock('can') + health = messaging.sub_sock('health') + joystick_sock = messaging.sub_sock('testJoystick', conflate=True, poller=poller) + + carstate = messaging.pub_sock('carState') + carcontrol = messaging.pub_sock('carControl') + sendcan = messaging.pub_sock('sendcan') + + button_1_last = 0 + enabled = False + + # wait for health and CAN packets + hw_type = messaging.recv_one(health).health.hwType + has_relay = hw_type in [HwType.blackPanda, HwType.uno] + print("Waiting for CAN messages...") + messaging.get_one_can(logcan) + + CI, CP = get_car(logcan, sendcan, has_relay) + Params().put("CarParams", CP.to_bytes()) + + CC = car.CarControl.new_message() + + while True: + + # send + joystick = messaging.recv_one(joystick_sock) + can_strs = messaging.drain_sock_raw(logcan, wait_for_one=True) + CS = CI.update(CC, can_strs) + + # Usually axis run in pairs, up/down for one, and left/right for + # the other. + actuators = car.CarControl.Actuators.new_message() + + if joystick is not None: + axis_3 = clip(-joystick.testJoystick.axes[3] * 1.05, -1., 1.) # -1 to 1 + actuators.steer = axis_3 + actuators.steerAngle = axis_3 * 43. # deg + axis_1 = clip(-joystick.testJoystick.axes[1] * 1.05, -1., 1.) # -1 to 1 + actuators.gas = max(axis_1, 0.) + actuators.brake = max(-axis_1, 0.) + + pcm_cancel_cmd = joystick.testJoystick.buttons[0] + button_1 = joystick.testJoystick.buttons[1] + if button_1 and not button_1_last: + enabled = not enabled + + button_1_last = button_1 + + #print "enable", enabled, "steer", actuators.steer, "accel", actuators.gas - actuators.brake + + hud_alert = 0 + audible_alert = 0 + if joystick.testJoystick.buttons[2]: + audible_alert = "beepSingle" + if joystick.testJoystick.buttons[3]: + audible_alert = "chimeRepeated" + hud_alert = "steerRequired" + + CC.actuators.gas = actuators.gas + CC.actuators.brake = actuators.brake + CC.actuators.steer = actuators.steer + CC.actuators.steerAngle = actuators.steerAngle + CC.hudControl.visualAlert = hud_alert + CC.hudControl.setSpeed = 20 + CC.cruiseControl.cancel = pcm_cancel_cmd + CC.enabled = enabled + can_sends = CI.apply(CC) + sendcan.send(can_list_to_can_capnp(can_sends, msgtype='sendcan')) + + # broadcast carState + cs_send = messaging.new_message('carState') + cs_send.carState = copy(CS) + carstate.send(cs_send.to_bytes()) + + # broadcast carControl + cc_send = messaging.new_message('carControl') + cc_send.carControl = copy(CC) + carcontrol.send(cc_send.to_bytes()) + + +if __name__ == "__main__": + steer_thread() diff --git a/tools/carcontrols/joystick_test.py b/tools/carcontrols/joystick_test.py new file mode 100755 index 00000000000000..e031fb506f09ed --- /dev/null +++ b/tools/carcontrols/joystick_test.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +import pygame + +# Define some colors +BLACK = ( 0, 0, 0) +WHITE = ( 255, 255, 255) + +# This is a simple class that will help us print to the screen +# It has nothing to do with the joysticks, just outputting the +# information. +class TextPrint: + def __init__(self): + self.reset() + self.font = pygame.font.Font(None, 20) + + def printf(self, screen, textString): + textBitmap = self.font.render(textString, True, BLACK) + screen.blit(textBitmap, [self.x, self.y]) + self.y += self.line_height + + def reset(self): + self.x = 10 + self.y = 10 + self.line_height = 15 + + def indent(self): + self.x += 10 + + def unindent(self): + self.x -= 10 + + +pygame.init() + +# Set the width and height of the screen [width,height] +size = [500, 700] +screen = pygame.display.set_mode(size) + +pygame.display.set_caption("My Game") + +#Loop until the user clicks the close button. +done = False + +# Used to manage how fast the screen updates +clock = pygame.time.Clock() + +# Initialize the joysticks +pygame.joystick.init() + +# Get ready to print +textPrint = TextPrint() + +# -------- Main Program Loop ----------- +while done==False: + # EVENT PROCESSING STEP + for event in pygame.event.get(): # User did something + if event.type == pygame.QUIT: # If user clicked close + done=True # Flag that we are done so we exit this loop + + # Possible joystick actions: JOYAXISMOTION JOYBALLMOTION JOYBUTTONDOWN JOYBUTTONUP JOYHATMOTION + if event.type == pygame.JOYBUTTONDOWN: + print("Joystick button pressed.") + if event.type == pygame.JOYBUTTONUP: + print("Joystick button released.") + + + # DRAWING STEP + # First, clear the screen to white. Don't put other drawing commands + # above this, or they will be erased with this command. + screen.fill(WHITE) + textPrint.reset() + + # Get count of joysticks + joystick_count = pygame.joystick.get_count() + + textPrint.printf(screen, "Number of joysticks: {}".format(joystick_count) ) + textPrint.indent() + + # For each joystick: + joystick = pygame.joystick.Joystick(0) + joystick.init() + + textPrint.printf(screen, "Joystick {}".format(0) ) + textPrint.indent() + + # Get the name from the OS for the controller/joystick + name = joystick.get_name() + textPrint.printf(screen, "Joystick name: {}".format(name) ) + + # Usually axis run in pairs, up/down for one, and left/right for + # the other. + axes = joystick.get_numaxes() + textPrint.printf(screen, "Number of axes: {}".format(axes) ) + textPrint.indent() + + for i in range( axes ): + axis = joystick.get_axis( i ) + textPrint.printf(screen, "Axis {} value: {:>6.3f}".format(i, axis) ) + textPrint.unindent() + + buttons = joystick.get_numbuttons() + textPrint.printf(screen, "Number of buttons: {}".format(buttons) ) + textPrint.indent() + + for i in range( buttons ): + button = joystick.get_button( i ) + textPrint.printf(screen, "Button {:>2} value: {}".format(i,button) ) + textPrint.unindent() + + + textPrint.unindent() + + + # ALL CODE TO DRAW SHOULD GO ABOVE THIS COMMENT + + # Go ahead and update the screen with what we've drawn. + pygame.display.flip() + + # Limit to 20 frames per second + clock.tick(20) + +# Close the window and quit. +# If you forget this line, the program will 'hang' +# on exit if running from IDLE. +pygame.quit () diff --git a/tools/carcontrols/joystickd.py b/tools/carcontrols/joystickd.py new file mode 100755 index 00000000000000..58235f450bcef9 --- /dev/null +++ b/tools/carcontrols/joystickd.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python + +# This process publishes joystick events. Such events can be suscribed by +# mocked car controller scripts. + + +### this process needs pygame and can't run on the EON ### + +import pygame +import zmq +import cereal.messaging as messaging + + +def joystick_thread(): + joystick_sock = messaging.pub_sock('testJoystick') + + pygame.init() + + # Used to manage how fast the screen updates + clock = pygame.time.Clock() + + # Initialize the joysticks + pygame.joystick.init() + + # Get count of joysticks + joystick_count = pygame.joystick.get_count() + if joystick_count > 1: + raise ValueError("More than one joystick attached") + elif joystick_count < 1: + raise ValueError("No joystick found") + + # -------- Main Program Loop ----------- + while True: + # EVENT PROCESSING STEP + for event in pygame.event.get(): # User did something + if event.type == pygame.QUIT: # If user clicked close + pass + # Available joystick events: JOYAXISMOTION JOYBALLMOTION JOYBUTTONDOWN JOYBUTTONUP JOYHATMOTION + if event.type == pygame.JOYBUTTONDOWN: + print("Joystick button pressed.") + if event.type == pygame.JOYBUTTONUP: + print("Joystick button released.") + + joystick = pygame.joystick.Joystick(0) + joystick.init() + + # Usually axis run in pairs, up/down for one, and left/right for + # the other. + axes = [] + buttons = [] + + for a in range(joystick.get_numaxes()): + axes.append(joystick.get_axis(a)) + + for b in range(joystick.get_numbuttons()): + buttons.append(bool(joystick.get_button(b))) + + dat = messaging.new_message('testJoystick') + dat.testJoystick.axes = axes + dat.testJoystick.buttons = buttons + joystick_sock.send(dat.to_bytes()) + + # Limit to 100 frames per second + clock.tick(100) + +if __name__ == "__main__": + joystick_thread() diff --git a/tools/clib/.gitignore b/tools/clib/.gitignore new file mode 100644 index 00000000000000..ae8a795a927d01 --- /dev/null +++ b/tools/clib/.gitignore @@ -0,0 +1 @@ +cframereader.cpp diff --git a/tools/clib/FrameReader.cpp b/tools/clib/FrameReader.cpp new file mode 100644 index 00000000000000..8048dbffdc63e6 --- /dev/null +++ b/tools/clib/FrameReader.cpp @@ -0,0 +1,176 @@ +#include "FrameReader.hpp" +#include +#include + +static int ffmpeg_lockmgr_cb(void **arg, enum AVLockOp op) { + pthread_mutex_t *mutex = (pthread_mutex_t *)*arg; + int err; + + switch (op) { + case AV_LOCK_CREATE: + mutex = (pthread_mutex_t *)malloc(sizeof(*mutex)); + if (!mutex) + return AVERROR(ENOMEM); + if ((err = pthread_mutex_init(mutex, NULL))) { + free(mutex); + return AVERROR(err); + } + *arg = mutex; + return 0; + case AV_LOCK_OBTAIN: + if ((err = pthread_mutex_lock(mutex))) + return AVERROR(err); + + return 0; + case AV_LOCK_RELEASE: + if ((err = pthread_mutex_unlock(mutex))) + return AVERROR(err); + + return 0; + case AV_LOCK_DESTROY: + if (mutex) + pthread_mutex_destroy(mutex); + free(mutex); + *arg = NULL; + return 0; + } + return 1; +} + +FrameReader::FrameReader(const char *fn) { + int ret; + + ret = av_lockmgr_register(ffmpeg_lockmgr_cb); + assert(ret >= 0); + + avformat_network_init(); + av_register_all(); + + snprintf(url, sizeof(url)-1,"%s",fn); + t = new std::thread([&]() { this->loaderThread(); }); +} + +void FrameReader::loaderThread() { + int ret; + + if (avformat_open_input(&pFormatCtx, url, NULL, NULL) != 0) { + fprintf(stderr, "error loading %s\n", url); + valid = false; + return; + } + av_dump_format(pFormatCtx, 0, url, 0); + + auto pCodecCtxOrig = pFormatCtx->streams[0]->codec; + auto pCodec = avcodec_find_decoder(pCodecCtxOrig->codec_id); + assert(pCodec != NULL); + + pCodecCtx = avcodec_alloc_context3(pCodec); + ret = avcodec_copy_context(pCodecCtx, pCodecCtxOrig); + assert(ret == 0); + + ret = avcodec_open2(pCodecCtx, pCodec, NULL); + assert(ret >= 0); + + sws_ctx = sws_getContext(width, height, AV_PIX_FMT_YUV420P, + width, height, AV_PIX_FMT_BGR24, + SWS_BILINEAR, NULL, NULL, NULL); + assert(sws_ctx != NULL); + + AVPacket *pkt = (AVPacket *)malloc(sizeof(AVPacket)); + assert(pkt != NULL); + bool first = true; + while (av_read_frame(pFormatCtx, pkt)>=0) { + //printf("%d pkt %d %d\n", pkts.size(), pkt->size, pkt->pos); + if (first) { + AVFrame *pFrame = av_frame_alloc(); + int frameFinished; + avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, pkt); + first = false; + } + pkts.push_back(pkt); + pkt = (AVPacket *)malloc(sizeof(AVPacket)); + assert(pkt != NULL); + } + free(pkt); + printf("framereader download done\n"); + joined = true; + + // cache + while (1) { + GOPCache(to_cache.get()); + } +} + + +void FrameReader::GOPCache(int idx) { + AVFrame *pFrame; + int gop = idx - idx%15; + + mcache.lock(); + bool has_gop = cache.find(gop) != cache.end(); + mcache.unlock(); + + if (!has_gop) { + //printf("caching %d\n", gop); + for (int i = gop; i < gop+15; i++) { + if (i >= pkts.size()) break; + //printf("decode %d\n", i); + int frameFinished; + pFrame = av_frame_alloc(); + avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, pkts[i]); + uint8_t *dat = toRGB(pFrame)->data[0]; + mcache.lock(); + cache.insert(std::make_pair(i, dat)); + mcache.unlock(); + } + } +} + +AVFrame *FrameReader::toRGB(AVFrame *pFrame) { + AVFrame *pFrameRGB = av_frame_alloc(); + int numBytes = avpicture_get_size(AV_PIX_FMT_BGR24, pFrame->width, pFrame->height); + uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t)); + avpicture_fill((AVPicture *)pFrameRGB, buffer, AV_PIX_FMT_BGR24, pFrame->width, pFrame->height); + sws_scale(sws_ctx, (uint8_t const * const *)pFrame->data, + pFrame->linesize, 0, pFrame->height, + pFrameRGB->data, pFrameRGB->linesize); + return pFrameRGB; +} + +uint8_t *FrameReader::get(int idx) { + if (!valid) return NULL; + waitForReady(); + // TODO: one line? + uint8_t *dat = NULL; + + // lookahead + to_cache.put(idx); + to_cache.put(idx+15); + + mcache.lock(); + auto it = cache.find(idx); + if (it != cache.end()) { + dat = it->second; + } + mcache.unlock(); + + if (dat == NULL) { + to_cache.put_front(idx); + // lookahead + while (dat == NULL) { + // wait for frame + usleep(50*1000); + // check for frame + mcache.lock(); + auto it = cache.find(idx); + if (it != cache.end()) dat = it->second; + mcache.unlock(); + if (dat == NULL) { + printf("."); + fflush(stdout); + } + } + } + return dat; +} + diff --git a/tools/clib/FrameReader.hpp b/tools/clib/FrameReader.hpp new file mode 100644 index 00000000000000..db655d641d64c4 --- /dev/null +++ b/tools/clib/FrameReader.hpp @@ -0,0 +1,58 @@ +#ifndef FRAMEREADER_HPP +#define FRAMEREADER_HPP + +#include +#include +#include +#include +#include +#include +#include + +#include "channel.hpp" + +// independent of QT, needs ffmpeg +extern "C" { +#include +#include +#include +} + + +class FrameReader { +public: + FrameReader(const char *fn); + uint8_t *get(int idx); + AVFrame *toRGB(AVFrame *); + void waitForReady() { + while (!joined) usleep(10*1000); + } + int getRGBSize() { return width*height*3; } + void loaderThread(); + void cacherThread(); +private: + AVFormatContext *pFormatCtx = NULL; + AVCodecContext *pCodecCtx = NULL; + + struct SwsContext *sws_ctx = NULL; + + int width = 1164; + int height = 874; + + std::vector pkts; + + std::thread *t; + bool joined = false; + + std::map cache; + std::mutex mcache; + + void GOPCache(int idx); + channel to_cache; + + bool valid = true; + char url[0x400]; +}; + +#endif + diff --git a/tools/clib/SConscript b/tools/clib/SConscript new file mode 100644 index 00000000000000..6f33fe75bc1498 --- /dev/null +++ b/tools/clib/SConscript @@ -0,0 +1,8 @@ +Import('env') +from sysconfig import get_paths +env['CPPPATH'] += [get_paths()['include']] + +from Cython.Build import cythonize +cythonize("cframereader.pyx") +env.SharedLibrary(File('cframereader.so'), ['cframereader.cpp', 'FrameReader.cpp'], LIBS=['avformat', 'avcodec', 'avutil', 'swscale']) + diff --git a/tools/clib/cframereader.pyx b/tools/clib/cframereader.pyx new file mode 100644 index 00000000000000..ffbc277587a028 --- /dev/null +++ b/tools/clib/cframereader.pyx @@ -0,0 +1,20 @@ +# distutils: language = c++ +# cython: language_level=3 + +cdef extern from "FrameReader.hpp": + cdef cppclass CFrameReader "FrameReader": + CFrameReader(const char *) + char *get(int) + +cdef class FrameReader(): + cdef CFrameReader *fr + + def __cinit__(self, fn): + self.fr = new CFrameReader(fn) + + def __dealloc__(self): + del self.fr + + def get(self, idx): + self.fr.get(idx) + diff --git a/tools/clib/channel.hpp b/tools/clib/channel.hpp new file mode 100644 index 00000000000000..d1ce657ceca6be --- /dev/null +++ b/tools/clib/channel.hpp @@ -0,0 +1,35 @@ +#ifndef CHANNEL_HPP +#define CHANNEL_HPP + +#include +#include +#include + +template +class channel { +private: + std::list queue; + std::mutex m; + std::condition_variable cv; +public: + void put(const item &i) { + std::unique_lock lock(m); + queue.push_back(i); + cv.notify_one(); + } + void put_front(const item &i) { + std::unique_lock lock(m); + queue.push_front(i); + cv.notify_one(); + } + item get() { + std::unique_lock lock(m); + cv.wait(lock, [&](){ return !queue.empty(); }); + item result = queue.front(); + queue.pop_front(); + return result; + } +}; + +#endif + diff --git a/tools/lib/__init__.py b/tools/lib/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/lib/api.py b/tools/lib/api.py new file mode 100644 index 00000000000000..4f9c870561d79e --- /dev/null +++ b/tools/lib/api.py @@ -0,0 +1,37 @@ +import sys +import os +import requests +from tools.lib.auth_config import clear_token +API_HOST = os.getenv('API_HOST', 'https://api.commadotai.com') + +class CommaApi(): + def __init__(self, token=None): + self.session = requests.Session() + self.session.headers['User-agent'] = 'OpenpilotTools' + if token: + self.session.headers['Authorization'] = 'JWT ' + token + + def request(self, method, endpoint, **kwargs): + resp = self.session.request(method, API_HOST + '/' + endpoint, **kwargs) + resp_json = resp.json() + if isinstance(resp_json, dict) and resp_json.get('error'): + if resp.status_code == 401: + clear_token() + raise UnauthorizedError('Unauthorized. Authenticate with tools/lib/auth.py') + + e = APIError(str(resp.status_code) + ":" + resp_json.get('description', str(resp_json['error']))) + e.status_code = resp.status_code + raise e + return resp_json + + def get(self, endpoint, **kwargs): + return self.request('GET', endpoint, **kwargs) + + def post(self, endpoint, **kwargs): + return self.request('POST', endpoint, **kwargs) + +class APIError(Exception): + pass + +class UnauthorizedError(Exception): + pass diff --git a/tools/lib/async_generator.py b/tools/lib/async_generator.py new file mode 100644 index 00000000000000..ef535f27f52cc1 --- /dev/null +++ b/tools/lib/async_generator.py @@ -0,0 +1,352 @@ +import functools +import threading +import inspect +import sys +import select +import struct +from math import sqrt +from collections import OrderedDict, deque +from time import time + +from tools.lib.pollable_queue import PollableQueue, Empty, Full, ExistentialError + +EndSentinel = object() + + +def _sync_inner_generator(input_queue, *args, **kwargs): + func = args[0] + args = args[1:] + + get = input_queue.get + while True: + item = get() + if item is EndSentinel: + return + + cookie, value = item + yield cookie, func(value, *args, **kwargs) + + +def _async_streamer_async_inner(input_queue, output_queue, generator_func, args, kwargs): + put = output_queue.put + put_end = True + try: + g = generator_func(input_queue, *args, **kwargs) + for item in g: + put((time(), item)) + g.close() + except ExistentialError: + put_end = False + raise + finally: + if put_end: + put((None, EndSentinel)) + +def _running_mean_var(ltc_stats, x): + old_mean, var = ltc_stats + mean = min(600., 0.98 * old_mean + 0.02 * x) + var = min(5., max(0.1, 0.98 * var + 0.02 * (mean - x) * (old_mean - x))) + return mean, var + +def _find_next_resend(sent_messages, ltc_stats): + if not sent_messages: + return None, None + + oldest_sent_idx = sent_messages._OrderedDict__root[1][2] + send_time, _ = sent_messages[oldest_sent_idx] + + # Assume message has been lost if it is >10 standard deviations from mean. + mean, var = ltc_stats + next_resend_time = send_time + mean + 40. * sqrt(var) + + return oldest_sent_idx, next_resend_time + + +def _do_cleanup(input_queue, output_queue, num_workers, sentinels_received, num_outstanding): + input_fd = input_queue.put_fd() + output_fd = output_queue.get_fd() + + poller = select.epoll() + poller.register(input_fd, select.EPOLLOUT) + poller.register(output_fd, select.EPOLLIN) + + remaining_outputs = [] + end_sentinels_to_send = num_workers - sentinels_received + while sentinels_received < num_workers: + evts = dict(poller.poll(-1 if num_outstanding > 0 else 10.)) + if not evts: + # Workers aren't responding, crash. + break + + if output_fd in evts: + _, maybe_sentinel = output_queue.get() + if maybe_sentinel is EndSentinel: + sentinels_received += 1 + else: + remaining_outputs.append(maybe_sentinel[1]) + num_outstanding -= 1 + + if input_fd in evts: + if end_sentinels_to_send > 0: + input_queue.put_nowait(EndSentinel) + end_sentinels_to_send -= 1 + else: + poller.modify(input_fd, 0) + + # TODO: Raise an exception when a queue thread raises one. + assert sentinels_received == num_workers, (sentinels_received, num_workers) + assert output_queue.empty() + return remaining_outputs + +def _generate_results(input_stream, input_queue, worker_output_queue, output_queue, + num_workers, max_outstanding): + pack_cookie = struct.pack + + # Maps idx -> (send_time, input) + sent_messages = OrderedDict() + oldest_sent_idx = None + next_resend_time = None + ltc_stats = 5., 10. + + # Maps idx -> result + received_messages = {} + next_out = 0 + + # Start things off by pulling the first value. + next_in_item = next(input_stream, EndSentinel) + inputs_remain = next_in_item is not EndSentinel + sentinels_received = 0 + + input_fd = input_queue.put_fd() + worker_output_fd = worker_output_queue.get_fd() + output_fd = output_queue.put_fd() + + poller = select.epoll() + poller.register(input_fd, select.EPOLLOUT) + poller.register(worker_output_fd, select.EPOLLIN) + poller.register(output_fd, 0) + + # Keep sending/retrying until the input stream and sent messages are all done. + while sentinels_received < num_workers and (inputs_remain or sent_messages): + if max_outstanding: + can_send_new = (len(sent_messages) < max_outstanding and + len(received_messages) < max_outstanding and inputs_remain) + else: + can_send_new = inputs_remain + + if (next_resend_time and now >= next_resend_time) or can_send_new: + poller.modify(input_fd, select.EPOLLOUT) + else: + poller.modify(input_fd, 0) + + if next_resend_time: + t = max(0, next_resend_time - now) + evts = dict(poller.poll(t)) + else: + evts = dict(poller.poll()) + now = time() + + if output_fd in evts: + output_queue.put_nowait(received_messages.pop(next_out)) + next_out += 1 + + if next_out not in received_messages: + poller.modify(output_fd, 0) + + if worker_output_fd in evts: + for receive_time, maybe_sentinel in worker_output_queue.get_multiple_nowait(): + # Check for EndSentinel in case of worker crash. + if maybe_sentinel is EndSentinel: + sentinels_received += 1 + continue + idx_bytes, value = maybe_sentinel + idx = struct.unpack("= (3,0): + import queue + import pickle + from io import BytesIO as StringIO +else: + import Queue as queue + import cPickle as pickle + from cStringIO import StringIO + +import subprocess +from aenum import Enum +from lru import LRU +from functools import wraps +from concurrent.futures import ThreadPoolExecutor, as_completed + +from tools.lib.cache import cache_path_for_file_path +from tools.lib.exceptions import DataUnreadableError +try: + from xx.chffr.lib.filereader import FileReader +except ImportError: + from tools.lib.filereader import FileReader +from tools.lib.file_helpers import atomic_write_in_dir +from tools.lib.mkvparse import mkvindex +from tools.lib.route import Route + +H264_SLICE_P = 0 +H264_SLICE_B = 1 +H264_SLICE_I = 2 + +HEVC_SLICE_B = 0 +HEVC_SLICE_P = 1 +HEVC_SLICE_I = 2 + +SLICE_I = 2 # hevc and h264 are the same :) + +class FrameType(Enum): + raw = 1 + h265_stream = 2 + h264_mp4 = 3 + h264_pstream = 4 + ffv1_mkv = 5 + ffvhuff_mkv = 6 + +def fingerprint_video(fn): + with FileReader(fn) as f: + header = f.read(4) + if len(header) == 0: + raise DataUnreadableError("%s is empty" % fn) + elif header == b"\x00\xc0\x12\x00": + return FrameType.raw + elif header == b"\x00\x00\x00\x01": + if 'hevc' in fn: + return FrameType.h265_stream + elif os.path.basename(fn) in ("camera", "acamera"): + return FrameType.h264_pstream + else: + raise NotImplementedError(fn) + elif header == b"\x00\x00\x00\x1c": + return FrameType.h264_mp4 + elif header == b"\x1a\x45\xdf\xa3": + return FrameType.ffv1_mkv + else: + raise NotImplementedError(fn) + + +def ffprobe(fn, fmt=None): + cmd = ["ffprobe", + "-v", "quiet", + "-print_format", "json", + "-show_format", "-show_streams"] + if fmt: + cmd += ["-format", fmt] + cmd += [fn] + + try: + ffprobe_output = subprocess.check_output(cmd) + except subprocess.CalledProcessError as e: + raise DataUnreadableError(fn) + + return json.loads(ffprobe_output) + + +def vidindex(fn, typ): + vidindex_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "vidindex") + vidindex = os.path.join(vidindex_dir, "vidindex") + + subprocess.check_call(["make"], cwd=vidindex_dir, stdout=open("/dev/null","w")) + + with tempfile.NamedTemporaryFile() as prefix_f, \ + tempfile.NamedTemporaryFile() as index_f: + try: + subprocess.check_call([vidindex, typ, fn, prefix_f.name, index_f.name]) + except subprocess.CalledProcessError as e: + raise DataUnreadableError("vidindex failed on file %s" % fn) + with open(index_f.name, "rb") as f: + index = f.read() + with open(prefix_f.name, "rb") as f: + prefix = f.read() + + index = np.frombuffer(index, np.uint32).reshape(-1, 2) + + assert index[-1, 0] == 0xFFFFFFFF + assert index[-1, 1] == os.path.getsize(fn) + + return index, prefix + + +def cache_fn(func): + @wraps(func) + def cache_inner(fn, *args, **kwargs): + cache_prefix = kwargs.pop('cache_prefix', None) + cache_path = cache_path_for_file_path(fn, cache_prefix) + + if cache_path and os.path.exists(cache_path): + with open(cache_path, "rb") as cache_file: + cache_value = pickle.load(cache_file) + else: + cache_value = func(fn, *args, **kwargs) + + if cache_path: + with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file: + pickle.dump(cache_value, cache_file, -1) + + return cache_value + + return cache_inner + +@cache_fn +def index_stream(fn, typ): + assert typ in ("hevc", "h264") + + with FileReader(fn) as f: + assert os.path.exists(f.name), fn + index, prefix = vidindex(f.name, typ) + probe = ffprobe(f.name, typ) + + return { + 'index': index, + 'global_prefix': prefix, + 'probe': probe + } + +@cache_fn +def index_mp4(fn): + with FileReader(fn) as f: + return vidindex_mp4(f.name) + +@cache_fn +def index_mkv(fn): + with FileReader(fn) as f: + probe = ffprobe(f.name, "matroska") + with open(f.name, "rb") as d_f: + config_record, index = mkvindex.mkvindex(d_f) + return { + 'probe': probe, + 'config_record': config_record, + 'index': index + } + +def index_videos(camera_paths, cache_prefix=None): + """Requires that paths in camera_paths are contiguous and of the same type.""" + if len(camera_paths) < 1: + raise ValueError("must provide at least one video to index") + + frame_type = fingerprint_video(camera_paths[0]) + if frame_type == FrameType.h264_pstream: + index_pstream(camera_paths, "h264", cache_prefix) + else: + for fn in camera_paths: + index_video(fn, frame_type, cache_prefix) + +def index_video(fn, frame_type=None, cache_prefix=None): + cache_path = cache_path_for_file_path(fn, cache_prefix) + + if os.path.exists(cache_path): + return + + if frame_type is None: + frame_type = fingerprint_video(fn[0]) + + if frame_type == FrameType.h264_pstream: + #hack: try to index the whole route now + route = Route.from_file_path(fn) + + camera_paths = route.camera_paths() + if fn not in camera_paths: + raise DataUnreadableError("Not a contiguous route camera file: {}".format(fn)) + + print("no pstream cache for %s, indexing route %s now" % (fn, route.name)) + index_pstream(route.camera_paths(), "h264", cache_prefix) + elif frame_type == FrameType.h265_stream: + index_stream(fn, "hevc", cache_prefix=cache_prefix) + elif frame_type == FrameType.h264_mp4: + index_mp4(fn, cache_prefix=cache_prefix) + +def get_video_index(fn, frame_type, cache_prefix=None): + cache_path = cache_path_for_file_path(fn, cache_prefix) + + if not os.path.exists(cache_path): + index_video(fn, frame_type, cache_prefix) + + if not os.path.exists(cache_path): + return None + with open(cache_path, "rb") as cache_file: + return pickle.load(cache_file) + +def pstream_predecompress(fns, probe, indexes, global_prefix, cache_prefix, multithreaded=False): + assert len(fns) == len(indexes) + out_fns = [cache_path_for_file_path(fn, cache_prefix, extension=".predecom.mkv") for fn in fns] + out_exists = map(os.path.exists, out_fns) + if all(out_exists): + return + + w = probe['streams'][0]['width'] + h = probe['streams'][0]['height'] + + frame_size = w*h*3/2 # yuv420p + + decompress_proc = subprocess.Popen( + ["ffmpeg", + "-threads", "0" if multithreaded else "1", + "-vsync", "0", + "-f", "h264", + "-i", "pipe:0", + "-threads", "0" if multithreaded else "1", + "-f", "rawvideo", + "-pix_fmt", "yuv420p", + "pipe:1"], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=open("/dev/null", "wb")) + + def write_thread(): + for fn in fns: + with FileReader(fn) as f: + decompress_proc.stdin.write(f.read()) + decompress_proc.stdin.close() + + def read_frame(): + frame = None + try: + frame = decompress_proc.stdout.read(frame_size) + except (IOError, ValueError): + pass + if frame is None or frame == "" or len(frame) != frame_size: + raise DataUnreadableError("pre-decompression failed for %s" % fn) + return frame + + t = threading.Thread(target=write_thread) + t.daemon = True + t.start() + + try: + for fn, out_fn, out_exist, index in zip(fns, out_fns, out_exists, indexes): + if out_exist: + for fi in range(index.shape[0]-1): + read_frame() + continue + + with atomic_write_in_dir(out_fn, mode="w+b", overwrite=True) as out_tmp: + compress_proc = subprocess.Popen( + ["ffmpeg", + "-threads", "0" if multithreaded else "1", + "-y", + "-vsync", "0", + "-f", "rawvideo", + "-pix_fmt", "yuv420p", + "-s", "%dx%d" % (w, h), + "-i", "pipe:0", + "-threads", "0" if multithreaded else "1", + "-f", "matroska", + "-vcodec", "ffv1", + "-g", "0", + out_tmp.name], + stdin=subprocess.PIPE, stderr=open("/dev/null", "wb")) + try: + for fi in range(index.shape[0]-1): + frame = read_frame() + compress_proc.stdin.write(frame) + compress_proc.stdin.close() + except: + compress_proc.kill() + raise + + assert compress_proc.wait() == 0 + + cache_path = cache_path_for_file_path(fn, cache_prefix) + with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file: + pickle.dump({ + 'predecom': os.path.basename(out_fn), + 'index': index, + 'probe': probe, + 'global_prefix': global_prefix, + }, cache_file, -1) + + except: + decompress_proc.kill() + raise + finally: + t.join() + + rc = decompress_proc.wait() + if rc != 0: + raise DataUnreadableError(fns[0]) + + +def index_pstream(fns, typ, cache_prefix=None): + if typ != "h264": + raise NotImplementedError(typ) + + if not fns: + raise DataUnreadableError("chffr h264 requires contiguous files") + + out_fns = [cache_path_for_file_path(fn, cache_prefix) for fn in fns] + out_exists = map(os.path.exists, out_fns) + if all(out_exists): return + + # load existing index files to avoid re-doing work + existing_indexes = [] + for out_fn, exists in zip(out_fns, out_exists): + existing = None + if exists: + with open(out_fn, "rb") as cache_file: + existing = pickle.load(cache_file) + existing_indexes.append(existing) + + # probe the first file + if existing_indexes[0]: + probe = existing_indexes[0]['probe'] + else: + with FileReader(fns[0]) as f: + probe = ffprobe(f.name, typ) + + global_prefix = None + + # get the video index of all the segments in this stream + indexes = [] + for i, fn in enumerate(fns): + if existing_indexes[i]: + index = existing_indexes[i]['index'] + prefix = existing_indexes[i]['global_prefix'] + else: + with FileReader(fn) as f: + index, prefix = vidindex(f.name, typ) + if i == 0: + # assert prefix + if not prefix: + raise DataUnreadableError("vidindex failed for %s" % fn) + global_prefix = prefix + indexes.append(index) + + assert global_prefix + + if np.sum(indexes[0][:, 0] == H264_SLICE_I) <= 1: + print("pstream %s is unseekable. pre-decompressing all the segments..." % (fns[0])) + pstream_predecompress(fns, probe, indexes, global_prefix, cache_prefix) + return + + # generate what's required to make each segment self-contained + # (the partial GOP from the end of each segments are put asside to add + # to the start of the following segment) + prefix_data = ["" for _ in fns] + prefix_index = [[] for _ in fns] + for i in range(len(fns)-1): + if indexes[i+1][0, 0] == H264_SLICE_I and indexes[i+1][0, 1] <= 1: + # next file happens to start with a i-frame, dont need use this file's end + continue + + index = indexes[i] + if i == 0 and np.sum(index[:, 0] == H264_SLICE_I) <= 1: + raise NotImplementedError("No I-frames in pstream.") + + # find the last GOP in the index + frame_b = len(index)-1 + while frame_b > 0 and index[frame_b, 0] != H264_SLICE_I: + frame_b -= 1 + + assert frame_b >= 0 + assert index[frame_b, 0] == H264_SLICE_I + + end_len = len(index)-frame_b + + with FileReader(fns[i]) as vid: + vid.seek(index[frame_b, 1]) + end_data = vid.read() + + prefix_data[i+1] = end_data + prefix_index[i+1] = index[frame_b:-1] + # indexes[i] = index[:frame_b] + + for i, fn in enumerate(fns): + cache_path = out_fns[i] + + if os.path.exists(cache_path): + continue + + segment_index = { + 'index': indexes[i], + 'global_prefix': global_prefix, + 'probe': probe, + 'prefix_frame_data': prefix_data[i], # data to prefix the first GOP with + 'num_prefix_frames': len(prefix_index[i]), # number of frames to skip in the first GOP + } + + with atomic_write_in_dir(cache_path, mode="wb", overwrite=True) as cache_file: + pickle.dump(segment_index, cache_file, -1) + +def gpu_info(): + ret = [] + for fn in glob.glob("/proc/driver/nvidia/gpus/*/information"): + with open(fn, "r") as f: + dat = f.read() + kvs = dat.strip().split("\n") + kv = {} + for s in kvs: + k, v = s.split(":", 1) + kv[k] = v.strip() + ret.append(kv) + return ret + +def gpu_supports_hevc(gpuinfo): + return ("GTX 10" in gpuinfo['Model'] or "GTX 20" in gpuinfo['Model'] or gpuinfo['Model'] == "Graphics Device") + +def find_hevc_gpu(): + for gpuinfo in gpu_info(): + if gpu_supports_hevc(gpuinfo): + return int(gpuinfo['Device Minor']) + return None + +def _ffmpeg_fcamera_input_for_frame_info(frame_info): + st = time.time() + fn, num, count, cache_prefix = frame_info + + assert fn.endswith('.hevc') + sindex = index_stream(fn, "hevc", cache_prefix=cache_prefix) + index = sindex['index'] + prefix = sindex['global_prefix'] + probe = sindex['probe'] + + frame_e = num + count + frame_b = num + # must start decoding on an i-frame + while index[frame_b, 0] != HEVC_SLICE_I: + frame_b -= 1 + offset_b = index[frame_b, 1] + offset_e = index[frame_e, 1] + assert frame_b <= num < frame_e + skip = num - frame_b + + w = probe['streams'][0]['width'] + h = probe['streams'][0]['height'] + assert (h, w) == (874, 1164) + + st2 = time.time() + with FileReader(fn) as f: + f.seek(offset_b) + input_data = f.read(offset_e - offset_b) + et = time.time() + + get_time = et-st + get_time2 = et-st2 + + if get_time > 10.0: + print("TOOK OVER 10 seconds to fetch %r %f %f" % (frame_info, get_time, get_time2)) + + return prefix, input_data, skip, count + +def _ffmpeg_fcamera_input_for_frame(pair): + cookie, frame_info = pair + try: + return cookie, _ffmpeg_fcamera_input_for_frame_info(frame_info) + except Exception as e: + # Let the caller handle exceptions. + return cookie, e + + +def _feed_ffmpeg_fcamera_input_work_loop(frames, proc_stdin, select_pipe_fd, cookie_queue): + last_prefix = None + """ + with ThreadPoolExecutor(64) as pool: + futures = [] + for f in frames: + futures.append(pool.submit(_ffmpeg_fcamera_input_for_frame, f)) + for f in as_completed(futures): + cookie, data = f.result() + if isinstance(data, Exception): + # Just print exceptions for now. + print(data) + continue + prefix, input_data, skip, count = data + cookie_queue.put((cookie, count)) + + # Write zeros for skipped frames, ones for keep frames. + os.write(select_pipe_fd, b"\x00" * skip + b"\x01" * count) + + if prefix != last_prefix: + proc_stdin.write(prefix) + last_prefix = prefix + + proc_stdin.write(input_data) + """ + num_threads = 64 + for cookie, data in async_generator( + num_threads, 8 * num_threads, 8 * num_threads, + reliable=False)(_ffmpeg_fcamera_input_for_frame)(frames): + if isinstance(data, Exception): + # Just print exceptions for now. + print(data) + continue + prefix, input_data, skip, count = data + cookie_queue.put((cookie, count)) + + # Write zeros for skipped frames, ones for keep frames. + os.write(select_pipe_fd, b"\x00" * skip + b"\x01" * count) + + if prefix != last_prefix: + proc_stdin.write(prefix) + last_prefix = prefix + + proc_stdin.write(input_data) + +_FCAMERA_FEED_SUCCESS = object() +def feed_ffmpeg_fcamera_input(frames, proc_stdin, select_pipe_fd, cookie_queue): + print("Feed started on {}".format(threading.current_thread().name)) + try: + _feed_ffmpeg_fcamera_input_work_loop(frames, proc_stdin, select_pipe_fd, cookie_queue) + cookie_queue.put((_FCAMERA_FEED_SUCCESS, None)) + finally: + # Always close ffmpeg input. + proc_stdin.close() + + +def read_file_check_size(f, sz, cookie): + buff = bytearray(sz) + bytes_read = f.readinto(buff) + assert bytes_read == sz, (bytes_read, sz) + return buff + + +import signal +import ctypes +def _set_pdeathsig(sig=signal.SIGTERM): + def f(): + libc = ctypes.CDLL('libc.so.6') + return libc.prctl(1, sig) + return f + +def vidindex_mp4(fn): + try: + xmls = subprocess.check_output(["MP4Box", fn, "-diso", "-out", "/dev/stdout"]) + except subprocess.CalledProcessError as e: + raise DataUnreadableError(fn) + + tree = ET.fromstring(xmls) + + def parse_content(s): + assert s.startswith("data:application/octet-string,") + return s[len("data:application/octet-string,"):].decode("hex") + + avc_element = tree.find(".//AVCSampleEntryBox") + width = int(avc_element.attrib['Width']) + height = int(avc_element.attrib['Height']) + + sps_element = avc_element.find(".//AVCDecoderConfigurationRecord/SequenceParameterSet") + pps_element = avc_element.find(".//AVCDecoderConfigurationRecord/PictureParameterSet") + + sps = parse_content(sps_element.attrib['content']) + pps = parse_content(pps_element.attrib['content']) + + media_header = tree.find("MovieBox/TrackBox/MediaBox/MediaHeaderBox") + time_scale = int(media_header.attrib['TimeScale']) + + sample_sizes = [ + int(entry.attrib['Size']) for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/SampleSizeBox/SampleSizeEntry") + ] + + sample_dependency = [ + entry.attrib['dependsOnOther'] == "yes" for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/SampleDependencyTypeBox/SampleDependencyEntry") + ] + + assert len(sample_sizes) == len(sample_dependency) + + chunk_offsets = [ + int(entry.attrib['offset']) for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/ChunkOffsetBox/ChunkEntry") + ] + + sample_chunk_table = [ + (int(entry.attrib['FirstChunk'])-1, int(entry.attrib['SamplesPerChunk'])) for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/SampleToChunkBox/SampleToChunkEntry") + ] + + sample_offsets = [None for _ in sample_sizes] + + sample_i = 0 + for i, (first_chunk, samples_per_chunk) in enumerate(sample_chunk_table): + if i == len(sample_chunk_table)-1: + last_chunk = len(chunk_offsets)-1 + else: + last_chunk = sample_chunk_table[i+1][0]-1 + for k in range(first_chunk, last_chunk+1): + sample_offset = chunk_offsets[k] + for _ in range(samples_per_chunk): + sample_offsets[sample_i] = sample_offset + sample_offset += sample_sizes[sample_i] + sample_i += 1 + + assert sample_i == len(sample_sizes) + + pts_offset_table = [ + ( int(entry.attrib['CompositionOffset']), int(entry.attrib['SampleCount']) ) for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/CompositionOffsetBox/CompositionOffsetEntry") + ] + sample_pts_offset = [0 for _ in sample_sizes] + sample_i = 0 + for dt, count in pts_offset_table: + for _ in range(count): + sample_pts_offset[sample_i] = dt + sample_i += 1 + + sample_time_table = [ + ( int(entry.attrib['SampleDelta']), int(entry.attrib['SampleCount']) ) for entry in tree.findall( + "MovieBox/TrackBox/MediaBox/MediaInformationBox/SampleTableBox/TimeToSampleBox/TimeToSampleEntry") + ] + sample_time = [None for _ in sample_sizes] + cur_ts = 0 + sample_i = 0 + for dt, count in sample_time_table: + for _ in range(count): + sample_time[sample_i] = (cur_ts + sample_pts_offset[sample_i]) * 1000 / time_scale + + cur_ts += dt + sample_i += 1 + + sample_time.sort() # because we ony decode GOPs in PTS order + + return { + 'width': width, + 'height': height, + 'sample_offsets': sample_offsets, + 'sample_sizes': sample_sizes, + 'sample_dependency': sample_dependency, + 'sample_time': sample_time, + 'sps': sps, + 'pps': pps + } + + +class BaseFrameReader(object): + # properties: frame_type, frame_count, w, h + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def close(self): + pass + + def get(self, num, count=1, pix_fmt="yuv420p"): + raise NotImplementedError + +def FrameReader(fn, cache_prefix=None, readahead=False, readbehind=False, multithreaded=True): + frame_type = fingerprint_video(fn) + if frame_type == FrameType.raw: + return RawFrameReader(fn) + elif frame_type in (FrameType.h265_stream, FrameType.h264_pstream): + index_data = get_video_index(fn, frame_type, cache_prefix) + if index_data is not None and "predecom" in index_data: + cache_path = cache_path_for_file_path(fn, cache_prefix) + return MKVFrameReader( + os.path.join(os.path.dirname(cache_path), index_data["predecom"])) + else: + return StreamFrameReader(fn, frame_type, index_data, + readahead=readahead, readbehind=readbehind, multithreaded=multithreaded) + elif frame_type == FrameType.h264_mp4: + return MP4FrameReader(fn, readahead=readahead) + elif frame_type == FrameType.ffv1_mkv: + return MKVFrameReader(fn) + else: + raise NotImplementedError(frame_type) + +def rgb24toyuv420(rgb): + yuv_from_rgb = np.array([[ 0.299 , 0.587 , 0.114 ], + [-0.14714119, -0.28886916, 0.43601035 ], + [ 0.61497538, -0.51496512, -0.10001026 ]]) + img = np.dot(rgb.reshape(-1, 3), yuv_from_rgb.T).reshape(rgb.shape) + + y_len = img.shape[0] * img.shape[1] + uv_len = y_len / 4 + + ys = img[:, :, 0] + us = (img[::2, ::2, 1] + img[1::2, ::2, 1] + img[::2, 1::2, 1] + img[1::2, 1::2, 1]) / 4 + 128 + vs = (img[::2, ::2, 2] + img[1::2, ::2, 2] + img[::2, 1::2, 2] + img[1::2, 1::2, 2]) / 4 + 128 + + yuv420 = np.empty(y_len + 2 * uv_len, dtype=img.dtype) + yuv420[:y_len] = ys.reshape(-1) + yuv420[y_len:y_len + uv_len] = us.reshape(-1) + yuv420[y_len + uv_len:y_len + 2 * uv_len] = vs.reshape(-1) + + return yuv420.clip(0,255).astype('uint8') + +class RawData(object): + def __init__(self, f): + self.f = _io.FileIO(f, 'rb') + self.lenn = struct.unpack("I", self.f.read(4))[0] + self.count = os.path.getsize(f) / (self.lenn+4) + + def read(self, i): + self.f.seek((self.lenn+4)*i + 4) + return self.f.read(self.lenn) + +class RawFrameReader(BaseFrameReader): + def __init__(self, fn): + # raw camera + self.fn = fn + self.frame_type = FrameType.raw + self.rawfile = RawData(self.fn) + self.frame_count = self.rawfile.count + self.w, self.h = 640, 480 + + def load_and_debayer(self, img): + img = np.frombuffer(img, dtype='uint8').reshape(960, 1280) + cimg = np.dstack([img[0::2, 1::2], ( + (img[0::2, 0::2].astype("uint16") + img[1::2, 1::2].astype("uint16")) + >> 1).astype("uint8"), img[1::2, 0::2]]) + return cimg + + + def get(self, num, count=1, pix_fmt="yuv420p"): + assert self.frame_count is not None + assert num+count <= self.frame_count + + if pix_fmt not in ("yuv420p", "rgb24"): + raise ValueError("Unsupported pixel format %r" % pix_fmt) + + app = [] + for i in range(num, num+count): + dat = self.rawfile.read(i) + rgb_dat = self.load_and_debayer(dat) + if pix_fmt == "rgb24": + app.append(rgb_dat) + elif pix_fmt == "yuv420p": + app.append(rgb24toyuv420(rgb_dat)) + else: + raise NotImplementedError + + return app + +def decompress_video_data(rawdat, vid_fmt, w, h, pix_fmt, multithreaded=False): + # using a tempfile is much faster than proc.communicate for some reason + + with tempfile.TemporaryFile() as tmpf: + tmpf.write(rawdat) + tmpf.seek(0) + + proc = subprocess.Popen( + ["ffmpeg", + "-threads", "0" if multithreaded else "1", + "-vsync", "0", + "-f", vid_fmt, + "-flags2", "showall", + "-i", "pipe:0", + "-threads", "0" if multithreaded else "1", + "-f", "rawvideo", + "-pix_fmt", pix_fmt, + "pipe:1"], + stdin=tmpf, stdout=subprocess.PIPE, stderr=open("/dev/null")) + + # dat = proc.communicate()[0] + dat = proc.stdout.read() + if proc.wait() != 0: + raise DataUnreadableError("ffmpeg failed") + + if pix_fmt == "rgb24": + ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, h, w, 3) + elif pix_fmt == "yuv420p": + ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, (h*w*3//2)) + elif pix_fmt == "yuv444p": + ret = np.frombuffer(dat, dtype=np.uint8).reshape(-1, 3, h, w) + else: + raise NotImplementedError + + return ret + +class VideoStreamDecompressor(object): + def __init__(self, vid_fmt, w, h, pix_fmt, multithreaded=False): + self.vid_fmt = vid_fmt + self.w = w + self.h = h + self.pix_fmt = pix_fmt + + if pix_fmt == "yuv420p": + self.out_size = w*h*3//2 # yuv420p + elif pix_fmt in ("rgb24", "yuv444p"): + self.out_size = w*h*3 + else: + raise NotImplementedError + + self.out_q = queue.Queue() + + self.proc = subprocess.Popen( + ["ffmpeg", + "-threads", "0" if multithreaded else "1", + # "-avioflags", "direct", + "-analyzeduration", "0", + "-probesize", "32", + "-flush_packets", "0", + # "-fflags", "nobuffer", + "-vsync", "0", + "-f", vid_fmt, + "-i", "pipe:0", + "-threads", "0" if multithreaded else "1", + "-f", "rawvideo", + "-pix_fmt", pix_fmt, + "pipe:1"], + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=open("/dev/null", "wb")) + + def read_thread(): + while True: + r = self.proc.stdout.read(self.out_size) + if len(r) == 0: + break + assert len(r) == self.out_size + self.out_q.put(r) + + self.t = threading.Thread(target=read_thread) + self.t.daemon = True + self.t.start() + + def __enter__(self): + return self + + def __exit__(self, *args): + self.close() + + def write(self, rawdat): + self.proc.stdin.write(rawdat) + self.proc.stdin.flush() + + def read(self): + dat = self.out_q.get(block=True) + + if self.pix_fmt == "rgb24": + ret = np.frombuffer(dat, dtype=np.uint8).reshape((self.h, self.w, 3)) + elif self.pix_fmt == "yuv420p": + ret = np.frombuffer(dat, dtype=np.uint8) + elif self.pix_fmt == "yuv444p": + ret = np.frombuffer(dat, dtype=np.uint8).reshape((3, self.h, self.w)) + else: + assert False + + return ret + + def eos(self): + self.proc.stdin.close() + + def close(self): + self.proc.stdin.close() + self.t.join() + self.proc.wait() + assert self.proc.wait() == 0 + + +class MKVFrameReader(BaseFrameReader): + def __init__(self, fn): + self.fn = fn + + #print("MKVFrameReader", fn) + index_data = index_mkv(fn) + stream = index_data['probe']['streams'][0] + self.w = stream['width'] + self.h = stream['height'] + + if stream['codec_name'] == 'ffv1': + self.frame_type = FrameType.ffv1_mkv + elif stream['codec_name'] == 'ffvhuff': + self.frame_type = FrameType.ffvhuff_mkv + else: + raise NotImplementedError + + self.config_record = index_data['config_record'] + self.index = index_data['index'] + + self.frame_count = len(self.index) + + def get(self, num, count=1, pix_fmt="yuv420p"): + assert 0 < num+count <= self.frame_count + + frame_dats = [] + with FileReader(self.fn) as f: + for i in range(num, num+count): + pos, length, _ = self.index[i] + f.seek(pos) + frame_dats.append(f.read(length)) + + of = StringIO() + mkvindex.simple_gen(of, self.config_record, self.w, self.h, frame_dats) + + r = decompress_video_data(of.getvalue(), "matroska", self.w, self.h, pix_fmt) + assert len(r) == count + + return r + + +class GOPReader(object): + def get_gop(self, num): + # returns (start_frame_num, num_frames, frames_to_skip, gop_data) + raise NotImplementedError + + +class DoNothingContextManager(object): + def __enter__(self): return self + def __exit__(*x): pass + + +class GOPFrameReader(BaseFrameReader): + #FrameReader with caching and readahead for formats that are group-of-picture based + + def __init__(self, readahead=False, readbehind=False, multithreaded=True): + self.open_ = True + + self.multithreaded = multithreaded + self.readahead = readahead + self.readbehind = readbehind + self.frame_cache = LRU(64) + + if self.readahead: + self.cache_lock = threading.RLock() + self.readahead_last = None + self.readahead_len = 30 + self.readahead_c = threading.Condition() + self.readahead_thread = threading.Thread(target=self._readahead_thread) + self.readahead_thread.daemon = True + self.readahead_thread.start() + else: + self.cache_lock = DoNothingContextManager() + + def close(self): + if not self.open_: + return + self.open_ = False + + if self.readahead: + self.readahead_c.acquire() + self.readahead_c.notify() + self.readahead_c.release() + self.readahead_thread.join() + + def _readahead_thread(self): + while True: + self.readahead_c.acquire() + try: + if not self.open_: + break + self.readahead_c.wait() + finally: + self.readahead_c.release() + if not self.open_: + break + assert self.readahead_last + num, pix_fmt = self.readahead_last + + if self.readbehind: + for k in range(num-1, max(0, num-self.readahead_len), -1): + self._get_one(k, pix_fmt) + else: + for k in range(num, min(self.frame_count, num+self.readahead_len)): + self._get_one(k, pix_fmt) + + def _get_one(self, num, pix_fmt): + assert num < self.frame_count + + if (num, pix_fmt) in self.frame_cache: + return self.frame_cache[(num, pix_fmt)] + + with self.cache_lock: + if (num, pix_fmt) in self.frame_cache: + return self.frame_cache[(num, pix_fmt)] + + frame_b, num_frames, skip_frames, rawdat = self.get_gop(num) + + ret = decompress_video_data(rawdat, self.vid_fmt, self.w, self.h, pix_fmt, + multithreaded=self.multithreaded) + ret = ret[skip_frames:] + assert ret.shape[0] == num_frames + + for i in range(ret.shape[0]): + self.frame_cache[(frame_b+i, pix_fmt)] = ret[i] + + return self.frame_cache[(num, pix_fmt)] + + def get(self, num, count=1, pix_fmt="yuv420p"): + assert self.frame_count is not None + + if num + count > self.frame_count: + raise ValueError("{} > {}".format(num + count, self.frame_count)) + + if pix_fmt not in ("yuv420p", "rgb24", "yuv444p"): + raise ValueError("Unsupported pixel format %r" % pix_fmt) + + ret = [self._get_one(num + i, pix_fmt) for i in range(count)] + + if self.readahead: + self.readahead_last = (num+count, pix_fmt) + self.readahead_c.acquire() + self.readahead_c.notify() + self.readahead_c.release() + + return ret + +class MP4GOPReader(GOPReader): + def __init__(self, fn): + self.fn = fn + self.frame_type = FrameType.h264_mp4 + + self.index = index_mp4(fn) + + self.w = self.index['width'] + self.h = self.index['height'] + self.sample_sizes = self.index['sample_sizes'] + self.sample_offsets = self.index['sample_offsets'] + self.sample_dependency = self.index['sample_dependency'] + + self.vid_fmt = "h264" + + self.frame_count = len(self.sample_sizes) + + self.prefix = "\x00\x00\x00\x01"+self.index['sps']+"\x00\x00\x00\x01"+self.index['pps'] + + def _lookup_gop(self, num): + frame_b = num + while frame_b > 0 and self.sample_dependency[frame_b]: + frame_b -= 1 + + frame_e = num+1 + while frame_e < (len(self.sample_dependency)-1) and self.sample_dependency[frame_e]: + frame_e += 1 + + return (frame_b, frame_e) + + def get_gop(self, num): + frame_b, frame_e = self._lookup_gop(num) + assert frame_b <= num < frame_e + + num_frames = frame_e-frame_b + + with FileReader(self.fn) as f: + rawdat = [] + + sample_i = frame_b + while sample_i < frame_e: + size = self.sample_sizes[sample_i] + start_offset = self.sample_offsets[sample_i] + + # try to read contiguously because a read could actually be a http request + sample_i += 1 + while sample_i < frame_e and size < 10000000 and start_offset+size == self.sample_offsets[sample_i]: + size += self.sample_sizes[sample_i] + sample_i += 1 + + f.seek(start_offset) + sampledat = f.read(size) + + # read length-prefixed NALUs and output in Annex-B + i = 0 + while i < len(sampledat): + nal_len, = struct.unpack(">I", sampledat[i:i+4]) + rawdat.append("\x00\x00\x00\x01"+sampledat[i+4:i+4+nal_len]) + i = i+4+nal_len + assert i == len(sampledat) + + rawdat = self.prefix+''.join(rawdat) + + return frame_b, num_frames, 0, rawdat + +class MP4FrameReader(MP4GOPReader, GOPFrameReader): + def __init__(self, fn, readahead=False): + MP4GOPReader.__init__(self, fn) + GOPFrameReader.__init__(self, readahead) + +class StreamGOPReader(GOPReader): + def __init__(self, fn, frame_type, index_data): + self.fn = fn + + self.frame_type = frame_type + self.frame_count = None + self.w, self.h = None, None + + self.prefix = None + self.index = None + + self.index = index_data['index'] + self.prefix = index_data['global_prefix'] + probe = index_data['probe'] + + if self.frame_type == FrameType.h265_stream: + self.prefix_frame_data = None + self.num_prefix_frames = 0 + self.vid_fmt = "hevc" + + elif self.frame_type == FrameType.h264_pstream: + self.prefix_frame_data = index_data['prefix_frame_data'] + self.num_prefix_frames = index_data['num_prefix_frames'] + + self.vid_fmt = "h264" + + i = 0 + while i < self.index.shape[0] and self.index[i, 0] != SLICE_I: + i += 1 + self.first_iframe = i + + if self.frame_type == FrameType.h265_stream: + assert self.first_iframe == 0 + + self.frame_count = len(self.index)-1 + + self.w = probe['streams'][0]['width'] + self.h = probe['streams'][0]['height'] + + + def _lookup_gop(self, num): + frame_b = num + while frame_b > 0 and self.index[frame_b, 0] != SLICE_I: + frame_b -= 1 + + frame_e = num+1 + while frame_e < (len(self.index)-1) and self.index[frame_e, 0] != SLICE_I: + frame_e += 1 + + offset_b = self.index[frame_b, 1] + offset_e = self.index[frame_e, 1] + + return (frame_b, frame_e, offset_b, offset_e) + + def get_gop(self, num): + frame_b, frame_e, offset_b, offset_e = self._lookup_gop(num) + assert frame_b <= num < frame_e + + num_frames = frame_e-frame_b + + with FileReader(self.fn) as f: + f.seek(offset_b) + rawdat = f.read(offset_e-offset_b) + + if num < self.first_iframe: + assert self.prefix_frame_data + rawdat = self.prefix_frame_data + rawdat + + rawdat = self.prefix + rawdat + + skip_frames = 0 + if num < self.first_iframe: + skip_frames = self.num_prefix_frames + + return frame_b, num_frames, skip_frames, rawdat + +class StreamFrameReader(StreamGOPReader, GOPFrameReader): + def __init__(self, fn, frame_type, index_data, readahead=False, readbehind=False, multithreaded=False): + StreamGOPReader.__init__(self, fn, frame_type, index_data) + GOPFrameReader.__init__(self, readahead, readbehind, multithreaded) + + + + +def GOPFrameIterator(gop_reader, pix_fmt, multithreaded=True): + # this is really ugly. ill think about how to refactor it when i can think good + + IN_FLIGHT_GOPS = 6 # should be enough that the stream decompressor starts returning data + + with VideoStreamDecompressor( + gop_reader.vid_fmt, gop_reader.w, gop_reader.h, pix_fmt, multithreaded) as dec: + + read_work = [] + + def readthing(): + # print read_work, dec.out_q.qsize() + outf = dec.read() + read_thing = read_work[0] + if read_thing[0] > 0: + read_thing[0] -= 1 + else: + assert read_thing[1] > 0 + yield outf + read_thing[1] -= 1 + + if read_thing[1] == 0: + read_work.pop(0) + + i = 0 + while i < gop_reader.frame_count: + frame_b, num_frames, skip_frames, gop_data = gop_reader.get_gop(i) + dec.write(gop_data) + i += num_frames + read_work.append([skip_frames, num_frames]) + + while len(read_work) >= IN_FLIGHT_GOPS: + for v in readthing(): yield v + + dec.eos() + + while read_work: + for v in readthing(): yield v + + +def FrameIterator(fn, pix_fmt, **kwargs): + fr = FrameReader(fn, **kwargs) + if isinstance(fr, GOPReader): + for v in GOPFrameIterator(fr, pix_fmt, kwargs.get("multithreaded", True)): yield v + else: + for i in range(fr.frame_count): + yield fr.get(i, pix_fmt=pix_fmt)[0] + + +def FrameWriter(ofn, frames, vid_fmt=FrameType.ffvhuff_mkv, pix_fmt="rgb24", framerate=20, multithreaded=False): + if pix_fmt not in ("rgb24", "yuv420p"): + raise NotImplementedError + + if vid_fmt == FrameType.ffv1_mkv: + assert ofn.endswith(".mkv") + vcodec = "ffv1" + elif vid_fmt == FrameType.ffvhuff_mkv: + assert ofn.endswith(".mkv") + vcodec = "ffvhuff" + else: + raise NotImplementedError + + frame_gen = iter(frames) + first_frame = next(frame_gen) + + # assert len(frames) > 1 + if pix_fmt == "rgb24": + h, w = first_frame.shape[:2] + elif pix_fmt == "yuv420p": + w = first_frame.shape[1] + h = 2*first_frame.shape[0]//3 + else: + raise NotImplementedError + + compress_proc = subprocess.Popen( + ["ffmpeg", + "-threads", "0" if multithreaded else "1", + "-y", + "-framerate", str(framerate), + "-vsync", "0", + "-f", "rawvideo", + "-pix_fmt", pix_fmt, + "-s", "%dx%d" % (w, h), + "-i", "pipe:0", + "-threads", "0" if multithreaded else "1", + "-f", "matroska", + "-vcodec", vcodec, + "-g", "0", + ofn], + stdin=subprocess.PIPE, stderr=open("/dev/null", "wb")) + try: + compress_proc.stdin.write(first_frame.tobytes()) + for frame in frame_gen: + compress_proc.stdin.write(frame.tobytes()) + compress_proc.stdin.close() + except: + compress_proc.kill() + raise + + assert compress_proc.wait() == 0 + +if __name__ == "__main__": + fn = "cd:/1c79456b0c90f15a/2017-05-10--08-17-00/2/fcamera.hevc" + f = FrameReader(fn) + # print f.get(0, 1).shape + # print f.get(15, 1).shape + for v in GOPFrameIterator(f, "yuv420p"): + print(v) diff --git a/tools/lib/index_log/.gitignore b/tools/lib/index_log/.gitignore new file mode 100644 index 00000000000000..17955f06d8a31e --- /dev/null +++ b/tools/lib/index_log/.gitignore @@ -0,0 +1 @@ +index_log diff --git a/tools/lib/index_log/Makefile b/tools/lib/index_log/Makefile new file mode 100644 index 00000000000000..a0264660f88a65 --- /dev/null +++ b/tools/lib/index_log/Makefile @@ -0,0 +1,19 @@ +CC := gcc +CXX := g++ +PHONELIBS?=../../../../phonelibs + +UNAME_S := $(shell uname -s) +ifeq ($(UNAME_S),Darwin) + CAPNP_FLAGS := /usr/local/lib/libcapnp.a /usr/local/lib/libkj.a +else + CAPNP_FLAGS := -I $(PHONELIBS)/capnp-cpp/include -I $(PHONELIBS)/capnp-cpp/include + CAPNP_LIBS := -L $(PHONELIBS)/capnp-cpp/x64/lib -L $(PHONELIBS)/capnp-cpp/x64/lib -l:libcapnp.a -l:libkj.a +endif + +index_log: index_log.cc + $(eval $@_TMP := $(shell mktemp)) + $(CXX) -std=gnu++11 -o $($@_TMP) \ + index_log.cc \ + $(CAPNP_FLAGS) \ + $(CAPNP_LIBS) + mv $($@_TMP) $@ diff --git a/tools/lib/index_log/index_log.cc b/tools/lib/index_log/index_log.cc new file mode 100644 index 00000000000000..8eb4a5e34ee839 --- /dev/null +++ b/tools/lib/index_log/index_log.cc @@ -0,0 +1,66 @@ +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include +#include + +int main(int argc, char** argv) { + + if (argc != 3) { + printf("usage: %s \n", argv[0]); + return 1; + } + + const std::string log_fn = argv[1]; + const std::string index_fn = argv[2]; + + int log_fd = open(log_fn.c_str(), O_RDONLY, 0); + assert(log_fd >= 0); + + off_t log_size = lseek(log_fd, 0, SEEK_END); + lseek(log_fd, 0, SEEK_SET); + + FILE* index_f = NULL; + if (index_fn == "-") { + index_f = stdout; + } else { + index_f = fopen(index_fn.c_str(), "wb"); + } + assert(index_f); + + void* log_data = mmap(NULL, log_size, PROT_READ, MAP_PRIVATE, log_fd, 0); + assert(log_data); + + auto words = kj::arrayPtr((const capnp::word*)log_data, log_size/sizeof(capnp::word)); + while (words.size() > 0) { + + uint64_t idx = ((uintptr_t)words.begin() - (uintptr_t)log_data); + // printf("%llu - %ld\n", idx, words.size()); + const char* idx_bytes = (const char*)&idx; + fwrite(idx_bytes, 8, 1, index_f); + try { + capnp::FlatArrayMessageReader reader(words); + words = kj::arrayPtr(reader.getEnd(), words.end()); + } catch (kj::Exception exc) { + break; + } + + + } + + munmap(log_data, log_size); + + fclose(index_f); + + close(log_fd); + + return 0; +} diff --git a/tools/lib/kbhit.py b/tools/lib/kbhit.py new file mode 100644 index 00000000000000..30587590a42e29 --- /dev/null +++ b/tools/lib/kbhit.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +import os +import sys +import termios +import atexit +from select import select + +class KBHit: + + def __init__(self): + '''Creates a KBHit object that you can call to do various keyboard things. + ''' + + self.set_kbhit_terminal() + + def set_kbhit_terminal(self): + # Save the terminal settings + self.fd = sys.stdin.fileno() + self.new_term = termios.tcgetattr(self.fd) + self.old_term = termios.tcgetattr(self.fd) + + # New terminal setting unbuffered + self.new_term[3] = (self.new_term[3] & ~termios.ICANON & ~termios.ECHO) + termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.new_term) + + # Support normal-terminal reset at exit + atexit.register(self.set_normal_term) + + def set_normal_term(self): + ''' Resets to normal terminal. On Windows this is a no-op. + ''' + + if os.name == 'nt': + pass + + else: + termios.tcsetattr(self.fd, termios.TCSAFLUSH, self.old_term) + + + def getch(self): + ''' Returns a keyboard character after kbhit() has been called. + Should not be called in the same program as getarrow(). + ''' + return sys.stdin.read(1) + + + def getarrow(self): + ''' Returns an arrow-key code after kbhit() has been called. Codes are + 0 : up + 1 : right + 2 : down + 3 : left + Should not be called in the same program as getch(). + ''' + + if os.name == 'nt': + msvcrt.getch() # skip 0xE0 + c = msvcrt.getch() + vals = [72, 77, 80, 75] + + else: + c = sys.stdin.read(3)[2] + vals = [65, 67, 66, 68] + + return vals.index(ord(c.decode('utf-8'))) + + + def kbhit(self): + ''' Returns True if keyboard character was hit, False otherwise. + ''' + dr,dw,de = select([sys.stdin], [], [], 0) + return dr != [] + + +# Test +if __name__ == "__main__": + + kb = KBHit() + + print('Hit any key, or ESC to exit') + + while True: + + if kb.kbhit(): + c = kb.getch() + if ord(c) == 27: # ESC + break + print(c) + + kb.set_normal_term() + + diff --git a/tools/lib/lazy_property.py b/tools/lib/lazy_property.py new file mode 100644 index 00000000000000..85c038f287a15f --- /dev/null +++ b/tools/lib/lazy_property.py @@ -0,0 +1,12 @@ +class lazy_property(object): + """Defines a property whose value will be computed only once and as needed. + + This can only be used on instance methods. + """ + def __init__(self, func): + self._func = func + + def __get__(self, obj_self, cls): + value = self._func(obj_self) + setattr(obj_self, self._func.__name__, value) + return value diff --git a/tools/lib/logreader.py b/tools/lib/logreader.py new file mode 100755 index 00000000000000..1893f322006a95 --- /dev/null +++ b/tools/lib/logreader.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python3 +import os +import sys +import json +import bz2 +import tempfile +import requests +import subprocess +import urllib.parse +from aenum import Enum +import capnp +import numpy as np + +from tools.lib.exceptions import DataUnreadableError +try: + from xx.chffr.lib.filereader import FileReader +except ImportError: + from tools.lib.filereader import FileReader +from cereal import log as capnp_log + +OP_PATH = os.path.dirname(os.path.dirname(capnp_log.__file__)) + +def index_log(fn): + index_log_dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "index_log") + index_log = os.path.join(index_log_dir, "index_log") + phonelibs_dir = os.path.join(OP_PATH, 'phonelibs') + + subprocess.check_call(["make", "PHONELIBS=" + phonelibs_dir], cwd=index_log_dir, stdout=subprocess.DEVNULL) + + try: + dat = subprocess.check_output([index_log, fn, "-"]) + except subprocess.CalledProcessError: + raise DataUnreadableError("%s capnp is corrupted/truncated" % fn) + return np.frombuffer(dat, dtype=np.uint64) + +def event_read_multiple_bytes(dat): + with tempfile.NamedTemporaryFile() as dat_f: + dat_f.write(dat) + dat_f.flush() + idx = index_log(dat_f.name) + + end_idx = np.uint64(len(dat)) + idx = np.append(idx, end_idx) + + return [capnp_log.Event.from_bytes(dat[idx[i]:idx[i+1]]) + for i in range(len(idx)-1)] + + +# this is an iterator itself, and uses private variables from LogReader +class MultiLogIterator(object): + def __init__(self, log_paths, wraparound=True): + self._log_paths = log_paths + self._wraparound = wraparound + + self._first_log_idx = next(i for i in range(len(log_paths)) if log_paths[i] is not None) + self._current_log = self._first_log_idx + self._idx = 0 + self._log_readers = [None]*len(log_paths) + self.start_time = self._log_reader(self._first_log_idx)._ts[0] + + def _log_reader(self, i): + if self._log_readers[i] is None and self._log_paths[i] is not None: + log_path = self._log_paths[i] + print("LogReader:%s" % log_path) + self._log_readers[i] = LogReader(log_path) + + return self._log_readers[i] + + def __iter__(self): + return self + + def _inc(self): + lr = self._log_reader(self._current_log) + if self._idx < len(lr._ents)-1: + self._idx += 1 + else: + self._idx = 0 + self._current_log = next(i for i in range(self._current_log + 1, len(self._log_readers) + 1) if i == len(self._log_readers) or self._log_paths[i] is not None) + # wraparound + if self._current_log == len(self._log_readers): + if self._wraparound: + self._current_log = self._first_log_idx + else: + raise StopIteration + + def __next__(self): + while 1: + lr = self._log_reader(self._current_log) + ret = lr._ents[self._idx] + self._inc() + return ret + + def tell(self): + # returns seconds from start of log + return (self._log_reader(self._current_log)._ts[self._idx] - self.start_time) * 1e-9 + + def seek(self, ts): + # seek to nearest minute + minute = int(ts/60) + if minute >= len(self._log_paths) or self._log_paths[minute] is None: + return False + + self._current_log = minute + + # HACK: O(n) seek afterward + self._idx = 0 + while self.tell() < ts: + self._inc() + return True + + +class LogReader(object): + def __init__(self, fn, canonicalize=True, only_union_types=False): + data_version = None + _, ext = os.path.splitext(urllib.parse.urlparse(fn).path) + with FileReader(fn) as f: + dat = f.read() + + if ext == "": + # old rlogs weren't bz2 compressed + ents = event_read_multiple_bytes(dat) + elif ext == ".bz2": + dat = bz2.decompress(dat) + ents = event_read_multiple_bytes(dat) + else: + raise Exception(f"unknown extension {ext}") + + self._ts = [x.logMonoTime for x in ents] + self.data_version = data_version + self._only_union_types = only_union_types + self._ents = ents + + def __iter__(self): + for ent in self._ents: + if self._only_union_types: + try: + ent.which() + yield ent + except capnp.lib.capnp.KjException: + pass + else: + yield ent + +if __name__ == "__main__": + log_path = sys.argv[1] + lr = LogReader(log_path) + for msg in lr: + print(msg) diff --git a/tools/lib/mkvparse/README.md b/tools/lib/mkvparse/README.md new file mode 100644 index 00000000000000..2d82f190b9a113 --- /dev/null +++ b/tools/lib/mkvparse/README.md @@ -0,0 +1,24 @@ +Simple easy-to-use hacky matroska parser + +Define your handler class: + + class MyMatroskaHandler(mkvparse.MatroskaHandler): + def tracks_available(self): + ... + + def segment_info_available(self): + ... + + def frame(self, track_id, timestamp, data, more_laced_blocks, duration, keyframe_flag, invisible_flag, discardable_flag): + ... + +and `mkvparse.mkvparse(file, MyMatroskaHandler())` + + +Supports lacing and setting global timecode scale, subtitles (BlockGroup). Does not support cues, tags, chapters, seeking and so on. Supports resyncing when something bad is encountered in matroska stream. + +Also contains example of generation of Matroska files from python + +Subtitles should remain as text, binary data gets encoded to hex. + +Licence=MIT diff --git a/tools/lib/mkvparse/__init__.py b/tools/lib/mkvparse/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/lib/mkvparse/mkvgen.py b/tools/lib/mkvparse/mkvgen.py new file mode 100755 index 00000000000000..963d60d019adec --- /dev/null +++ b/tools/lib/mkvparse/mkvgen.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python +import sys +import random +import math + +# Simple hacky Matroska generator +# Reads mp3 file "q.mp3" and jpeg images from img/0.jpg, img/1.jpg and so on and +# writes Matroska file with mjpeg and mp3 to stdout + +# License=MIT + +# unsigned +def big_endian_number(number): + if(number<0x100): + return chr(number) + return big_endian_number(number>>8) + chr(number&0xFF) + +ben=big_endian_number + +def ebml_encode_number(number): + def trailing_bits(rest_of_number, number_of_bits): + # like big_endian_number, but can do padding zeroes + if number_of_bits==8: + return chr(rest_of_number&0xFF); + else: + return trailing_bits(rest_of_number>>8, number_of_bits-8) + chr(rest_of_number&0xFF) + + if number == -1: + return chr(0xFF) + if number < 2**7 - 1: + return chr(number|0x80) + if number < 2**14 - 1: + return chr(0x40 | (number>>8)) + trailing_bits(number, 8) + if number < 2**21 - 1: + return chr(0x20 | (number>>16)) + trailing_bits(number, 16) + if number < 2**28 - 1: + return chr(0x10 | (number>>24)) + trailing_bits(number, 24) + if number < 2**35 - 1: + return chr(0x08 | (number>>32)) + trailing_bits(number, 32) + if number < 2**42 - 1: + return chr(0x04 | (number>>40)) + trailing_bits(number, 40) + if number < 2**49 - 1: + return chr(0x02 | (number>>48)) + trailing_bits(number, 48) + if number < 2**56 - 1: + return chr(0x01) + trailing_bits(number, 56) + raise Exception("NUMBER TOO BIG") + +def ebml_element(element_id, data, length=None): + if length==None: + length = len(data) + return big_endian_number(element_id) + ebml_encode_number(length) + data + + +def write_ebml_header(f, content_type, version, read_version): + f.write( + ebml_element(0x1A45DFA3, "" # EBML + + ebml_element(0x4286, ben(1)) # EBMLVersion + + ebml_element(0x42F7, ben(1)) # EBMLReadVersion + + ebml_element(0x42F2, ben(4)) # EBMLMaxIDLength + + ebml_element(0x42F3, ben(8)) # EBMLMaxSizeLength + + ebml_element(0x4282, content_type) # DocType + + ebml_element(0x4287, ben(version)) # DocTypeVersion + + ebml_element(0x4285, ben(read_version)) # DocTypeReadVersion + )) + +def write_infinite_segment_header(f): + # write segment element header + f.write(ebml_element(0x18538067,"",-1)) # Segment (unknown length) + +def random_uid(): + def rint(): + return int(random.random()*(0x100**4)) + return ben(rint()) + ben(rint()) + ben(rint()) + ben(rint()) + + +def example(): + write_ebml_header(sys.stdout, "matroska", 2, 2) + write_infinite_segment_header(sys.stdout) + + + # write segment info (optional) + sys.stdout.write(ebml_element(0x1549A966, "" # SegmentInfo + + ebml_element(0x73A4, random_uid()) # SegmentUID + + ebml_element(0x7BA9, "mkvgen.py test") # Title + + ebml_element(0x4D80, "mkvgen.py") # MuxingApp + + ebml_element(0x5741, "mkvgen.py") # WritingApp + )) + + # write trans data (codecs etc.) + sys.stdout.write(ebml_element(0x1654AE6B, "" # Tracks + + ebml_element(0xAE, "" # TrackEntry + + ebml_element(0xD7, ben(1)) # TrackNumber + + ebml_element(0x73C5, ben(0x77)) # TrackUID + + ebml_element(0x83, ben(0x01)) # TrackType + #0x01 track is a video track + #0x02 track is an audio track + #0x03 track is a complex track, i.e. a combined video and audio track + #0x10 track is a logo track + #0x11 track is a subtitle track + #0x12 track is a button track + #0x20 track is a control track + + ebml_element(0x536E, "mjpeg data") # Name + + ebml_element(0x86, "V_MJPEG") # CodecID + #+ ebml_element(0x23E383, ben(100000000)) # DefaultDuration (opt.), nanoseconds + #+ ebml_element(0x6DE7, ben(100)) # MinCache + + ebml_element(0xE0, "" # Video + + ebml_element(0xB0, ben(640)) # PixelWidth + + ebml_element(0xBA, ben(480)) # PixelHeight + ) + ) + + ebml_element(0xAE, "" # TrackEntry + + ebml_element(0xD7, ben(2)) # TrackNumber + + ebml_element(0x73C5, ben(0x78)) # TrackUID + + ebml_element(0x83, ben(0x02)) # TrackType + #0x01 track is a video track + #0x02 track is an audio track + #0x03 track is a complex track, i.e. a combined video and audio track + #0x10 track is a logo track + #0x11 track is a subtitle track + #0x12 track is a button track + #0x20 track is a control track + + ebml_element(0x536E, "content of mp3 file") # Name + #+ ebml_element(0x6DE7, ben(100)) # MinCache + + ebml_element(0x86, "A_MPEG/L3") # CodecID + #+ ebml_element(0xE1, "") # Audio + ) + )) + + + mp3file = open("q.mp3", "rb") + mp3file.read(500000); + + def mp3framesgenerator(f): + debt="" + while True: + for i in xrange(0,len(debt)+1): + if i >= len(debt)-1: + debt = debt + f.read(8192) + break + #sys.stderr.write("i="+str(i)+" len="+str(len(debt))+"\n") + if ord(debt[i])==0xFF and (ord(debt[i+1]) & 0xF0)==0XF0 and i>700: + if i>0: + yield debt[0:i] + # sys.stderr.write("len="+str(i)+"\n") + debt = debt[i:] + break + + + mp3 = mp3framesgenerator(mp3file) + mp3.next() + + + for i in xrange(0,530): + framefile = open("img/"+str(i)+".jpg", "rb") + framedata = framefile.read() + framefile.close() + + # write cluster (actual video data) + + if random.random()<1: + sys.stdout.write(ebml_element(0x1F43B675, "" # Cluster + + ebml_element(0xE7, ben(int(i*26*4))) # TimeCode, uint, milliseconds + # + ebml_element(0xA7, ben(0)) # Position, uint + + ebml_element(0xA3, "" # SimpleBlock + + ebml_encode_number(1) # track number + + chr(0x00) + chr(0x00) # timecode, relative to Cluster timecode, sint16, in milliseconds + + chr(0x00) # flags + + framedata + ))) + + for u in xrange(0,4): + mp3f=mp3.next() + if random.random()<1: + sys.stdout.write(ebml_element(0x1F43B675, "" # Cluster + + ebml_element(0xE7, ben(i*26*4+u*26)) # TimeCode, uint, milliseconds + + ebml_element(0xA3, "" # SimpleBlock + + ebml_encode_number(2) # track number + + chr(0x00) + chr(0x00) # timecode, relative to Cluster timecode, sint16, in milliseconds + + chr(0x00) # flags + + mp3f + ))) + + + + + +if __name__ == '__main__': + example() diff --git a/tools/lib/mkvparse/mkvindex.py b/tools/lib/mkvparse/mkvindex.py new file mode 100644 index 00000000000000..f985280d9d1096 --- /dev/null +++ b/tools/lib/mkvparse/mkvindex.py @@ -0,0 +1,87 @@ +#!/usr/bin/env python +# Copyright (c) 2016, Comma.ai, Inc. + +import sys +import re +import binascii + +from tools.lib.mkvparse import mkvparse +from tools.lib.mkvparse import mkvgen +from tools.lib.mkvparse.mkvgen import ben, ebml_element, ebml_encode_number + +class MatroskaIndex(mkvparse.MatroskaHandler): + # def __init__(self, banlist, nocluster_mode): + # pass + def __init__(self): + self.frameindex = [] + + def tracks_available(self): + _, self.config_record = self.tracks[1]['CodecPrivate'] + + def frame(self, track_id, timestamp, pos, length, more_laced_frames, duration, + keyframe, invisible, discardable): + self.frameindex.append((pos, length, keyframe)) + + + +def mkvindex(f): + handler = MatroskaIndex() + mkvparse.mkvparse(f, handler) + return handler.config_record, handler.frameindex + + +def simple_gen(of, config_record, w, h, framedata): + mkvgen.write_ebml_header(of, "matroska", 2, 2) + mkvgen.write_infinite_segment_header(of) + + of.write(ebml_element(0x1654AE6B, "" # Tracks + + ebml_element(0xAE, "" # TrackEntry + + ebml_element(0xD7, ben(1)) # TrackNumber + + ebml_element(0x73C5, ben(1)) # TrackUID + + ebml_element(0x83, ben(1)) # TrackType = video track + + ebml_element(0x86, "V_MS/VFW/FOURCC") # CodecID + + ebml_element(0xE0, "" # Video + + ebml_element(0xB0, ben(w)) # PixelWidth + + ebml_element(0xBA, ben(h)) # PixelHeight + ) + + ebml_element(0x63A2, config_record) # CodecPrivate (ffv1 configuration record) + ) + )) + + blocks = [] + for fd in framedata: + blocks.append( + ebml_element(0xA3, "" # SimpleBlock + + ebml_encode_number(1) # track number + + chr(0x00) + chr(0x00) # timecode, relative to Cluster timecode, sint16, in milliseconds + + chr(0x80) # flags (keyframe) + + fd + ) + ) + + of.write(ebml_element(0x1F43B675, "" # Cluster + + ebml_element(0xE7, ben(0)) # TimeCode, uint, milliseconds + # + ebml_element(0xA7, ben(0)) # Position, uint + + ''.join(blocks))) + +if __name__ == "__main__": + import random + + if len(sys.argv) != 2: + print("usage: %s mkvpath" % sys.argv[0]) + with open(sys.argv[1], "rb") as f: + cr, index = mkvindex(f) + + # cr = "280000003002000030010000010018004646563100cb070000000000000000000000000000000000".decode("hex") + + def geti(i): + pos, length = index[i] + with open(sys.argv[1], "rb") as f: + f.seek(pos) + return f.read(length) + + dats = [geti(random.randrange(200)) for _ in xrange(30)] + + with open("tmpout.mkv", "wb") as of: + simple_gen(of, cr, dats) + diff --git a/tools/lib/mkvparse/mkvparse.py b/tools/lib/mkvparse/mkvparse.py new file mode 100644 index 00000000000000..114fb4cceaf90f --- /dev/null +++ b/tools/lib/mkvparse/mkvparse.py @@ -0,0 +1,761 @@ +# Licence==MIT; Vitaly "_Vi" Shukela 2012 + +# Simple easy-to-use hacky matroska parser + +# Supports SimpleBlock and BlockGroup, lacing, TimecodeScale. +# Does not support seeking, cues, chapters and other features. +# No proper EOF handling unfortunately + +# See "mkvuser.py" for the example + +import traceback +from struct import unpack + +import sys +import datetime + +if sys.version < '3': + range=xrange +else: + #identity=lambda x:x + def ord(something): + if type(something)==bytes: + if something == b"": + raise StopIteration + return something[0] + else: + return something + +def get_major_bit_number(n): + ''' + Takes uint8, returns number of the most significant bit plus the number with that bit cleared. + Examples: + 0b10010101 -> (0, 0b00010101) + 0b00010101 -> (3, 0b00000101) + 0b01111111 -> (1, 0b00111111) + ''' + if not n: + raise Exception("Bad number") + i=0x80; + r=0 + while not n&i: + r+=1 + i>>=1 + return (r,n&~i); + +def read_matroska_number(f, unmodified=False, signed=False): + ''' + Read ebml number. Unmodified means don't clear the length bit (as in Element IDs) + Returns the number and it's length as a tuple + + See examples in "parse_matroska_number" function + ''' + if unmodified and signed: + raise Exception("Contradictary arguments") + first_byte=f.read(1) + if(first_byte==""): + raise StopIteration + r = ord(first_byte) + (n,r2) = get_major_bit_number(r) + if not unmodified: + r=r2 + # from now "signed" means "negative" + i=n + while i: + r = r * 0x100 + ord(f.read(1)) + i-=1 + if signed: + r-=(2**(7*n+7)-1) + else: + if r==2**(7*n+7)-1: + return (-1, n+1) + return (r,n+1) + +def parse_matroska_number(data, pos, unmodified=False, signed=False): + ''' + Parse ebml number from buffer[pos:]. Just like read_matroska_number. + Unmodified means don't clear the length bit (as in Element IDs) + Returns the number plus the new position in input buffer + + Examples: + "\x81" -> (1, pos+1) + "\x40\x01" -> (1, pos+2) + "\x20\x00\x01" -> (1, pos+3) + "\x3F\xFF\xFF" -> (0x1FFFFF, pos+3) + "\x20\x00\x01" unmodified -> (0x200001, pos+3) + "\xBF" signed -> (0, pos+1) + "\xBE" signed -> (-1, pos+1) + "\xC0" signed -> (1, pos+1) + "\x5F\xEF" signed -> (-16, pos+2) + ''' + if unmodified and signed: + raise Exception("Contradictary arguments") + r = ord(data[pos]) + pos+=1 + (n,r2) = get_major_bit_number(r) + if not unmodified: + r=r2 + # from now "signed" means "negative" + i=n + while i: + r = r * 0x100 + ord(data[pos]) + pos+=1 + i-=1 + if signed: + r-=(2**(7*n+6)-1) + else: + if r==2**(7*n+7)-1: + return (-1, pos) + return (r,pos) + +def parse_xiph_number(data, pos): + ''' + Parse the Xiph lacing number from data[pos:] + Returns the number plus the new position + + Examples: + "\x01" -> (1, pos+1) + "\x55" -> (0x55, pos+1) + "\xFF\x04" -> (0x103, pos+2) + "\xFF\xFF\x04" -> (0x202, pos+3) + "\xFF\xFF\x00" -> (0x1FE, pos+3) + ''' + v = ord(data[pos]) + pos+=1 + + r=0 + while v==255: + r+=v + v = ord(data[pos]) + pos+=1 + + r+=v + return (r, pos) + + +def parse_fixedlength_number(data, pos, length, signed=False): + ''' + Read the big-endian number from data[pos:pos+length] + Returns the number plus the new position + + Examples: + "\x01" -> (0x1, pos+1) + "\x55" -> (0x55, pos+1) + "\x55" signed -> (0x55, pos+1) + "\xFF\x04" -> (0xFF04, pos+2) + "\xFF\x04" signed -> (-0x00FC, pos+2) + ''' + r=0 + for i in range(length): + r=r*0x100+ord(data[pos+i]) + if signed: + if ord(data[pos]) & 0x80: + r-=2**(8*length) + return (r, pos+length) + +def read_fixedlength_number(f, length, signed=False): + """ Read length bytes and parse (parse_fixedlength_number) it. + Returns only the number""" + buf = f.read(length) + (r, pos) = parse_fixedlength_number(buf, 0, length, signed) + return r + +def read_ebml_element_header(f): + ''' + Read Element ID and size + Returns id, element size and this header size + ''' + (id_, n) = read_matroska_number(f, unmodified=True) + (size, n2) = read_matroska_number(f) + return (id_, size, n+n2) + +class EbmlElementType: + VOID=0 + MASTER=1 # read all subelements and return tree. Don't use this too large things like Segment + UNSIGNED=2 + SIGNED=3 + TEXTA=4 + TEXTU=5 + BINARY=6 + FLOAT=7 + DATE=8 + + JUST_GO_ON=10 # For "Segment". + # Actually MASTER, but don't build the tree for all subelements, + # interpreting all child elements as if they were top-level elements + + +EET=EbmlElementType + +# lynx -width=10000 -dump http://matroska.org/technical/specs/index.html +# | sed 's/not 0/not0/g; s/> 0/>0/g; s/Sampling Frequency/SamplingFrequency/g' +# | awk '{print $1 " " $3 " " $8}' +# | grep '\[..\]' +# | perl -ne '/(\S+) (\S+) (.)/; +# $name=$1; $id=$2; $type=$3; +# $id=~s/\[|\]//g; +# %types = (m=>"EET.MASTER", +# u=>"EET.UNSIGNED", +# i=>"EET.SIGNED", +# 8=>"EET.TEXTU", +# s=>"EET.TEXTA", +# b=>"EET.BINARY", +# f=>"EET.FLOAT", +# d=>"EET.DATE"); +# $t=$types{$type}; +# next unless $t; +# $t="EET.JUST_GO_ON" if $name eq "Segment" or $name eq "Cluster"; +# print "\t0x$id: ($t, \"$name\"),\n";' + +element_types_names = { + 0x1A45DFA3: (EET.MASTER, "EBML"), + 0x4286: (EET.UNSIGNED, "EBMLVersion"), + 0x42F7: (EET.UNSIGNED, "EBMLReadVersion"), + 0x42F2: (EET.UNSIGNED, "EBMLMaxIDLength"), + 0x42F3: (EET.UNSIGNED, "EBMLMaxSizeLength"), + 0x4282: (EET.TEXTA, "DocType"), + 0x4287: (EET.UNSIGNED, "DocTypeVersion"), + 0x4285: (EET.UNSIGNED, "DocTypeReadVersion"), + 0xEC: (EET.BINARY, "Void"), + 0xBF: (EET.BINARY, "CRC-32"), + 0x1B538667: (EET.MASTER, "SignatureSlot"), + 0x7E8A: (EET.UNSIGNED, "SignatureAlgo"), + 0x7E9A: (EET.UNSIGNED, "SignatureHash"), + 0x7EA5: (EET.BINARY, "SignaturePublicKey"), + 0x7EB5: (EET.BINARY, "Signature"), + 0x7E5B: (EET.MASTER, "SignatureElements"), + 0x7E7B: (EET.MASTER, "SignatureElementList"), + 0x6532: (EET.BINARY, "SignedElement"), + 0x18538067: (EET.JUST_GO_ON, "Segment"), + 0x114D9B74: (EET.MASTER, "SeekHead"), + 0x4DBB: (EET.MASTER, "Seek"), + 0x53AB: (EET.BINARY, "SeekID"), + 0x53AC: (EET.UNSIGNED, "SeekPosition"), + 0x1549A966: (EET.MASTER, "Info"), + 0x73A4: (EET.BINARY, "SegmentUID"), + 0x7384: (EET.TEXTU, "SegmentFilename"), + 0x3CB923: (EET.BINARY, "PrevUID"), + 0x3C83AB: (EET.TEXTU, "PrevFilename"), + 0x3EB923: (EET.BINARY, "NextUID"), + 0x3E83BB: (EET.TEXTU, "NextFilename"), + 0x4444: (EET.BINARY, "SegmentFamily"), + 0x6924: (EET.MASTER, "ChapterTranslate"), + 0x69FC: (EET.UNSIGNED, "ChapterTranslateEditionUID"), + 0x69BF: (EET.UNSIGNED, "ChapterTranslateCodec"), + 0x69A5: (EET.BINARY, "ChapterTranslateID"), + 0x2AD7B1: (EET.UNSIGNED, "TimecodeScale"), + 0x4489: (EET.FLOAT, "Duration"), + 0x4461: (EET.DATE, "DateUTC"), + 0x7BA9: (EET.TEXTU, "Title"), + 0x4D80: (EET.TEXTU, "MuxingApp"), + 0x5741: (EET.TEXTU, "WritingApp"), + 0x1F43B675: (EET.JUST_GO_ON, "Cluster"), + 0xE7: (EET.UNSIGNED, "Timecode"), + 0x5854: (EET.MASTER, "SilentTracks"), + 0x58D7: (EET.UNSIGNED, "SilentTrackNumber"), + 0xA7: (EET.UNSIGNED, "Position"), + 0xAB: (EET.UNSIGNED, "PrevSize"), + 0xA3: (EET.BINARY, "SimpleBlock"), + 0xA0: (EET.MASTER, "BlockGroup"), + 0xA1: (EET.BINARY, "Block"), + 0xA2: (EET.BINARY, "BlockVirtual"), + 0x75A1: (EET.MASTER, "BlockAdditions"), + 0xA6: (EET.MASTER, "BlockMore"), + 0xEE: (EET.UNSIGNED, "BlockAddID"), + 0xA5: (EET.BINARY, "BlockAdditional"), + 0x9B: (EET.UNSIGNED, "BlockDuration"), + 0xFA: (EET.UNSIGNED, "ReferencePriority"), + 0xFB: (EET.SIGNED, "ReferenceBlock"), + 0xFD: (EET.SIGNED, "ReferenceVirtual"), + 0xA4: (EET.BINARY, "CodecState"), + 0x8E: (EET.MASTER, "Slices"), + 0xE8: (EET.MASTER, "TimeSlice"), + 0xCC: (EET.UNSIGNED, "LaceNumber"), + 0xCD: (EET.UNSIGNED, "FrameNumber"), + 0xCB: (EET.UNSIGNED, "BlockAdditionID"), + 0xCE: (EET.UNSIGNED, "Delay"), + 0xCF: (EET.UNSIGNED, "SliceDuration"), + 0xC8: (EET.MASTER, "ReferenceFrame"), + 0xC9: (EET.UNSIGNED, "ReferenceOffset"), + 0xCA: (EET.UNSIGNED, "ReferenceTimeCode"), + 0xAF: (EET.BINARY, "EncryptedBlock"), + 0x1654AE6B: (EET.MASTER, "Tracks"), + 0xAE: (EET.MASTER, "TrackEntry"), + 0xD7: (EET.UNSIGNED, "TrackNumber"), + 0x73C5: (EET.UNSIGNED, "TrackUID"), + 0x83: (EET.UNSIGNED, "TrackType"), + 0xB9: (EET.UNSIGNED, "FlagEnabled"), + 0x88: (EET.UNSIGNED, "FlagDefault"), + 0x55AA: (EET.UNSIGNED, "FlagForced"), + 0x9C: (EET.UNSIGNED, "FlagLacing"), + 0x6DE7: (EET.UNSIGNED, "MinCache"), + 0x6DF8: (EET.UNSIGNED, "MaxCache"), + 0x23E383: (EET.UNSIGNED, "DefaultDuration"), + 0x23314F: (EET.FLOAT, "TrackTimecodeScale"), + 0x537F: (EET.SIGNED, "TrackOffset"), + 0x55EE: (EET.UNSIGNED, "MaxBlockAdditionID"), + 0x536E: (EET.TEXTU, "Name"), + 0x22B59C: (EET.TEXTA, "Language"), + 0x86: (EET.TEXTA, "CodecID"), + 0x63A2: (EET.BINARY, "CodecPrivate"), + 0x258688: (EET.TEXTU, "CodecName"), + 0x7446: (EET.UNSIGNED, "AttachmentLink"), + 0x3A9697: (EET.TEXTU, "CodecSettings"), + 0x3B4040: (EET.TEXTA, "CodecInfoURL"), + 0x26B240: (EET.TEXTA, "CodecDownloadURL"), + 0xAA: (EET.UNSIGNED, "CodecDecodeAll"), + 0x6FAB: (EET.UNSIGNED, "TrackOverlay"), + 0x6624: (EET.MASTER, "TrackTranslate"), + 0x66FC: (EET.UNSIGNED, "TrackTranslateEditionUID"), + 0x66BF: (EET.UNSIGNED, "TrackTranslateCodec"), + 0x66A5: (EET.BINARY, "TrackTranslateTrackID"), + 0xE0: (EET.MASTER, "Video"), + 0x9A: (EET.UNSIGNED, "FlagInterlaced"), + 0x53B8: (EET.UNSIGNED, "StereoMode"), + 0x53B9: (EET.UNSIGNED, "OldStereoMode"), + 0xB0: (EET.UNSIGNED, "PixelWidth"), + 0xBA: (EET.UNSIGNED, "PixelHeight"), + 0x54AA: (EET.UNSIGNED, "PixelCropBottom"), + 0x54BB: (EET.UNSIGNED, "PixelCropTop"), + 0x54CC: (EET.UNSIGNED, "PixelCropLeft"), + 0x54DD: (EET.UNSIGNED, "PixelCropRight"), + 0x54B0: (EET.UNSIGNED, "DisplayWidth"), + 0x54BA: (EET.UNSIGNED, "DisplayHeight"), + 0x54B2: (EET.UNSIGNED, "DisplayUnit"), + 0x54B3: (EET.UNSIGNED, "AspectRatioType"), + 0x2EB524: (EET.BINARY, "ColourSpace"), + 0x2FB523: (EET.FLOAT, "GammaValue"), + 0x2383E3: (EET.FLOAT, "FrameRate"), + 0xE1: (EET.MASTER, "Audio"), + 0xB5: (EET.FLOAT, "SamplingFrequency"), + 0x78B5: (EET.FLOAT, "OutputSamplingFrequency"), + 0x9F: (EET.UNSIGNED, "Channels"), + 0x7D7B: (EET.BINARY, "ChannelPositions"), + 0x6264: (EET.UNSIGNED, "BitDepth"), + 0xE2: (EET.MASTER, "TrackOperation"), + 0xE3: (EET.MASTER, "TrackCombinePlanes"), + 0xE4: (EET.MASTER, "TrackPlane"), + 0xE5: (EET.UNSIGNED, "TrackPlaneUID"), + 0xE6: (EET.UNSIGNED, "TrackPlaneType"), + 0xE9: (EET.MASTER, "TrackJoinBlocks"), + 0xED: (EET.UNSIGNED, "TrackJoinUID"), + 0xC0: (EET.UNSIGNED, "TrickTrackUID"), + 0xC1: (EET.BINARY, "TrickTrackSegmentUID"), + 0xC6: (EET.UNSIGNED, "TrickTrackFlag"), + 0xC7: (EET.UNSIGNED, "TrickMasterTrackUID"), + 0xC4: (EET.BINARY, "TrickMasterTrackSegmentUID"), + 0x6D80: (EET.MASTER, "ContentEncodings"), + 0x6240: (EET.MASTER, "ContentEncoding"), + 0x5031: (EET.UNSIGNED, "ContentEncodingOrder"), + 0x5032: (EET.UNSIGNED, "ContentEncodingScope"), + 0x5033: (EET.UNSIGNED, "ContentEncodingType"), + 0x5034: (EET.MASTER, "ContentCompression"), + 0x4254: (EET.UNSIGNED, "ContentCompAlgo"), + 0x4255: (EET.BINARY, "ContentCompSettings"), + 0x5035: (EET.MASTER, "ContentEncryption"), + 0x47E1: (EET.UNSIGNED, "ContentEncAlgo"), + 0x47E2: (EET.BINARY, "ContentEncKeyID"), + 0x47E3: (EET.BINARY, "ContentSignature"), + 0x47E4: (EET.BINARY, "ContentSigKeyID"), + 0x47E5: (EET.UNSIGNED, "ContentSigAlgo"), + 0x47E6: (EET.UNSIGNED, "ContentSigHashAlgo"), + 0x1C53BB6B: (EET.MASTER, "Cues"), + 0xBB: (EET.MASTER, "CuePoint"), + 0xB3: (EET.UNSIGNED, "CueTime"), + 0xB7: (EET.MASTER, "CueTrackPositions"), + 0xF7: (EET.UNSIGNED, "CueTrack"), + 0xF1: (EET.UNSIGNED, "CueClusterPosition"), + 0x5378: (EET.UNSIGNED, "CueBlockNumber"), + 0xEA: (EET.UNSIGNED, "CueCodecState"), + 0xDB: (EET.MASTER, "CueReference"), + 0x96: (EET.UNSIGNED, "CueRefTime"), + 0x97: (EET.UNSIGNED, "CueRefCluster"), + 0x535F: (EET.UNSIGNED, "CueRefNumber"), + 0xEB: (EET.UNSIGNED, "CueRefCodecState"), + 0x1941A469: (EET.MASTER, "Attachments"), + 0x61A7: (EET.MASTER, "AttachedFile"), + 0x467E: (EET.TEXTU, "FileDescription"), + 0x466E: (EET.TEXTU, "FileName"), + 0x4660: (EET.TEXTA, "FileMimeType"), + 0x465C: (EET.BINARY, "FileData"), + 0x46AE: (EET.UNSIGNED, "FileUID"), + 0x4675: (EET.BINARY, "FileReferral"), + 0x4661: (EET.UNSIGNED, "FileUsedStartTime"), + 0x4662: (EET.UNSIGNED, "FileUsedEndTime"), + 0x1043A770: (EET.MASTER, "Chapters"), + 0x45B9: (EET.MASTER, "EditionEntry"), + 0x45BC: (EET.UNSIGNED, "EditionUID"), + 0x45BD: (EET.UNSIGNED, "EditionFlagHidden"), + 0x45DB: (EET.UNSIGNED, "EditionFlagDefault"), + 0x45DD: (EET.UNSIGNED, "EditionFlagOrdered"), + 0xB6: (EET.MASTER, "ChapterAtom"), + 0x73C4: (EET.UNSIGNED, "ChapterUID"), + 0x91: (EET.UNSIGNED, "ChapterTimeStart"), + 0x92: (EET.UNSIGNED, "ChapterTimeEnd"), + 0x98: (EET.UNSIGNED, "ChapterFlagHidden"), + 0x4598: (EET.UNSIGNED, "ChapterFlagEnabled"), + 0x6E67: (EET.BINARY, "ChapterSegmentUID"), + 0x6EBC: (EET.UNSIGNED, "ChapterSegmentEditionUID"), + 0x63C3: (EET.UNSIGNED, "ChapterPhysicalEquiv"), + 0x8F: (EET.MASTER, "ChapterTrack"), + 0x89: (EET.UNSIGNED, "ChapterTrackNumber"), + 0x80: (EET.MASTER, "ChapterDisplay"), + 0x85: (EET.TEXTU, "ChapString"), + 0x437C: (EET.TEXTA, "ChapLanguage"), + 0x437E: (EET.TEXTA, "ChapCountry"), + 0x6944: (EET.MASTER, "ChapProcess"), + 0x6955: (EET.UNSIGNED, "ChapProcessCodecID"), + 0x450D: (EET.BINARY, "ChapProcessPrivate"), + 0x6911: (EET.MASTER, "ChapProcessCommand"), + 0x6922: (EET.UNSIGNED, "ChapProcessTime"), + 0x6933: (EET.BINARY, "ChapProcessData"), + 0x1254C367: (EET.MASTER, "Tags"), + 0x7373: (EET.MASTER, "Tag"), + 0x63C0: (EET.MASTER, "Targets"), + 0x68CA: (EET.UNSIGNED, "TargetTypeValue"), + 0x63CA: (EET.TEXTA, "TargetType"), + 0x63C5: (EET.UNSIGNED, "TagTrackUID"), + 0x63C9: (EET.UNSIGNED, "TagEditionUID"), + 0x63C4: (EET.UNSIGNED, "TagChapterUID"), + 0x63C6: (EET.UNSIGNED, "TagAttachmentUID"), + 0x67C8: (EET.MASTER, "SimpleTag"), + 0x45A3: (EET.TEXTU, "TagName"), + 0x447A: (EET.TEXTA, "TagLanguage"), + 0x4484: (EET.UNSIGNED, "TagDefault"), + 0x4487: (EET.TEXTU, "TagString"), + 0x4485: (EET.BINARY, "TagBinary"), + 0x56AA: (EET.UNSIGNED, "CodecDelay"), + 0x56BB: (EET.UNSIGNED, "SeekPreRoll"), + 0xF0: (EET.UNSIGNED, "CueRelativePosition"), + 0x53C0: (EET.UNSIGNED, "AlphaMode"), + 0x55B2: (EET.UNSIGNED, "BitsPerChannel"), + 0x55B5: (EET.UNSIGNED, "CbSubsamplingHorz"), + 0x55B6: (EET.UNSIGNED, "CbSubsamplingVert"), + 0x5654: (EET.TEXTU, "ChapterStringUID"), + 0x55B7: (EET.UNSIGNED, "ChromaSitingHorz"), + 0x55B8: (EET.UNSIGNED, "ChromaSitingVert"), + 0x55B3: (EET.UNSIGNED, "ChromaSubsamplingHorz"), + 0x55B4: (EET.UNSIGNED, "ChromaSubsamplingVert"), + 0x55B0: (EET.MASTER, "Colour"), + 0x234E7A: (EET.UNSIGNED, "DefaultDecodedFieldDuration"), + 0x75A2: (EET.SIGNED, "DiscardPadding"), + 0x9D: (EET.UNSIGNED, "FieldOrder"), + 0x55D9: (EET.FLOAT, "LuminanceMax"), + 0x55DA: (EET.FLOAT, "LuminanceMin"), + 0x55D0: (EET.MASTER, "MasteringMetadata"), + 0x55B1: (EET.UNSIGNED, "MatrixCoefficients"), + 0x55BC: (EET.UNSIGNED, "MaxCLL"), + 0x55BD: (EET.UNSIGNED, "MaxFALL"), + 0x55BB: (EET.UNSIGNED, "Primaries"), + 0x55D5: (EET.FLOAT, "PrimaryBChromaticityX"), + 0x55D6: (EET.FLOAT, "PrimaryBChromaticityY"), + 0x55D3: (EET.FLOAT, "PrimaryGChromaticityX"), + 0x55D4: (EET.FLOAT, "PrimaryGChromaticityY"), + 0x55D1: (EET.FLOAT, "PrimaryRChromaticityX"), + 0x55D2: (EET.FLOAT, "PrimaryRChromaticityY"), + 0x55B9: (EET.UNSIGNED, "Range"), + 0x55BA: (EET.UNSIGNED, "TransferCharacteristics"), + 0x55D7: (EET.FLOAT, "WhitePointChromaticityX"), + 0x55D8: (EET.FLOAT, "WhitePointChromaticityY"), +} + +def read_simple_element(f, type_, size): + date = None + if size==0: + return "" + + if type_==EET.UNSIGNED: + data=read_fixedlength_number(f, size, False) + elif type_==EET.SIGNED: + data=read_fixedlength_number(f, size, True) + elif type_==EET.TEXTA: + data=f.read(size) + data = data.replace(b"\x00", b"") # filter out \0, for gstreamer + data = data.decode("ascii") + elif type_==EET.TEXTU: + data=f.read(size) + data = data.replace(b"\x00", b"") # filter out \0, for gstreamer + data = data.decode("UTF-8") + elif type_==EET.MASTER: + data=read_ebml_element_tree(f, size) + elif type_==EET.DATE: + data=read_fixedlength_number(f, size, True) + data*= 1e-9 + data+= (datetime.datetime(2001, 1, 1) - datetime.datetime(1970, 1, 1)).total_seconds() + # now should be UNIX date + elif type_==EET.FLOAT: + if size==4: + data = f.read(4) + data = unpack(">f", data)[0] + elif size==8: + data = f.read(8) + data = unpack(">d", data)[0] + else: + data=read_fixedlength_number(f, size, False) + sys.stderr.write("mkvparse: Floating point of size %d is not supported\n" % size) + data = None + else: + data=f.read(size) + return data + +def read_ebml_element_tree(f, total_size): + ''' + Build tree of elements, reading f until total_size reached + Don't use for the whole segment, it's not Haskell + + Returns list of pairs (element_name, element_value). + element_value can also be list of pairs + ''' + childs=[] + while(total_size>0): + (id_, size, hsize) = read_ebml_element_header(f) + if size == -1: + sys.stderr.write("mkvparse: Element %x without size? Damaged data? Skipping %d bytes\n" % (id_, size, total_size)) + f.read(total_size); + break; + if size>total_size: + sys.stderr.write("mkvparse: Element %x with size %d? Damaged data? Skipping %d bytes\n" % (id_, size, total_size)) + f.read(total_size); + break + type_ = EET.BINARY + name = "unknown_%x"%id_ + if id_ in element_types_names: + (type_, name) = element_types_names[id_] + data = read_simple_element(f, type_, size) + total_size-=(size+hsize) + childs.append((name, (type_, data))) + return childs + + +class MatroskaHandler: + """ User for mkvparse should override these methods """ + def tracks_available(self): + pass + def segment_info_available(self): + pass + def frame(self, track_id, timestamp, data, more_laced_frames, duration, keyframe, invisible, discardable): + pass + def ebml_top_element(self, id_, name_, type_, data_): + pass + def before_handling_an_element(self): + pass + def begin_handling_ebml_element(self, id_, name, type_, headersize, datasize): + return type_ + def element_data_available(self, id_, name, type_, headersize, data): + pass + +def handle_block(buffer, buffer_pos, handler, cluster_timecode, timecode_scale=1000000, duration=None, header_removal_headers_for_tracks={}): + ''' + Decode a block, handling all lacings, send it to handler with appropriate timestamp, track number + ''' + pos=0 + (tracknum, pos) = parse_matroska_number(buffer, pos, signed=False) + (tcode, pos) = parse_fixedlength_number(buffer, pos, 2, signed=True) + flags = ord(buffer[pos]); pos+=1 + f_keyframe = (flags&0x80 == 0x80) + f_invisible = (flags&0x08 == 0x08) + f_discardable = (flags&0x01 == 0x01) + laceflags=flags&0x06 + + block_timecode = (cluster_timecode + tcode)*(timecode_scale*0.000000001) + + header_removal_prefix = b"" + if tracknum in header_removal_headers_for_tracks: + # header_removal_prefix = header_removal_headers_for_tracks[tracknum] + raise NotImplementedError + + if laceflags == 0x00: # no lacing + # buf = buffer[pos:] + handler.frame(tracknum, block_timecode, buffer_pos+pos, len(buffer)-pos, + 0, duration, f_keyframe, f_invisible, f_discardable) + return + + numframes = ord(buffer[pos]); pos+=1 + numframes+=1 + + lengths=[] + + if laceflags == 0x02: # Xiph lacing + accumlength=0 + for i in range(numframes-1): + (l, pos) = parse_xiph_number(buffer, pos) + lengths.append(l) + accumlength+=l + lengths.append(len(buffer)-pos-accumlength) + elif laceflags == 0x06: # EBML lacing + accumlength=0 + if numframes: + (flength, pos) = parse_matroska_number(buffer, pos, signed=False) + lengths.append(flength) + accumlength+=flength + for i in range(numframes-2): + (l, pos) = parse_matroska_number(buffer, pos, signed=True) + flength+=l + lengths.append(flength) + accumlength+=flength + lengths.append(len(buffer)-pos-accumlength) + elif laceflags==0x04: # Fixed size lacing + fl=int((len(buffer)-pos)/numframes) + for i in range(numframes): + lengths.append(fl) + + more_laced_frames=numframes-1 + for i in lengths: + # buf = buffer[pos:pos+i] + handler.frame(tracknum, block_timecode, buffer_pos+pos, i, more_laced_frames, duration, + f_keyframe, f_invisible, f_discardable) + pos+=i + more_laced_frames-=1 + + +def resync(f): + sys.stderr.write("mvkparse: Resyncing\n") + while True: + b = f.read(1); + if b == b"": return (None, None); + if b == b"\x1F": + b2 = f.read(3); + if b2 == b"\x43\xB6\x75": + (seglen, x) = read_matroska_number(f) + return (0x1F43B675, seglen, x+4) # cluster + if b == b"\x18": + b2 = f.read(3) + if b2 == b"\x53\x80\x67": + (seglen, x) = read_matroska_number(f) + return (0x18538067, seglen, x+4) # segment + if b == b"\x16": + b2 = f.read(3) + if b2 == b"\x54\xAE\x6B": + (seglen ,x )= read_matroska_number(f) + return (0x1654AE6B, seglen, x+4) # tracks + + + + +def mkvparse(f, handler): + ''' + Read mkv file f and call handler methods when track or segment information is ready or when frame is read. + Handles lacing, timecodes (except of per-track scaling) + ''' + timecode_scale = 1000000 + current_cluster_timecode = 0 + resync_element_id = None + resync_element_size = None + resync_element_headersize = None + header_removal_headers_for_tracks = {} + while f: + (id_, size, hsize) = (None, None, None) + tree = None + data = None + (type_, name) = (None, None) + try: + if not resync_element_id: + try: + handler.before_handling_an_element() + (id_, size, hsize) = read_ebml_element_header(f) + except StopIteration: + break; + if not (id_ in element_types_names): + sys.stderr.write("mkvparse: Unknown element with id %x and size %d\n"%(id_, size)) + (resync_element_id, resync_element_size, resync_element_headersize) = resync(f) + if resync_element_id: + continue; + else: + break; + else: + id_ = resync_element_id + size=resync_element_size + hsize=resync_element_headersize + resync_element_id = None + resync_element_size = None + resync_element_headersize = None + + (type_, name) = element_types_names[id_] + (type_, name) = element_types_names[id_] + type_ = handler.begin_handling_ebml_element(id_, name, type_, hsize, size) + + if type_ == EET.MASTER: + tree = read_ebml_element_tree(f, size) + data = tree + + except Exception: + traceback.print_exc() + handler.before_handling_an_element() + (resync_element_id, resync_element_size, resync_element_headersize) = resync(f) + if resync_element_id: + continue; + else: + break; + + if name=="EBML" and type(data) == list: + d = dict(tree) + if 'EBMLReadVersion' in d: + if d['EBMLReadVersion'][1]>1: sys.stderr.write("mkvparse: Warning: EBMLReadVersion too big\n") + if 'DocTypeReadVersion' in d: + if d['DocTypeReadVersion'][1]>2: sys.stderr.write("mkvparse: Warning: DocTypeReadVersion too big\n") + dt = d['DocType'][1] + if dt != "matroska" and dt != "webm": + sys.stderr.write("mkvparse: Warning: EBML DocType is not \"matroska\" or \"webm\"") + elif name=="Info" and type(data) == list: + handler.segment_info = tree + handler.segment_info_available() + + d = dict(tree) + if "TimecodeScale" in d: + timecode_scale = d["TimecodeScale"][1] + elif name=="Tracks" and type(data) == list: + handler.tracks={} + for (ten, (_t, track)) in tree: + if ten != "TrackEntry": continue + d = dict(track) + n = d['TrackNumber'][1] + handler.tracks[n]=d + tt = d['TrackType'][1] + if tt==0x01: d['type']='video' + elif tt==0x02: d['type']='audio' + elif tt==0x03: d['type']='complex' + elif tt==0x10: d['type']='logo' + elif tt==0x11: d['type']='subtitle' + elif tt==0x12: d['type']='button' + elif tt==0x20: d['type']='control' + if 'TrackTimecodeScale' in d: + sys.stderr.write("mkvparse: Warning: TrackTimecodeScale is not supported\n") + if 'ContentEncodings' in d: + try: + compr = dict(d["ContentEncodings"][1][0][1][1][0][1][1]) + if compr["ContentCompAlgo"][1] == 3: + header_removal_headers_for_tracks[n] = compr["ContentCompSettings"][1] + else: + sys.stderr.write("mkvparse: Warning: compression other than " \ + "header removal is not supported\n") + except: + sys.stderr.write("mkvparse: Warning: unsuccessfully tried " \ + "to handle header removal compression\n") + handler.tracks_available() + # cluster contents: + elif name=="Timecode" and type_ == EET.UNSIGNED: + data=read_fixedlength_number(f, size, False) + current_cluster_timecode = data; + elif name=="SimpleBlock" and type_ == EET.BINARY: + pos = f.tell() + data=f.read(size) + handle_block(data, pos, handler, current_cluster_timecode, timecode_scale, None, header_removal_headers_for_tracks) + elif name=="BlockGroup" and type_ == EET.MASTER: + d2 = dict(tree) + duration=None + raise NotImplementedError + # if 'BlockDuration' in d2: + # duration = d2['BlockDuration'][1] + # duration = duration*0.000000001*timecode_scale + # if 'Block' in d2: + # handle_block(d2['Block'][1], None, handler, current_cluster_timecode, timecode_scale, duration, header_removal_headers_for_tracks) + else: + if type_!=EET.JUST_GO_ON and type_!=EET.MASTER: + data = read_simple_element(f, type_, size) + + handler.ebml_top_element(id_, name, type_, data); + + + +if __name__ == '__main__': + print("Run mkvuser.py for the example") diff --git a/tools/lib/pollable_queue.py b/tools/lib/pollable_queue.py new file mode 100644 index 00000000000000..9ef2db62b23ede --- /dev/null +++ b/tools/lib/pollable_queue.py @@ -0,0 +1,107 @@ +import os +import select +import fcntl +from itertools import count +from collections import deque + +Empty = OSError +Full = OSError +ExistentialError = OSError + +class PollableQueue(object): + """A Queue that you can poll(). + Only works with a single producer. + """ + def __init__(self, maxlen=None): + with open("/proc/sys/fs/pipe-max-size") as f: + max_maxlen = int(f.read().rstrip()) + + if maxlen is None: + maxlen = max_maxlen + else: + maxlen = min(maxlen, max_maxlen) + + self._maxlen = maxlen + self._q = deque() + self._get_fd, self._put_fd = os.pipe() + fcntl.fcntl(self._get_fd, fcntl.F_SETFL, os.O_NONBLOCK) + fcntl.fcntl(self._put_fd, fcntl.F_SETFL, os.O_NONBLOCK) + + fcntl.fcntl(self._get_fd, fcntl.F_SETLEASE + 7, self._maxlen) + fcntl.fcntl(self._put_fd, fcntl.F_SETLEASE + 7, self._maxlen) + + get_poller = select.epoll() + put_poller = select.epoll() + get_poller.register(self._get_fd, select.EPOLLIN) + put_poller.register(self._put_fd, select.EPOLLOUT) + + self._get_poll = get_poller.poll + self._put_poll = put_poller.poll + + + def get_fd(self): + return self._get_fd + + def put_fd(self): + return self._put_fd + + def put(self, item, block=True, timeout=None): + if block: + while self._put_poll(timeout if timeout is not None else -1): + try: + # TODO: This is broken for multiple push threads when the queue is full. + return self.put_nowait(item) + except OSError as e: + if e.errno != 11: + raise + + raise Full() + else: + return self.put_nowait(item) + + def put_nowait(self, item): + self._q.appendleft(item) + os.write(self._put_fd, b"\x00") + + def get(self, block=True, timeout=None): + if block: + while self._get_poll(timeout if timeout is not None else -1): + try: + return self.get_nowait() + except OSError as e: + if e.errno != 11: + raise + + raise Empty() + else: + return self.get_nowait() + + def get_nowait(self): + os.read(self._get_fd, 1) + return self._q.pop() + + def get_multiple(self, block=True, timeout=None): + if block: + if self._get_poll(timeout if timeout is not None else -1): + return self.get_multiple_nowait() + else: + raise Empty() + else: + return self.get_multiple_nowait() + + def get_multiple_nowait(self, max_messages=None): + num_read = len(os.read(self._get_fd, max_messages or self._maxlen)) + return [self._q.pop() for _ in range(num_read)] + + def empty(self): + return len(self._q) == 0 + + def full(self): + return len(self._q) >= self._maxlen + + def close(self): + os.close(self._get_fd) + os.close(self._put_fd) + + def __len__(self): + return len(self._q) \ No newline at end of file diff --git a/tools/lib/route.py b/tools/lib/route.py new file mode 100644 index 00000000000000..2a5ab5e4ac61ec --- /dev/null +++ b/tools/lib/route.py @@ -0,0 +1,128 @@ +import os +import re +from urllib.parse import urlparse +from collections import defaultdict +from itertools import chain + +from tools.lib.auth_config import get_token +from tools.lib.api import CommaApi + +SEGMENT_NAME_RE = r'[a-z0-9]{16}[|_][0-9]{4}-[0-9]{2}-[0-9]{2}--[0-9]{2}-[0-9]{2}-[0-9]{2}--[0-9]+' +EXPLORER_FILE_RE = r'^({})--([a-z]+\.[a-z0-9]+)$'.format(SEGMENT_NAME_RE) +OP_SEGMENT_DIR_RE = r'^({})$'.format(SEGMENT_NAME_RE) + +LOG_FILENAMES = ['rlog.bz2', 'raw_log.bz2'] +CAMERA_FILENAMES = ['fcamera.hevc', 'video.hevc'] + +class Route(object): + def __init__(self, route_name, data_dir=None): + self.route_name = route_name.replace('_', '|') + if data_dir is not None: + self._segments = self._get_segments_local(data_dir) + else: + self._segments = self._get_segments_remote() + + @property + def segments(self): + return self._segments + + def log_paths(self): + max_seg_number = self._segments[-1].canonical_name.segment_num + log_path_by_seg_num = {s.canonical_name.segment_num: s.log_path for s in self._segments} + return [log_path_by_seg_num.get(i, None) for i in range(max_seg_number+1)] + + def camera_paths(self): + max_seg_number = self._segments[-1].canonical_name.segment_num + camera_path_by_seg_num = {s.canonical_name.segment_num: s.camera_path for s in self._segments} + return [camera_path_by_seg_num.get(i, None) for i in range(max_seg_number+1)] + + def _get_segments_remote(self): + api = CommaApi(get_token()) + route_files = api.get('v1/route/' + self.route_name + '/files') + + segments = {} + for url in chain.from_iterable(route_files.values()): + _, dongle_id, time_str, segment_num, fn = urlparse(url).path.rsplit('/', maxsplit=4) + segment_name = f'{dongle_id}|{time_str}--{segment_num}' + if segments.get(segment_name): + segments[segment_name] = RouteSegment( + segment_name, + url if fn in LOG_FILENAMES else segments[segment_name].log_path, + url if fn in CAMERA_FILENAMES else segments[segment_name].camera_path + ) + else: + segments[segment_name] = RouteSegment( + segment_name, + url if fn in LOG_FILENAMES else None, + url if fn in CAMERA_FILENAMES else None + ) + + return sorted(segments.values(), key=lambda seg: seg.canonical_name.segment_num) + + def _get_segments_local(self, data_dir): + files = os.listdir(data_dir) + segment_files = defaultdict(list) + + for f in files: + fullpath = os.path.join(data_dir, f) + explorer_match = re.match(EXPLORER_FILE_RE, f) + op_match = re.match(OP_SEGMENT_DIR_RE, f) + + if explorer_match: + segment_name, fn = explorer_match.groups() + if segment_name.replace('_', '|').startswith(self.route_name): + segment_files[segment_name].append((fullpath, fn)) + elif op_match and os.path.isdir(fullpath): + segment_name, = op_match.groups() + if segment_name.startswith(self.route_name): + for seg_f in os.listdir(fullpath): + segment_files[segment_name].append((os.path.join(fullpath, seg_f), seg_f)) + elif f == self.route_name: + for seg_num in os.listdir(fullpath): + if not seg_num.isdigit(): + continue + + segment_name = '{}--{}'.format(self.route_name, seg_num) + for seg_f in os.listdir(os.path.join(fullpath, seg_num)): + segment_files[segment_name].append((os.path.join(fullpath, seg_num, seg_f), seg_f)) + + segments = [] + for segment, files in segment_files.items(): + try: + log_path = next(path for path, filename in files if filename in LOG_FILENAMES) + except StopIteration: + log_path = None + + try: + camera_path = next(path for path, filename in files if filename in CAMERA_FILENAMES) + except StopIteration: + camera_path = None + + segments.append(RouteSegment(segment, log_path, camera_path)) + + if len(segments) == 0: + raise ValueError('Could not find segments for route {} in data directory {}'.format(self.route_name, data_dir)) + return sorted(segments, key=lambda seg: seg.canonical_name.segment_num) + +class RouteSegment(object): + def __init__(self, name, log_path, camera_path): + self._name = RouteSegmentName(name) + self.log_path = log_path + self.camera_path = camera_path + + @property + def name(self): return str(self._name) + + @property + def canonical_name(self): return self._name + +class RouteSegmentName(object): + def __init__(self, name_str): + self._segment_name_str = name_str + self._route_name_str, num_str = self._segment_name_str.rsplit("--", 1) + self._num = int(num_str) + + @property + def segment_num(self): return self._num + + def __str__(self): return self._segment_name_str diff --git a/tools/lib/route_framereader.py b/tools/lib/route_framereader.py new file mode 100644 index 00000000000000..47250383c528c6 --- /dev/null +++ b/tools/lib/route_framereader.py @@ -0,0 +1,86 @@ +"""RouteFrameReader indexes and reads frames across routes, by frameId or segment indices.""" +from tools.lib.framereader import FrameReader + +class _FrameReaderDict(dict): + def __init__(self, camera_paths, cache_paths, framereader_kwargs, *args, **kwargs): + super(_FrameReaderDict, self).__init__(*args, **kwargs) + + if cache_paths is None: + cache_paths = {} + if not isinstance(cache_paths, dict): + cache_paths = { k: v for k, v in enumerate(cache_paths) } + + self._camera_paths = camera_paths + self._cache_paths = cache_paths + self._framereader_kwargs = framereader_kwargs + + def __missing__(self, key): + if key < len(self._camera_paths) and self._camera_paths[key] is not None: + frame_reader = FrameReader(self._camera_paths[key], + self._cache_paths.get(key), **self._framereader_kwargs) + self[key] = frame_reader + return frame_reader + else: + raise KeyError("Segment index out of bounds: {}".format(key)) + + +class RouteFrameReader(object): + """Reads frames across routes and route segments by frameId.""" + def __init__(self, camera_paths, cache_paths, frame_id_lookup, **kwargs): + """Create a route framereader. + + Inputs: + TODO + + kwargs: Forwarded to the FrameReader function. If cache_prefix is included, that path + will also be used for frame position indices. + """ + self._first_camera_idx = next(i for i in range(len(camera_paths)) if camera_paths[i] is not None) + self._frame_readers = _FrameReaderDict(camera_paths, cache_paths, kwargs) + self._frame_id_lookup = frame_id_lookup + + @property + def w(self): + """Width of each frame in pixels.""" + return self._frame_readers[self._first_camera_idx].w + + @property + def h(self): + """Height of each frame in pixels.""" + return self._frame_readers[self._first_camera_idx].h + + def get(self, frame_id, **kwargs): + """Get a frame for a route based on frameId. + + Inputs: + frame_id: The frameId of the returned frame. + kwargs: Forwarded to BaseFrameReader.get. "count" is not implemented. + """ + segment_num, segment_id = self._frame_id_lookup.get(frame_id, (None, None)) + if segment_num is None or segment_num == -1 or segment_id == -1: + return None + else: + return self.get_from_segment(segment_num, segment_id, **kwargs) + + def get_from_segment(self, segment_num, segment_id, **kwargs): + """Get a frame from a specific segment with a specific index in that segment (segment_id). + + Inputs: + segment_num: The number of the segment. + segment_id: The index of the return frame within that segment. + kwargs: Forwarded to BaseFrameReader.get. "count" is not implemented. + """ + if "count" in kwargs: + raise NotImplementedError("count") + + return self._frame_readers[segment_num].get(segment_id, **kwargs)[0] + + + def close(self): + frs = self._frame_readers + self._frame_readers.clear() + for fr in frs: + fr.close() + + def __enter__(self): return self + def __exit__(self, type, value, traceback): self.close() diff --git a/tools/lib/tests/test_readers.py b/tools/lib/tests/test_readers.py new file mode 100755 index 00000000000000..c8688a4d850704 --- /dev/null +++ b/tools/lib/tests/test_readers.py @@ -0,0 +1,67 @@ +#!/usr/bin/env python +import unittest +import requests +import tempfile + +from collections import defaultdict +import numpy as np +from tools.lib.framereader import FrameReader +from tools.lib.logreader import LogReader + +class TestReaders(unittest.TestCase): + def test_logreader(self): + def _check_data(lr): + hist = defaultdict(int) + for l in lr: + hist[l.which()] += 1 + + self.assertEqual(hist['carControl'], 6000) + self.assertEqual(hist['logMessage'], 6857) + + + with tempfile.NamedTemporaryFile(suffix=".bz2") as fp: + r = requests.get("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/raw_log.bz2?raw=true") + fp.write(r.content) + fp.flush() + + lr_file = LogReader(fp.name) + _check_data(lr_file) + + lr_url = LogReader("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/raw_log.bz2?raw=true") + _check_data(lr_url) + + def test_framereader(self): + def _check_data(f): + self.assertEqual(f.frame_count, 1200) + self.assertEqual(f.w, 1164) + self.assertEqual(f.h, 874) + + + frame_first_30 = f.get(0, 30) + self.assertEqual(len(frame_first_30), 30) + + + print(frame_first_30[15]) + + print("frame_0") + frame_0 = f.get(0, 1) + frame_15 = f.get(15, 1) + + print(frame_15[0]) + + assert np.all(frame_first_30[0] == frame_0[0]) + assert np.all(frame_first_30[15] == frame_15[0]) + + with tempfile.NamedTemporaryFile(suffix=".hevc") as fp: + r = requests.get("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/video.hevc?raw=true") + fp.write(r.content) + fp.flush() + + fr_file = FrameReader(fp.name) + _check_data(fr_file) + + fr_url = FrameReader("https://github.com/commaai/comma2k19/blob/master/Example_1/b0c9d2329ad1606b%7C2018-08-02--08-34-47/40/video.hevc?raw=true") + _check_data(fr_url) + +if __name__ == "__main__": + unittest.main() diff --git a/tools/lib/vidindex/.gitignore b/tools/lib/vidindex/.gitignore new file mode 100644 index 00000000000000..a77a06e97d7d3c --- /dev/null +++ b/tools/lib/vidindex/.gitignore @@ -0,0 +1 @@ +vidindex diff --git a/tools/lib/vidindex/Makefile b/tools/lib/vidindex/Makefile new file mode 100644 index 00000000000000..f6526db212d23a --- /dev/null +++ b/tools/lib/vidindex/Makefile @@ -0,0 +1,6 @@ +CC := gcc + +vidindex: bitstream.c bitstream.h vidindex.c + $(eval $@_TMP := $(shell mktemp)) + $(CC) -std=c99 bitstream.c vidindex.c -o $($@_TMP) + mv $($@_TMP) $@ diff --git a/tools/lib/vidindex/bitstream.c b/tools/lib/vidindex/bitstream.c new file mode 100644 index 00000000000000..d174ffa8a7dfa6 --- /dev/null +++ b/tools/lib/vidindex/bitstream.c @@ -0,0 +1,118 @@ +#include +#include + +#include "bitstream.h" + +static const uint32_t BS_MASKS[33] = { + 0, 0x1L, 0x3L, 0x7L, 0xFL, 0x1FL, + 0x3FL, 0x7FL, 0xFFL, 0x1FFL, 0x3FFL, 0x7FFL, + 0xFFFL, 0x1FFFL, 0x3FFFL, 0x7FFFL, 0xFFFFL, 0x1FFFFL, + 0x3FFFFL, 0x7FFFFL, 0xFFFFFL, 0x1FFFFFL, 0x3FFFFFL, 0x7FFFFFL, + 0xFFFFFFL, 0x1FFFFFFL, 0x3FFFFFFL, 0x7FFFFFFL, 0xFFFFFFFL, 0x1FFFFFFFL, + 0x3FFFFFFFL, 0x7FFFFFFFL, 0xFFFFFFFFL}; + +void bs_init(struct bitstream* bs, const uint8_t* buffer, size_t input_size) { + bs->buffer_ptr = buffer; + bs->buffer_end = buffer + input_size; + bs->value = 0; + bs->pos = 0; + bs->shift = 8; + bs->size = input_size * 8; +} + +uint32_t bs_get(struct bitstream* bs, int n) { + if (n > 32) + return 0; + + bs->pos += n; + bs->shift += n; + while (bs->shift > 8) { + if (bs->buffer_ptr < bs->buffer_end) { + bs->value <<= 8; + bs->value |= *bs->buffer_ptr++; + bs->shift -= 8; + } else { + bs_seek(bs, bs->pos - n); + return 0; + // bs->value <<= 8; + // bs->shift -= 8; + } + } + return (bs->value >> (8 - bs->shift)) & BS_MASKS[n]; +} + +void bs_seek(struct bitstream* bs, size_t new_pos) { + bs->pos = (new_pos / 32) * 32; + bs->shift = 8; + bs->value = 0; + bs_get(bs, new_pos % 32); +} + +uint32_t bs_peek(struct bitstream* bs, int n) { + struct bitstream bak = *bs; + return bs_get(&bak, n); +} + +size_t bs_remain(struct bitstream* bs) { + return bs->size - bs->pos; +} + +int bs_eof(struct bitstream* bs) { + return bs_remain(bs) == 0; +} + +uint32_t bs_ue(struct bitstream* bs) { + static const uint8_t exp_golomb_bits[256] = { + 8, 7, 6, 6, 5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint32_t bits, read = 0; + int bits_left; + uint8_t coded; + int done = 0; + bits = 0; + // we want to read 8 bits at a time - if we don't have 8 bits, + // read what's left, and shift. The exp_golomb_bits calc remains the + // same. + while (!done) { + bits_left = bs_remain(bs); + if (bits_left < 8) { + read = bs_peek(bs, bits_left) << (8 - bits_left); + done = 1; + } else { + read = bs_peek(bs, 8); + if (read == 0) { + bs_get(bs, 8); + bits += 8; + } else { + done = 1; + } + } + } + coded = exp_golomb_bits[read]; + bs_get(bs, coded); + bits += coded; + + // printf("ue - bits %d\n", bits); + return bs_get(bs, bits + 1) - 1; +} + +int32_t bs_se(struct bitstream* bs) { + uint32_t ret; + ret = bs_ue(bs); + if ((ret & 0x1) == 0) { + ret >>= 1; + int32_t temp = 0 - ret; + return temp; + } + return (ret + 1) >> 1; +} diff --git a/tools/lib/vidindex/bitstream.h b/tools/lib/vidindex/bitstream.h new file mode 100644 index 00000000000000..0f538a59ab7d00 --- /dev/null +++ b/tools/lib/vidindex/bitstream.h @@ -0,0 +1,26 @@ +#ifndef bitstream_H +#define bitstream_H + + +#include +#include + +struct bitstream { + const uint8_t *buffer_ptr; + const uint8_t *buffer_end; + uint64_t value; + uint32_t pos; + uint32_t shift; + size_t size; +}; + +void bs_init(struct bitstream *bs, const uint8_t *buffer, size_t input_size); +void bs_seek(struct bitstream *bs, size_t new_pos); +uint32_t bs_get(struct bitstream *bs, int n); +uint32_t bs_peek(struct bitstream *bs, int n); +size_t bs_remain(struct bitstream *bs); +int bs_eof(struct bitstream *bs); +uint32_t bs_ue(struct bitstream *bs); +int32_t bs_se(struct bitstream *bs); + +#endif diff --git a/tools/lib/vidindex/vidindex.c b/tools/lib/vidindex/vidindex.c new file mode 100644 index 00000000000000..a8d53d947ebbe9 --- /dev/null +++ b/tools/lib/vidindex/vidindex.c @@ -0,0 +1,307 @@ +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "bitstream.h" + +#define START_CODE 0x000001 + +static uint32_t read24be(const uint8_t* ptr) { + return (ptr[0] << 16) | (ptr[1] << 8) | ptr[2]; +} +static void write32le(FILE *of, uint32_t v) { + uint8_t va[4] = { + v & 0xff, (v >> 8) & 0xff, (v >> 16) & 0xff, (v >> 24) & 0xff + }; + fwrite(va, 1, sizeof(va), of); +} + +// Table 7-1 +enum hevc_nal_type { + HEVC_NAL_TYPE_TRAIL_N = 0, + HEVC_NAL_TYPE_TRAIL_R = 1, + HEVC_NAL_TYPE_TSA_N = 2, + HEVC_NAL_TYPE_TSA_R = 3, + HEVC_NAL_TYPE_STSA_N = 4, + HEVC_NAL_TYPE_STSA_R = 5, + HEVC_NAL_TYPE_RADL_N = 6, + HEVC_NAL_TYPE_RADL_R = 7, + HEVC_NAL_TYPE_RASL_N = 8, + HEVC_NAL_TYPE_RASL_R = 9, + HEVC_NAL_TYPE_BLA_W_LP = 16, + HEVC_NAL_TYPE_BLA_W_RADL = 17, + HEVC_NAL_TYPE_BLA_N_LP = 18, + HEVC_NAL_TYPE_IDR_W_RADL = 19, + HEVC_NAL_TYPE_IDR_N_LP = 20, + HEVC_NAL_TYPE_CRA_NUT = 21, + HEVC_NAL_TYPE_RSV_IRAP_VCL23 = 23, + HEVC_NAL_TYPE_VPS_NUT = 32, + HEVC_NAL_TYPE_SPS_NUT = 33, + HEVC_NAL_TYPE_PPS_NUT = 34, + HEVC_NAL_TYPE_AUD_NUT = 35, + HEVC_NAL_TYPE_EOS_NUT = 36, + HEVC_NAL_TYPE_EOB_NUT = 37, + HEVC_NAL_TYPE_FD_NUT = 38, + HEVC_NAL_TYPE_PREFIX_SEI_NUT = 39, + HEVC_NAL_TYPE_SUFFIX_SEI_NUT = 40, +}; + +// Table 7-7 +enum hevc_slice_type { + HEVC_SLICE_B = 0, + HEVC_SLICE_P = 1, + HEVC_SLICE_I = 2, +}; + +static void hevc_index(const uint8_t *data, size_t file_size, FILE *of_prefix, FILE *of_index) { + const uint8_t* ptr = data; + const uint8_t* ptr_end = data + file_size; + + assert(ptr[0] == 0); + ptr++; + assert(read24be(ptr) == START_CODE); + + // pps. ignore for now + uint32_t num_extra_slice_header_bits = 0; + uint32_t dependent_slice_segments_enabled_flag = 0; + + while (ptr < ptr_end) { + const uint8_t* next = ptr+1; + for (; next < ptr_end-4; next++) { + if (read24be(next) == START_CODE) break; + } + size_t nal_size = next - ptr; + if (nal_size < 6) { + break; + } + + { + struct bitstream bs = {0}; + bs_init(&bs, ptr, nal_size); + + uint32_t start_code = bs_get(&bs, 24); + assert(start_code == 0x000001); + + // nal_unit_header + uint32_t forbidden_zero_bit = bs_get(&bs, 1); + uint32_t nal_unit_type = bs_get(&bs, 6); + uint32_t nuh_layer_id = bs_get(&bs, 6); + uint32_t nuh_temporal_id_plus1 = bs_get(&bs, 3); + + // if (nal_unit_type != 1) printf("%3d -- %3d %10d %lu\n", nal_unit_type, frame_num, (uint32_t)(ptr-data), nal_size); + + switch (nal_unit_type) { + case HEVC_NAL_TYPE_VPS_NUT: + case HEVC_NAL_TYPE_SPS_NUT: + case HEVC_NAL_TYPE_PPS_NUT: + fwrite(ptr, 1, nal_size, of_prefix); + break; + case HEVC_NAL_TYPE_TRAIL_N: + case HEVC_NAL_TYPE_TRAIL_R: + case HEVC_NAL_TYPE_TSA_N: + case HEVC_NAL_TYPE_TSA_R: + case HEVC_NAL_TYPE_STSA_N: + case HEVC_NAL_TYPE_STSA_R: + case HEVC_NAL_TYPE_RADL_N: + case HEVC_NAL_TYPE_RADL_R: + case HEVC_NAL_TYPE_RASL_N: + case HEVC_NAL_TYPE_RASL_R: + case HEVC_NAL_TYPE_BLA_W_LP: + case HEVC_NAL_TYPE_BLA_W_RADL: + case HEVC_NAL_TYPE_BLA_N_LP: + case HEVC_NAL_TYPE_IDR_W_RADL: + case HEVC_NAL_TYPE_IDR_N_LP: + case HEVC_NAL_TYPE_CRA_NUT: { + // slice_segment_header + uint32_t first_slice_segment_in_pic_flag = bs_get(&bs, 1); + if (nal_unit_type >= HEVC_NAL_TYPE_BLA_W_LP && nal_unit_type <= HEVC_NAL_TYPE_RSV_IRAP_VCL23) { + uint32_t no_output_of_prior_pics_flag = bs_get(&bs, 1); + } + uint32_t slice_pic_parameter_set_id = bs_get(&bs, 1); + if (!first_slice_segment_in_pic_flag) { + // ... + break; + } + + if (!dependent_slice_segments_enabled_flag) { + for (int i=0; i 4); + + const uint8_t* data = (const uint8_t*)mmap(NULL, file_size, PROT_READ, MAP_PRIVATE, fd, 0); + assert(data != MAP_FAILED); + + if (strcmp(file_type, "hevc") == 0) { + hevc_index(data, file_size, of_prefix, of_index); + } else if (strcmp(file_type, "h264") == 0) { + h264_index(data, file_size, of_prefix, of_index); + } else { + assert(false); + } + + munmap((void*)data, file_size); + close(fd); + + return 0; +} diff --git a/tools/livedm/helpers.py b/tools/livedm/helpers.py new file mode 100644 index 00000000000000..47a79a67cbebf8 --- /dev/null +++ b/tools/livedm/helpers.py @@ -0,0 +1,30 @@ +import numpy as np +import cv2 + +def rot_matrix(roll, pitch, yaw): + cr, sr = np.cos(roll), np.sin(roll) + cp, sp = np.cos(pitch), np.sin(pitch) + cy, sy = np.cos(yaw), np.sin(yaw) + rr = np.array([[1,0,0],[0, cr,-sr],[0, sr, cr]]) + rp = np.array([[cp,0,sp],[0, 1,0],[-sp, 0, cp]]) + ry = np.array([[cy,-sy,0],[sy, cy,0],[0, 0, 1]]) + return ry.dot(rp.dot(rr)) + +def draw_pose(img, pose, loc, W=160, H=320, xyoffset=(0,0), faceprob=0): + rcmat = np.zeros((3,4)) + rcmat[:,:3] = rot_matrix(*pose[0:3]) * 0.5 + rcmat[0,3] = (loc[0]+0.5) * W + rcmat[1,3] = (loc[1]+0.5) * H + rcmat[2,3] = 1.0 + # draw nose + p1 = np.dot(rcmat, [0,0,0,1])[0:2] + p2 = np.dot(rcmat, [0,0,100,1])[0:2] + tr = tuple([int(round(x + xyoffset[i])) for i,x in enumerate(p1)]) + pr = tuple([int(round(x + xyoffset[i])) for i,x in enumerate(p2)]) + if faceprob > 0.4: + color = (255,255,0) + cv2.line(img, tr, pr, color=(255,255,0), thickness=3) + else: + color = (64,64,64) + cv2.circle(img, tr, 7, color=color) + \ No newline at end of file diff --git a/tools/livedm/livedm.py b/tools/livedm/livedm.py new file mode 100644 index 00000000000000..5125f0e2c71c0c --- /dev/null +++ b/tools/livedm/livedm.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +import os +import argparse +import pygame +import numpy as np +import cv2 + +from cereal import log +import cereal.messaging as messaging + +from helpers import draw_pose + +if __name__ == "__main__": + + os.environ["ZMQ"] = "1" + + parser = argparse.ArgumentParser(description='Sniff a communcation socket') + parser.add_argument('--addr', default='192.168.5.11') + args = parser.parse_args() + + messaging.context = messaging.Context() + + poller = messaging.Poller() + + m = 'driverMonitoring' + sock = messaging.sub_sock(m, poller, addr=args.addr) + + pygame.init() + pygame.display.set_caption('livedm') + screen = pygame.display.set_mode((320,640), pygame.DOUBLEBUF) + camera_surface = pygame.surface.Surface((160,320), 0, 24).convert() + + while 1: + polld = poller.poll(1000) + for sock in polld: + msg = sock.receive() + evt = log.Event.from_bytes(msg) + + faceProb = np.array(evt.driverMonitoring.faceProb) + faceOrientation = np.array(evt.driverMonitoring.faceOrientation) + facePosition = np.array(evt.driverMonitoring.facePosition) + + print(faceProb) + # print(faceOrientation) + # print(facePosition) + faceOrientation[1] *= -1 + facePosition[0] *= -1 + + img = np.zeros((320,160,3)) + if faceProb > 0.4: + cv2.putText(img, 'you', (int(facePosition[0]*160+40), int(facePosition[1]*320+110)), cv2.FONT_ITALIC, 0.5, (255,255,0)) + cv2.rectangle(img, (int(facePosition[0]*160+40), int(facePosition[1]*320+120)),\ + (int(facePosition[0]*160+120), int(facePosition[1]*320+200)), (255,255,0), 1) + + not_blink = evt.driverMonitoring.leftBlinkProb + evt.driverMonitoring.rightBlinkProb < 1 + + if evt.driverMonitoring.leftEyeProb > 0.6: + cv2.line(img, (int(facePosition[0]*160+95), int(facePosition[1]*320+140)),\ + (int(facePosition[0]*160+105), int(facePosition[1]*320+140)), (255,255,0), 2) + if not_blink: + cv2.line(img, (int(facePosition[0]*160+99), int(facePosition[1]*320+143)),\ + (int(facePosition[0]*160+101), int(facePosition[1]*320+143)), (255,255,0), 2) + + if evt.driverMonitoring.rightEyeProb > 0.6: + cv2.line(img, (int(facePosition[0]*160+55), int(facePosition[1]*320+140)),\ + (int(facePosition[0]*160+65), int(facePosition[1]*320+140)), (255,255,0), 2) + if not_blink: + cv2.line(img, (int(facePosition[0]*160+59), int(facePosition[1]*320+143)),\ + (int(facePosition[0]*160+61), int(facePosition[1]*320+143)), (255,255,0), 2) + + else: + cv2.putText(img, 'you not found', (int(facePosition[0]*160+40), int(facePosition[1]*320+110)), cv2.FONT_ITALIC, 0.5, (64,64,64)) + draw_pose(img, faceOrientation, facePosition, + W = 160, H = 320, xyoffset = (0, 0), faceprob=faceProb) + + pygame.surfarray.blit_array(camera_surface, img.swapaxes(0,1)) + camera_surface_2x = pygame.transform.scale2x(camera_surface) + screen.blit(camera_surface_2x, (0, 0)) + pygame.display.flip() diff --git a/tools/misc/save_ubloxraw_stream.py b/tools/misc/save_ubloxraw_stream.py new file mode 100644 index 00000000000000..518c4ecaf635ee --- /dev/null +++ b/tools/misc/save_ubloxraw_stream.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +import argparse +import os +import sys +from common.basedir import BASEDIR +from tools.lib.logreader import MultiLogIterator +from tools.lib.route import Route + +os.environ['BASEDIR'] = BASEDIR + + +def get_arg_parser(): + parser = argparse.ArgumentParser( + description="Unlogging and save to file", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument("data_dir", nargs='?', + help="Path to directory in which log and camera files are located.") + parser.add_argument("route_name", type=(lambda x: x.replace("#", "|")), nargs="?", + help="The route whose messages will be published.") + parser.add_argument("--out_path", nargs='?', default='/data/ubloxRaw.stream', + help="Output pickle file path") + return parser + + +def main(argv): + args = get_arg_parser().parse_args(sys.argv[1:]) + if not args.data_dir: + print('Data directory invalid.') + return + + if not args.route_name: + # Extract route name from path + args.route_name = os.path.basename(args.data_dir) + args.data_dir = os.path.dirname(args.data_dir) + + route = Route(args.route_name, args.data_dir) + lr = MultiLogIterator(route.log_paths(), wraparound=False) + + with open(args.out_path, 'wb') as f: + try: + done = False + i = 0 + while not done: + msg = next(lr) + if not msg: + break + smsg = msg.as_builder() + typ = smsg.which() + if typ == 'ubloxRaw': + f.write(smsg.to_bytes()) + i += 1 + except StopIteration: + print('All done') + print('Writed {} msgs'.format(i)) + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/tools/nui/.gitignore b/tools/nui/.gitignore new file mode 100644 index 00000000000000..9341b87ec5f0a6 --- /dev/null +++ b/tools/nui/.gitignore @@ -0,0 +1,9 @@ +Makefile +.*.swp +*.o +_nui +moc_* +.qmake.stash +nui.app/* +routes.json + diff --git a/tools/nui/FileReader.cpp b/tools/nui/FileReader.cpp new file mode 100644 index 00000000000000..d52305b10e61fc --- /dev/null +++ b/tools/nui/FileReader.cpp @@ -0,0 +1,139 @@ +#include "FileReader.hpp" +#include "FrameReader.hpp" + +#include + +FileReader::FileReader(const QString& file_) : file(file_) { +} + +void FileReader::process() { + timer.start(); + QString str = file.simplified(); + str.replace(" ", ""); + startRequest(QUrl(str)); +} + +void FileReader::startRequest(const QUrl &url) { + qnam = new QNetworkAccessManager; + reply = qnam->get(QNetworkRequest(url)); + connect(reply, &QNetworkReply::finished, this, &FileReader::httpFinished); + connect(reply, &QIODevice::readyRead, this, &FileReader::readyRead); + qDebug() << "requesting" << url; +} + +void FileReader::httpFinished() { + if (reply->error()) { + qWarning() << reply->errorString(); + } + + const QVariant redirectionTarget = reply->attribute(QNetworkRequest::RedirectionTargetAttribute); + if (!redirectionTarget.isNull()) { + const QUrl redirectedUrl = redirectionTarget.toUrl(); + //qDebug() << "redirected to" << redirectedUrl; + startRequest(redirectedUrl); + } else { + qDebug() << "done in" << timer.elapsed() << "ms"; + done(); + } +} + +void FileReader::readyRead() { + QByteArray dat = reply->readAll(); + printf("got http ready read: %d\n", dat.size()); +} + +FileReader::~FileReader() { + +} + +LogReader::LogReader(const QString& file, Events *events_, QReadWriteLock* events_lock_, QMap > *eidx_) : + FileReader(file), events(events_), events_lock(events_lock_), eidx(eidx_) { + bStream.next_in = NULL; + bStream.avail_in = 0; + bStream.bzalloc = NULL; + bStream.bzfree = NULL; + bStream.opaque = NULL; + + int ret = BZ2_bzDecompressInit(&bStream, 0, 0); + if (ret != BZ_OK) qWarning() << "bz2 init failed"; + + // start with 64MB buffer + raw.resize(1024*1024*64); + + // auto increment? + bStream.next_out = raw.data(); + bStream.avail_out = raw.size(); + + // parsed no events yet + event_offset = 0; + + parser = new std::thread([&]() { + while (1) { + mergeEvents(cdled.get()); + } + }); +} + +void LogReader::mergeEvents(int dled) { + auto amsg = kj::arrayPtr((const capnp::word*)(raw.data() + event_offset), (dled-event_offset)/sizeof(capnp::word)); + Events events_local; + QMap > eidx_local; + + while (amsg.size() > 0) { + try { + capnp::FlatArrayMessageReader cmsg = capnp::FlatArrayMessageReader(amsg); + + // this needed? it is + capnp::FlatArrayMessageReader *tmsg = + new capnp::FlatArrayMessageReader(kj::arrayPtr(amsg.begin(), cmsg.getEnd())); + + amsg = kj::arrayPtr(cmsg.getEnd(), amsg.end()); + + cereal::Event::Reader event = tmsg->getRoot(); + events_local.insert(event.getLogMonoTime(), event); + + // hack + // TODO: rewrite with callback + if (event.which() == cereal::Event::ENCODE_IDX) { + auto ee = event.getEncodeIdx(); + eidx_local.insert(ee.getFrameId(), qMakePair(ee.getSegmentNum(), ee.getSegmentId())); + } + + // increment + event_offset = (char*)cmsg.getEnd() - raw.data(); + } catch (const kj::Exception& e) { + // partial messages trigger this + //qDebug() << e.getDescription().cStr(); + break; + } + } + + // merge in events + // TODO: add lock + events_lock->lockForWrite(); + *events += events_local; + eidx->unite(eidx_local); + events_lock->unlock(); + + printf("parsed %d into %d events with offset %d\n", dled, events->size(), event_offset); +} + +void LogReader::readyRead() { + QByteArray dat = reply->readAll(); + + bStream.next_in = dat.data(); + bStream.avail_in = dat.size(); + + while (bStream.avail_in > 0) { + int ret = BZ2_bzDecompress(&bStream); + if (ret != BZ_OK && ret != BZ_STREAM_END) { + qWarning() << "bz2 decompress failed"; + break; + } + qDebug() << "got" << dat.size() << "with" << bStream.avail_out << "size" << raw.size(); + } + + int dled = raw.size() - bStream.avail_out; + cdled.put(dled); +} + diff --git a/tools/nui/FileReader.hpp b/tools/nui/FileReader.hpp new file mode 100644 index 00000000000000..6d53bdee8e2450 --- /dev/null +++ b/tools/nui/FileReader.hpp @@ -0,0 +1,68 @@ +#ifndef FILEREADER_HPP +#define FILEREADER_HPP + +#include +#include +#include +#include +#include +#include +#include + +#include + +#include +#include + +#include "cereal/gen/cpp/log.capnp.h" + +#include +#include "channel.hpp" + +class FileReader : public QObject { +Q_OBJECT +public: + FileReader(const QString& file_); + void startRequest(const QUrl &url); + ~FileReader(); + virtual void readyRead(); + void httpFinished(); + virtual void done() {}; +public slots: + void process(); +protected: + QNetworkReply *reply; +private: + QNetworkAccessManager *qnam; + QElapsedTimer timer; + QString file; +}; + +typedef QMultiMap Events; + +class LogReader : public FileReader { +Q_OBJECT +public: + LogReader(const QString& file, Events *, QReadWriteLock* events_lock_, QMap > *eidx_); + void readyRead(); + void done() { is_done = true; }; + bool is_done = false; +private: + bz_stream bStream; + + // backing store + QByteArray raw; + + std::thread *parser; + int event_offset; + channel cdled; + + // global + void mergeEvents(int dled); + Events *events; + QReadWriteLock* events_lock; + QMap > *eidx; +}; + +#endif + diff --git a/tools/nui/README b/tools/nui/README new file mode 100644 index 00000000000000..3033bcadc0c0cf --- /dev/null +++ b/tools/nui/README @@ -0,0 +1,9 @@ +== Ubuntu == + +sudo apt-get install capnproto libyaml-cpp-dev qt5-default + +== Mac == + +brew install qt5 ffmpeg capnp yaml-cpp zmq +brew link qt5 --force + diff --git a/tools/nui/Unlogger.cpp b/tools/nui/Unlogger.cpp new file mode 100644 index 00000000000000..843ef51e3fc636 --- /dev/null +++ b/tools/nui/Unlogger.cpp @@ -0,0 +1,182 @@ +#include +#include +#include +#include +#include + +// include the dynamic struct +#include "cereal/gen/cpp/car.capnp.c++" +#include "cereal/gen/cpp/log.capnp.c++" + +#include "Unlogger.hpp" + +#include +#include + +static inline uint64_t nanos_since_boot() { + struct timespec t; + clock_gettime(CLOCK_BOOTTIME, &t); + return t.tv_sec * 1000000000ULL + t.tv_nsec; +} + + +Unlogger::Unlogger(Events *events_, QReadWriteLock* events_lock_, QMap *frs_, int seek) + : events(events_), events_lock(events_lock_), frs(frs_) { + ctx = Context::create(); + YAML::Node service_list = YAML::LoadFile("../../cereal/service_list.yaml"); + + seek_request = seek*1e9; + + QStringList block = QString(getenv("BLOCK")).split(","); + qDebug() << "blocklist" << block; + + QStringList allow = QString(getenv("ALLOW")).split(","); + qDebug() << "allowlist" << allow; + + for (const auto& it : service_list) { + auto name = it.first.as(); + + if (allow[0].size() > 0 && !allow.contains(name.c_str())) { + qDebug() << "not allowing" << name.c_str(); + continue; + } + + if (block.contains(name.c_str())) { + qDebug() << "blocking" << name.c_str(); + continue; + } + + PubSocket *sock = PubSocket::create(ctx, name); + if (sock == NULL) { + qDebug() << "FAILED" << name.c_str(); + continue; + } + + qDebug() << name.c_str(); + + for (auto field: capnp::Schema::from().getFields()) { + std::string tname = field.getProto().getName(); + + if (tname == name) { + // TODO: I couldn't figure out how to get the which, only the index, hence this hack + int type = field.getIndex(); + if (type > 67) type--; // valid + type--; // logMonoTime + + //qDebug() << "here" << tname.c_str() << type << cereal::Event::CONTROLS_STATE; + socks.insert(type, sock); + } + } + } +} + +void Unlogger::process() { + qDebug() << "hello from unlogger thread"; + while (events->size() == 0) { + qDebug() << "waiting for events"; + QThread::sleep(1); + } + qDebug() << "got events"; + + // TODO: hack + if (seek_request != 0) { + seek_request += events->begin().key(); + while (events->lowerBound(seek_request) == events->end()) { + qDebug() << "waiting for desired time"; + QThread::sleep(1); + } + } + + QElapsedTimer timer; + timer.start(); + + uint64_t last_elapsed = 0; + + // loops + while (1) { + uint64_t t0 = (events->begin()+1).key(); + uint64_t t0r = timer.nsecsElapsed(); + qDebug() << "unlogging at" << t0; + + auto eit = events->lowerBound(t0); + while (eit != events->end()) { + while (paused) { + QThread::usleep(1000); + t0 = eit->getLogMonoTime(); + t0r = timer.nsecsElapsed(); + } + + if (seek_request != 0) { + t0 = seek_request; + qDebug() << "seeking to" << t0; + t0r = timer.nsecsElapsed(); + eit = events->lowerBound(t0); + seek_request = 0; + if (eit == events->end()) { + qWarning() << "seek off end"; + break; + } + } + + if (abs(((long long)tc-(long long)last_elapsed)) > 50e6) { + //qDebug() << "elapsed"; + emit elapsed(); + last_elapsed = tc; + } + + auto e = *eit; + auto type = e.which(); + uint64_t tm = e.getLogMonoTime(); + auto it = socks.find(type); + tc = tm; + if (it != socks.end()) { + long etime = tm-t0; + long rtime = timer.nsecsElapsed() - t0r; + long us_behind = ((etime-rtime)*1e-3)+0.5; + if (us_behind > 0) { + if (us_behind > 1e6) { + qWarning() << "OVER ONE SECOND BEHIND, HACKING" << us_behind; + us_behind = 0; + t0 = tm; + t0r = timer.nsecsElapsed(); + } + QThread::usleep(us_behind); + //qDebug() << "sleeping" << us_behind << etime << timer.nsecsElapsed(); + } + + capnp::MallocMessageBuilder msg; + msg.setRoot(e); + + auto ee = msg.getRoot(); + ee.setLogMonoTime(nanos_since_boot()); + + if (e.which() == cereal::Event::FRAME) { + auto fr = msg.getRoot().getFrame(); + + // TODO: better way? + auto it = eidx.find(fr.getFrameId()); + if (it != eidx.end()) { + auto pp = *it; + //qDebug() << fr.getFrameId() << pp; + + if (frs->find(pp.first) != frs->end()) { + auto frm = (*frs)[pp.first]; + auto data = frm->get(pp.second); + if (data != NULL) { + fr.setImage(kj::arrayPtr(data, frm->getRGBSize())); + } + } + } + } + + auto words = capnp::messageToFlatArray(msg); + auto bytes = words.asBytes(); + + // TODO: Can PubSocket take a const char? + (*it)->send((char*)bytes.begin(), bytes.size()); + } + ++eit; + } + } +} + diff --git a/tools/nui/Unlogger.hpp b/tools/nui/Unlogger.hpp new file mode 100644 index 00000000000000..577632268e3e3f --- /dev/null +++ b/tools/nui/Unlogger.hpp @@ -0,0 +1,36 @@ +#ifndef UNLOGGER_HPP +#define UNLOGGER_HPP + +#include +#include +#include "messaging.hpp" +#include "FileReader.hpp" +#include "FrameReader.hpp" + +class Unlogger : public QObject { +Q_OBJECT + public: + Unlogger(Events *events_, QReadWriteLock* events_lock_, QMap *frs_, int seek); + uint64_t getCurrentTime() { return tc; } + void setSeekRequest(uint64_t seek_request_) { seek_request = seek_request_; } + void setPause(bool pause) { paused = pause; } + void togglePause() { paused = !paused; } + QMap > eidx; + public slots: + void process(); + signals: + void elapsed(); + void finished(); + private: + Events *events; + QReadWriteLock *events_lock; + QMap *frs; + QMap socks; + Context *ctx; + uint64_t tc = 0; + uint64_t seek_request = 0; + bool paused = false; +}; + +#endif + diff --git a/tools/nui/build.sh b/tools/nui/build.sh new file mode 100755 index 00000000000000..d859d6f1d06c73 --- /dev/null +++ b/tools/nui/build.sh @@ -0,0 +1,4 @@ +#!/bin/bash +qmake +make -j + diff --git a/tools/nui/get_files_comma_api.py b/tools/nui/get_files_comma_api.py new file mode 100644 index 00000000000000..83fb73561ffbed --- /dev/null +++ b/tools/nui/get_files_comma_api.py @@ -0,0 +1,14 @@ +import json +import os +import sys + +from tools.lib.route import Route + +route_name = sys.argv[1] +routes = Route(route_name) +data_dump = { + "camera":routes.camera_paths(), + "logs":routes.log_paths() +} + +json.dump(data_dump, open("routes.json", "w")) \ No newline at end of file diff --git a/tools/nui/main.cpp b/tools/nui/main.cpp new file mode 100644 index 00000000000000..2f3d18c0ff785b --- /dev/null +++ b/tools/nui/main.cpp @@ -0,0 +1,261 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "FileReader.hpp" +#include "Unlogger.hpp" +#include "FrameReader.hpp" + +class Window : public QWidget { + public: + Window(QString route_, int seek, int use_api); + bool addSegment(int i); + QJsonArray camera_paths; + QJsonArray log_paths; + int use_api; + protected: + void keyPressEvent(QKeyEvent *event) override; + void mousePressEvent(QMouseEvent *event) override; + void paintEvent(QPaintEvent *event) override; + uint64_t ct; + Unlogger *unlogger; + private: + int timeToPixel(uint64_t ns); + uint64_t pixelToTime(int px); + QString route; + + QReadWriteLock events_lock; + Events events; + int last_event_size = 0; + + QMap lrs; + QMap frs; + + + // cache the bar + QPixmap *px = NULL; + int seg_add = 0; + + QLineEdit *timeLE; +}; + +Window::Window(QString route_, int seek, int use_api_) : route(route_), use_api(use_api_) { + timeLE = new QLineEdit(this); + timeLE->setPlaceholderText("Placeholder Text"); + timeLE->move(50, 650); + + QThread* thread = new QThread; + unlogger = new Unlogger(&events, &events_lock, &frs, seek); + unlogger->moveToThread(thread); + connect(thread, SIGNAL (started()), unlogger, SLOT (process())); + connect(unlogger, SIGNAL (elapsed()), this, SLOT (update())); + thread->start(); + + if (use_api != 0){ + QString settings; + QFile file; + file.setFileName("routes.json"); + file.open(QIODevice::ReadOnly | QIODevice::Text); + settings = file.readAll(); + file.close(); + + QJsonDocument sd = QJsonDocument::fromJson(settings.toUtf8()); + qWarning() << sd.isNull(); // <- print false :) + QJsonObject sett2 = sd.object(); + + this->camera_paths = sett2.value("camera").toArray(); + this->log_paths = sett2.value("logs").toArray(); + } + + this->setFocusPolicy(Qt::StrongFocus); + + // add the first segment + addSegment(seek/60); +} + +bool Window::addSegment(int i) { + if (lrs.find(i) == lrs.end()) { + QString fn = QString("http://data.comma.life/%1/%2/rlog.bz2").arg(route).arg(i); + + + QThread* thread = new QThread; + if (use_api != 0) + lrs.insert(i, new LogReader(fn, &events, &events_lock, &unlogger->eidx)); + else { + QString log_fn = this->log_paths.at(i).toString(); + lrs.insert(i, new LogReader(log_fn, &events, &events_lock, &unlogger->eidx)); + + } + + lrs[i]->moveToThread(thread); + connect(thread, SIGNAL (started()), lrs[i], SLOT (process())); + thread->start(); + //connect(lrs[i], SIGNAL (finished()), this, SLOT (update())); + + QString frn = QString("http://data.comma.life/%1/%2/fcamera.hevc").arg(route).arg(i); + + if (use_api != 0) + frs.insert(i, new FrameReader(qPrintable(frn))); + else{ + QString camera_fn = this->camera_paths.at(i).toString(); + frs.insert(i, new FrameReader(qPrintable(camera_fn))); + } + + + return true; + } + return false; +} + +#define PIXELS_PER_SEC 0.5 + +int Window::timeToPixel(uint64_t ns) { + // TODO: make this dynamic + return int(ns*1e-9*PIXELS_PER_SEC+0.5); +} + +uint64_t Window::pixelToTime(int px) { + // TODO: make this dynamic + //printf("%d\n", px); + return ((px+0.5)/PIXELS_PER_SEC) * 1e9; +} + +void Window::keyPressEvent(QKeyEvent *event) { + printf("keypress: %x\n", event->key()); + if (event->key() == Qt::Key_Space) unlogger->togglePause(); +} + +void Window::mousePressEvent(QMouseEvent *event) { + //printf("mouse event\n"); + if (event->button() == Qt::LeftButton) { + uint64_t t0 = events.begin().key(); + uint64_t tt = pixelToTime(event->x()); + int seg = int((tt*1e-9)/60); + printf("segment %d\n", seg); + addSegment(seg); + + //printf("seek to %lu\n", t0+tt); + unlogger->setSeekRequest(t0+tt); + } + this->update(); +} + +void Window::paintEvent(QPaintEvent *event) { + if (events.size() == 0) return; + + QElapsedTimer timer; + timer.start(); + + uint64_t t0 = events.begin().key(); + uint64_t t1 = (events.end()-1).key(); + + //p.drawRect(0, 0, 600, 100); + + // TODO: we really don't have to redraw this every time, only on updates to events + int this_event_size = events.size(); + if (last_event_size != this_event_size) { + if (px != NULL) delete px; + px = new QPixmap(1920, 600); + px->fill(QColor(0xd8, 0xd8, 0xd8)); + + QPainter tt(px); + tt.setBrush(Qt::cyan); + + int lt = -1; + int lvv = 0; + for (auto e : events) { + auto type = e.which(); + //printf("%lld %d\n", e.getLogMonoTime()-t0, type); + if (type == cereal::Event::CONTROLS_STATE) { + auto controlsState = e.getControlsState(); + uint64_t t = (e.getLogMonoTime()-t0); + float vEgo = controlsState.getVEgo(); + int enabled = controlsState.getState() == cereal::ControlsState::OpenpilotState::ENABLED; + int rt = timeToPixel(t); // 250 ms per pixel + if (rt != lt) { + int vv = vEgo*8.0; + if (lt != -1) { + tt.setPen(Qt::red); + tt.drawLine(lt, 300-lvv, rt, 300-vv); + + if (enabled) { + tt.setPen(Qt::green); + } else { + tt.setPen(Qt::blue); + } + + tt.drawLine(rt, 300, rt, 600); + } + lt = rt; + lvv = vv; + } + } + } + tt.end(); + last_event_size = this_event_size; + if (lrs.find(seg_add) != lrs.end() && lrs[seg_add]->is_done) { + while (!addSegment(++seg_add)); + } + } + + QPainter p(this); + if (px != NULL) p.drawPixmap(0, 0, 1920, 600, *px); + + p.setBrush(Qt::cyan); + + uint64_t ct = unlogger->getCurrentTime(); + if (ct != 0) { + addSegment((((ct-t0)*1e-9)/60)+1); + int rrt = timeToPixel(ct-t0); + p.drawRect(rrt-1, 0, 2, 600); + + timeLE->setText(QString("%1").arg((ct-t0)*1e-9, '8', 'f', 2)); + } + + p.end(); + + if (timer.elapsed() > 50) { + qDebug() << "paint in" << timer.elapsed() << "ms"; + } +} + +int main(int argc, char *argv[]) { + QApplication app(argc, argv); + + QString route(argv[1]); + + int use_api = QString::compare(QString("use_api"), route, Qt::CaseInsensitive); + int seek = QString(argv[2]).toInt(); + printf("seek: %d\n", seek); + route = route.replace("|", "/"); + if (route == "") { + printf("usage %s: \n", argv[0]); + exit(0); + //route = "3a5d6ac1c23e5536/2019-10-29--10-06-58"; + //route = "0006c839f32a6f99/2019-02-18--06-21-29"; + //route = "02ec6bea180a4d36/2019-10-25--10-18-09"; + } + + Window window(route, seek, use_api); + + window.resize(1920, 800); + window.setWindowTitle("nui unlogger"); + window.show(); + + return app.exec(); +} + diff --git a/tools/nui/nui b/tools/nui/nui new file mode 100755 index 00000000000000..35d3cfa68f7867 --- /dev/null +++ b/tools/nui/nui @@ -0,0 +1,12 @@ +#!/bin/sh + +if [ $# -gt 0 ]; then + if [ "$INTERNAL" = 1 ]; then + ./_nui "$1" + else + python get_files_comma_api.py $1 && ./_nui use_api + fi +else + echo "Please Enter a Route" +fi + \ No newline at end of file diff --git a/tools/nui/nui.pro b/tools/nui/nui.pro new file mode 100644 index 00000000000000..4249283680efe6 --- /dev/null +++ b/tools/nui/nui.pro @@ -0,0 +1,37 @@ +###################################################################### +# Automatically generated by qmake (3.0) Tue Oct 29 11:15:25 2019 +###################################################################### + +TEMPLATE = app +TARGET = _nui +INCLUDEPATH += . + +# Input +SOURCES += main.cpp FileReader.cpp Unlogger.cpp ../clib/FrameReader.cpp +HEADERS = FileReader.hpp Unlogger.hpp ../clib/FrameReader.hpp + +CONFIG += c++14 +CONFIG += debug + +QT += widgets network core + +BASEDIR = "../../" +PHONELIBS = $$BASEDIR"phonelibs" + +INCLUDEPATH = $$PHONELIBS/capnp-cpp/include $$PHONELIBS/yaml-cpp/include ../clib/ + +unix:!macx { + LIBS += -L$$PHONELIBS/capnp-cpp/x64/lib -L$$PHONELIBS/yaml-cpp/x64/lib -Wl,-rpath=$$PHONELIBS/capnp-cpp/x64/lib +} + +macx: { + LIBS += -L$$PHONELIBS/capnp-cpp/mac/lib -L$$PHONELIBS/yaml-cpp/mac/lib +} + +LIBS += -lcapnp -lkj -lyaml-cpp + +INCLUDEPATH += /usr/local/include +INCLUDEPATH += $$PHONELIBS/capnp-cpp/include $$BASEDIR $$BASEDIR/cereal/messaging $$PHONELIBS/yaml-cpp/include +LIBS += -L/usr/local/lib -lavformat -lavcodec -lavutil -lswscale +LIBS += -lbz2 $$BASEDIR/cereal/libmessaging.a -lzmq + diff --git a/tools/nui/test/.gitignore b/tools/nui/test/.gitignore new file mode 100644 index 00000000000000..9daeafb9864cf4 --- /dev/null +++ b/tools/nui/test/.gitignore @@ -0,0 +1 @@ +test diff --git a/tools/nui/test/TestFrameReader.cpp b/tools/nui/test/TestFrameReader.cpp new file mode 100644 index 00000000000000..55327ac312be30 --- /dev/null +++ b/tools/nui/test/TestFrameReader.cpp @@ -0,0 +1,14 @@ +#include "../../clib/FrameReader.hpp" +#include "TestFrameReader.hpp" + +void TestFrameReader::frameread() { + QElapsedTimer t; + t.start(); + FrameReader fr("3a5d6ac1c23e5536/2019-10-29--10-06-58/2/fcamera.hevc"); + fr.get(2); + //QThread::sleep(10); + qDebug() << t.nsecsElapsed()*1e-9 << "seconds"; +} + +QTEST_MAIN(TestFrameReader) + diff --git a/tools/nui/test/TestFrameReader.hpp b/tools/nui/test/TestFrameReader.hpp new file mode 100644 index 00000000000000..59c07a3ebdfd30 --- /dev/null +++ b/tools/nui/test/TestFrameReader.hpp @@ -0,0 +1,8 @@ +#include + +class TestFrameReader : public QObject { +Q_OBJECT +private slots: + void frameread(); +}; + diff --git a/tools/nui/test/test.pro b/tools/nui/test/test.pro new file mode 100644 index 00000000000000..2e345f66d41cf2 --- /dev/null +++ b/tools/nui/test/test.pro @@ -0,0 +1,16 @@ +###################################################################### +# Automatically generated by qmake (3.0) Thu Oct 31 16:05:48 2019 +###################################################################### + +QT += testlib +TEMPLATE = app +TARGET = test +INCLUDEPATH += . ../ + +# Input +SOURCES += TestFrameReader.cpp ../../clib/FrameReader.cpp +HEADERS = TestFrameReader.hpp ../../clib/FrameReader.hpp + +CONFIG += c++14 + +LIBS += -lavformat -lavcodec -lavutil -lswscale diff --git a/tools/openpilot_env.sh b/tools/openpilot_env.sh new file mode 100644 index 00000000000000..ccf76db2bc2f21 --- /dev/null +++ b/tools/openpilot_env.sh @@ -0,0 +1,20 @@ +if [ -z "$OPENPILOT_ENV" ]; then + export PYTHONPATH="$HOME/openpilot" + + unamestr=`uname` + if [[ "$unamestr" == 'Linux' ]]; then + export PATH="$HOME/.pyenv/bin:$PATH" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" + + export PATH="$PATH:$HOME/openpilot/external/capnp/bin" + export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$HOME/openpilot/external/capnp/lib" + elif [[ "$unamestr" == 'Darwin' ]]; then + # msgq doesn't work on mac + export ZMQ=1 + export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES + fi + + export OPENPILOT_ENV=1 +fi + diff --git a/tools/replay/__init__.py b/tools/replay/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/replay/camera.py b/tools/replay/camera.py new file mode 100755 index 00000000000000..a48c006407d8a4 --- /dev/null +++ b/tools/replay/camera.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +import os + +from common.basedir import BASEDIR +os.environ['BASEDIR'] = BASEDIR +SCALE = 3 + +import argparse +import zmq +import pygame +import numpy as np +import cv2 +import sys +import traceback +from collections import namedtuple +from cereal import car +from common.params import Params +from tools.lib.lazy_property import lazy_property +from cereal.messaging import sub_sock, recv_one_or_none, recv_one +from cereal.services import service_list + +_BB_OFFSET = 0, 0 +_BB_TO_FULL_FRAME = np.asarray([[1., 0., _BB_OFFSET[0]], [0., 1., _BB_OFFSET[1]], + [0., 0., 1.]]) +_FULL_FRAME_TO_BB = np.linalg.inv(_BB_TO_FULL_FRAME) +_FULL_FRAME_SIZE = 1164, 874 + + + +def pygame_modules_have_loaded(): + return pygame.display.get_init() and pygame.font.get_init() + + +def ui_thread(addr, frame_address): + context = zmq.Context.instance() + + pygame.init() + pygame.font.init() + assert pygame_modules_have_loaded() + + size = (_FULL_FRAME_SIZE[0] * SCALE, _FULL_FRAME_SIZE[1] * SCALE) + pygame.display.set_caption("comma one debug UI") + screen = pygame.display.set_mode(size, pygame.DOUBLEBUF) + + camera_surface = pygame.surface.Surface((_FULL_FRAME_SIZE[0] * SCALE, _FULL_FRAME_SIZE[1] * SCALE), 0, 24).convert() + + frame = context.socket(zmq.SUB) + frame.connect(frame_address or "tcp://%s:%d" % (addr, service_list['frame'].port)) + frame.setsockopt(zmq.SUBSCRIBE, "") + + img = np.zeros((_FULL_FRAME_SIZE[1], _FULL_FRAME_SIZE[0], 3), dtype='uint8') + imgff = np.zeros((_FULL_FRAME_SIZE[1], _FULL_FRAME_SIZE[0], 3), dtype=np.uint8) + + while 1: + list(pygame.event.get()) + screen.fill((64, 64, 64)) + + # ***** frame ***** + fpkt = recv_one(frame) + yuv_img = fpkt.frame.image + + if fpkt.frame.transform: + yuv_transform = np.array(fpkt.frame.transform).reshape(3, 3) + else: + # assume frame is flipped + yuv_transform = np.array([[-1.0, 0.0, _FULL_FRAME_SIZE[0] - 1], + [0.0, -1.0, _FULL_FRAME_SIZE[1] - 1], [0.0, 0.0, 1.0]]) + + if yuv_img and len(yuv_img) == _FULL_FRAME_SIZE[0] * _FULL_FRAME_SIZE[1] * 3 // 2: + yuv_np = np.frombuffer( + yuv_img, dtype=np.uint8).reshape(_FULL_FRAME_SIZE[1] * 3 // 2, -1) + cv2.cvtColor(yuv_np, cv2.COLOR_YUV2RGB_I420, dst=imgff) + cv2.warpAffine( + imgff, + np.dot(yuv_transform, _BB_TO_FULL_FRAME)[:2], (img.shape[1], img.shape[0]), + dst=img, + flags=cv2.WARP_INVERSE_MAP) + else: + img.fill(0) + + height, width = img.shape[:2] + img_resized = cv2.resize( + img, (SCALE * width, SCALE * height), interpolation=cv2.INTER_CUBIC) + # *** blits *** + pygame.surfarray.blit_array(camera_surface, img_resized.swapaxes(0, 1)) + screen.blit(camera_surface, (0, 0)) + + # this takes time...vsync or something + pygame.display.flip() + + +def get_arg_parser(): + parser = argparse.ArgumentParser( + description="Show replay data in a UI.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument( + "ip_address", + nargs="?", + default="127.0.0.1", + help="The ip address on which to receive zmq messages.") + + parser.add_argument( + "--frame-address", + default=None, + help="The ip address on which to receive zmq messages.") + return parser + + +if __name__ == "__main__": + args = get_arg_parser().parse_args(sys.argv[1:]) + ui_thread(args.ip_address, args.frame_address) diff --git a/tools/replay/lib/__init__.py b/tools/replay/lib/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/replay/lib/ui_helpers.py b/tools/replay/lib/ui_helpers.py new file mode 100644 index 00000000000000..242e5a0c25a47b --- /dev/null +++ b/tools/replay/lib/ui_helpers.py @@ -0,0 +1,314 @@ +import platform +from collections import namedtuple + +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +import pygame + +from tools.lib.lazy_property import lazy_property +from selfdrive.config import UIParams as UP +from selfdrive.config import RADAR_TO_CAMERA +from selfdrive.controls.lib.lane_planner import (compute_path_pinv, + model_polyfit) + +RED = (255, 0, 0) +GREEN = (0, 255, 0) +BLUE = (0, 0, 255) +YELLOW = (255, 255, 0) +BLACK = (0, 0, 0) +WHITE = (255, 255, 255) + +_PATH_X = np.arange(192.) +_PATH_XD = np.arange(192.) +_PATH_PINV = compute_path_pinv(50) +#_BB_OFFSET = 290, 332 +_BB_OFFSET = 0,0 +_BB_SCALE = 1164/640. +_BB_TO_FULL_FRAME = np.asarray([ + [_BB_SCALE, 0., _BB_OFFSET[0]], + [0., _BB_SCALE, _BB_OFFSET[1]], + [0., 0., 1.]]) +_FULL_FRAME_TO_BB = np.linalg.inv(_BB_TO_FULL_FRAME) + +METER_WIDTH = 20 + +ModelUIData = namedtuple("ModelUIData", ["cpath", "lpath", "rpath", "lead", "lead_future"]) + +_COLOR_CACHE = {} +def find_color(lidar_surface, color): + if color in _COLOR_CACHE: + return _COLOR_CACHE[color] + tcolor = 0 + ret = 255 + for x in lidar_surface.get_palette(): + #print tcolor, x + if x[0:3] == color: + ret = tcolor + break + tcolor += 1 + _COLOR_CACHE[color] = ret + return ret + +def warp_points(pt_s, warp_matrix): + # pt_s are the source points, nxm array. + pt_d = np.dot(warp_matrix[:, :-1], pt_s.T) + warp_matrix[:, -1, None] + + # Divide by last dimension for representation in image space. + return (pt_d[:-1, :] / pt_d[-1, :]).T + +def to_lid_pt(y, x): + px, py = -x * UP.lidar_zoom + UP.lidar_car_x, -y * UP.lidar_zoom + UP.lidar_car_y + if px > 0 and py > 0 and px < UP.lidar_x and py < UP.lidar_y: + return int(px), int(py) + return -1, -1 + + +def draw_path(y, x, color, img, calibration, top_down, lid_color=None): + # TODO: Remove big box. + uv_model_real = warp_points(np.column_stack((x, y)), calibration.car_to_model) + uv_model = np.round(uv_model_real).astype(int) + + uv_model_dots = uv_model[np.logical_and.reduce((np.all( # pylint: disable=no-member + uv_model > 0, axis=1), uv_model[:, 0] < img.shape[1] - 1, uv_model[:, 1] < + img.shape[0] - 1))] + + for i, j in ((-1, 0), (0, -1), (0, 0), (0, 1), (1, 0)): + img[uv_model_dots[:, 1] + i, uv_model_dots[:, 0] + j] = color + + # draw lidar path point on lidar + # find color in 8 bit + if lid_color is not None and top_down is not None: + tcolor = find_color(top_down[0], lid_color) + for i in range(len(x)): + px, py = to_lid_pt(x[i], y[i]) + if px != -1: + top_down[1][px, py] = tcolor + +def draw_steer_path(speed_ms, curvature, color, img, + calibration, top_down, VM, lid_color=None): + path_x = np.arange(101.) + path_y = np.multiply(path_x, np.tan(np.arcsin(np.clip(path_x * curvature, -0.999, 0.999)) / 2.)) + + draw_path(path_y, path_x, color, img, calibration, top_down, lid_color) + +def draw_lead_car(closest, top_down): + if closest != None: + closest_y = int(round(UP.lidar_car_y - closest * UP.lidar_zoom)) + if closest_y > 0: + top_down[1][int(round(UP.lidar_car_x - METER_WIDTH * 2)):int( + round(UP.lidar_car_x + METER_WIDTH * 2)), closest_y] = find_color( + top_down[0], (255, 0, 0)) + +def draw_lead_on(img, closest_x_m, closest_y_m, calibration, color, sz=10, img_offset=(0, 0)): + uv = warp_points(np.asarray([closest_x_m, closest_y_m]), calibration.car_to_bb)[0] + u, v = int(uv[0] + img_offset[0]), int(uv[1] + img_offset[1]) + if u > 0 and u < 640 and v > 0 and v < 480 - 5: + img[v - 5 - sz:v - 5 + sz, u] = color + img[v - 5, u - sz:u + sz] = color + return u, v + + +if platform.system() != 'Darwin': + matplotlib.use('QT4Agg') + + +def init_plots(arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles, bigplots=False): + color_palette = { "r": (1,0,0), + "g": (0,1,0), + "b": (0,0,1), + "k": (0,0,0), + "y": (1,1,0), + "p": (0,1,1), + "m": (1,0,1) } + + if bigplots == True: + fig = plt.figure(figsize=(6.4, 7.0)) + elif bigplots == False: + fig = plt.figure() + else: + fig = plt.figure(figsize=bigplots) + + fig.set_facecolor((0.2,0.2,0.2)) + + axs = [] + for pn in range(len(plot_ylims)): + ax = fig.add_subplot(len(plot_ylims),1,len(axs)+1) + ax.set_xlim(plot_xlims[pn][0], plot_xlims[pn][1]) + ax.set_ylim(plot_ylims[pn][0], plot_ylims[pn][1]) + ax.patch.set_facecolor((0.4, 0.4, 0.4)) + axs.append(ax) + + plots = [] ;idxs = [] ;plot_select = [] + for i, pl_list in enumerate(plot_names): + for j, item in enumerate(pl_list): + plot, = axs[i].plot(arr[:, name_to_arr_idx[item]], + label=item, + color=color_palette[plot_colors[i][j]], + linestyle=plot_styles[i][j]) + plots.append(plot) + idxs.append(name_to_arr_idx[item]) + plot_select.append(i) + axs[i].set_title(", ".join("%s (%s)" % (nm, cl) + for (nm, cl) in zip(pl_list, plot_colors[i])), fontsize=10) + if i < len(plot_ylims) - 1: + axs[i].set_xticks([]) + + fig.canvas.draw() + + renderer = fig.canvas.get_renderer() + + if matplotlib.get_backend() == "MacOSX": + fig.draw(renderer) + + def draw_plots(arr): + for ax in axs: + ax.draw_artist(ax.patch) + for i in range(len(plots)): + plots[i].set_ydata(arr[:, idxs[i]]) + axs[plot_select[i]].draw_artist(plots[i]) + + if matplotlib.get_backend() == "QT4Agg": + fig.canvas.update() + fig.canvas.flush_events() + + raw_data = renderer.tostring_rgb() + #print fig.canvas.get_width_height() + plot_surface = pygame.image.frombuffer(raw_data, fig.canvas.get_width_height(), "RGB").convert() + return plot_surface + + return draw_plots + + +def draw_mpc(liveMpc, top_down): + mpc_color = find_color(top_down[0], (0, 255, 0)) + for p in zip(liveMpc.x, liveMpc.y): + px, py = to_lid_pt(*p) + top_down[1][px, py] = mpc_color + + + +class CalibrationTransformsForWarpMatrix(object): + def __init__(self, model_to_full_frame, K, E): + self._model_to_full_frame = model_to_full_frame + self._K = K + self._E = E + + @property + def model_to_bb(self): + return _FULL_FRAME_TO_BB.dot(self._model_to_full_frame) + + @lazy_property + def model_to_full_frame(self): + return self._model_to_full_frame + + @lazy_property + def car_to_model(self): + return np.linalg.inv(self._model_to_full_frame).dot(self._K).dot( + self._E[:, [0, 1, 3]]) + + @lazy_property + def car_to_bb(self): + return _BB_TO_FULL_FRAME.dot(self._K).dot(self._E[:, [0, 1, 3]]) + + +def pygame_modules_have_loaded(): + return pygame.display.get_init() and pygame.font.get_init() + +def draw_var(y, x, var, color, img, calibration, top_down): + # otherwise drawing gets stupid + var = max(1e-1, min(var, 0.7)) + + varcolor = tuple(np.array(color)*0.5) + draw_path(y - var, x, varcolor, img, calibration, top_down) + draw_path(y + var, x, varcolor, img, calibration, top_down) + + +class ModelPoly(object): + def __init__(self, model_path): + if len(model_path.points) == 0 and len(model_path.poly) == 0: + self.valid = False + return + + if len(model_path.poly): + self.poly = np.array(model_path.poly) + else: + self.poly = model_polyfit(model_path.points, _PATH_PINV) + + self.prob = model_path.prob + self.std = model_path.std + self.y = np.polyval(self.poly, _PATH_XD) + self.valid = True + +def extract_model_data(md): + return ModelUIData( + cpath=ModelPoly(md.path), + lpath=ModelPoly(md.leftLane), + rpath=ModelPoly(md.rightLane), + lead=md.lead, + lead_future=md.leadFuture, + ) + +def plot_model(m, VM, v_ego, curvature, imgw, calibration, top_down, d_poly, top_down_color=216): + if calibration is None or top_down is None: + return + + for lead in [m.lead, m.lead_future]: + if lead.prob < 0.5: + continue + + lead_dist_from_radar = lead.dist - RADAR_TO_CAMERA + _, py_top = to_lid_pt(lead_dist_from_radar + lead.std, lead.relY) + px, py_bottom = to_lid_pt(lead_dist_from_radar - lead.std, lead.relY) + top_down[1][int(round(px - 4)):int(round(px + 4)), py_top:py_bottom] = top_down_color + + color = (0, int(255 * m.lpath.prob), 0) + for path in [m.cpath, m.lpath, m.rpath]: + if path.valid: + draw_path(path.y, _PATH_XD, color, imgw, calibration, top_down, YELLOW) + draw_var(path.y, _PATH_XD, path.std, color, imgw, calibration, top_down) + + if d_poly is not None: + dpath_y = np.polyval(d_poly, _PATH_X) + draw_path(dpath_y, _PATH_X, RED, imgw, calibration, top_down, RED) + + # draw user path from curvature + draw_steer_path(v_ego, curvature, BLUE, imgw, calibration, top_down, VM, BLUE) + + +def maybe_update_radar_points(lt, lid_overlay): + ar_pts = [] + if lt is not None: + ar_pts = {} + for track in lt: + ar_pts[track.trackId] = [track.dRel, track.yRel, track.vRel, track.aRel, track.oncoming, track.stationary] + for ids, pt in ar_pts.items(): + px, py = to_lid_pt(pt[0], pt[1]) + if px != -1: + if pt[-1]: + color = 240 + elif pt[-2]: + color = 230 + else: + color = 255 + if int(ids) == 1: + lid_overlay[px - 2:px + 2, py - 10:py + 10] = 100 + else: + lid_overlay[px - 2:px + 2, py - 2:py + 2] = color + +def get_blank_lid_overlay(UP): + lid_overlay = np.zeros((UP.lidar_x, UP.lidar_y), 'uint8') + # Draw the car. + lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int( + round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y - + UP.car_front))] = UP.car_color + lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)):int( + round(UP.lidar_car_x + UP.car_hwidth)), int(round(UP.lidar_car_y + + UP.car_back))] = UP.car_color + lid_overlay[int(round(UP.lidar_car_x - UP.car_hwidth)), int( + round(UP.lidar_car_y - UP.car_front)):int(round( + UP.lidar_car_y + UP.car_back))] = UP.car_color + lid_overlay[int(round(UP.lidar_car_x + UP.car_hwidth)), int( + round(UP.lidar_car_y - UP.car_front)):int(round( + UP.lidar_car_y + UP.car_back))] = UP.car_color + return lid_overlay diff --git a/tools/replay/mapd.py b/tools/replay/mapd.py new file mode 100755 index 00000000000000..9b5e0bccd54f23 --- /dev/null +++ b/tools/replay/mapd.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python + +import matplotlib +matplotlib.use('TkAgg') +import matplotlib.pyplot as plt + +import numpy as np +import zmq +from cereal.services import service_list +from selfdrive.config import Conversions as CV +import cereal.messaging as messaging + + +if __name__ == "__main__": + live_map_sock = messaging.sub_sock(service_list['liveMapData'].port, conflate=True) + plan_sock = messaging.sub_sock(service_list['plan'].port, conflate=True) + + plt.ion() + fig = plt.figure(figsize=(8, 16)) + ax = fig.add_subplot(2, 1, 1) + ax.set_title('Map') + + SCALE = 1000 + ax.set_xlim([-SCALE, SCALE]) + ax.set_ylim([-SCALE, SCALE]) + ax.set_xlabel('x [m]') + ax.set_ylabel('y [m]') + ax.grid(True) + + points_plt, = ax.plot([0.0], [0.0], "--xk") + cur, = ax.plot([0.0], [0.0], "xr") + + speed_txt = ax.text(-500, 900, '') + curv_txt = ax.text(-500, 775, '') + + ax = fig.add_subplot(2, 1, 2) + ax.set_title('Curvature') + curvature_plt, = ax.plot([0.0], [0.0], "--xk") + ax.set_xlim([0, 500]) + ax.set_ylim([0, 1e-2]) + ax.set_xlabel('Distance along path [m]') + ax.set_ylabel('Curvature [1/m]') + ax.grid(True) + + plt.show() + + while True: + m = messaging.recv_one_or_none(live_map_sock) + p = messaging.recv_one_or_none(plan_sock) + if p is not None: + v = p.plan.vCurvature * CV.MS_TO_MPH + speed_txt.set_text('Desired curvature speed: %.2f mph' % v) + + if m is not None: + print("Current way id: %d" % m.liveMapData.wayId) + curv_txt.set_text('Curvature valid: %s Dist: %03.0f m\nSpeedlimit valid: %s Speed: %.0f mph' % + (str(m.liveMapData.curvatureValid), + m.liveMapData.distToTurn, + str(m.liveMapData.speedLimitValid), + m.liveMapData.speedLimit * CV.MS_TO_MPH)) + + points_plt.set_xdata(m.liveMapData.roadX) + points_plt.set_ydata(m.liveMapData.roadY) + curvature_plt.set_xdata(m.liveMapData.roadCurvatureX) + curvature_plt.set_ydata(m.liveMapData.roadCurvature) + + fig.canvas.draw() + fig.canvas.flush_events() diff --git a/tools/replay/rqplot.py b/tools/replay/rqplot.py new file mode 100755 index 00000000000000..32e9a349817c28 --- /dev/null +++ b/tools/replay/rqplot.py @@ -0,0 +1,83 @@ +#!/usr/bin/env python +import os +import sys +import matplotlib.pyplot as plt +import numpy as np +import cereal.messaging as messaging +import time + + +# tool to plot one or more signals live. Call ex: +#./rqplot.py log.carState.vEgo log.carState.aEgo + +# TODO: can this tool consume 10x less cpu? + +def recursive_getattr(x, name): + l = name.split('.') + if len(l) == 1: + return getattr(x, name) + else: + return recursive_getattr(getattr(x, l[0]), ".".join(l[1:]) ) + + +if __name__ == "__main__": + poller = messaging.Poller() + + services = [] + fields = [] + subs = [] + values = [] + + plt.ion() + fig, ax = plt.subplots() + #fig = plt.figure(figsize=(10, 15)) + #ax = fig.add_subplot(111) + ax.grid(True) + fig.canvas.draw() + + subs_name = sys.argv[1:] + lines = [] + x, y = [], [] + LEN = 500 + + for i, sub in enumerate(subs_name): + sub_split = sub.split(".") + services.append(sub_split[0]) + fields.append(".".join(sub_split[1:])) + subs.append(messaging.sub_sock(sub_split[0], poller)) + + x.append(np.ones(LEN)*np.nan) + y.append(np.ones(LEN)*np.nan) + lines.append(ax.plot(x[i], y[i])[0]) + + for l in lines: + l.set_marker("*") + + cur_t = 0. + ax.legend(subs_name) + ax.set_xlabel('time [s]') + + while 1: + print(1./(time.time() - cur_t)) + cur_t = time.time() + for i, s in enumerate(subs): + msg = messaging.recv_sock(s) + #msg = messaging.recv_one_or_none(s) + if msg is not None: + x[i] = np.append(x[i], getattr(msg, 'logMonoTime') / float(1e9)) + x[i] = np.delete(x[i], 0) + y[i] = np.append(y[i], recursive_getattr(msg, subs_name[i])) + y[i] = np.delete(y[i], 0) + + lines[i].set_xdata(x[i]) + lines[i].set_ydata(y[i]) + + ax.relim() + ax.autoscale_view(True, scaley=True, scalex=True) + + fig.canvas.blit(ax.bbox) + fig.canvas.flush_events() + + # just a bit of wait to avoid 100% CPU usage + time.sleep(0.001) + diff --git a/tools/replay/ui.py b/tools/replay/ui.py new file mode 100755 index 00000000000000..61f01dc4e26c3e --- /dev/null +++ b/tools/replay/ui.py @@ -0,0 +1,278 @@ +#!/usr/bin/env python3 +import argparse +import os +import sys + +os.environ["OMP_NUM_THREADS"] = "1" + +import cv2 +import numpy as np +import pygame + +from common.basedir import BASEDIR +from common.transformations.camera import FULL_FRAME_SIZE, eon_intrinsics +from common.transformations.model import (MODEL_CX, MODEL_CY, MODEL_INPUT_SIZE, + get_camera_frame_from_model_frame) +from selfdrive.car.toyota.interface import CarInterface as ToyotaInterface +from selfdrive.config import UIParams as UP +from selfdrive.controls.lib.vehicle_model import VehicleModel +import cereal.messaging as messaging +from tools.replay.lib.ui_helpers import (_BB_TO_FULL_FRAME, BLACK, BLUE, GREEN, + YELLOW, RED, + CalibrationTransformsForWarpMatrix, + draw_lead_car, draw_lead_on, draw_mpc, + extract_model_data, + get_blank_lid_overlay, init_plots, + maybe_update_radar_points, plot_model, + pygame_modules_have_loaded, + warp_points) + +os.environ['BASEDIR'] = BASEDIR + +ANGLE_SCALE = 5.0 +HOR = os.getenv("HORIZONTAL") is not None + + +def ui_thread(addr, frame_address): + # TODO: Detect car from replay and use that to select carparams + CP = ToyotaInterface.get_params("TOYOTA PRIUS 2017") + VM = VehicleModel(CP) + + CalP = np.asarray([[0, 0], [MODEL_INPUT_SIZE[0], 0], [MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1]], [0, MODEL_INPUT_SIZE[1]]]) + vanishing_point = np.asarray([[MODEL_CX, MODEL_CY]]) + + pygame.init() + pygame.font.init() + assert pygame_modules_have_loaded() + + if HOR: + size = (640+384+640, 960) + write_x = 5 + write_y = 680 + else: + size = (640+384, 960+300) + write_x = 645 + write_y = 970 + + pygame.display.set_caption("openpilot debug UI") + screen = pygame.display.set_mode(size, pygame.DOUBLEBUF) + + alert1_font = pygame.font.SysFont("arial", 30) + alert2_font = pygame.font.SysFont("arial", 20) + info_font = pygame.font.SysFont("arial", 15) + + camera_surface = pygame.surface.Surface((640, 480), 0, 24).convert() + cameraw_surface = pygame.surface.Surface(MODEL_INPUT_SIZE, 0, 24).convert() + cameraw_test_surface = pygame.surface.Surface(MODEL_INPUT_SIZE, 0, 24) + top_down_surface = pygame.surface.Surface((UP.lidar_x, UP.lidar_y),0,8) + + frame = messaging.sub_sock('frame', addr=addr, conflate=True) + sm = messaging.SubMaster(['carState', 'plan', 'carControl', 'radarState', 'liveCalibration', 'controlsState', 'liveTracks', 'model', 'liveMpc', 'liveParameters', 'pathPlan'], addr=addr) + + calibration = None + img = np.zeros((480, 640, 3), dtype='uint8') + imgff = np.zeros((FULL_FRAME_SIZE[1], FULL_FRAME_SIZE[0], 3), dtype=np.uint8) + imgw = np.zeros((160, 320, 3), dtype=np.uint8) # warped image + lid_overlay_blank = get_blank_lid_overlay(UP) + + # plots + name_to_arr_idx = { "gas": 0, + "computer_gas": 1, + "user_brake": 2, + "computer_brake": 3, + "v_ego": 4, + "v_pid": 5, + "angle_steers_des": 6, + "angle_steers": 7, + "angle_steers_k": 8, + "steer_torque": 9, + "v_override": 10, + "v_cruise": 11, + "a_ego": 12, + "a_target": 13, + "accel_override": 14} + + plot_arr = np.zeros((100, len(name_to_arr_idx.values()))) + + plot_xlims = [(0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0]), (0, plot_arr.shape[0])] + plot_ylims = [(-0.1, 1.1), (-ANGLE_SCALE, ANGLE_SCALE), (0., 75.), (-3.0, 2.0)] + plot_names = [["gas", "computer_gas", "user_brake", "computer_brake", "accel_override"], + ["angle_steers", "angle_steers_des", "angle_steers_k", "steer_torque"], + ["v_ego", "v_override", "v_pid", "v_cruise"], + ["a_ego", "a_target"]] + plot_colors = [["b", "b", "g", "r", "y"], + ["b", "g", "y", "r"], + ["b", "g", "r", "y"], + ["b", "r"]] + plot_styles = [["-", "-", "-", "-", "-"], + ["-", "-", "-", "-"], + ["-", "-", "-", "-"], + ["-", "-"]] + + draw_plots = init_plots(plot_arr, name_to_arr_idx, plot_xlims, plot_ylims, plot_names, plot_colors, plot_styles, bigplots=True) + + counter = 0 + while 1: + list(pygame.event.get()) + + screen.fill((64,64,64)) + lid_overlay = lid_overlay_blank.copy() + top_down = top_down_surface, lid_overlay + + # ***** frame ***** + fpkt = messaging.recv_one(frame) + rgb_img_raw = fpkt.frame.image + + if fpkt.frame.transform: + img_transform = np.array(fpkt.frame.transform).reshape(3,3) + else: + # assume frame is flipped + img_transform = np.array([ + [-1.0, 0.0, FULL_FRAME_SIZE[0]-1], + [ 0.0, -1.0, FULL_FRAME_SIZE[1]-1], + [ 0.0, 0.0, 1.0] + ]) + + + if rgb_img_raw and len(rgb_img_raw) == FULL_FRAME_SIZE[0] * FULL_FRAME_SIZE[1] * 3: + imgff = np.frombuffer(rgb_img_raw, dtype=np.uint8).reshape((FULL_FRAME_SIZE[1], FULL_FRAME_SIZE[0], 3)) + imgff = imgff[:, :, ::-1] # Convert BGR to RGB + cv2.warpAffine(imgff, np.dot(img_transform, _BB_TO_FULL_FRAME)[:2], + (img.shape[1], img.shape[0]), dst=img, flags=cv2.WARP_INVERSE_MAP) + + intrinsic_matrix = eon_intrinsics + else: + img.fill(0) + intrinsic_matrix = np.eye(3) + + if calibration is not None: + transform = np.dot(img_transform, calibration.model_to_full_frame) + imgw = cv2.warpAffine(imgff, transform[:2], (MODEL_INPUT_SIZE[0], MODEL_INPUT_SIZE[1]), flags=cv2.WARP_INVERSE_MAP) + else: + imgw.fill(0) + + sm.update() + + w = sm['controlsState'].lateralControlState.which() + if w == 'lqrState': + angle_steers_k = sm['controlsState'].lateralControlState.lqrState.steerAngle + elif w == 'indiState': + angle_steers_k = sm['controlsState'].lateralControlState.indiState.steerAngle + else: + angle_steers_k = np.inf + + plot_arr[:-1] = plot_arr[1:] + plot_arr[-1, name_to_arr_idx['angle_steers']] = sm['controlsState'].angleSteers + plot_arr[-1, name_to_arr_idx['angle_steers_des']] = sm['carControl'].actuators.steerAngle + plot_arr[-1, name_to_arr_idx['angle_steers_k']] = angle_steers_k + plot_arr[-1, name_to_arr_idx['gas']] = sm['carState'].gas + plot_arr[-1, name_to_arr_idx['computer_gas']] = sm['carControl'].actuators.gas + plot_arr[-1, name_to_arr_idx['user_brake']] = sm['carState'].brake + plot_arr[-1, name_to_arr_idx['steer_torque']] = sm['carControl'].actuators.steer * ANGLE_SCALE + plot_arr[-1, name_to_arr_idx['computer_brake']] = sm['carControl'].actuators.brake + plot_arr[-1, name_to_arr_idx['v_ego']] = sm['controlsState'].vEgo + plot_arr[-1, name_to_arr_idx['v_pid']] = sm['controlsState'].vPid + plot_arr[-1, name_to_arr_idx['v_override']] = sm['carControl'].cruiseControl.speedOverride + plot_arr[-1, name_to_arr_idx['v_cruise']] = sm['carState'].cruiseState.speed + plot_arr[-1, name_to_arr_idx['a_ego']] = sm['carState'].aEgo + plot_arr[-1, name_to_arr_idx['a_target']] = sm['plan'].aTarget + plot_arr[-1, name_to_arr_idx['accel_override']] = sm['carControl'].cruiseControl.accelOverride + + # ***** model **** + if len(sm['model'].path.poly) > 0: + model_data = extract_model_data(sm['model']) + plot_model(model_data, VM, sm['controlsState'].vEgo, sm['controlsState'].curvature, imgw, calibration, + top_down, np.array(sm['pathPlan'].dPoly)) + + # MPC + if sm.updated['liveMpc']: + draw_mpc(sm['liveMpc'], top_down) + + # draw all radar points + maybe_update_radar_points(sm['liveTracks'], top_down[1]) + + + if sm.updated['liveCalibration']: + extrinsic_matrix = np.asarray(sm['liveCalibration'].extrinsicMatrix).reshape(3, 4) + ke = intrinsic_matrix.dot(extrinsic_matrix) + warp_matrix = get_camera_frame_from_model_frame(ke) + calibration = CalibrationTransformsForWarpMatrix(warp_matrix, intrinsic_matrix, extrinsic_matrix) + + # draw red pt for lead car in the main img + for lead in [sm['radarState'].leadOne, sm['radarState'].leadTwo]: + if lead.status: + if calibration is not None: + draw_lead_on(img, lead.dRel, lead.yRel, calibration, color=(192,0,0)) + + draw_lead_car(lead.dRel, top_down) + + # *** blits *** + pygame.surfarray.blit_array(camera_surface, img.swapaxes(0,1)) + screen.blit(camera_surface, (0, 0)) + + # display alerts + alert_line1 = alert1_font.render(sm['controlsState'].alertText1, True, (255,0,0)) + alert_line2 = alert2_font.render(sm['controlsState'].alertText2, True, (255,0,0)) + screen.blit(alert_line1, (180, 150)) + screen.blit(alert_line2, (180, 190)) + + if calibration is not None and img is not None: + cpw = warp_points(CalP, calibration.model_to_bb) + vanishing_pointw = warp_points(vanishing_point, calibration.model_to_bb) + pygame.draw.polygon(screen, BLUE, tuple(map(tuple, cpw)), 1) + pygame.draw.circle(screen, BLUE, list(map(int, map(round, vanishing_pointw[0]))), 2) + + if HOR: + screen.blit(draw_plots(plot_arr), (640+384, 0)) + else: + screen.blit(draw_plots(plot_arr), (0, 600)) + + pygame.surfarray.blit_array(cameraw_surface, imgw.swapaxes(0, 1)) + screen.blit(cameraw_surface, (320, 480)) + + pygame.surfarray.blit_array(*top_down) + screen.blit(top_down[0], (640,0)) + + i = 0 + SPACING = 25 + + lines = [ + info_font.render("ENABLED", True, GREEN if sm['controlsState'].enabled else BLACK), + info_font.render("BRAKE LIGHTS", True, RED if sm['carState'].brakeLights else BLACK), + info_font.render("SPEED: " + str(round(sm['carState'].vEgo, 1)) + " m/s", True, YELLOW), + info_font.render("LONG CONTROL STATE: " + str(sm['controlsState'].longControlState), True, YELLOW), + info_font.render("LONG MPC SOURCE: " + str(sm['plan'].longitudinalPlanSource), True, YELLOW), + None, + info_font.render("ANGLE OFFSET (AVG): " + str(round(sm['liveParameters'].angleOffsetAverage, 2)) + " deg", True, YELLOW), + info_font.render("ANGLE OFFSET (INSTANT): " + str(round(sm['liveParameters'].angleOffset, 2)) + " deg", True, YELLOW), + info_font.render("STIFFNESS: " + str(round(sm['liveParameters'].stiffnessFactor * 100., 2)) + " %", True, YELLOW), + info_font.render("STEER RATIO: " + str(round(sm['liveParameters'].steerRatio, 2)), True, YELLOW) + ] + + for i, line in enumerate(lines): + if line is not None: + screen.blit(line, (write_x, write_y + i * SPACING)) + + # this takes time...vsync or something + pygame.display.flip() + +def get_arg_parser(): + parser = argparse.ArgumentParser( + description="Show replay data in a UI.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument("ip_address", nargs="?", default="127.0.0.1", + help="The ip address on which to receive zmq messages.") + + parser.add_argument("--frame-address", default=None, + help="The frame address (fully qualified ZMQ endpoint for frames) on which to receive zmq messages.") + return parser + +if __name__ == "__main__": + args = get_arg_parser().parse_args(sys.argv[1:]) + + if args.ip_address != "127.0.0.1": + os.environ["ZMQ"] = "1" + messaging.context = messaging.Context() + + ui_thread(args.ip_address, args.frame_address) diff --git a/tools/replay/unlogger.py b/tools/replay/unlogger.py new file mode 100755 index 00000000000000..227091fb298ee2 --- /dev/null +++ b/tools/replay/unlogger.py @@ -0,0 +1,447 @@ +#!/usr/bin/env python3 +import argparse +import os +import sys +import zmq +import time +import gc +import signal +from threading import Thread +import numpy as np +from uuid import uuid4 +from collections import namedtuple +from collections import deque +from multiprocessing import Process, TimeoutError +from datetime import datetime + +# strat 1: script to copy files +# strat 2: build pip packages around these +# could be its own pip package, which we'd need to build and release +from cereal import log as capnp_log +from cereal.services import service_list +from cereal.messaging import pub_sock, MultiplePublishersError +from common import realtime + +from tools.lib.file_helpers import mkdirs_exists_ok +from tools.lib.kbhit import KBHit +from tools.lib.logreader import MultiLogIterator +from tools.lib.route import Route +from tools.lib.route_framereader import RouteFrameReader + +# Commands. +SetRoute = namedtuple("SetRoute", ("name", "start_time", "data_dir")) +SeekAbsoluteTime = namedtuple("SeekAbsoluteTime", ("secs",)) +SeekRelativeTime = namedtuple("SeekRelativeTime", ("secs",)) +TogglePause = namedtuple("TogglePause", ()) +StopAndQuit = namedtuple("StopAndQuit", ()) + + +class UnloggerWorker(object): + def __init__(self): + self._frame_reader = None + self._cookie = None + self._readahead = deque() + + def run(self, commands_address, data_address, pub_types): + zmq.Context._instance = None + commands_socket = zmq.Context.instance().socket(zmq.PULL) + commands_socket.connect(commands_address) + + data_socket = zmq.Context.instance().socket(zmq.PUSH) + data_socket.connect(data_address) + + poller = zmq.Poller() + poller.register(commands_socket, zmq.POLLIN) + + # We can't publish frames without encodeIdx, so add when it's missing. + if "frame" in pub_types: + pub_types["encodeIdx"] = None + + # gc.set_debug(gc.DEBUG_LEAK | gc.DEBUG_OBJECTS | gc.DEBUG_STATS | gc.DEBUG_SAVEALL | + # gc.DEBUG_UNCOLLECTABLE) + + # TODO: WARNING pycapnp leaks memory all over the place after unlogger runs for a while, gc + # pauses become huge because there are so many tracked objects solution will be to switch to new + # cython capnp + try: + route = None + while True: + while poller.poll(0.) or route is None: + cookie, cmd = commands_socket.recv_pyobj() + route = self._process_commands(cmd, route, pub_types) + + # **** get message **** + self._read_logs(cookie, pub_types) + self._send_logs(data_socket) + finally: + if self._frame_reader is not None: + self._frame_reader.close() + data_socket.close() + commands_socket.close() + + def _read_logs(self, cookie, pub_types): + fullHEVC = capnp_log.EncodeIndex.Type.fullHEVC + lr = self._lr + while len(self._readahead) < 1000: + route_time = lr.tell() + msg = next(lr) + typ = msg.which() + if typ not in pub_types: + continue + + # **** special case certain message types **** + if typ == "encodeIdx" and msg.encodeIdx.type == fullHEVC: + # this assumes the encodeIdx always comes before the frame + self._frame_id_lookup[ + msg.encodeIdx.frameId] = msg.encodeIdx.segmentNum, msg.encodeIdx.segmentId + #print "encode", msg.encodeIdx.frameId, len(self._readahead), route_time + self._readahead.appendleft((typ, msg, route_time, cookie)) + + def _send_logs(self, data_socket): + while len(self._readahead) > 500: + typ, msg, route_time, cookie = self._readahead.pop() + smsg = msg.as_builder() + + if typ == "frame": + frame_id = msg.frame.frameId + + # Frame exists, make sure we have a framereader. + # load the frame readers as needed + s1 = time.time() + img = self._frame_reader.get(frame_id, pix_fmt="rgb24") + fr_time = time.time() - s1 + if fr_time > 0.05: + print("FRAME(%d) LAG -- %.2f ms" % (frame_id, fr_time*1000.0)) + + if img is not None: + img = img[:, :, ::-1] # Convert RGB to BGR, which is what the camera outputs + img = img.flatten() + smsg.frame.image = img.tobytes() + + data_socket.send_pyobj((cookie, typ, msg.logMonoTime, route_time), flags=zmq.SNDMORE) + data_socket.send(smsg.to_bytes(), copy=False) + + def _process_commands(self, cmd, route, pub_types): + seek_to = None + if route is None or (isinstance(cmd, SetRoute) and route.name != cmd.name): + seek_to = cmd.start_time + route = Route(cmd.name, cmd.data_dir) + self._lr = MultiLogIterator(route.log_paths(), wraparound=True) + if self._frame_reader is not None: + self._frame_reader.close() + if "frame" in pub_types or "encodeIdx" in pub_types: + # reset frames for a route + self._frame_id_lookup = {} + self._frame_reader = RouteFrameReader( + route.camera_paths(), None, self._frame_id_lookup, readahead=True) + + # always reset this on a seek + if isinstance(cmd, SeekRelativeTime): + seek_to = self._lr.tell() + cmd.secs + elif isinstance(cmd, SeekAbsoluteTime): + seek_to = cmd.secs + elif isinstance(cmd, StopAndQuit): + exit() + + if seek_to is not None: + print("seeking", seek_to) + if not self._lr.seek(seek_to): + print("Can't seek: time out of bounds") + else: + next(self._lr) # ignore one + return route + +def _get_address_send_func(address): + sock = pub_sock(address) + return sock.send + + +def unlogger_thread(command_address, forward_commands_address, data_address, run_realtime, + address_mapping, publish_time_length, bind_early, no_loop): + # Clear context to avoid problems with multiprocessing. + zmq.Context._instance = None + context = zmq.Context.instance() + + command_sock = context.socket(zmq.PULL) + command_sock.bind(command_address) + + forward_commands_socket = context.socket(zmq.PUSH) + forward_commands_socket.bind(forward_commands_address) + + data_socket = context.socket(zmq.PULL) + data_socket.bind(data_address) + + # Set readahead to a reasonable number. + data_socket.setsockopt(zmq.RCVHWM, 10000) + + poller = zmq.Poller() + poller.register(command_sock, zmq.POLLIN) + poller.register(data_socket, zmq.POLLIN) + + if bind_early: + send_funcs = { + typ: _get_address_send_func(address) + for typ, address in address_mapping.items() + } + + # Give subscribers a chance to connect. + time.sleep(0.1) + else: + send_funcs = {} + + start_time = float("inf") + printed_at = 0 + generation = 0 + paused = False + reset_time = True + prev_msg_time = None + while True: + evts = dict(poller.poll()) + if command_sock in evts: + cmd = command_sock.recv_pyobj() + if isinstance(cmd, TogglePause): + paused = not paused + if paused: + poller.modify(data_socket, 0) + else: + poller.modify(data_socket, zmq.POLLIN) + else: + # Forward the command the the log data thread. + # TODO: Remove everything on data_socket. + generation += 1 + forward_commands_socket.send_pyobj((generation, cmd)) + if isinstance(cmd, StopAndQuit): + return + + reset_time = True + elif data_socket in evts: + msg_generation, typ, msg_time, route_time = data_socket.recv_pyobj(flags=zmq.RCVMORE) + msg_bytes = data_socket.recv() + if msg_generation < generation: + # Skip packets. + continue + + if no_loop and prev_msg_time is not None and prev_msg_time > msg_time + 1e9: + generation += 1 + forward_commands_socket.send_pyobj((generation, StopAndQuit())) + return + prev_msg_time = msg_time + + msg_time_seconds = msg_time * 1e-9 + if reset_time: + msg_start_time = msg_time_seconds + real_start_time = realtime.sec_since_boot() + start_time = min(start_time, msg_start_time) + reset_time = False + + if publish_time_length and msg_time_seconds - start_time > publish_time_length: + generation += 1 + forward_commands_socket.send_pyobj((generation, StopAndQuit())) + return + + # Print time. + if abs(printed_at - route_time) > 5.: + print("at", route_time) + printed_at = route_time + + if typ not in send_funcs: + if typ in address_mapping: + # Remove so we don't keep printing warnings. + address = address_mapping.pop(typ) + try: + print("binding", typ) + send_funcs[typ] = _get_address_send_func(address) + except Exception as e: + print("couldn't replay {}: {}".format(typ, e)) + continue + else: + # Skip messages that we are not registered to publish. + continue + + # Sleep as needed for real time playback. + if run_realtime: + msg_time_offset = msg_time_seconds - msg_start_time + real_time_offset = realtime.sec_since_boot() - real_start_time + lag = msg_time_offset - real_time_offset + if lag > 0 and lag < 30: # a large jump is OK, likely due to an out of order segment + if lag > 1: + print("sleeping for", lag) + time.sleep(lag) + elif lag < -1: + # Relax the real time schedule when we slip far behind. + reset_time = True + + # Send message. + try: + send_funcs[typ](msg_bytes) + except MultiplePublishersError: + del send_funcs[typ] + +def timestamp_to_s(tss): + return time.mktime(datetime.strptime(tss, '%Y-%m-%d--%H-%M-%S').timetuple()) + +def absolute_time_str(s, start_time): + try: + # first try if it's a float + return float(s) + except ValueError: + # now see if it's a timestamp + return timestamp_to_s(s) - start_time + +def _get_address_mapping(args): + if args.min is not None: + services_to_mock = [ + 'thermal', 'can', 'health', 'sensorEvents', 'gpsNMEA', 'frame', 'encodeIdx', + 'model', 'features', 'liveLocation', 'gpsLocation' + ] + elif args.enabled is not None: + services_to_mock = args.enabled + else: + services_to_mock = service_list.keys() + + address_mapping = {service_name: service_name for service_name in services_to_mock} + address_mapping.update(dict(args.address_mapping)) + + for k in args.disabled: + address_mapping.pop(k, None) + + non_services = set(address_mapping) - set(service_list) + if non_services: + print("WARNING: Unknown services {}".format(list(non_services))) + + return address_mapping + +def keyboard_controller_thread(q, route_start_time): + print("keyboard waiting for input") + kb = KBHit() + while 1: + c = kb.getch() + if c=='m': # Move forward by 1m + q.send_pyobj(SeekRelativeTime(60)) + elif c=='M': # Move backward by 1m + q.send_pyobj(SeekRelativeTime(-60)) + elif c=='s': # Move forward by 10s + q.send_pyobj(SeekRelativeTime(10)) + elif c=='S': # Move backward by 10s + q.send_pyobj(SeekRelativeTime(-10)) + elif c=='G': # Move backward by 10s + q.send_pyobj(SeekAbsoluteTime(0.)) + elif c=="\x20": # Space bar. + q.send_pyobj(TogglePause()) + elif c=="\n": + try: + seek_time_input = input('time: ') + seek_time = absolute_time_str(seek_time_input, route_start_time) + + q.send_pyobj(SeekAbsoluteTime(seek_time)) + except Exception as e: + print("Time not understood: {}".format(e)) + +def get_arg_parser(): + parser = argparse.ArgumentParser( + description="Mock openpilot components by publishing logged messages.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + + parser.add_argument("route_name", type=(lambda x: x.replace("#", "|")), nargs="?", + help="The route whose messages will be published.") + parser.add_argument("data_dir", nargs='?', default=os.getenv('UNLOGGER_DATA_DIR'), + help="Path to directory in which log and camera files are located.") + + parser.add_argument("--no-loop", action="store_true", help="Stop at the end of the replay.") + + key_value_pair = lambda x: x.split("=") + parser.add_argument("address_mapping", nargs="*", type=key_value_pair, + help="Pairs = to publish on .") + + comma_list = lambda x: x.split(",") + to_mock_group = parser.add_mutually_exclusive_group() + to_mock_group.add_argument("--min", action="store_true", default=os.getenv("MIN")) + to_mock_group.add_argument("--enabled", default=os.getenv("ENABLED"), type=comma_list) + + parser.add_argument("--disabled", type=comma_list, default=os.getenv("DISABLED") or ()) + + parser.add_argument( + "--tl", dest="publish_time_length", type=float, default=None, + help="Length of interval in event time for which messages should be published.") + + parser.add_argument( + "--no-realtime", dest="realtime", action="store_false", default=True, + help="Publish messages as quickly as possible instead of realtime.") + + parser.add_argument( + "--no-interactive", dest="interactive", action="store_false", default=True, + help="Disable interactivity.") + + parser.add_argument( + "--bind-early", action="store_true", default=False, + help="Bind early to avoid dropping messages.") + + return parser + +def main(argv): + args = get_arg_parser().parse_args(sys.argv[1:]) + + command_address = "ipc:///tmp/{}".format(uuid4()) + forward_commands_address = "ipc:///tmp/{}".format(uuid4()) + data_address = "ipc:///tmp/{}".format(uuid4()) + + address_mapping = _get_address_mapping(args) + + command_sock = zmq.Context.instance().socket(zmq.PUSH) + command_sock.connect(command_address) + + if args.route_name is not None: + route_name_split = args.route_name.split("|") + if len(route_name_split) > 1: + route_start_time = timestamp_to_s(route_name_split[1]) + else: + route_start_time = 0 + command_sock.send_pyobj( + SetRoute(args.route_name, 0, args.data_dir)) + else: + print("waiting for external command...") + route_start_time = 0 + + subprocesses = {} + try: + subprocesses["data"] = Process( + target=UnloggerWorker().run, + args=(forward_commands_address, data_address, address_mapping.copy())) + + subprocesses["control"] = Process( + target=unlogger_thread, + args=(command_address, forward_commands_address, data_address, args.realtime, + _get_address_mapping(args), args.publish_time_length, args.bind_early, args.no_loop)) + + for p in subprocesses.values(): + p.daemon = True + + subprocesses["data"].start() + subprocesses["control"].start() + + # Exit if any of the children die. + def exit_if_children_dead(*_): + for name, p in subprocesses.items(): + if not p.is_alive(): + [p.terminate() for p in subprocesses.values()] + exit() + signal.signal(signal.SIGCHLD, signal.SIGIGN) + signal.signal(signal.SIGCHLD, exit_if_children_dead) + + if args.interactive: + keyboard_controller_thread(command_sock, route_start_time) + else: + # Wait forever for children. + while True: + time.sleep(10000.) + finally: + for p in subprocesses.values(): + if p.is_alive(): + try: + p.join(3.) + except TimeoutError: + p.terminate() + continue + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/tools/requirements.txt b/tools/requirements.txt new file mode 100644 index 00000000000000..03afaa573d80e5 --- /dev/null +++ b/tools/requirements.txt @@ -0,0 +1,11 @@ +aenum +atomicwrites +futures +libarchive +lru-dict +matplotlib +numpy +opencv-python +pygame +hexdump +av==0.5.0 diff --git a/tools/sim/.gitignore b/tools/sim/.gitignore new file mode 100644 index 00000000000000..b9442630ef79de --- /dev/null +++ b/tools/sim/.gitignore @@ -0,0 +1,4 @@ +CARLA_*.tar.gz +carla +carla_tmp + diff --git a/tools/sim/README.md b/tools/sim/README.md new file mode 100644 index 00000000000000..8277ab1f10b2ce --- /dev/null +++ b/tools/sim/README.md @@ -0,0 +1,40 @@ +openpilot in simulator +===================== +Needs Ubuntu 16.04 + +## Checkout openpilot +``` +cd ~/ +git clone https://github.com/commaai/openpilot.git + +# Add export PYTHONPATH=$HOME/openpilot to your bashrc +# Have a working tensorflow+keras in python3.7.3 (with [packages] in openpilot/Pipfile) +``` +## Install (in tab 1) +``` +cd ~/openpilot/tools/sim +./start_carla.sh # install CARLA 0.9.7 and start the server +``` +## openpilot (in tab 2) +``` +cd ~/openpilot/selfdrive/ +PASSIVE=0 NOBOARD=1 ./manager.py +``` +## bridge (in tab 3) +``` +# links carla to openpilot, will "start the car" according to manager +cd ~/openpilot/tools/sim +./bridge.py +``` +## Controls +Now you can control the simulator with the keys: + +1: Cruise up 5 mph + +2: Cruise down 5 mph + +3: Cruise cancel + +q: Exit all + + diff --git a/tools/sim/bridge.py b/tools/sim/bridge.py new file mode 100755 index 00000000000000..6979e44f81f7a0 --- /dev/null +++ b/tools/sim/bridge.py @@ -0,0 +1,252 @@ +#!/usr/bin/env python3 +import carla +import os +import time, termios, tty, sys +import math +import atexit +import numpy as np +import threading +import random +import cereal.messaging as messaging +import argparse +import queue +from common.params import Params +from common.realtime import Ratekeeper +from lib.can import can_function, sendcan_function +from lib.helpers import FakeSteeringWheel +from selfdrive.car.honda.values import CruiseButtons + +parser = argparse.ArgumentParser(description='Bridge between CARLA and openpilot.') +parser.add_argument('--autopilot', action='store_true') +parser.add_argument('--joystick', action='store_true') +parser.add_argument('--realmonitoring', action='store_true') +args = parser.parse_args() + +pm = messaging.PubMaster(['frame', 'sensorEvents', 'can']) + +W,H = 1164, 874 + +def cam_callback(image): + img = np.frombuffer(image.raw_data, dtype=np.dtype("uint8")) + img = np.reshape(img, (H, W, 4)) + img = img[:, :, [0,1,2]].copy() + + dat = messaging.new_message('frame') + dat.frame = { + "frameId": image.frame, + "image": img.tostring(), + "transform": [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] + } + pm.send('frame', dat) + +def imu_callback(imu): + #print(imu, imu.accelerometer) + + dat = messaging.new_message('sensorEvents', 2) + dat.sensorEvents[0].sensor = 4 + dat.sensorEvents[0].type = 0x10 + dat.sensorEvents[0].init('acceleration') + dat.sensorEvents[0].acceleration.v = [imu.accelerometer.x, imu.accelerometer.y, imu.accelerometer.z] + # copied these numbers from locationd + dat.sensorEvents[1].sensor = 5 + dat.sensorEvents[1].type = 0x10 + dat.sensorEvents[1].init('gyroUncalibrated') + dat.sensorEvents[1].gyroUncalibrated.v = [imu.gyroscope.x, imu.gyroscope.y, imu.gyroscope.z] + pm.send('sensorEvents', dat) + +def health_function(): + pm = messaging.PubMaster(['health']) + rk = Ratekeeper(1.0) + while 1: + dat = messaging.new_message('health') + dat.valid = True + dat.health = { + 'ignitionLine': True, + 'hwType': "whitePanda", + 'controlsAllowed': True + } + pm.send('health', dat) + rk.keep_time() + +def fake_driver_monitoring(): + if args.realmonitoring: + return + pm = messaging.PubMaster(['driverState']) + while 1: + dat = messaging.new_message('driverState') + dat.driverState.faceProb = 1.0 + pm.send('driverState', dat) + time.sleep(0.1) + +def go(q): + threading.Thread(target=health_function).start() + threading.Thread(target=fake_driver_monitoring).start() + + import carla + client = carla.Client("127.0.0.1", 2000) + client.set_timeout(5.0) + world = client.load_world('Town04') + settings = world.get_settings() + settings.fixed_delta_seconds = 0.05 + world.apply_settings(settings) + + weather = carla.WeatherParameters( + cloudyness=0.1, + precipitation=0.0, + precipitation_deposits=0.0, + wind_intensity=0.0, + sun_azimuth_angle=15.0, + sun_altitude_angle=75.0) + world.set_weather(weather) + + blueprint_library = world.get_blueprint_library() + """ + for blueprint in blueprint_library.filter('sensor.*'): + print(blueprint.id) + exit(0) + """ + + world_map = world.get_map() + vehicle_bp = random.choice(blueprint_library.filter('vehicle.tesla.*')) + vehicle = world.spawn_actor(vehicle_bp, world_map.get_spawn_points()[16]) + + # make tires less slippery + wheel_control = carla.WheelPhysicsControl(tire_friction=5) + physics_control = vehicle.get_physics_control() + physics_control.mass = 1326 + physics_control.wheels = [wheel_control]*4 + physics_control.torque_curve = [[20.0, 500.0], [5000.0, 500.0]] + physics_control.gear_switch_time = 0.0 + vehicle.apply_physics_control(physics_control) + + if args.autopilot: + vehicle.set_autopilot(True) + # print(vehicle.get_speed_limit()) + + blueprint = blueprint_library.find('sensor.camera.rgb') + blueprint.set_attribute('image_size_x', str(W)) + blueprint.set_attribute('image_size_y', str(H)) + blueprint.set_attribute('fov', '70') + blueprint.set_attribute('sensor_tick', '0.05') + transform = carla.Transform(carla.Location(x=0.8, z=1.45)) + camera = world.spawn_actor(blueprint, transform, attach_to=vehicle) + camera.listen(cam_callback) + + # reenable IMU + imu_bp = blueprint_library.find('sensor.other.imu') + imu = world.spawn_actor(imu_bp, transform, attach_to=vehicle) + imu.listen(imu_callback) + + def destroy(): + print("clean exit") + imu.destroy() + camera.destroy() + vehicle.destroy() + print("done") + atexit.register(destroy) + + # can loop + sendcan = messaging.sub_sock('sendcan') + rk = Ratekeeper(100, print_delay_threshold=0.05) + + # init + A_throttle = 2. + A_brake = 2. + A_steer_torque = 1. + fake_wheel = FakeSteeringWheel() + is_openpilot_engaged = False + in_reverse = False + + throttle_out = 0 + brake_out = 0 + steer_angle_out = 0 + + while 1: + cruise_button = 0 + + # check for a input message, this will not block + if not q.empty(): + print("here") + message = q.get() + + m = message.split('_') + if m[0] == "steer": + steer_angle_out = float(m[1]) + fake_wheel.set_angle(steer_angle_out) # touching the wheel overrides fake wheel angle + # print(" === steering overriden === ") + if m[0] == "throttle": + throttle_out = float(m[1]) / 100. + if throttle_out > 0.3: + cruise_button = CruiseButtons.CANCEL + is_openpilot_engaged = False + if m[0] == "brake": + brake_out = float(m[1]) / 100. + if brake_out > 0.3: + cruise_button = CruiseButtons.CANCEL + is_openpilot_engaged = False + if m[0] == "reverse": + in_reverse = not in_reverse + cruise_button = CruiseButtons.CANCEL + is_openpilot_engaged = False + if m[0] == "cruise": + if m[1] == "down": + cruise_button = CruiseButtons.DECEL_SET + is_openpilot_engaged = True + if m[1] == "up": + cruise_button = CruiseButtons.RES_ACCEL + is_openpilot_engaged = True + if m[1] == "cancel": + cruise_button = CruiseButtons.CANCEL + is_openpilot_engaged = False + + vel = vehicle.get_velocity() + speed = math.sqrt(vel.x**2 + vel.y**2 + vel.z**2) * 3.6 + can_function(pm, speed, fake_wheel.angle, rk.frame, cruise_button=cruise_button, is_engaged=is_openpilot_engaged) + + if rk.frame%1 == 0: # 20Hz? + throttle_op, brake_op, steer_torque_op = sendcan_function(sendcan) + # print(" === torq, ",steer_torque_op, " ===") + if is_openpilot_engaged: + fake_wheel.response(steer_torque_op * A_steer_torque, speed) + throttle_out = throttle_op * A_throttle + brake_out = brake_op * A_brake + steer_angle_out = fake_wheel.angle + # print(steer_torque_op) + # print(steer_angle_out) + vc = carla.VehicleControl(throttle=throttle_out, steer=steer_angle_out / 3.14, brake=brake_out, reverse=in_reverse) + vehicle.apply_control(vc) + + rk.keep_time() + +if __name__ == "__main__": + params = Params() + params.delete("Offroad_ConnectivityNeeded") + from selfdrive.version import terms_version, training_version + params.put("HasAcceptedTerms", terms_version) + params.put("CompletedTrainingVersion", training_version) + params.put("CommunityFeaturesToggle", "1") + params.put("CalibrationParams", '{"vanishing_point": [582.06, 442.78], "valid_blocks": 20}') + + # no carla, still run + try: + import carla + except ImportError: + print("WARNING: NO CARLA") + while 1: + time.sleep(1) + + from multiprocessing import Process, Queue + q = Queue() + p = Process(target=go, args=(q,)) + p.daemon = True + p.start() + + if args.joystick: + # start input poll for joystick + from lib.manual_ctrl import wheel_poll_thread + wheel_poll_thread(q) + else: + # start input poll for keyboard + from lib.keyboard_ctrl import keyboard_poll_thread + keyboard_poll_thread(q) + diff --git a/tools/sim/lib/__init__.py b/tools/sim/lib/__init__.py new file mode 100644 index 00000000000000..e69de29bb2d1d6 diff --git a/tools/sim/lib/can.py b/tools/sim/lib/can.py new file mode 100755 index 00000000000000..509bb30198e94c --- /dev/null +++ b/tools/sim/lib/can.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python3 +import time +import cereal.messaging as messaging +from opendbc.can.parser import CANParser +from opendbc.can.packer import CANPacker +from selfdrive.boardd.boardd_api_impl import can_list_to_can_capnp +from selfdrive.car.honda.values import FINGERPRINTS, CAR +from selfdrive.car import crc8_pedal +import math + +from selfdrive.test.longitudinal_maneuvers.plant import get_car_can_parser +cp = get_car_can_parser() +#cp = CANParser("honda_civic_touring_2016_can_generated") + +packer = CANPacker("honda_civic_touring_2016_can_generated") +rpacker = CANPacker("acura_ilx_2016_nidec") + +SR = 7.5 + +def angle_to_sangle(angle): + return - math.degrees(angle) * SR + +def can_function(pm, speed, angle, idx, cruise_button=0, is_engaged=False): + msg = [] + msg.append(packer.make_can_msg("ENGINE_DATA", 0, {"XMISSION_SPEED": speed}, idx)) + msg.append(packer.make_can_msg("WHEEL_SPEEDS", 0, + {"WHEEL_SPEED_FL": speed, + "WHEEL_SPEED_FR": speed, + "WHEEL_SPEED_RL": speed, + "WHEEL_SPEED_RR": speed}, -1)) + + msg.append(packer.make_can_msg("SCM_BUTTONS", 0, {"CRUISE_BUTTONS": cruise_button}, idx)) + + values = {"COUNTER_PEDAL": idx&0xF} + checksum = crc8_pedal(packer.make_can_msg("GAS_SENSOR", 0, {"COUNTER_PEDAL": idx&0xF}, -1)[2][:-1]) + values["CHECKSUM_PEDAL"] = checksum + msg.append(packer.make_can_msg("GAS_SENSOR", 0, values, -1)) + + msg.append(packer.make_can_msg("GEARBOX", 0, {"GEAR": 4, "GEAR_SHIFTER": 8}, idx)) + msg.append(packer.make_can_msg("GAS_PEDAL_2", 0, {}, idx)) + msg.append(packer.make_can_msg("SEATBELT_STATUS", 0, {"SEATBELT_DRIVER_LATCHED": 1}, idx)) + msg.append(packer.make_can_msg("STEER_STATUS", 0, {}, idx)) + msg.append(packer.make_can_msg("STEERING_SENSORS", 0, {"STEER_ANGLE":angle_to_sangle(angle)}, idx)) + msg.append(packer.make_can_msg("POWERTRAIN_DATA", 0, {}, idx)) + msg.append(packer.make_can_msg("VSA_STATUS", 0, {}, idx)) + msg.append(packer.make_can_msg("STANDSTILL", 0, {}, idx)) + msg.append(packer.make_can_msg("STEER_MOTOR_TORQUE", 0, {}, idx)) + msg.append(packer.make_can_msg("EPB_STATUS", 0, {}, idx)) + msg.append(packer.make_can_msg("DOORS_STATUS", 0, {}, idx)) + msg.append(packer.make_can_msg("CRUISE_PARAMS", 0, {}, idx)) + msg.append(packer.make_can_msg("CRUISE", 0, {}, idx)) + msg.append(packer.make_can_msg("SCM_FEEDBACK", 0, {"MAIN_ON": 1}, idx)) + msg.append(packer.make_can_msg("POWERTRAIN_DATA", 0, {"ACC_STATUS": int(is_engaged)}, idx)) + #print(msg) + + # cam bus + msg.append(packer.make_can_msg("STEERING_CONTROL", 2, {}, idx)) + msg.append(packer.make_can_msg("ACC_HUD", 2, {}, idx)) + msg.append(packer.make_can_msg("BRAKE_COMMAND", 2, {}, idx)) + + # radar + if idx%5 == 0: + msg.append(rpacker.make_can_msg("RADAR_DIAGNOSTIC", 1, {"RADAR_STATE": 0x79}, -1)) + for i in range(16): + msg.append(rpacker.make_can_msg("TRACK_%d" % i, 1, {"LONG_DIST": 255.5}, -1)) + + # fill in the rest for fingerprint + done = set([x[0] for x in msg]) + for k,v in FINGERPRINTS[CAR.CIVIC][0].items(): + if k not in done and k not in [0xE4, 0x194]: + msg.append([k, 0, b'\x00'*v, 0]) + pm.send('can', can_list_to_can_capnp(msg)) + +def sendcan_function(sendcan): + sc = messaging.drain_sock_raw(sendcan) + cp.update_strings(sc, sendcan=True) + + if cp.vl[0x1fa]['COMPUTER_BRAKE_REQUEST']: + brake = cp.vl[0x1fa]['COMPUTER_BRAKE'] * 0.003906248 + else: + brake = 0.0 + + if cp.vl[0x200]['GAS_COMMAND'] > 0: + gas = cp.vl[0x200]['GAS_COMMAND'] / 256.0 + else: + gas = 0.0 + + if cp.vl[0xe4]['STEER_TORQUE_REQUEST']: + steer_torque = cp.vl[0xe4]['STEER_TORQUE']*1.0/0x1000 + else: + steer_torque = 0.0 + + return (gas, brake, steer_torque) + +if __name__ == "__main__": + pm = messaging.PubMaster(['can']) + sendcan = messaging.sub_sock('sendcan') + idx = 0 + while 1: + sendcan_function(sendcan) + can_function(pm, 10.0, idx) + time.sleep(0.01) + idx += 1 + diff --git a/tools/sim/lib/helpers.py b/tools/sim/lib/helpers.py new file mode 100644 index 00000000000000..2156e7bc4a305b --- /dev/null +++ b/tools/sim/lib/helpers.py @@ -0,0 +1,36 @@ +import numpy as np + +class FakeSteeringWheel(): + def __init__(self, dt=0.01): + # physical params + self.DAC = 4. / 0.625 # convert torque value from can to Nm + self.k = 0.035 + self.centering_k = 4.1 * 0.9 + self.b = 0.1 * 0.8 + self.I = 1 * 1.36 * (0.175**2) + self.dt = dt + # ... + + self.angle = 0. # start centered + # self.omega = 0. + + def response(self, torque, vego=0): + exerted_torque = torque * self.DAC + # centering_torque = np.clip(-(self.centering_k * self.angle), -1.1, 1.1) + # damping_torque = -(self.b * self.omega) + # self.omega += self.dt * (exerted_torque + centering_torque + damping_torque) / self.I + # self.omega = np.clip(self.omega, -1.1, 1.1) + # self.angle += self.dt * self.omega + self.angle += self.dt * self.k * exerted_torque # aristotle + + # print(" ========== ") + # print("angle,", self.angle) + # print("omega,", self.omega) + # print("torque,", exerted_torque) + # print("centering torque,", centering_torque) + # print("damping torque,", damping_torque) + # print(" ========== ") + + def set_angle(self, target): + self.angle = target + # self.omega = 0. diff --git a/tools/sim/lib/keyboard_ctrl.py b/tools/sim/lib/keyboard_ctrl.py new file mode 100644 index 00000000000000..8be3ca0ddaf2d3 --- /dev/null +++ b/tools/sim/lib/keyboard_ctrl.py @@ -0,0 +1,61 @@ +import time +import sys +import termios +from termios import * + +# Indexes for termios list. +IFLAG = 0 +OFLAG = 1 +CFLAG = 2 +LFLAG = 3 +ISPEED = 4 +OSPEED = 5 +CC = 6 + +def getch(): + fd = sys.stdin.fileno() + old_settings = termios.tcgetattr(fd) + try: + # set + mode = termios.tcgetattr(fd) + mode[IFLAG] = mode[IFLAG] & ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON) + #mode[OFLAG] = mode[OFLAG] & ~(OPOST) + mode[CFLAG] = mode[CFLAG] & ~(CSIZE | PARENB) + mode[CFLAG] = mode[CFLAG] | CS8 + mode[LFLAG] = mode[LFLAG] & ~(ECHO | ICANON | IEXTEN | ISIG) + mode[CC][VMIN] = 1 + mode[CC][VTIME] = 0 + termios.tcsetattr(fd, termios.TCSAFLUSH, mode) + + ch = sys.stdin.read(1) + finally: + termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) + return ch + +def keyboard_poll_thread(q): + while True: + c = getch() + print("got %s" % c) + if c == '1': + q.put(str("cruise_up")) + if c == '2': + q.put(str("cruise_down")) + if c == '3': + q.put(str("cruise_cancel")) + if c == 'q': + exit(0) + +def test(q): + while 1: + print("hello") + time.sleep(1.0) + +if __name__ == '__main__': + from multiprocessing import Process, Queue + q = Queue() + p = Process(target=test, args=(q,)) + p.daemon = True + p.start() + + keyboard_poll_thread(q) + diff --git a/tools/sim/lib/manual_ctrl.py b/tools/sim/lib/manual_ctrl.py new file mode 100755 index 00000000000000..a32d606d432e5b --- /dev/null +++ b/tools/sim/lib/manual_ctrl.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python3 +# set up wheel +import os, struct, array +from fcntl import ioctl + +# Iterate over the joystick devices. +print('Available devices:') +for fn in os.listdir('/dev/input'): + if fn.startswith('js'): + print(' /dev/input/%s' % (fn)) + +# We'll store the states here. +axis_states = {} +button_states = {} + +# These constants were borrowed from linux/input.h +axis_names = { + 0x00 : 'x', + 0x01 : 'y', + 0x02 : 'z', + 0x03 : 'rx', + 0x04 : 'ry', + 0x05 : 'rz', + 0x06 : 'trottle', + 0x07 : 'rudder', + 0x08 : 'wheel', + 0x09 : 'gas', + 0x0a : 'brake', + 0x10 : 'hat0x', + 0x11 : 'hat0y', + 0x12 : 'hat1x', + 0x13 : 'hat1y', + 0x14 : 'hat2x', + 0x15 : 'hat2y', + 0x16 : 'hat3x', + 0x17 : 'hat3y', + 0x18 : 'pressure', + 0x19 : 'distance', + 0x1a : 'tilt_x', + 0x1b : 'tilt_y', + 0x1c : 'tool_width', + 0x20 : 'volume', + 0x28 : 'misc', +} + +button_names = { + 0x120 : 'trigger', + 0x121 : 'thumb', + 0x122 : 'thumb2', + 0x123 : 'top', + 0x124 : 'top2', + 0x125 : 'pinkie', + 0x126 : 'base', + 0x127 : 'base2', + 0x128 : 'base3', + 0x129 : 'base4', + 0x12a : 'base5', + 0x12b : 'base6', + 0x12f : 'dead', + 0x130 : 'a', + 0x131 : 'b', + 0x132 : 'c', + 0x133 : 'x', + 0x134 : 'y', + 0x135 : 'z', + 0x136 : 'tl', + 0x137 : 'tr', + 0x138 : 'tl2', + 0x139 : 'tr2', + 0x13a : 'select', + 0x13b : 'start', + 0x13c : 'mode', + 0x13d : 'thumbl', + 0x13e : 'thumbr', + + 0x220 : 'dpad_up', + 0x221 : 'dpad_down', + 0x222 : 'dpad_left', + 0x223 : 'dpad_right', + + # XBox 360 controller uses these codes. + 0x2c0 : 'dpad_left', + 0x2c1 : 'dpad_right', + 0x2c2 : 'dpad_up', + 0x2c3 : 'dpad_down', +} + +axis_map = [] +button_map = [] + +def wheel_poll_thread(q): + # Open the joystick device. + fn = '/dev/input/js0' + print('Opening %s...' % fn) + jsdev = open(fn, 'rb') + + # Get the device name. + #buf = bytearray(63) + buf = array.array('B', [0] * 64) + ioctl(jsdev, 0x80006a13 + (0x10000 * len(buf)), buf) # JSIOCGNAME(len) + js_name = buf.tobytes().rstrip(b'\x00').decode('utf-8') + print('Device name: %s' % js_name) + + # Get number of axes and buttons. + buf = array.array('B', [0]) + ioctl(jsdev, 0x80016a11, buf) # JSIOCGAXES + num_axes = buf[0] + + buf = array.array('B', [0]) + ioctl(jsdev, 0x80016a12, buf) # JSIOCGBUTTONS + num_buttons = buf[0] + + # Get the axis map. + buf = array.array('B', [0] * 0x40) + ioctl(jsdev, 0x80406a32, buf) # JSIOCGAXMAP + + for axis in buf[:num_axes]: + axis_name = axis_names.get(axis, 'unknown(0x%02x)' % axis) + axis_map.append(axis_name) + axis_states[axis_name] = 0.0 + + # Get the button map. + buf = array.array('H', [0] * 200) + ioctl(jsdev, 0x80406a34, buf) # JSIOCGBTNMAP + + for btn in buf[:num_buttons]: + btn_name = button_names.get(btn, 'unknown(0x%03x)' % btn) + button_map.append(btn_name) + button_states[btn_name] = 0 + + print('%d axes found: %s' % (num_axes, ', '.join(axis_map))) + print('%d buttons found: %s' % (num_buttons, ', '.join(button_map))) + + # Enable FF + import evdev + from evdev import ecodes, InputDevice, ff + device = evdev.list_devices()[0] + evtdev = InputDevice(device) + val = 24000 + evtdev.write(ecodes.EV_FF, ecodes.FF_AUTOCENTER, val) + + while True: + evbuf = jsdev.read(8) + time, value, mtype, number = struct.unpack('IhBB', evbuf) + # print(mtype, number, value) + if mtype & 0x02: # wheel & paddles + axis = axis_map[number] + + if axis == "z": # gas + fvalue = value / 32767.0 + axis_states[axis] = fvalue + normalized = (1 - fvalue) * 50 + q.put(str("throttle_%f" % normalized)) + + if axis == "rz": # brake + fvalue = value / 32767.0 + axis_states[axis] = fvalue + normalized = (1 - fvalue) * 50 + q.put(str("brake_%f" % normalized)) + + if axis == "x": # steer angle + fvalue = value / 32767.0 + axis_states[axis] = fvalue + normalized = fvalue + q.put(str("steer_%f" % normalized)) + + if mtype & 0x01: # buttons + if number in [0,19]: # X + if value == 1: # press down + q.put(str("cruise_down")) + + if number in [3,18]: # triangle + if value == 1: # press down + q.put(str("cruise_up")) + + if number in [1,6]: # square + if value == 1: # press down + q.put(str("cruise_cancel")) + + if number in [10,21]: # R3 + if value == 1: # press down + q.put(str("reverse_switch")) + +if __name__ == '__main__': + from multiprocessing import Process + p = Process(target=wheel_poll_thread) + p.start() diff --git a/tools/sim/lib/replay.sh b/tools/sim/lib/replay.sh new file mode 100755 index 00000000000000..38c21400962d68 --- /dev/null +++ b/tools/sim/lib/replay.sh @@ -0,0 +1,5 @@ +#!/bin/bash +cd ~/openpilot/tools/nui +# vision, boardd, sensorsd, gpsd +ALLOW=frame,can,ubloxRaw,health,sensorEvents,gpsNMEA,gpsLocation ./nui "02ec6bea180a4d36/2019-10-25--10-18-09" + diff --git a/tools/sim/start_carla.sh b/tools/sim/start_carla.sh new file mode 100755 index 00000000000000..42c956c2f7d841 --- /dev/null +++ b/tools/sim/start_carla.sh @@ -0,0 +1,18 @@ +#!/bin/bash -e + +FILE=CARLA_0.9.7.tar.gz +if [ ! -f $FILE ]; then + curl -O http://carla-assets-internal.s3.amazonaws.com/Releases/Linux/$FILE +fi +if [ ! -d carla ]; then + rm -rf carla_tmp + mkdir -p carla_tmp + cd carla_tmp + tar xvf ../$FILE + easy_install PythonAPI/carla/dist/carla-0.9.7-py3.5-linux-x86_64.egg || true + cd ../ + mv carla_tmp carla +fi + +cd carla +./CarlaUE4.sh diff --git a/tools/ssh/config b/tools/ssh/config new file mode 100644 index 00000000000000..a58ed61b472704 --- /dev/null +++ b/tools/ssh/config @@ -0,0 +1,9 @@ +Host EON-smays + HostName 192.168.5.11 + Port 8022 + IdentityFile key/id_rsa + +Host EON-wifi + HostName 192.168.43.1 + Port 8022 + IdentityFile key/id_rsa diff --git a/tools/ssh/key/id_rsa b/tools/ssh/key/id_rsa new file mode 100644 index 00000000000000..6a8ecfcce9c06a --- /dev/null +++ b/tools/ssh/key/id_rsa @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQC+iXXq30Tq+J5N +Kat3KWHCzcmwZ55nGh6WggAqECa5CasBlM9VeROpVu3beA+5h0MibRgbD4DMtVXB +t6gEvZ8nd04E7eLA9LTZyFDZ7SkSOVj4oXOQsT0GnJmKrASW5KslTWqVzTfo2XCt +Z+004ikLxmyFeBO8NOcErW1pa8gFdQDToH9FrA7kgysic/XVESTOoe7XlzRoe/eZ +acEQ+jtnmFd21A4aEADkk00Ahjr0uKaJiLUAPatxs2icIXWpgYtfqqtaKF23wSt6 +1OTu6cAwXbOWr3m+IUSRUO0IRzEIQS3z1jfd1svgzSgSSwZ1Lhj4AoKxIEAIc8qJ +rO4uymCJAgMBAAECggEBAISFevxHGdoL3Z5xkw6oO5SQKO2GxEeVhRzNgmu/HA+q +x8OryqD6O1CWY4037kft6iWxlwiLOdwna2P25ueVM3LxqdQH2KS4DmlCx+kq6FwC +gv063fQPMhC9LpWimvaQSPEC7VUPjQlo4tPY6sTTYBUOh0A1ihRm/x7juKuQCWix +Cq8C/DVnB1X4mGj+W3nJc5TwVJtgJbbiBrq6PWrhvB/3qmkxHRL7dU2SBb2iNRF1 +LLY30dJx/cD73UDKNHrlrsjk3UJc29Mp4/MladKvUkRqNwlYxSuAtJV0nZ3+iFkL +s3adSTHdJpClQer45R51rFDlVsDz2ZBpb/hRNRoGDuECgYEA6A1EixLq7QYOh3cb +Xhyh3W4kpVvA/FPfKH1OMy3ONOD/Y9Oa+M/wthW1wSoRL2n+uuIW5OAhTIvIEivj +6bAZsTT3twrvOrvYu9rx9aln4p8BhyvdjeW4kS7T8FP5ol6LoOt2sTP3T1LOuJPO +uQvOjlKPKIMh3c3RFNWTnGzMPa0CgYEA0jNiPLxP3A2nrX0keKDI+VHuvOY88gdh +0W5BuLMLovOIDk9aQFIbBbMuW1OTjHKv9NK+Lrw+YbCFqOGf1dU/UN5gSyE8lX/Q +FsUGUqUZx574nJZnOIcy3ONOnQLcvHAQToLFAGUd7PWgP3CtHkt9hEv2koUwL4vo +ikTP1u9Gkc0CgYEA2apoWxPZrY963XLKBxNQecYxNbLFaWq67t3rFnKm9E8BAICi +4zUaE5J1tMVi7Vi9iks9Ml9SnNyZRQJKfQ+kaebHXbkyAaPmfv+26rqHKboA0uxA +nDOZVwXX45zBkp6g1sdHxJx8JLoGEnkC9eyvSi0C//tRLx86OhLErXwYcNkCf1it +VMRKrWYoXJTUNo6tRhvodM88UnnIo3u3CALjhgU4uC1RTMHV4ZCGBwiAOb8GozSl +s5YD1E1iKwEULloHnK6BIh6P5v8q7J6uf/xdqoKMjlWBHgq6/roxKvkSPA1DOZ3l +jTadcgKFnRUmc+JT9p/ZbCxkA/ALFg8++G+0ghECgYA8vG3M/utweLvq4RI7l7U7 +b+i2BajfK2OmzNi/xugfeLjY6k2tfQGRuv6ppTjehtji2uvgDWkgjJUgPfZpir3I +RsVMUiFgloWGHETOy0Qvc5AwtqTJFLTD1Wza2uBilSVIEsg6Y83Gickh+ejOmEsY +6co17RFaAZHwGfCFFjO76Q== +-----END PRIVATE KEY----- diff --git a/tools/ssh/via-smays.sh b/tools/ssh/via-smays.sh new file mode 100755 index 00000000000000..bf903cb86c9baf --- /dev/null +++ b/tools/ssh/via-smays.sh @@ -0,0 +1,3 @@ +# enp5s0 is the smays network name. Change it appropriately if you are using an ethernet adapter (type ifconfig to get the proper network name) +sudo ifconfig enp5s0 192.168.5.1 netmask 255.255.255.0 +ssh -F config EON-smays diff --git a/tools/ssh/via-wifi.sh b/tools/ssh/via-wifi.sh new file mode 100755 index 00000000000000..4a43ca2d83f8e6 --- /dev/null +++ b/tools/ssh/via-wifi.sh @@ -0,0 +1 @@ +ssh -F config EON-wifi diff --git a/tools/steer.gif b/tools/steer.gif new file mode 100644 index 00000000000000..8331b50de982c1 Binary files /dev/null and b/tools/steer.gif differ diff --git a/tools/stream.gif b/tools/stream.gif new file mode 100644 index 00000000000000..cd266fb6a84621 Binary files /dev/null and b/tools/stream.gif differ diff --git a/tools/streamer/streamerd.py b/tools/streamer/streamerd.py new file mode 100755 index 00000000000000..8788eafd29eba7 --- /dev/null +++ b/tools/streamer/streamerd.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +import os +import sys +import argparse +import zmq +import json +import cv2 +import numpy as np +from hexdump import hexdump +import scipy.misc +import struct +from collections import deque + +# sudo pip install git+git://github.com/mikeboers/PyAV.git +import av + +import cereal.messaging as messaging +from cereal.services import service_list + +PYGAME = os.getenv("PYGAME") is not None +if PYGAME: + import pygame + imgff = np.zeros((874, 1164, 3), dtype=np.uint8) + +# first 74 bytes in any stream +start = "0000000140010c01ffff016000000300b0000003000003005dac5900000001420101016000000300b0000003000003005da0025080381c5c665aee4c92ec80000000014401c0f1800420" + +def receiver_thread(): + if PYGAME: + pygame.init() + pygame.display.set_caption("vnet debug UI") + screen = pygame.display.set_mode((1164,874), pygame.DOUBLEBUF) + camera_surface = pygame.surface.Surface((1164,874), 0, 24).convert() + + addr = "192.168.5.11" + if len(sys.argv) >= 2: + addr = sys.argv[1] + + context = zmq.Context() + s = messaging.sub_sock(context, 9002, addr=addr) + frame_sock = messaging.pub_sock(context, service_list['frame'].port) + + ctx = av.codec.codec.Codec('hevc', 'r').create() + ctx.decode(av.packet.Packet(start.decode("hex"))) + + import time + while 1: + t1 = time.time() + ts, raw = s.recv_multipart() + ts = struct.unpack('q', ts)[0] * 1000 + t1, t2 = time.time(), t1 + #print 'ms to get frame:', (t1-t2)*1000 + + pkt = av.packet.Packet(raw) + f = ctx.decode(pkt) + if not f: + continue + f = f[0] + t1, t2 = time.time(), t1 + #print 'ms to decode:', (t1-t2)*1000 + + y_plane = np.frombuffer(f.planes[0], np.uint8).reshape((874, 1216))[:, 0:1164] + u_plane = np.frombuffer(f.planes[1], np.uint8).reshape((437, 608))[:, 0:582] + v_plane = np.frombuffer(f.planes[2], np.uint8).reshape((437, 608))[:, 0:582] + yuv_img = y_plane.tobytes() + u_plane.tobytes() + v_plane.tobytes() + t1, t2 = time.time(), t1 + #print 'ms to make yuv:', (t1-t2)*1000 + #print 'tsEof:', ts + + dat = messaging.new_message('frame') + dat.frame.image = yuv_img + dat.frame.timestampEof = ts + dat.frame.transform = map(float, list(np.eye(3).flatten())) + frame_sock.send(dat.to_bytes()) + + if PYGAME: + yuv_np = np.frombuffer(yuv_img, dtype=np.uint8).reshape(874 * 3 // 2, -1) + cv2.cvtColor(yuv_np, cv2.COLOR_YUV2RGB_I420, dst=imgff) + #print yuv_np.shape, imgff.shape + + #scipy.misc.imsave("tmp.png", imgff) + + pygame.surfarray.blit_array(camera_surface, imgff.swapaxes(0,1)) + screen.blit(camera_surface, (0, 0)) + pygame.display.flip() + + +def main(gctx=None): + receiver_thread() + +if __name__ == "__main__": + main() diff --git a/tools/ubuntu_setup.sh b/tools/ubuntu_setup.sh new file mode 100755 index 00000000000000..ccd5b9e93dba91 --- /dev/null +++ b/tools/ubuntu_setup.sh @@ -0,0 +1,96 @@ +#!/bin/bash -e + +# NOTE: ubuntu_setup.sh doesn't run! only for reading now + +sudo apt-get update && sudo apt-get install -y \ + autoconf \ + build-essential \ + bzip2 \ + clang \ + cmake \ + curl \ + ffmpeg \ + git \ + libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev libswscale-dev libavresample-dev libavfilter-dev \ + libarchive-dev \ + libbz2-dev \ + libcurl4-openssl-dev \ + libeigen3-dev \ + libffi-dev \ + libglew-dev \ + libgles2-mesa-dev \ + libglfw3-dev \ + libglib2.0-0 \ + liblzma-dev \ + libmysqlclient-dev \ + libomp-dev \ + libopencv-dev \ + libpng16-16 \ + libssl-dev \ + libstdc++-arm-none-eabi-newlib \ + libsqlite3-dev \ + libtool \ + libusb-1.0-0-dev \ + libzmq5-dev \ + locales \ + ocl-icd-libopencl1 \ + ocl-icd-opencl-dev \ + opencl-headers \ + python-dev \ + python-pip \ + screen \ + sudo \ + vim \ + wget \ + gcc-arm-none-eabi + +# install git lfs +if ! command -v "git-lfs" > /dev/null 2>&1; then + curl -s https://packagecloud.io/install/repositories/github/git-lfs/script.deb.sh | sudo bash + sudo apt-get install git-lfs +fi + +# install pyenv +if ! command -v "pyenv" > /dev/null 2>&1; then + curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash +fi + +# install bashrc +source ~/.bashrc +if [ -z "$OPENPILOT_ENV" ]; then + echo "source $HOME/openpilot/tools/openpilot_env.sh" >> ~/.bashrc + source ~/.bashrc + echo "added openpilot_env to bashrc" +fi + +# in the openpilot repo +cd $HOME/openpilot + +# do the rest of the git checkout +git lfs pull +git submodule init +git submodule update + +# install python 3.7.3 globally (you should move to python3 anyway) +pyenv install -s 3.7.3 +pyenv global 3.7.3 +pyenv rehash + +# **** in python env **** + +# install pipenv +pip install pipenv==2018.11.26 + +# pipenv setup (in openpilot dir) +pipenv install --system --deploy + +# to make tools work +pip install -r tools/requirements.txt + +# to make modeld work on PC with nvidia GPU +pip install tensorflow-gpu==2.1 + +# for loggerd to work on ubuntu +# TODO: PC should log somewhere else +#sudo mkdir -p /data/media/0/realdata +#sudo chown $USER /data/media/0/realdata diff --git a/tools/webcam/README.md b/tools/webcam/README.md new file mode 100644 index 00000000000000..14574c16c63ac1 --- /dev/null +++ b/tools/webcam/README.md @@ -0,0 +1,49 @@ +Run openpilot with webcam on PC/laptop +===================== +What's needed: +- Ubuntu 16.04 +- Python 3.7.3 +- GPU (recommended) +- Two USB webcams, at least 720p and 78 degrees FOV (e.g. Logitech C920/C615) +- [Car harness](https://comma.ai/shop/products/comma-car-harness) w/ black panda (or the outdated grey panda/giraffe combo) to connect to your car +- [Panda paw](https://comma.ai/shop/products/panda-paw) (or USB-A to USB-A cable) to connect panda to your computer +- Tape, Charger, ... +That's it! + +## Clone openpilot and install the requirements +``` +cd ~ +git clone https://github.com/commaai/openpilot.git +``` +- Follow [this readme](https://github.com/commaai/openpilot/tree/master/tools) to install the requirements +- Add line "export PYTHONPATH=$HOME/openpilot" to your ~/.bashrc +- You also need to install tensorflow-gpu 2.1.0 and nvidia drivers: nvidia-xxx/cuda10.0/cudnn7.6.5 +- Install [OpenCL Driver](http://registrationcenter-download.intel.com/akdlm/irc_nas/12556/opencl_runtime_16.1.2_x64_rh_6.4.0.37.tgz) +- (Note: the code assumes cl platforms order to be 0.GPU/1.CPU when running clinfo; if reverse, change the -1 to -2 in selfdrive/modeld/modeld.cc#L130; helping us refactor this mess is encouraged) +- Install [OpenCV4](https://www.pyimagesearch.com/2018/08/15/how-to-install-opencv-4-on-ubuntu/) + +## Build openpilot for webcam +``` +cd ~/openpilot +``` +- check out selfdrive/camerad/cameras/camera_webcam.cc line72&146 before building if any camera is upside down +``` +scons use_webcam=1 +touch prebuilt +``` + +## Connect the hardwares +- Connect the road facing camera first, then the driver facing camera +- (default indexes are 1 and 2; can be modified in selfdrive/camerad/cameras/camera_webcam.cc) +- Connect your computer to panda + +## GO +``` +cd ~/openpilot/tools/webcam +./accept_terms.py # accept the user terms so that thermald can detect the car started +cd ~/openpilot/selfdrive +PASSIVE=0 NOSENSOR=1 WEBCAM=1 ./manager.py +``` +- Start the car, then the UI should show the road webcam's view +- Adjust and secure the webcams (you can run tools/webcam/front_mount_helper.py to help mount the driver camera) +- Finish calibration and engage! diff --git a/tools/webcam/accept_terms.py b/tools/webcam/accept_terms.py new file mode 100755 index 00000000000000..e7757511d8314d --- /dev/null +++ b/tools/webcam/accept_terms.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python +from common.params import Params +from selfdrive.version import terms_version, training_version + +if __name__ == '__main__': + params = Params() + params.put("HasAcceptedTerms", str(terms_version, 'utf-8')) + params.put("CompletedTrainingVersion", str(training_version, 'utf-8')) + print("Terms Accepted!") \ No newline at end of file diff --git a/tools/webcam/front_mount_helper.py b/tools/webcam/front_mount_helper.py new file mode 100755 index 00000000000000..0ac32acc25e0ae --- /dev/null +++ b/tools/webcam/front_mount_helper.py @@ -0,0 +1,35 @@ +#!/usr/bin/env python3.6 +import numpy as np + +# copied from common.transformations/camera.py +eon_dcam_focal_length = 860.0 # pixels +webcam_focal_length = 908.0 # pixels + +eon_dcam_intrinsics = np.array([ + [eon_dcam_focal_length, 0, 1152/2.], + [ 0, eon_dcam_focal_length, 864/2.], + [ 0, 0, 1]]) + +webcam_intrinsics = np.array([ + [webcam_focal_length, 0., 1280/2.], + [ 0., webcam_focal_length, 720/2.], + [ 0., 0., 1.]]) + +cam_id = 1 + +if __name__ == "__main__": + import cv2 + + trans_webcam_to_eon_front = np.dot(eon_dcam_intrinsics,np.linalg.inv(webcam_intrinsics)) + + cap = cv2.VideoCapture(cam_id) + cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1280) + cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 720) + + while (True): + ret, img = cap.read() + if ret: + img = cv2.warpPerspective(img, trans_webcam_to_eon_front, (1152,864), borderMode=cv2.BORDER_CONSTANT, borderValue=0) + img = img[:,-864//2:,:] + cv2.imshow('preview', img) + cv2.waitKey(10) diff --git a/tools/webcam/warp_vis.py b/tools/webcam/warp_vis.py new file mode 100755 index 00000000000000..71d9b7749f0dc6 --- /dev/null +++ b/tools/webcam/warp_vis.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python3.6 +import numpy as np +import time + +# copied from common.transformations/camera.py +eon_focal_length = 910.0 # pixels +eon_dcam_focal_length = 860.0 # pixels + + +############################################### +want_width = 800 +want_height = 600 +want_fps = 30 +is_front_cam = False +want_hdw_change = 0 +############################################### + +webcam_focal_length_f = 860 * want_width / 1152 # pixels +webcam_focal_length_r = 910 * want_width / 1164 + +fac=1. +eon_intrinsics = np.array([ + [eon_focal_length, 0., 1164/fac], + [ 0., eon_focal_length, 874/fac], + [ 0., 0., 1.]]) + +eon_dcam_intrinsics = np.array([ + [eon_dcam_focal_length, 0, 1152/fac], + [ 0, eon_dcam_focal_length, 864/fac], + [ 0, 0, 1]]) + +webcam_dcam_intrinsics = np.array([ + [webcam_focal_length_f, 0., want_width/fac], + [ 0., webcam_focal_length_f, want_height/fac], + [ 0., 0., 1.]]) + +webcam_intrinsics = np.array([ + [webcam_focal_length_r, 0., want_width/fac], + [ 0., webcam_focal_length_r, want_height/fac], + [ 0., 0., 1.]]) + +if __name__ == "__main__": + import cv2 + trans_webcam_to_eon_rear = np.dot(eon_intrinsics,np.linalg.inv(webcam_intrinsics)) + trans_webcam_to_eon_front = np.dot(eon_dcam_intrinsics,np.linalg.inv(webcam_dcam_intrinsics)) + if is_front_cam: + print("trans_webcam_to_eon_front:\n", trans_webcam_to_eon_front) + else: + print("trans_webcam_to_eon_rear:\n", trans_webcam_to_eon_rear) + cap = cv2.VideoCapture(1) + cap.set(cv2.CAP_PROP_FRAME_WIDTH, want_width) + cap.set(cv2.CAP_PROP_FRAME_HEIGHT, want_height) + if want_hdw_change > 0: + cap.set(cv2.CV_CAP_PROP_FPS, want_fps) + startTime = time.time() * 100 + while (True): + ret, img = cap.read() + if ret: + nowTime = time.time() * 100 + if (nowTime-startTime) > 100/want_fps: + startTime = nowTime + if is_front_cam: + img2 = cv2.warpPerspective(img, trans_webcam_to_eon_front, (1152,864), borderMode=cv2.BORDER_CONSTANT, borderValue=0) + else: + img2 = cv2.warpPerspective(img, trans_webcam_to_eon_rear, (1164,874), borderMode=cv2.BORDER_CONSTANT, borderValue=0) + print(img2.shape,end='\r') + cv2.imshow('preview', img2) + cv2.waitKey(10) +