camera_suite_view_camera.c 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401
  1. #include "../camera_suite.h"
  2. #include <furi.h>
  3. #include <furi_hal.h>
  4. #include <input/input.h>
  5. #include <gui/elements.h>
  6. #include <dolphin/dolphin.h>
  7. #include "../helpers/camera_suite_haptic.h"
  8. #include "../helpers/camera_suite_speaker.h"
  9. #include "../helpers/camera_suite_led.h"
  10. static CameraSuiteViewStyle1* current_instance = NULL;
  11. // Dithering type:
  12. // 0 = Floyd Steinberg (default)
  13. // 1 = Atkinson
  14. static int current_dithering = 0;
  15. struct CameraSuiteViewStyle1 {
  16. CameraSuiteViewStyle1Callback callback;
  17. FuriStreamBuffer* rx_stream;
  18. FuriThread* worker_thread;
  19. View* view;
  20. void* context;
  21. };
  22. void camera_suite_view_camera_set_callback(
  23. CameraSuiteViewStyle1* instance,
  24. CameraSuiteViewStyle1Callback callback,
  25. void* context) {
  26. furi_assert(instance);
  27. furi_assert(callback);
  28. instance->callback = callback;
  29. instance->context = context;
  30. }
  31. static void camera_suite_view_camera_draw(Canvas* canvas, UartDumpModel* model) {
  32. // Clear the screen.
  33. canvas_set_color(canvas, ColorBlack);
  34. // Draw the frame.
  35. canvas_draw_frame(canvas, 0, 0, FRAME_WIDTH, FRAME_HEIGHT);
  36. CameraSuite* app = current_instance->context;
  37. // Draw the pixels with rotation.
  38. for(size_t p = 0; p < FRAME_BUFFER_LENGTH; ++p) {
  39. uint8_t x = p % ROW_BUFFER_LENGTH; // 0 .. 15
  40. uint8_t y = p / ROW_BUFFER_LENGTH; // 0 .. 63
  41. // Apply rotation
  42. int16_t rotated_x, rotated_y;
  43. switch(app->orientation) {
  44. case 1: // 90 degrees
  45. rotated_x = y;
  46. rotated_y = FRAME_WIDTH - 1 - x;
  47. break;
  48. case 2: // 180 degrees
  49. rotated_x = FRAME_WIDTH - 1 - x;
  50. rotated_y = FRAME_HEIGHT - 1 - y;
  51. break;
  52. case 3: // 270 degrees
  53. rotated_x = FRAME_HEIGHT - 1 - y;
  54. rotated_y = x;
  55. break;
  56. case 0: // 0 degrees
  57. default:
  58. rotated_x = x;
  59. rotated_y = y;
  60. break;
  61. }
  62. for(uint8_t i = 0; i < 8; ++i) {
  63. if((model->pixels[p] & (1 << i)) != 0) {
  64. // Adjust the coordinates based on the new screen dimensions
  65. uint16_t screen_x, screen_y;
  66. switch(app->orientation) {
  67. case 1: // 90 degrees
  68. screen_x = rotated_x;
  69. screen_y = FRAME_HEIGHT - 8 + (rotated_y * 8) + i;
  70. break;
  71. case 2: // 180 degrees
  72. screen_x = FRAME_WIDTH - 8 + (rotated_x * 8) + i;
  73. screen_y = FRAME_HEIGHT - 1 - rotated_y;
  74. break;
  75. case 3: // 270 degrees
  76. screen_x = FRAME_WIDTH - 1 - rotated_x;
  77. screen_y = rotated_y * 8 + i;
  78. break;
  79. case 0: // 0 degrees
  80. default:
  81. screen_x = rotated_x * 8 + i;
  82. screen_y = rotated_y;
  83. break;
  84. }
  85. canvas_draw_dot(canvas, screen_x, screen_y);
  86. }
  87. }
  88. }
  89. // Draw the guide if the camera is not initialized.
  90. if(!model->initialized) {
  91. canvas_draw_icon(canvas, 74, 16, &I_DolphinCommon_56x48);
  92. canvas_set_font(canvas, FontSecondary);
  93. canvas_draw_str(canvas, 8, 12, "Connect the ESP32-CAM");
  94. canvas_draw_str(canvas, 20, 24, "VCC - 3V3");
  95. canvas_draw_str(canvas, 20, 34, "GND - GND");
  96. canvas_draw_str(canvas, 20, 44, "U0R - TX");
  97. canvas_draw_str(canvas, 20, 54, "U0T - RX");
  98. }
  99. }
  100. static void camera_suite_view_camera_model_init(UartDumpModel* const model) {
  101. for(size_t i = 0; i < FRAME_BUFFER_LENGTH; i++) {
  102. model->pixels[i] = 0;
  103. }
  104. }
  105. static bool camera_suite_view_camera_input(InputEvent* event, void* context) {
  106. furi_assert(context);
  107. CameraSuiteViewStyle1* instance = context;
  108. if(event->type == InputTypeRelease) {
  109. switch(event->key) {
  110. default: // Stop all sounds, reset the LED.
  111. with_view_model(
  112. instance->view,
  113. UartDumpModel * model,
  114. {
  115. UNUSED(model);
  116. camera_suite_play_bad_bump(instance->context);
  117. camera_suite_stop_all_sound(instance->context);
  118. camera_suite_led_set_rgb(instance->context, 0, 0, 0);
  119. },
  120. true);
  121. break;
  122. }
  123. // Send `data` to the ESP32-CAM
  124. } else if(event->type == InputTypePress) {
  125. uint8_t data[1];
  126. switch(event->key) {
  127. case InputKeyBack:
  128. // Stop the camera stream.
  129. data[0] = 's';
  130. // Go back to the main menu.
  131. with_view_model(
  132. instance->view,
  133. UartDumpModel * model,
  134. {
  135. UNUSED(model);
  136. instance->callback(CameraSuiteCustomEventSceneStyle1Back, instance->context);
  137. },
  138. true);
  139. break;
  140. case InputKeyLeft:
  141. // Camera: Invert.
  142. data[0] = '<';
  143. with_view_model(
  144. instance->view,
  145. UartDumpModel * model,
  146. {
  147. UNUSED(model);
  148. camera_suite_play_happy_bump(instance->context);
  149. camera_suite_play_input_sound(instance->context);
  150. camera_suite_led_set_rgb(instance->context, 0, 0, 255);
  151. instance->callback(CameraSuiteCustomEventSceneStyle1Left, instance->context);
  152. },
  153. true);
  154. break;
  155. case InputKeyRight:
  156. // Camera: Enable/disable dithering.
  157. data[0] = '>';
  158. with_view_model(
  159. instance->view,
  160. UartDumpModel * model,
  161. {
  162. UNUSED(model);
  163. camera_suite_play_happy_bump(instance->context);
  164. camera_suite_play_input_sound(instance->context);
  165. camera_suite_led_set_rgb(instance->context, 0, 0, 255);
  166. instance->callback(CameraSuiteCustomEventSceneStyle1Right, instance->context);
  167. },
  168. true);
  169. break;
  170. case InputKeyUp:
  171. // Camera: Increase contrast.
  172. data[0] = 'C';
  173. with_view_model(
  174. instance->view,
  175. UartDumpModel * model,
  176. {
  177. UNUSED(model);
  178. camera_suite_play_happy_bump(instance->context);
  179. camera_suite_play_input_sound(instance->context);
  180. camera_suite_led_set_rgb(instance->context, 0, 0, 255);
  181. instance->callback(CameraSuiteCustomEventSceneStyle1Up, instance->context);
  182. },
  183. true);
  184. break;
  185. case InputKeyDown:
  186. // Camera: Reduce contrast.
  187. data[0] = 'c';
  188. with_view_model(
  189. instance->view,
  190. UartDumpModel * model,
  191. {
  192. UNUSED(model);
  193. camera_suite_play_happy_bump(instance->context);
  194. camera_suite_play_input_sound(instance->context);
  195. camera_suite_led_set_rgb(instance->context, 0, 0, 255);
  196. instance->callback(CameraSuiteCustomEventSceneStyle1Down, instance->context);
  197. },
  198. true);
  199. break;
  200. case InputKeyOk:
  201. if(current_dithering == 0) {
  202. data[0] = 'd'; // Update to Floyd Steinberg dithering.
  203. current_dithering = 1;
  204. } else {
  205. data[0] = 'D'; // Update to Atkinson dithering.
  206. current_dithering = 0;
  207. }
  208. with_view_model(
  209. instance->view,
  210. UartDumpModel * model,
  211. {
  212. UNUSED(model);
  213. camera_suite_play_happy_bump(instance->context);
  214. camera_suite_play_input_sound(instance->context);
  215. camera_suite_led_set_rgb(instance->context, 0, 0, 255);
  216. instance->callback(CameraSuiteCustomEventSceneStyle1Ok, instance->context);
  217. },
  218. true);
  219. break;
  220. case InputKeyMAX:
  221. break;
  222. }
  223. // Send `data` to the ESP32-CAM
  224. furi_hal_uart_tx(FuriHalUartIdUSART1, data, 1);
  225. }
  226. return true;
  227. }
  228. static void camera_suite_view_camera_exit(void* context) {
  229. furi_assert(context);
  230. }
  231. static void camera_suite_view_camera_enter(void* context) {
  232. // Check `context` for null. If it is null, abort program, else continue.
  233. furi_assert(context);
  234. // Cast `context` to `CameraSuiteViewStyle1*` and store it in `instance`.
  235. CameraSuiteViewStyle1* instance = (CameraSuiteViewStyle1*)context;
  236. // Assign the current instance to the global variable
  237. current_instance = instance;
  238. uint8_t data[1];
  239. data[0] = 'S'; // Uppercase `S` to start the camera
  240. // Send `data` to the ESP32-CAM
  241. furi_hal_uart_tx(FuriHalUartIdUSART1, data, 1);
  242. with_view_model(
  243. instance->view,
  244. UartDumpModel * model,
  245. { camera_suite_view_camera_model_init(model); },
  246. true);
  247. }
  248. static void camera_on_irq_cb(UartIrqEvent uartIrqEvent, uint8_t data, void* context) {
  249. // Check `context` for null. If it is null, abort program, else continue.
  250. furi_assert(context);
  251. // Cast `context` to `CameraSuiteViewStyle1*` and store it in `instance`.
  252. CameraSuiteViewStyle1* instance = context;
  253. // If `uartIrqEvent` is `UartIrqEventRXNE`, send the data to the
  254. // `rx_stream` and set the `WorkerEventRx` flag.
  255. if(uartIrqEvent == UartIrqEventRXNE) {
  256. furi_stream_buffer_send(instance->rx_stream, &data, 1, 0);
  257. furi_thread_flags_set(furi_thread_get_id(instance->worker_thread), WorkerEventRx);
  258. }
  259. }
  260. static void process_ringbuffer(UartDumpModel* model, uint8_t byte) {
  261. // First char has to be 'Y' in the buffer.
  262. if(model->ringbuffer_index == 0 && byte != 'Y') {
  263. return;
  264. }
  265. // Second char has to be ':' in the buffer or reset.
  266. if(model->ringbuffer_index == 1 && byte != ':') {
  267. model->ringbuffer_index = 0;
  268. process_ringbuffer(model, byte);
  269. return;
  270. }
  271. // Assign current byte to the ringbuffer.
  272. model->row_ringbuffer[model->ringbuffer_index] = byte;
  273. // Increment the ringbuffer index.
  274. ++model->ringbuffer_index;
  275. // Let's wait 'till the buffer fills.
  276. if(model->ringbuffer_index < RING_BUFFER_LENGTH) {
  277. return;
  278. }
  279. // Flush the ringbuffer to the framebuffer.
  280. model->ringbuffer_index = 0; // Reset the ringbuffer
  281. model->initialized = true; // Established the connection successfully.
  282. size_t row_start_index =
  283. model->row_ringbuffer[2] * ROW_BUFFER_LENGTH; // Third char will determine the row number
  284. if(row_start_index > LAST_ROW_INDEX) { // Failsafe
  285. row_start_index = 0;
  286. }
  287. for(size_t i = 0; i < ROW_BUFFER_LENGTH; ++i) {
  288. model->pixels[row_start_index + i] =
  289. model->row_ringbuffer[i + 3]; // Writing the remaining 16 bytes into the frame buffer
  290. }
  291. }
  292. static int32_t camera_worker(void* context) {
  293. furi_assert(context);
  294. CameraSuiteViewStyle1* instance = context;
  295. while(1) {
  296. uint32_t events =
  297. furi_thread_flags_wait(WORKER_EVENTS_MASK, FuriFlagWaitAny, FuriWaitForever);
  298. furi_check((events & FuriFlagError) == 0);
  299. if(events & WorkerEventStop) {
  300. break;
  301. } else if(events & WorkerEventRx) {
  302. size_t length = 0;
  303. do {
  304. size_t intended_data_size = 64;
  305. uint8_t data[intended_data_size];
  306. length =
  307. furi_stream_buffer_receive(instance->rx_stream, data, intended_data_size, 0);
  308. if(length > 0) {
  309. with_view_model(
  310. instance->view,
  311. UartDumpModel * model,
  312. {
  313. for(size_t i = 0; i < length; i++) {
  314. process_ringbuffer(model, data[i]);
  315. }
  316. },
  317. false);
  318. }
  319. } while(length > 0);
  320. }
  321. }
  322. return 0;
  323. }
  324. CameraSuiteViewStyle1* camera_suite_view_camera_alloc() {
  325. CameraSuiteViewStyle1* instance = malloc(sizeof(CameraSuiteViewStyle1));
  326. instance->view = view_alloc();
  327. instance->rx_stream = furi_stream_buffer_alloc(2048, 1);
  328. // Set up views
  329. view_allocate_model(instance->view, ViewModelTypeLocking, sizeof(UartDumpModel));
  330. view_set_context(instance->view, instance); // furi_assert crashes in events without this
  331. view_set_draw_callback(instance->view, (ViewDrawCallback)camera_suite_view_camera_draw);
  332. view_set_input_callback(instance->view, camera_suite_view_camera_input);
  333. view_set_enter_callback(instance->view, camera_suite_view_camera_enter);
  334. view_set_exit_callback(instance->view, camera_suite_view_camera_exit);
  335. with_view_model(
  336. instance->view,
  337. UartDumpModel * model,
  338. { camera_suite_view_camera_model_init(model); },
  339. true);
  340. instance->worker_thread = furi_thread_alloc_ex("UsbUartWorker", 2048, camera_worker, instance);
  341. furi_thread_start(instance->worker_thread);
  342. // Enable uart listener
  343. furi_hal_console_disable();
  344. furi_hal_uart_set_br(FuriHalUartIdUSART1, 230400);
  345. furi_hal_uart_set_irq_cb(FuriHalUartIdUSART1, camera_on_irq_cb, instance);
  346. return instance;
  347. }
  348. void camera_suite_view_camera_free(CameraSuiteViewStyle1* instance) {
  349. furi_assert(instance);
  350. with_view_model(
  351. instance->view, UartDumpModel * model, { UNUSED(model); }, true);
  352. view_free(instance->view);
  353. free(instance);
  354. }
  355. View* camera_suite_view_camera_get_view(CameraSuiteViewStyle1* instance) {
  356. furi_assert(instance);
  357. return instance->view;
  358. }