|
FFmpeg
|
#include "libavformat/avio.h"#include "libavutil/avassert.h"#include "libavutil/avstring.h"#include "libavutil/cpu.h"#include "libavutil/mem.h"#include "libavutil/opt.h"#include "libavcodec/defs.h"#include "dnn_io_proc.h"#include "dnn_backend_common.h"#include "safe_queue.h"#include <tensorflow/c/c_api.h>Go to the source code of this file.
Data Structures | |
| struct | TFModel |
| struct | TFInferRequest |
| Stores execution parameters for single call to the TensorFlow C API. More... | |
| struct | TFRequestItem |
Macros | |
| #define | OFFSET(x) offsetof(TFOptions, x) |
| #define | FLAGS AV_OPT_FLAG_FILTERING_PARAM |
| #define | SPACE_CHARS " \t\r\n" |
Functions | |
| static int | execute_model_tf (TFRequestItem *request, Queue *lltask_queue) |
| static void | infer_completion_callback (void *args) |
| static void | destroy_request_item (TFRequestItem **arg) |
| Free the TFRequestItem completely. More... | |
| static void | free_buffer (void *data, size_t length) |
| static void | tf_free_request (TFInferRequest *request) |
| Free the contents of TensorFlow inference request. More... | |
| static TFInferRequest * | tf_create_inference_request (void) |
| Create a TensorFlow inference request. More... | |
| static int | tf_start_inference (void *args) |
| Start synchronous inference for the TensorFlow model. More... | |
| static int | extract_lltask_from_task (TaskItem *task, Queue *lltask_queue) |
| static TF_Buffer * | read_graph (const char *model_filename) |
| static TF_Tensor * | allocate_input_tensor (const DNNData *input) |
| static int | get_input_tf (DNNModel *model, DNNData *input, const char *input_name) |
| static int | get_output_tf (DNNModel *model, const char *input_name, int input_width, int input_height, const char *output_name, int *output_width, int *output_height) |
| static int | hex_to_data (uint8_t *data, const char *p) |
| static int | load_tf_model (TFModel *tf_model, const char *model_filename) |
| static void | dnn_free_model_tf (DNNModel **model) |
| static DNNModel * | dnn_load_model_tf (DnnContext *ctx, DNNFunctionType func_type, AVFilterContext *filter_ctx) |
| static int | fill_model_input_tf (TFModel *tf_model, TFRequestItem *request) |
| static int | dnn_execute_model_tf (const DNNModel *model, DNNExecBaseParams *exec_params) |
| static DNNAsyncStatusType | dnn_get_result_tf (const DNNModel *model, AVFrame **in, AVFrame **out) |
| static int | dnn_flush_tf (const DNNModel *model) |
Variables | |
| static const AVOption | dnn_tensorflow_options [] |
| const DNNModule | ff_dnn_backend_tf |
DNN tensorflow backend implementation.
Definition in file dnn_backend_tf.c.
| #define OFFSET | ( | x | ) | offsetof(TFOptions, x) |
Definition at line 67 of file dnn_backend_tf.c.
| #define FLAGS AV_OPT_FLAG_FILTERING_PARAM |
Definition at line 68 of file dnn_backend_tf.c.
| #define SPACE_CHARS " \t\r\n" |
Definition at line 356 of file dnn_backend_tf.c.
|
static |
Definition at line 756 of file dnn_backend_tf.c.
Referenced by dnn_execute_model_tf(), and get_output_tf().
|
static |
Definition at line 693 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and execute_model_tf().
|
inlinestatic |
Free the TFRequestItem completely.
| arg | Address of the TFInferRequest instance. |
Definition at line 170 of file dnn_backend_tf.c.
Referenced by dnn_flush_tf(), dnn_free_model_tf(), dnn_load_model_tf(), execute_model_tf(), and infer_completion_callback().
|
static |
Definition at line 79 of file dnn_backend_tf.c.
Referenced by read_graph().
|
static |
Free the contents of TensorFlow inference request.
It does not free the TFInferRequest instance.
| request | pointer to TFInferRequest instance. NULL pointer is allowed. |
Definition at line 91 of file dnn_backend_tf.c.
Referenced by destroy_request_item(), execute_model_tf(), fill_model_input_tf(), and infer_completion_callback().
|
static |
Create a TensorFlow inference request.
All properties are initially unallocated and set as NULL.
Definition at line 119 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Start synchronous inference for the TensorFlow model.
| request | pointer to the TFRequestItem for inference |
| 0 | if execution is successful |
| AVERROR(EINVAL) | if request is NULL |
| DNN_GENERIC_ERROR | if execution fails |
Definition at line 140 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and execute_model_tf().
Definition at line 184 of file dnn_backend_tf.c.
Referenced by dnn_execute_model_tf(), and get_output_tf().
|
static |
Definition at line 204 of file dnn_backend_tf.c.
Referenced by load_tf_model().
|
static |
Definition at line 237 of file dnn_backend_tf.c.
Referenced by fill_model_input_tf().
Definition at line 264 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf(), and fill_model_input_tf().
|
static |
Definition at line 312 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Definition at line 357 of file dnn_backend_tf.c.
Referenced by load_tf_model().
|
static |
Definition at line 386 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Definition at line 481 of file dnn_backend_tf.c.
Referenced by dnn_load_model_tf().
|
static |
Definition at line 523 of file dnn_backend_tf.c.
|
static |
Definition at line 599 of file dnn_backend_tf.c.
Referenced by dnn_flush_tf(), and execute_model_tf().
|
static |
Definition at line 802 of file dnn_backend_tf.c.
|
static |
Definition at line 848 of file dnn_backend_tf.c.
|
static |
Definition at line 854 of file dnn_backend_tf.c.
|
static |
Definition at line 69 of file dnn_backend_tf.c.
| const DNNModule ff_dnn_backend_tf |
Definition at line 884 of file dnn_backend_tf.c.
1.8.17