Home
last modified time | relevance | path

Searched refs:context (Results 1 – 25 of 104) sorted by relevance

12345

/dpdk/lib/acl/
H A Dacl_bld.c171 context->num_nodes++; in acl_alloc_node()
172 node->id = context->node_id++; in acl_alloc_node()
200 acl_build_free(context, in acl_free_node()
207 context->num_nodes--; in acl_free_node()
703 acl_deref_ptr(context, in acl_merge_trie()
1324 context->tries[n].count = 0; in build_one_trie()
1327 context->data_indexes[n]); in build_one_trie()
1328 context->tries[n].data_index = context->data_indexes[n]; in build_one_trie()
1332 context->bld_tries[n].trie = build_trie(context, rule_sets[n], in build_one_trie()
1366 last = build_one_trie(context, rule_sets, n, context->node_max); in acl_build_tries()
[all …]
/dpdk/drivers/net/mlx4/
H A Dmlx4_glue.c28 mlx4_glue_get_async_event(struct ibv_context *context, in mlx4_glue_get_async_event() argument
31 return ibv_get_async_event(context, event); in mlx4_glue_get_async_event()
41 mlx4_glue_alloc_pd(struct ibv_context *context) in mlx4_glue_alloc_pd() argument
43 return ibv_alloc_pd(context); in mlx4_glue_alloc_pd()
71 mlx4_glue_close_device(struct ibv_context *context) in mlx4_glue_close_device() argument
73 return ibv_close_device(context); in mlx4_glue_close_device()
86 return ibv_query_device(context, device_attr); in mlx4_glue_query_device()
94 return ibv_query_device_ex(context, input, attr); in mlx4_glue_query_device_ex()
113 return ibv_create_comp_channel(context); in mlx4_glue_create_comp_channel()
211 mlx4_glue_create_wq(struct ibv_context *context, in mlx4_glue_create_wq() argument
[all …]
H A Dmlx4_glue.h29 int (*get_async_event)(struct ibv_context *context,
32 struct ibv_pd *(*alloc_pd)(struct ibv_context *context);
37 int (*close_device)(struct ibv_context *context);
39 int (*query_device)(struct ibv_context *context,
41 int (*query_device_ex)(struct ibv_context *context,
44 int (*query_port)(struct ibv_context *context, uint8_t port_num,
48 (struct ibv_context *context);
64 (struct ibv_context *context,
73 (struct ibv_context *context,
76 struct ibv_wq *(*create_wq)(struct ibv_context *context,
[all …]
/dpdk/drivers/common/mlx5/linux/
H A Dmlx5_glue.c43 (void)context; in mlx5_glue_import_pd()
305 (void)context; in mlx5_glue_create_counter_set()
330 (void)context; in mlx5_glue_describe_counter_set()
357 (void)context; in mlx5_glue_create_counters()
577 (void)context; in mlx5_glue_dv_create_wq()
615 (void)context; in mlx5_glue_dv_create_qp()
630 (void)context; in mlx5_glue_dv_create_flow_matcher()
639 (void)context; in mlx5_glue_dv_create_flow_matcher()
1087 (void)context; in mlx5_glue_devx_umem_reg()
1346 (void)context; in mlx5_glue_devx_alloc_uar()
[all …]
H A Dmlx5_glue.h172 (struct ibv_context *context);
183 (struct ibv_context *context,
197 (struct ibv_context *context,
210 (struct ibv_context *context,
214 (struct ibv_context *context,
220 (struct ibv_context *context,
250 (struct ibv_context *context,
254 (struct ibv_context *context,
264 (struct ibv_context *context,
268 (struct ibv_context *context,
[all …]
/dpdk/lib/jobstats/
H A Drte_jobstats.c128 if (unlikely(ctx == NULL || job == NULL || job->context != NULL)) in rte_jobstats_start()
132 job->context = ctx; in rte_jobstats_start()
148 if (unlikely(job == NULL || job->context == NULL)) in rte_jobstats_abort()
151 ctx = job->context; in rte_jobstats_abort()
156 job->context = NULL; in rte_jobstats_abort()
169 if (unlikely(job == NULL || job->context == NULL)) in rte_jobstats_finish()
177 ctx = job->context; in rte_jobstats_finish()
192 job->context = NULL; in rte_jobstats_finish()
242 job->context = NULL; in rte_jobstats_init()
/dpdk/app/test-pmd/
H A Dcmdline_flow.c805 struct context { struct
6379 pop_args(struct context *ctx) in pop_args()
10165 static struct context cmd_flow_context;
10173 cmd_flow_context_init(struct context *ctx) in cmd_flow_context_init()
10193 struct context *ctx = &cmd_flow_context; in cmd_flow_parse()
10293 struct context *ctx = &cmd_flow_context; in cmd_flow_complete_get_nb()
10326 struct context *ctx = &cmd_flow_context; in cmd_flow_complete_get_elt()
10362 struct context *ctx = &cmd_flow_context; in cmd_flow_get_help()
10393 struct context *ctx = &cmd_flow_context; in cmd_flow_tok()
11114 struct context *ctx = &cmd_flow_context; in cmd_set_raw_get_help()
[all …]
/dpdk/drivers/raw/dpaa2_cmdif/
H A Ddpaa2_cmdif.c54 rte_rawdev_obj_t context) in dpaa2_cmdif_enqueue_bufs() argument
78 cmdif_send_cnxt = (struct rte_dpaa2_cmdif_context *)(context); in dpaa2_cmdif_enqueue_bufs()
118 rte_rawdev_obj_t context) in dpaa2_cmdif_dequeue_bufs() argument
143 cmdif_rcv_cnxt = (struct rte_dpaa2_cmdif_context *)(context); in dpaa2_cmdif_dequeue_bufs()
/dpdk/doc/guides/prog_guide/
H A Dpacket_classif_access_ctrl.rst14 * Create a new Access Control (AC) context.
16 * Add rules into the context.
346 * populated with rules AC context and cfg filled properly.
349 /* try to build AC context, with RT structures less then 8MB. */
354 * RT structures can't fit into 8MB for given context.
425 /* AC context creation parameters. */
434 .max_rule_num = 8, /* maximum number of rules in the AC context. */
484 /* create an empty AC context */
488 /* handle context create failure. */
492 /* add rules to the context */
[all …]
H A Dbpf_lib.rst20 * Create a new BPF execution context and load user provided eBPF code into it.
22 * Destroy an BPF execution context and its runtime structures and free the associated memory.
26 * Provide information about natively compiled code for given BPF context.
35 These instructions can only be used when execution context is a pointer to
H A Dtoeplitz_hash_lib.rst98 * Create the thash context.
100 * Create the thash helper, associated with a context.
106 Thash context
109 The function ``rte_thash_init_ctx()`` initializes the context struct
146 associated with a given context and a part of a target tuple of interest which
148 calculated bit sequence into the RSS hash key which is stored in the context
153 * A pointer to the Thash context to be associated with.
162 Adding a helper changes the key stored in the corresponding context. So the
172 context initialization) to be xored with N least significant bits of the
197 * A Thash context and helper.
[all …]
H A Dgpudev.rst24 (e.g. CUDA Driver context or CUDA Streams in case of NVIDIA GPUs).
53 through an ``uint64_t`` generic handler (e.g. CUDA Driver context)
146 /* Initialize CUDA objects (cstream, context, etc..). */
147 /* Use gpudev library to register a new CUDA context if any. */
149 /* Let's assume the application wants to use the default context of the GPU device 0. */
/dpdk/drivers/raw/cnxk_bphy/
H A Dcnxk_bphy_cgx.c155 rte_rawdev_obj_t context) in cnxk_bphy_cgx_enqueue_bufs() argument
158 unsigned int queue = (size_t)context; in cnxk_bphy_cgx_enqueue_bufs()
177 rte_rawdev_obj_t context) in cnxk_bphy_cgx_dequeue_bufs() argument
180 unsigned int queue = (size_t)context; in cnxk_bphy_cgx_dequeue_bufs()
H A Dcnxk_bphy.c169 rte_rawdev_obj_t context) in cnxk_bphy_irq_enqueue_bufs() argument
174 unsigned int queue = (size_t)context; in cnxk_bphy_irq_enqueue_bufs()
249 rte_rawdev_obj_t context) in cnxk_bphy_irq_dequeue_bufs() argument
252 unsigned int queue = (size_t)context; in cnxk_bphy_irq_dequeue_bufs()
/dpdk/lib/rawdev/
H A Drte_rawdev.h413 rte_rawdev_obj_t context);
444 rte_rawdev_obj_t context);
H A Drte_rawdev_pmd.h305 rte_rawdev_obj_t context);
330 rte_rawdev_obj_t context);
H A Drte_rawdev.c211 rte_rawdev_obj_t context) in rte_rawdev_enqueue_buffers() argument
219 return (*dev->dev_ops->enqueue_bufs)(dev, buffers, count, context); in rte_rawdev_enqueue_buffers()
226 rte_rawdev_obj_t context) in rte_rawdev_dequeue_buffers() argument
234 return (*dev->dev_ops->dequeue_bufs)(dev, buffers, count, context); in rte_rawdev_dequeue_buffers()
/dpdk/drivers/crypto/bcmfs/hw/
H A Dbcmfs5_rm.c461 unsigned long context = 0; in bcmfs5_dequeue_qp() local
508 context = qp->ctx_pool[reqid]; in bcmfs5_dequeue_qp()
509 if (context == 0) in bcmfs5_dequeue_qp()
516 *ops = (void *)context; in bcmfs5_dequeue_qp()
H A Dbcmfs4_rm.c530 unsigned long context = 0; in bcmfs4_dequeue_qp() local
577 context = qp->ctx_pool[reqid]; in bcmfs4_dequeue_qp()
578 if (context == 0) in bcmfs4_dequeue_qp()
585 *ops = (void *)context; in bcmfs4_dequeue_qp()
/dpdk/drivers/raw/skeleton/
H A Dskeleton_rawdev.c413 rte_rawdev_obj_t context) in skeleton_rawdev_enqueue_bufs() argument
424 q_id = *((int *)context); in skeleton_rawdev_enqueue_bufs()
435 rte_rawdev_obj_t context) in skeleton_rawdev_dequeue_bufs() argument
446 q_id = *((int *)context); in skeleton_rawdev_dequeue_bufs()
/dpdk/drivers/gpu/cuda/
H A Dcuda.c618 input_ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_dev_info_get()
728 input_ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_mem_alloc()
780 mem_alloc_list_tail->ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_mem_alloc()
822 input_ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_mem_register()
903 mem_alloc_list_tail->ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_mem_register()
1139 input_ctx = (CUcontext)((uintptr_t)dev->mpshared->info.context); in cuda_wmb()
1288 dev->mpshared->info.context = (uint64_t)pctx; in cuda_gpu_probe()
/dpdk/doc/guides/sample_app_ug/
H A Dcmd_line.rst90 A cmdline context is a list of commands that are listed in a NULL-terminated table, for example:
94 :start-after: Cmdline context list of commands in NULL-terminated table. 8<
95 :end-before: >8 End of context list.
/dpdk/drivers/vdpa/sfc/
H A Dsfc_vdpa_ops.h62 sfc_vdpa_device_init(void *adapter, enum sfc_vdpa_context context);
/dpdk/lib/node/
H A Dmeson.build22 # Strict-aliasing rules are violated by uint8_t[] to context size casts.
/dpdk/doc/guides/nics/
H A Daf_xdp.rst105 The context refers to the netdev,qid tuple.
131 can significantly improve single-core performance in this context.
141 kernel will attempt to process in the netdev's NAPI context. You can change
156 NAPI context from a watchdog timer instead of from softirqs. More information

12345