| /dpdk/lib/eal/x86/include/ |
| H A D | rte_atomic_64.h | 61 v->cnt = 0; in rte_atomic64_init() 67 return v->cnt; in rte_atomic64_read() 82 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic64_add() 84 "m" (v->cnt) in rte_atomic64_add() 94 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic64_sub() 106 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic64_inc() 117 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic64_dec() 131 [cnt] "=m" (v->cnt) in rte_atomic64_add_return() 151 : [cnt] "+m" (v->cnt), /* output */ in rte_atomic64_inc_and_test() 166 : [cnt] "+m" (v->cnt), /* output */ in rte_atomic64_dec_and_test() [all …]
|
| H A D | rte_atomic.h | 142 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic16_inc() 143 : "m" (v->cnt) /* input */ in rte_atomic16_inc() 153 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic16_dec() 154 : "m" (v->cnt) /* input */ in rte_atomic16_dec() 166 : [cnt] "+m" (v->cnt), /* output */ in rte_atomic16_inc_and_test() 179 : [cnt] "+m" (v->cnt), /* output */ in rte_atomic16_dec_and_test() 228 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic32_inc() 229 : "m" (v->cnt) /* input */ in rte_atomic32_inc() 239 : [cnt] "=m" (v->cnt) /* output */ in rte_atomic32_dec() 252 : [cnt] "+m" (v->cnt), /* output */ in rte_atomic32_inc_and_test() [all …]
|
| H A D | rte_atomic_32.h | 90 tmp = v->cnt; in rte_atomic64_init() 91 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, in rte_atomic64_init() 103 tmp = v->cnt; in rte_atomic64_read() 105 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, in rte_atomic64_read() 118 tmp = v->cnt; in rte_atomic64_set() 119 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, in rte_atomic64_set() 131 tmp = v->cnt; in rte_atomic64_add() 132 success = rte_atomic64_cmpset((volatile uint64_t *)&v->cnt, in rte_atomic64_add() 144 tmp = v->cnt; in rte_atomic64_sub() 169 tmp = v->cnt; in rte_atomic64_add_return() [all …]
|
| H A D | rte_rwlock.h | 18 if (likely(rte_try_tm(&rwl->cnt))) in rte_rwlock_read_lock_tm() 26 if (unlikely(rwl->cnt)) in rte_rwlock_read_unlock_tm() 35 if (likely(rte_try_tm(&rwl->cnt))) in rte_rwlock_write_lock_tm() 43 if (unlikely(rwl->cnt)) in rte_rwlock_write_unlock_tm()
|
| /dpdk/lib/eal/include/generic/ |
| H A D | rte_atomic.h | 207 v->cnt = 0; in rte_atomic16_init() 221 return v->cnt; in rte_atomic16_read() 407 v->cnt = 0; in rte_atomic16_clear() 490 v->cnt = 0; in rte_atomic32_init() 504 return v->cnt; in rte_atomic32_read() 690 v->cnt = 0; in rte_atomic32_clear() 777 v->cnt = 0; in rte_atomic64_init() 783 tmp = v->cnt; in rte_atomic64_init() 807 return v->cnt; in rte_atomic64_read() 813 tmp = v->cnt; in rte_atomic64_read() [all …]
|
| H A D | rte_rwlock.h | 51 rwl->cnt = 0; in rte_rwlock_init() 67 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED); in rte_rwlock_read_lock() 73 success = __atomic_compare_exchange_n(&rwl->cnt, &x, x + 1, 1, in rte_rwlock_read_lock() 99 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED); in rte_rwlock_read_trylock() 103 success = __atomic_compare_exchange_n(&rwl->cnt, &x, x + 1, 1, in rte_rwlock_read_trylock() 119 __atomic_fetch_sub(&rwl->cnt, 1, __ATOMIC_RELEASE); in rte_rwlock_read_unlock() 141 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED); in rte_rwlock_write_trylock() 142 if (x != 0 || __atomic_compare_exchange_n(&rwl->cnt, &x, -1, 1, in rte_rwlock_write_trylock() 162 x = __atomic_load_n(&rwl->cnt, __ATOMIC_RELAXED); in rte_rwlock_write_lock() 168 success = __atomic_compare_exchange_n(&rwl->cnt, &x, -1, 1, in rte_rwlock_write_lock() [all …]
|
| /dpdk/lib/eal/ppc/include/ |
| H A D | rte_atomic.h | 64 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic16_inc() 70 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic16_dec() 106 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic32_inc() 112 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic32_dec() 143 v->cnt = 0; in rte_atomic64_init() 149 return v->cnt; in rte_atomic64_read() 155 v->cnt = new_value; in rte_atomic64_set() 161 __atomic_add_fetch(&v->cnt, inc, __ATOMIC_ACQUIRE); in rte_atomic64_add() 173 __atomic_add_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic64_inc() 179 __atomic_sub_fetch(&v->cnt, 1, __ATOMIC_ACQUIRE); in rte_atomic64_dec() [all …]
|
| /dpdk/drivers/net/mlx4/ |
| H A D | mlx4_utils.c | 54 unsigned int cnt, int zero, int socket) in mlx4_mallocv_inline() argument 65 for (i = 0; i < cnt; ++i) { in mlx4_mallocv_inline() 93 for (i = 0; i != cnt; ++i) in mlx4_mallocv_inline() 139 unsigned int cnt) in mlx4_mallocv() argument 141 return mlx4_mallocv_inline(type, vec, cnt, 0, SOCKET_ID_ANY); in mlx4_mallocv() 152 unsigned int cnt) in mlx4_zmallocv() argument 154 return mlx4_mallocv_inline(type, vec, cnt, 1, SOCKET_ID_ANY); in mlx4_zmallocv() 171 unsigned int cnt, int socket) in mlx4_mallocv_socket() argument 173 return mlx4_mallocv_inline(type, vec, cnt, 0, socket); in mlx4_mallocv_socket() 185 unsigned int cnt, int socket) in mlx4_zmallocv_socket() argument [all …]
|
| H A D | mlx4_utils.h | 104 unsigned int cnt); 106 unsigned int cnt); 108 unsigned int cnt, int socket); 110 unsigned int cnt, int socket);
|
| /dpdk/drivers/crypto/nitrox/ |
| H A D | nitrox_sym_reqmgr.c | 260 sglist[cnt].len = len; in fill_sglist() 264 cnt++; in fill_sglist() 293 cnt++; in create_sglist_from_mbuf() 302 cnt++; in create_sglist_from_mbuf() 351 int err, cnt = 0; in create_cipher_outbuf() local 360 cnt++; in create_cipher_outbuf() 376 cnt++; in create_cipher_outbuf() 554 int i, cnt; in create_aead_inplace_outbuf() local 577 int cnt = 0; in create_aead_outbuf() local 584 cnt++; in create_aead_outbuf() [all …]
|
| H A D | nitrox_hal.h | 24 uint64_t cnt : 32; member 26 uint64_t cnt : 32; 45 uint64_t cnt : 32; member 47 uint64_t cnt : 32; 123 uint64_t cnt : 32; member 125 uint64_t cnt : 32;
|
| /dpdk/app/test/ |
| H A D | test_ipsec_perf.c | 62 struct stats_counter cnt; member 297 sa->cnt.nb_prepare_call++; in packet_prepare() 298 sa->cnt.nb_prepare_pkt += k; in packet_prepare() 325 sa->cnt.nb_process_call++; in packet_process() 326 sa->cnt.nb_process_pkt += k; in packet_process() 392 sa->cnt.nb_prepare_call = 0; in fill_ipsec_sa_out() 393 sa->cnt.nb_prepare_pkt = 0; in fill_ipsec_sa_out() 394 sa->cnt.nb_process_call = 0; in fill_ipsec_sa_out() 395 sa->cnt.nb_process_pkt = 0; in fill_ipsec_sa_out() 415 sa->cnt.nb_prepare_pkt = 0; in fill_ipsec_sa_in() [all …]
|
| H A D | test_reorder.c | 236 unsigned i, cnt; in test_reorder_drain() local 251 cnt = rte_reorder_drain(b, robufs, 1); in test_reorder_drain() 252 if (cnt != 0) { in test_reorder_drain() 273 cnt = rte_reorder_drain(b, robufs, 1); in test_reorder_drain() 274 if (cnt != 1) { in test_reorder_drain() 276 __func__, __LINE__, cnt); in test_reorder_drain() 306 cnt = rte_reorder_drain(b, robufs, 4); in test_reorder_drain() 307 if (cnt != 3) { in test_reorder_drain() 309 __func__, __LINE__, cnt); in test_reorder_drain() 322 if (cnt != 0) { in test_reorder_drain() [all …]
|
| /dpdk/examples/ipsec-secgw/ |
| H A D | ipsec_process.c | 183 unsigned int cnt) in ipsec_prepare_crypto_group() argument 186 struct rte_crypto_op *cop[cnt]; in ipsec_prepare_crypto_group() 193 for (j = 0; j != cnt; j++) { in ipsec_prepare_crypto_group() 222 for (j = 0; j != cnt; j++) { in prep_process_group() 266 prep_process_group(sa, mb, cnt); in ipsec_process_inline_group() 285 prep_process_group(sa, mb, cnt); in ipsec_process_cpu_group() 336 pg->m, pg->cnt); in ipsec_process() 341 trf, pg->m, pg->cnt); in ipsec_process() 345 trf, pg->m, pg->cnt); in ipsec_process() 353 if (k != pg->cnt) in ipsec_process() [all …]
|
| /dpdk/drivers/net/bnxt/tf_core/ |
| H A D | tf_em_common.c | 444 while (num_entries > cnt && cnt <= TF_EM_MAX_ENTRIES) in tf_em_size_table() 445 cnt *= 2; in tf_em_size_table() 446 num_entries = cnt; in tf_em_size_table() 505 uint32_t cnt; in tf_em_validate_num_entries() local 521 cnt = TF_EM_MIN_ENTRIES; in tf_em_validate_num_entries() 524 cnt *= 2; in tf_em_validate_num_entries() 550 cnt = TF_EM_MIN_ENTRIES; in tf_em_validate_num_entries() 553 cnt *= 2; in tf_em_validate_num_entries() 578 cnt = TF_EM_MIN_ENTRIES; in tf_em_validate_num_entries() 581 cnt *= 2; in tf_em_validate_num_entries() [all …]
|
| /dpdk/examples/vm_power_manager/guest_cli/ |
| H A D | main.c | 56 int i, cnt, idx; in parse_args() local 80 cnt = parse_set(optarg, hours, MAX_HOURS); in parse_args() 81 if (cnt < 0) { in parse_args() 102 cnt = parse_set(optarg, cores, in parse_args() 104 if (cnt < 0) { in parse_args() 120 cnt = parse_set(optarg, ports, in parse_args() 122 if (cnt < 0) { in parse_args()
|
| /dpdk/drivers/dma/cnxk/ |
| H A D | cnxk_dmadev.c | 353 int cnt; in cnxk_dmadev_completed() local 356 for (cnt = 0; cnt < nb_cpls; cnt++) { in cnxk_dmadev_completed() 367 *last_idx = cnt - 1; in cnxk_dmadev_completed() 368 dpivf->conf.c_desc.tail = cnt; in cnxk_dmadev_completed() 369 dpivf->stats.completed += cnt; in cnxk_dmadev_completed() 371 return cnt; in cnxk_dmadev_completed() 380 int cnt; in cnxk_dmadev_completed_status() local 384 for (cnt = 0; cnt < nb_cpls; cnt++) { in cnxk_dmadev_completed_status() 392 *last_idx = cnt - 1; in cnxk_dmadev_completed_status() 394 dpivf->stats.completed += cnt; in cnxk_dmadev_completed_status() [all …]
|
| /dpdk/lib/rcu/ |
| H A D | rte_rcu_qsbr.h | 76 uint64_t cnt; member 318 __atomic_store_n(&v->qsbr_cnt[thread_id].cnt, in rte_rcu_qsbr_thread_online() 364 __atomic_store_n(&v->qsbr_cnt[thread_id].cnt, in rte_rcu_qsbr_thread_offline() 502 if (t != __atomic_load_n(&v->qsbr_cnt[thread_id].cnt, __ATOMIC_RELAXED)) in rte_rcu_qsbr_quiescent() 503 __atomic_store_n(&v->qsbr_cnt[thread_id].cnt, in rte_rcu_qsbr_quiescent() 537 &v->qsbr_cnt[id + j].cnt, in __rte_rcu_qsbr_check_selective() 591 struct rte_rcu_qsbr_cnt *cnt; in __rte_rcu_qsbr_check_all() local 595 for (i = 0, cnt = v->qsbr_cnt; i < v->max_threads; i++, cnt++) { in __rte_rcu_qsbr_check_all() 600 c = __atomic_load_n(&cnt->cnt, __ATOMIC_ACQUIRE); in __rte_rcu_qsbr_check_all()
|
| /dpdk/lib/ipsec/ |
| H A D | crypto.h | 21 uint32_t cnt; member 33 uint32_t cnt; member 63 uint32_t cnt; member 102 uint32_t cnt; member 131 ctr->cnt = rte_cpu_to_be_32(1); in aes_ctr_cnt_blk_fill() 141 chacha20_poly1305->cnt = rte_cpu_to_be_32(1); in aead_chacha20_poly1305_iv_fill() 149 gcm->cnt = rte_cpu_to_be_32(1); in aead_gcm_iv_fill() 163 ccm->cnt = rte_cpu_to_be_32(1); in aead_ccm_iv_fill()
|
| /dpdk/lib/ring/ |
| H A D | rte_ring_rts_elem_pvt.h | 41 if (++nt.val.cnt == h.val.cnt) in __rte_ring_rts_update_tail() 109 nh.val.cnt = oh.val.cnt + 1; in __rte_ring_rts_move_prod_head() 164 nh.val.cnt = oh.val.cnt + 1; in __rte_ring_rts_move_cons_head()
|
| /dpdk/drivers/net/i40e/base/ |
| H A D | i40e_lan_hmc.c | 100 full_obj->cnt = 0; in i40e_init_lan_hmc() 107 obj->cnt = txq_num; in i40e_init_lan_hmc() 122 full_obj->cnt += obj->cnt; in i40e_init_lan_hmc() 127 obj->cnt = rxq_num; in i40e_init_lan_hmc() 129 (hw->hmc.hmc_obj[I40E_HMC_LAN_TX].cnt * in i40e_init_lan_hmc() 145 full_obj->cnt += obj->cnt; in i40e_init_lan_hmc() 150 obj->cnt = fcoe_cntx_num; in i40e_init_lan_hmc() 152 (hw->hmc.hmc_obj[I40E_HMC_LAN_RX].cnt * in i40e_init_lan_hmc() 168 full_obj->cnt += obj->cnt; in i40e_init_lan_hmc() 173 obj->cnt = fcoe_filt_num; in i40e_init_lan_hmc() [all …]
|
| /dpdk/drivers/net/hns3/ |
| H A D | hns3_stats.c | 424 uint64_t cnt; in hns3_update_port_rpu_drop_stats() local 498 uint64_t cnt; in hns3_update_port_rx_ssu_drop_stats() local 523 uint64_t cnt; in hns3_update_port_tx_ssu_drop_stats() local 591 uint32_t cnt; in hns3_rcb_rx_ring_stats_get() local 602 uint32_t cnt; in hns3_rcb_tx_ring_stats_get() local 1010 int cnt = *count; in hns3_imissed_stats_get() local 1020 xstats[cnt].id = cnt; in hns3_imissed_stats_get() 1021 cnt++; in hns3_imissed_stats_get() 1024 *count = cnt; in hns3_imissed_stats_get() 1196 cnt++; in hns3_imissed_stats_name_get() [all …]
|
| /dpdk/drivers/net/mlx5/ |
| H A D | mlx5_flow_verbs.c | 231 if (cnt) in flow_verbs_counter_new() 234 if (!cnt) { in flow_verbs_counter_new() 264 cnt = MLX5_POOL_GET_CNT(pool, 0); in flow_verbs_counter_new() 270 i = MLX5_CNT_ARRAY_IDX(pool, cnt); in flow_verbs_counter_new() 275 cnt->dcs_when_active = cnt->dcs_when_free; in flow_verbs_counter_new() 276 cnt->hits = 0; in flow_verbs_counter_new() 277 cnt->bytes = 0; in flow_verbs_counter_new() 298 struct mlx5_flow_counter *cnt; in flow_verbs_counter_release() local 333 cnt->dcs_when_active, in flow_verbs_counter_query() 359 cnt->hits = counters[0]; in flow_verbs_counter_query() [all …]
|
| /dpdk/lib/stack/ |
| H A D | rte_stack_lf_c11.h | 51 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_push_elems() 142 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_pop_elems()
|
| H A D | rte_stack_lf_generic.h | 58 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_push_elems() 137 new_head.cnt = old_head.cnt + 1; in __rte_stack_lf_pop_elems()
|