Home
last modified time | relevance | path

Searched refs:vlan_mask (Results 1 – 22 of 22) sorted by relevance

/f-stack/dpdk/app/test/
H A Dtest_acl.h29 uint16_t vlan_mask; /**< VLAN ID mask. */ member
216 .vlan_mask = 0x7f,
227 .vlan_mask = 0x5,
238 .vlan_mask = 0xffff,
407 .vlan_mask = 0xffff,
425 .vlan_mask = 0xffff,
443 .vlan_mask = 0xffff,
459 .vlan_mask = 0xffff,
H A Dtest_acl.c102 ro->field[RTE_ACL_IPV4VLAN_VLAN1_FIELD].mask_range.u16 = ri->vlan_mask; in acl_ipv4vlan_convert_rule()
603 ro->field[RTE_ACL_IPV4VLAN_VLAN1_FIELD].mask_range.u16 = ri->vlan_mask; in convert_rule()
/f-stack/dpdk/app/test-flow-perf/
H A Ditems_gen.c42 static struct rte_flow_item_vlan vlan_mask; in add_vlan() local
47 memset(&vlan_mask, 0, sizeof(struct rte_flow_item_vlan)); in add_vlan()
50 vlan_mask.tci = RTE_BE16(0xffff); in add_vlan()
54 items[items_counter].mask = &vlan_mask; in add_vlan()
/f-stack/dpdk/drivers/net/i40e/
H A Di40e_flow.c2558 vlan_mask = item->mask; in i40e_flow_parse_fdir_pattern()
2562 if (vlan_mask->tci != in i40e_flow_parse_fdir_pattern()
2564 vlan_mask->tci != in i40e_flow_parse_fdir_pattern()
2566 vlan_mask->tci != in i40e_flow_parse_fdir_pattern()
2568 vlan_mask->tci != in i40e_flow_parse_fdir_pattern()
2579 if (vlan_spec && vlan_mask && vlan_mask->inner_type) { in i40e_flow_parse_fdir_pattern()
3721 vlan_mask = item->mask; in i40e_flow_parse_vxlan_pattern()
3732 if (vlan_mask->tci == in i40e_flow_parse_vxlan_pattern()
3952 vlan_mask = item->mask; in i40e_flow_parse_nvgre_pattern()
3963 if (vlan_mask->tci == in i40e_flow_parse_nvgre_pattern()
[all …]
H A Di40e_ethdev.c10395 if (mirror_conf->vlan.vlan_mask & (1ULL << i)) { in i40e_mirror_rule_set()
/f-stack/dpdk/drivers/net/ice/
H A Dice_switch_filter.c506 const struct rte_flow_item_vlan *vlan_spec, *vlan_mask; in ice_switch_inset_get() local
1052 vlan_mask = item->mask; in ice_switch_inset_get()
1057 if ((!vlan_spec && vlan_mask) || in ice_switch_inset_get()
1058 (vlan_spec && !vlan_mask)) { in ice_switch_inset_get()
1065 if (vlan_spec && vlan_mask) { in ice_switch_inset_get()
1067 if (vlan_mask->tci) { in ice_switch_inset_get()
1071 vlan_mask->tci; in ice_switch_inset_get()
1075 if (vlan_mask->inner_type) { in ice_switch_inset_get()
1079 vlan_mask->inner_type; in ice_switch_inset_get()
/f-stack/dpdk/drivers/net/bnxt/
H A Dbnxt_flow.c136 const struct rte_flow_item_vlan *vlan_spec, *vlan_mask; in bnxt_validate_and_parse_flow_type() local
284 vlan_mask = item->mask; in bnxt_validate_and_parse_flow_type()
293 if (vlan_mask->tci && in bnxt_validate_and_parse_flow_type()
294 vlan_mask->tci == RTE_BE16(0x0fff)) { in bnxt_validate_and_parse_flow_type()
308 if (vlan_mask->inner_type && in bnxt_validate_and_parse_flow_type()
309 vlan_mask->inner_type != RTE_BE16(0xffff)) { in bnxt_validate_and_parse_flow_type()
317 if (vlan_mask->inner_type) { in bnxt_validate_and_parse_flow_type()
H A Dbnxt_ethdev.c1255 int vlan_mask = 0; in bnxt_dev_start_op() local
1298 vlan_mask |= ETH_VLAN_FILTER_MASK; in bnxt_dev_start_op()
1300 vlan_mask |= ETH_VLAN_STRIP_MASK; in bnxt_dev_start_op()
1301 rc = bnxt_vlan_offload_set_op(eth_dev, vlan_mask); in bnxt_dev_start_op()
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_trigger.c1299 struct rte_flow_item_vlan vlan_mask = in mlx5_traffic_enable() local
1303 &vlan_spec, &vlan_mask); in mlx5_traffic_enable()
1308 &vlan_spec, &vlan_mask); in mlx5_traffic_enable()
1337 struct rte_flow_item_vlan vlan_mask = in mlx5_traffic_enable() local
1343 &vlan_mask); in mlx5_traffic_enable()
H A Dmlx5_rxtx_vec_neon.h278 const uint32x4_t vlan_mask = in rxq_cq_decompress_v() local
288 vlan_mask); in rxq_cq_decompress_v()
290 vandq_u32(vlan_mask, in rxq_cq_decompress_v()
H A Dmlx5_rxtx_vec_sse.h262 const __m128i vlan_mask = in rxq_cq_decompress_v() local
274 vlan_mask); in rxq_cq_decompress_v()
277 cv_mask), vlan_mask)); in rxq_cq_decompress_v()
H A Dmlx5_rxtx_vec_altivec.h376 const vector unsigned char vlan_mask = in rxq_cq_decompress_v()
399 (vector unsigned long)vlan_mask); in rxq_cq_decompress_v()
403 vec_and((vector unsigned long)vlan_mask, in rxq_cq_decompress_v()
H A Dmlx5.h1225 struct rte_flow_item_vlan *vlan_mask);
H A Dmlx5_flow.c5933 struct rte_flow_item_vlan *vlan_mask) in mlx5_ctrl_flow_vlan() argument
5952 .mask = vlan_mask, in mlx5_ctrl_flow_vlan()
/f-stack/dpdk/drivers/net/ixgbe/
H A Dixgbe_flow.c191 const struct rte_flow_item_vlan *vlan_mask; in cons_parse_ntuple_filter() local
302 vlan_mask = item->mask; in cons_parse_ntuple_filter()
315 memcmp(vlan_mask, &vlan_null, in cons_parse_ntuple_filter()
1612 const struct rte_flow_item_vlan *vlan_mask; in ixgbe_parse_fdir_filter_normal() local
1791 vlan_mask = item->mask; in ixgbe_parse_fdir_filter_normal()
1795 rule->mask.vlan_tci_mask = vlan_mask->tci; in ixgbe_parse_fdir_filter_normal()
2316 const struct rte_flow_item_vlan *vlan_mask; in ixgbe_parse_fdir_filter_tunnel() local
2723 vlan_mask = item->mask; in ixgbe_parse_fdir_filter_tunnel()
2727 rule->mask.vlan_tci_mask = vlan_mask->tci; in ixgbe_parse_fdir_filter_tunnel()
H A Dixgbe_ethdev.c5747 uint64_t vlan_mask = 0; in ixgbe_mirror_rule_set() local
5779 if (mirror_conf->vlan.vlan_mask & (1ULL << i)) { in ixgbe_mirror_rule_set()
5794 vlan_mask |= (1ULL << reg_index); in ixgbe_mirror_rule_set()
5801 mv_lsb = vlan_mask & 0xFFFFFFFF; in ixgbe_mirror_rule_set()
5802 mv_msb = vlan_mask >> vlan_mask_offset; in ixgbe_mirror_rule_set()
5804 mr_info->mr_conf[rule_id].vlan.vlan_mask = in ixgbe_mirror_rule_set()
5805 mirror_conf->vlan.vlan_mask; in ixgbe_mirror_rule_set()
5807 if (mirror_conf->vlan.vlan_mask & (1ULL << i)) in ixgbe_mirror_rule_set()
5814 mr_info->mr_conf[rule_id].vlan.vlan_mask = 0; in ixgbe_mirror_rule_set()
/f-stack/dpdk/drivers/net/bnxt/tf_ulp/
H A Dulp_rte_parser.c722 const struct rte_flow_item_vlan *vlan_mask = item->mask; in ulp_rte_vlan_hdr_handler() local
754 if (vlan_mask) { in ulp_rte_vlan_hdr_handler()
755 vlan_tag = ntohs(vlan_mask->tci); in ulp_rte_vlan_hdr_handler()
781 ulp_rte_prsr_mask_copy(params, &idx, &vlan_mask->inner_type, in ulp_rte_vlan_hdr_handler()
782 sizeof(vlan_mask->inner_type)); in ulp_rte_vlan_hdr_handler()
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_flow.c484 const struct rte_flow_item_vlan *vlan_mask; in hns3_parse_vlan() local
502 vlan_mask = item->mask; in hns3_parse_vlan()
503 if (vlan_mask->tci) { in hns3_parse_vlan()
508 rte_be_to_cpu_16(vlan_mask->tci); in hns3_parse_vlan()
513 rte_be_to_cpu_16(vlan_mask->tci); in hns3_parse_vlan()
/f-stack/dpdk/drivers/net/mlx4/
H A Dmlx4_flow.c1311 const struct rte_flow_item_vlan vlan_mask = { in mlx4_flow_internal() local
1385 .mask = &vlan_mask, in mlx4_flow_internal()
/f-stack/dpdk/lib/librte_ethdev/
H A Drte_ethdev.h822 uint64_t vlan_mask; /**< mask for valid VLAN ID. */ member
H A Drte_ethdev.c4269 mirror_conf->vlan.vlan_mask == 0) { in rte_eth_mirror_rule_set()
/f-stack/dpdk/app/test-pmd/
H A Dcmdline.c9319 mr_conf.vlan.vlan_mask |= 1ULL << i; in cmd_set_mirror_mask_parsed()
10313 uint16_t vlan_mask; member
10354 mask->vlan_tci_mask = rte_cpu_to_be_16(res->vlan_mask); in cmd_flow_director_mask_parsed()
10361 mask->vlan_tci_mask = rte_cpu_to_be_16(res->vlan_mask); in cmd_flow_director_mask_parsed()
10371 mask->vlan_tci_mask = rte_cpu_to_be_16(res->vlan_mask); in cmd_flow_director_mask_parsed()
10394 vlan_mask, RTE_UINT16);