Home
last modified time | relevance | path

Searched refs:sent_seq (Results 1 – 25 of 26) sorted by relevance

12

/dpdk/lib/gro/
H A Dgro_tcp4.h66 uint32_t sent_seq; member
208 uint32_t sent_seq, in merge_two_tcp4_packets() argument
245 item->sent_seq = sent_seq; in merge_two_tcp4_packets()
263 uint32_t sent_seq, in check_seq_option() argument
294 if ((sent_seq == item->sent_seq + len) && (is_atomic || in check_seq_option()
298 else if ((sent_seq + tcp_dl == item->sent_seq) && (is_atomic || in check_seq_option()
H A Dgro_tcp4.c103 uint32_t sent_seq, in insert_new_item() argument
117 tbl->items[item_idx].sent_seq = sent_seq; in insert_new_item()
199 uint32_t sent_seq; in gro_tcp4_reassemble() local
243 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in gro_tcp4_reassemble()
273 INVALID_ARRAY_INDEX, sent_seq, ip_id, in gro_tcp4_reassemble()
297 sent_seq, ip_id, pkt->l4_len, tcp_dl, 0, in gro_tcp4_reassemble()
301 pkt, cmp, sent_seq, ip_id, 0)) in gro_tcp4_reassemble()
309 sent_seq, ip_id, is_atomic) == in gro_tcp4_reassemble()
319 if (insert_new_item(tbl, pkt, start_time, prev_idx, sent_seq, in gro_tcp4_reassemble()
H A Dgro_vxlan_tcp4.c102 uint32_t sent_seq, in insert_new_item() argument
118 tbl->items[item_idx].inner_item.sent_seq = sent_seq; in insert_new_item()
211 uint32_t sent_seq, in check_vxlan_seq_option() argument
247 uint32_t sent_seq, in merge_two_vxlan_tcp4_packets() argument
297 uint32_t sent_seq; in gro_vxlan_tcp4_reassemble() local
358 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in gro_vxlan_tcp4_reassemble()
397 INVALID_ARRAY_INDEX, sent_seq, outer_ip_id, in gro_vxlan_tcp4_reassemble()
418 sent_seq, outer_ip_id, ip_id, pkt->l4_len, in gro_vxlan_tcp4_reassemble()
422 pkt, cmp, sent_seq, in gro_vxlan_tcp4_reassemble()
431 sent_seq, outer_ip_id, in gro_vxlan_tcp4_reassemble()
[all …]
/dpdk/lib/gso/
H A Dgso_tcp4.c14 uint32_t sent_seq; in update_ipv4_tcp_headers() local
23 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in update_ipv4_tcp_headers()
28 update_tcp_header(segs[i], l4_offset, sent_seq, i < tail_idx); in update_ipv4_tcp_headers()
30 sent_seq += (segs[i]->pkt_len - segs[i]->data_len); in update_ipv4_tcp_headers()
H A Dgso_tunnel_tcp4.c14 uint32_t sent_seq; in update_tunnel_ipv4_tcp_headers() local
36 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in update_tunnel_ipv4_tcp_headers()
47 update_tcp_header(segs[i], tcp_offset, sent_seq, i < tail_idx); in update_tunnel_ipv4_tcp_headers()
50 sent_seq += (segs[i]->pkt_len - segs[i]->data_len); in update_tunnel_ipv4_tcp_headers()
H A Dgso_common.h76 update_tcp_header(struct rte_mbuf *pkt, uint16_t l4_offset, uint32_t sent_seq, in update_tcp_header() argument
83 tcp_hdr->sent_seq = rte_cpu_to_be_32(sent_seq); in update_tcp_header()
/dpdk/drivers/net/sfc/
H A Dsfc_tso.c101 uint32_t sent_seq; in sfc_efx_tso_do() local
162 rte_memcpy(&sent_seq, &th->sent_seq, sizeof(uint32_t)); in sfc_efx_tso_do()
163 sent_seq = rte_be_to_cpu_32(sent_seq); in sfc_efx_tso_do()
165 efx_tx_qdesc_tso2_create(txq->common, packet_id, 0, sent_seq, in sfc_efx_tso_do()
H A Dsfc_ef10_tx.c384 uint32_t sent_seq; in sfc_ef10_xmit_tso_pkt() local
517 rte_memcpy(&sent_seq, &th->sent_seq, sizeof(uint32_t)); in sfc_ef10_xmit_tso_pkt()
518 sent_seq = rte_be_to_cpu_32(sent_seq); in sfc_ef10_xmit_tso_pkt()
521 sent_seq, first_m_seg->tso_segsz); in sfc_ef10_xmit_tso_pkt()
/dpdk/lib/net/
H A Drte_tcp.h31 rte_be32_t sent_seq; /**< TX data sequence number. */ member
/dpdk/lib/flow_classify/
H A Drte_flow_classify_parse.c345 if (tcp_mask->hdr.sent_seq || in classify_parse_ntuple_filter()
/dpdk/drivers/net/e1000/
H A Digb_flow.c244 if (tcp_mask->hdr.sent_seq || in cons_parse_ntuple_filter()
879 tcp_mask->hdr.sent_seq || in cons_parse_syn_filter()
/dpdk/drivers/net/igc/
H A Digc_flow.c516 if (mask->hdr.sent_seq || in igc_parse_pattern_tcp()
/dpdk/drivers/net/bnxt/tf_ulp/
H A Dulp_rte_parser.c1477 size = sizeof(((struct rte_flow_item_tcp *)NULL)->hdr.sent_seq); in ulp_rte_tcp_hdr_handler()
1479 ulp_deference_struct(tcp_spec, hdr.sent_seq), in ulp_rte_tcp_hdr_handler()
1480 ulp_deference_struct(tcp_mask, hdr.sent_seq), in ulp_rte_tcp_hdr_handler()
/dpdk/drivers/net/txgbe/
H A Dtxgbe_flow.c393 if (tcp_mask->hdr.sent_seq || in cons_parse_ntuple_filter()
1004 tcp_mask->hdr.sent_seq || in cons_parse_syn_filter()
1891 if (tcp_mask->hdr.sent_seq || in txgbe_parse_fdir_filter_normal()
/dpdk/drivers/net/ixgbe/
H A Dixgbe_flow.c433 if (tcp_mask->hdr.sent_seq || in cons_parse_ntuple_filter()
1045 tcp_mask->hdr.sent_seq || in cons_parse_syn_filter()
2000 if (tcp_mask->hdr.sent_seq || in ixgbe_parse_fdir_filter_normal()
/dpdk/drivers/net/hinic/
H A Dhinic_pmd_flow.c632 if (tcp_mask->hdr.sent_seq || in hinic_ntuple_item_check_l4()
1094 if (tcp_mask->hdr.sent_seq || in hinic_tcam_normal_item_check_l4()
1282 if (tcp_mask->hdr.sent_seq || in hinic_tunnel_inner_item_check_l4()
/dpdk/drivers/net/ice/
H A Dice_acl_filter.c771 if (tcp_mask->hdr.sent_seq || in ice_acl_parse_pattern()
H A Dice_switch_filter.c895 if (tcp_mask->hdr.sent_seq || in ice_switch_parse_pattern()
H A Dice_fdir_filter.c2180 if (tcp_mask->hdr.sent_seq || in ice_fdir_parse_pattern()
/dpdk/drivers/net/cxgbe/
H A Dcxgbe_flow.c378 if (mask->hdr.sent_seq || mask->hdr.recv_ack || mask->hdr.data_off || in ch_rte_parsetype_tcp()
/dpdk/drivers/net/iavf/
H A Diavf_fdir.c1094 if (tcp_mask->hdr.sent_seq || in iavf_fdir_parse_pattern()
/dpdk/drivers/net/mvpp2/
H A Dmrvl_flow.c1116 if (mask->hdr.sent_seq || in mrvl_parse_tcp()
/dpdk/drivers/net/bnxt/
H A Dbnxt_flow.c467 if (tcp_mask->hdr.sent_seq || in bnxt_validate_and_parse_flow_type()
/dpdk/drivers/net/hns3/
H A Dhns3_flow.c700 if (tcp_mask->hdr.sent_seq || tcp_mask->hdr.recv_ack || in hns3_check_tcp_mask_supported()
/dpdk/drivers/net/i40e/
H A Di40e_flow.c2779 if (tcp_mask->hdr.sent_seq || in i40e_flow_parse_fdir_pattern()

12