Home
last modified time | relevance | path

Searched refs:sent_seq (Results 1 – 25 of 25) sorted by relevance

/f-stack/dpdk/lib/librte_gro/
H A Dgro_tcp4.h68 uint32_t sent_seq; member
210 uint32_t sent_seq, in merge_two_tcp4_packets() argument
247 item->sent_seq = sent_seq; in merge_two_tcp4_packets()
265 uint32_t sent_seq, in check_seq_option() argument
296 if ((sent_seq == item->sent_seq + len) && (is_atomic || in check_seq_option()
300 else if ((sent_seq + tcp_dl == item->sent_seq) && (is_atomic || in check_seq_option()
H A Dgro_tcp4.c104 uint32_t sent_seq, in insert_new_item() argument
118 tbl->items[item_idx].sent_seq = sent_seq; in insert_new_item()
200 uint32_t sent_seq; in gro_tcp4_reassemble() local
244 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in gro_tcp4_reassemble()
274 INVALID_ARRAY_INDEX, sent_seq, ip_id, in gro_tcp4_reassemble()
298 sent_seq, ip_id, pkt->l4_len, tcp_dl, 0, in gro_tcp4_reassemble()
302 pkt, cmp, sent_seq, ip_id, 0)) in gro_tcp4_reassemble()
310 sent_seq, ip_id, is_atomic) == in gro_tcp4_reassemble()
320 if (insert_new_item(tbl, pkt, start_time, prev_idx, sent_seq, in gro_tcp4_reassemble()
H A Dgro_vxlan_tcp4.c103 uint32_t sent_seq, in insert_new_item() argument
119 tbl->items[item_idx].inner_item.sent_seq = sent_seq; in insert_new_item()
212 uint32_t sent_seq, in check_vxlan_seq_option() argument
248 uint32_t sent_seq, in merge_two_vxlan_tcp4_packets() argument
298 uint32_t sent_seq; in gro_vxlan_tcp4_reassemble() local
359 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in gro_vxlan_tcp4_reassemble()
398 INVALID_ARRAY_INDEX, sent_seq, outer_ip_id, in gro_vxlan_tcp4_reassemble()
419 sent_seq, outer_ip_id, ip_id, pkt->l4_len, in gro_vxlan_tcp4_reassemble()
423 pkt, cmp, sent_seq, in gro_vxlan_tcp4_reassemble()
432 sent_seq, outer_ip_id, in gro_vxlan_tcp4_reassemble()
[all …]
/f-stack/dpdk/lib/librte_gso/
H A Dgso_tcp4.c14 uint32_t sent_seq; in update_ipv4_tcp_headers() local
23 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in update_ipv4_tcp_headers()
28 update_tcp_header(segs[i], l4_offset, sent_seq, i < tail_idx); in update_ipv4_tcp_headers()
30 sent_seq += (segs[i]->pkt_len - segs[i]->data_len); in update_ipv4_tcp_headers()
H A Dgso_tunnel_tcp4.c14 uint32_t sent_seq; in update_tunnel_ipv4_tcp_headers() local
36 sent_seq = rte_be_to_cpu_32(tcp_hdr->sent_seq); in update_tunnel_ipv4_tcp_headers()
47 update_tcp_header(segs[i], tcp_offset, sent_seq, i < tail_idx); in update_tunnel_ipv4_tcp_headers()
50 sent_seq += (segs[i]->pkt_len - segs[i]->data_len); in update_tunnel_ipv4_tcp_headers()
H A Dgso_common.h71 update_tcp_header(struct rte_mbuf *pkt, uint16_t l4_offset, uint32_t sent_seq, in update_tcp_header() argument
78 tcp_hdr->sent_seq = rte_cpu_to_be_32(sent_seq); in update_tcp_header()
/f-stack/dpdk/drivers/net/sfc/
H A Dsfc_tso.c101 uint32_t sent_seq; in sfc_efx_tso_do() local
155 rte_memcpy(&sent_seq, &th->sent_seq, sizeof(uint32_t)); in sfc_efx_tso_do()
156 sent_seq = rte_be_to_cpu_32(sent_seq); in sfc_efx_tso_do()
158 efx_tx_qdesc_tso2_create(txq->common, packet_id, 0, sent_seq, in sfc_efx_tso_do()
H A Dsfc_ef10_tx.c383 uint32_t sent_seq; in sfc_ef10_xmit_tso_pkt() local
497 rte_memcpy(&sent_seq, &th->sent_seq, sizeof(uint32_t)); in sfc_ef10_xmit_tso_pkt()
498 sent_seq = rte_be_to_cpu_32(sent_seq); in sfc_ef10_xmit_tso_pkt()
501 sent_seq, first_m_seg->tso_segsz); in sfc_ef10_xmit_tso_pkt()
/f-stack/dpdk/lib/librte_net/
H A Drte_tcp.h31 rte_be32_t sent_seq; /**< TX data sequence number. */ member
/f-stack/dpdk/drivers/net/bnxt/tf_ulp/
H A Dulp_rte_parser.c1296 size = sizeof(tcp_spec->hdr.sent_seq); in ulp_rte_tcp_hdr_handler()
1298 &tcp_spec->hdr.sent_seq, in ulp_rte_tcp_hdr_handler()
1336 &tcp_mask->hdr.sent_seq, in ulp_rte_tcp_hdr_handler()
1337 sizeof(tcp_mask->hdr.sent_seq)); in ulp_rte_tcp_hdr_handler()
/f-stack/dpdk/lib/librte_flow_classify/
H A Drte_flow_classify_parse.c346 if (tcp_mask->hdr.sent_seq || in classify_parse_ntuple_filter()
/f-stack/dpdk/drivers/net/igc/
H A Digc_flow.c516 if (mask->hdr.sent_seq || in igc_parse_pattern_tcp()
/f-stack/dpdk/drivers/net/e1000/
H A Digb_flow.c244 if (tcp_mask->hdr.sent_seq || in cons_parse_ntuple_filter()
879 tcp_mask->hdr.sent_seq || in cons_parse_syn_filter()
/f-stack/dpdk/drivers/net/ixgbe/
H A Dixgbe_flow.c433 if (tcp_mask->hdr.sent_seq || in cons_parse_ntuple_filter()
1045 tcp_mask->hdr.sent_seq || in cons_parse_syn_filter()
2000 if (tcp_mask->hdr.sent_seq || in ixgbe_parse_fdir_filter_normal()
/f-stack/dpdk/drivers/net/hinic/
H A Dhinic_pmd_flow.c632 if (tcp_mask->hdr.sent_seq || in hinic_ntuple_item_check_l4()
1094 if (tcp_mask->hdr.sent_seq || in hinic_tcam_normal_item_check_l4()
1282 if (tcp_mask->hdr.sent_seq || in hinic_tunnel_inner_item_check_l4()
/f-stack/dpdk/drivers/net/iavf/
H A Diavf_fdir.c701 if (tcp_mask->hdr.sent_seq || in iavf_fdir_parse_pattern()
/f-stack/dpdk/drivers/net/ice/
H A Dice_acl_filter.c760 if (tcp_mask->hdr.sent_seq || in ice_acl_parse_pattern()
H A Dice_switch_filter.c879 if (tcp_mask->hdr.sent_seq || in ice_switch_inset_get()
H A Dice_fdir_filter.c1821 if (tcp_mask->hdr.sent_seq || in ice_fdir_parse_pattern()
/f-stack/dpdk/drivers/net/cxgbe/
H A Dcxgbe_flow.c383 if (mask->hdr.sent_seq || mask->hdr.recv_ack || mask->hdr.data_off || in ch_rte_parsetype_tcp()
/f-stack/dpdk/drivers/net/bnxt/
H A Dbnxt_flow.c446 if (tcp_mask->hdr.sent_seq || in bnxt_validate_and_parse_flow_type()
/f-stack/dpdk/drivers/net/mvpp2/
H A Dmrvl_flow.c1251 if (mask->hdr.sent_seq || in mrvl_parse_tcp()
/f-stack/dpdk/drivers/net/hns3/
H A Dhns3_flow.c673 if (tcp_mask->hdr.sent_seq || tcp_mask->hdr.recv_ack || in hns3_parse_tcp()
/f-stack/dpdk/drivers/net/i40e/
H A Di40e_flow.c2786 if (tcp_mask->hdr.sent_seq || in i40e_flow_parse_fdir_pattern()
/f-stack/dpdk/drivers/net/mlx5/
H A Dmlx5_flow_dv.c862 tcp.hdr.sent_seq = rte_cpu_to_be_32((uint32_t)value); in flow_dv_convert_action_modify_tcp_seq()
863 tcp_mask.hdr.sent_seq = RTE_BE32(UINT32_MAX); in flow_dv_convert_action_modify_tcp_seq()