Lines Matching refs:cls
92 struct rte_flow_classifier *cls, in rte_flow_classify_validate() argument
107 if (cls == NULL) { in rte_flow_classify_validate()
135 memset(&cls->ntuple_filter, 0, sizeof(cls->ntuple_filter)); in rte_flow_classify_validate()
165 ret = parse_filter(attr, items, actions, &cls->ntuple_filter, error); in rte_flow_classify_validate()
262 struct rte_flow_classifier *cls; in rte_flow_classifier_create() local
275 cls = rte_zmalloc_socket("FLOW_CLASSIFIER", in rte_flow_classifier_create()
279 if (cls == NULL) { in rte_flow_classifier_create()
287 strlcpy(cls->name, params->name, RTE_FLOW_CLASSIFIER_MAX_NAME_SZ); in rte_flow_classifier_create()
289 cls->socket_id = params->socket_id; in rte_flow_classifier_create()
291 return cls; in rte_flow_classifier_create()
302 rte_flow_classifier_free(struct rte_flow_classifier *cls) in rte_flow_classifier_free() argument
307 if (cls == NULL) { in rte_flow_classifier_free()
315 for (i = 0; i < cls->num_tables; i++) { in rte_flow_classifier_free()
316 struct rte_cls_table *table = &cls->tables[i]; in rte_flow_classifier_free()
322 rte_free(cls); in rte_flow_classifier_free()
328 rte_table_check_params(struct rte_flow_classifier *cls, in rte_table_check_params() argument
331 if (cls == NULL) { in rte_table_check_params()
363 if (cls->num_tables == RTE_FLOW_CLASSIFY_TABLE_MAX) { in rte_table_check_params()
374 rte_flow_classify_table_create(struct rte_flow_classifier *cls, in rte_flow_classify_table_create() argument
383 ret = rte_table_check_params(cls, params); in rte_flow_classify_table_create()
391 h_table = params->ops->f_create(params->arg_create, cls->socket_id, in rte_flow_classify_table_create()
400 table = &cls->tables[cls->num_tables]; in rte_flow_classify_table_create()
402 cls->num_tables++; in rte_flow_classify_table_create()
415 allocate_acl_ipv4_5tuple_rule(struct rte_flow_classifier *cls) in allocate_acl_ipv4_5tuple_rule() argument
428 rule->u.key.key_add.priority = cls->ntuple_filter.priority; in allocate_acl_ipv4_5tuple_rule()
430 cls->ntuple_filter.proto_mask; in allocate_acl_ipv4_5tuple_rule()
432 cls->ntuple_filter.proto; in allocate_acl_ipv4_5tuple_rule()
433 rule->rules.u.ipv4_5tuple.proto = cls->ntuple_filter.proto; in allocate_acl_ipv4_5tuple_rule()
434 rule->rules.u.ipv4_5tuple.proto_mask = cls->ntuple_filter.proto_mask; in allocate_acl_ipv4_5tuple_rule()
437 cls->ntuple_filter.src_ip_mask; in allocate_acl_ipv4_5tuple_rule()
439 cls->ntuple_filter.src_ip; in allocate_acl_ipv4_5tuple_rule()
440 rule->rules.u.ipv4_5tuple.src_ip_mask = cls->ntuple_filter.src_ip_mask; in allocate_acl_ipv4_5tuple_rule()
441 rule->rules.u.ipv4_5tuple.src_ip = cls->ntuple_filter.src_ip; in allocate_acl_ipv4_5tuple_rule()
444 cls->ntuple_filter.dst_ip_mask; in allocate_acl_ipv4_5tuple_rule()
446 cls->ntuple_filter.dst_ip; in allocate_acl_ipv4_5tuple_rule()
447 rule->rules.u.ipv4_5tuple.dst_ip_mask = cls->ntuple_filter.dst_ip_mask; in allocate_acl_ipv4_5tuple_rule()
448 rule->rules.u.ipv4_5tuple.dst_ip = cls->ntuple_filter.dst_ip; in allocate_acl_ipv4_5tuple_rule()
451 cls->ntuple_filter.src_port_mask; in allocate_acl_ipv4_5tuple_rule()
453 cls->ntuple_filter.src_port; in allocate_acl_ipv4_5tuple_rule()
455 cls->ntuple_filter.src_port_mask; in allocate_acl_ipv4_5tuple_rule()
456 rule->rules.u.ipv4_5tuple.src_port = cls->ntuple_filter.src_port; in allocate_acl_ipv4_5tuple_rule()
459 cls->ntuple_filter.dst_port_mask; in allocate_acl_ipv4_5tuple_rule()
461 cls->ntuple_filter.dst_port; in allocate_acl_ipv4_5tuple_rule()
463 cls->ntuple_filter.dst_port_mask; in allocate_acl_ipv4_5tuple_rule()
464 rule->rules.u.ipv4_5tuple.dst_port = cls->ntuple_filter.dst_port; in allocate_acl_ipv4_5tuple_rule()
481 rte_flow_classify_table_entry_add(struct rte_flow_classifier *cls, in rte_flow_classify_table_entry_add() argument
505 ret = rte_flow_classify_validate(cls, attr, pattern, actions, error); in rte_flow_classify_table_entry_add()
511 rule = allocate_acl_ipv4_5tuple_rule(cls); in rte_flow_classify_table_entry_add()
515 cls->table_mask |= table_type; in rte_flow_classify_table_entry_add()
536 for (i = 0; i < cls->num_tables; i++) { in rte_flow_classify_table_entry_add()
537 struct rte_cls_table *table = &cls->tables[i]; in rte_flow_classify_table_entry_add()
563 rte_flow_classify_table_entry_delete(struct rte_flow_classifier *cls, in rte_flow_classify_table_entry_delete() argument
569 if (!cls || !rule) in rte_flow_classify_table_entry_delete()
573 for (i = 0; i < cls->num_tables; i++) { in rte_flow_classify_table_entry_delete()
574 struct rte_cls_table *table = &cls->tables[i]; in rte_flow_classify_table_entry_delete()
592 flow_classifier_lookup(struct rte_flow_classifier *cls, in flow_classifier_lookup() argument
604 (void **)cls->entries); in flow_classifier_lookup()
607 cls->nb_pkts = nb_pkts; in flow_classifier_lookup()
609 cls->nb_pkts = 0; in flow_classifier_lookup()
615 action_apply(struct rte_flow_classifier *cls, in action_apply() argument
626 for (i = 0; i < cls->nb_pkts; i++) { in action_apply()
627 if (rule->id == cls->entries[i]->rule_id) in action_apply()
641 rte_flow_classifier_query(struct rte_flow_classifier *cls, in rte_flow_classifier_query() argument
651 if (!cls || !rule || !stats || !pkts || nb_pkts == 0) in rte_flow_classifier_query()
655 for (i = 0; i < cls->num_tables; i++) { in rte_flow_classifier_query()
656 struct rte_cls_table *table = &cls->tables[i]; in rte_flow_classifier_query()
659 ret = flow_classifier_lookup(cls, table, in rte_flow_classifier_query()
662 ret = action_apply(cls, rule, stats); in rte_flow_classifier_query()