Lines Matching refs:cache

1270 rte_mempool_cache_free(struct rte_mempool_cache *cache);
1306 rte_mempool_cache_flush(struct rte_mempool_cache *cache, in rte_mempool_cache_flush() argument
1309 if (cache == NULL) in rte_mempool_cache_flush()
1310 cache = rte_mempool_default_cache(mp, rte_lcore_id()); in rte_mempool_cache_flush()
1311 if (cache == NULL || cache->len == 0) in rte_mempool_cache_flush()
1313 rte_mempool_trace_cache_flush(cache, mp); in rte_mempool_cache_flush()
1314 rte_mempool_ops_enqueue_bulk(mp, cache->objs, cache->len); in rte_mempool_cache_flush()
1315 cache->len = 0; in rte_mempool_cache_flush()
1332 unsigned int n, struct rte_mempool_cache *cache) in rte_mempool_do_generic_put() argument
1341 if (unlikely(cache == NULL || n > RTE_MEMPOOL_CACHE_MAX_SIZE)) in rte_mempool_do_generic_put()
1344 cache_objs = &cache->objs[cache->len]; in rte_mempool_do_generic_put()
1356 cache->len += n; in rte_mempool_do_generic_put()
1358 if (cache->len >= cache->flushthresh) { in rte_mempool_do_generic_put()
1359 rte_mempool_ops_enqueue_bulk(mp, &cache->objs[cache->size], in rte_mempool_do_generic_put()
1360 cache->len - cache->size); in rte_mempool_do_generic_put()
1361 cache->len = cache->size; in rte_mempool_do_generic_put()
1392 unsigned int n, struct rte_mempool_cache *cache) in rte_mempool_generic_put() argument
1394 rte_mempool_trace_generic_put(mp, obj_table, n, cache); in rte_mempool_generic_put()
1396 rte_mempool_do_generic_put(mp, obj_table, n, cache); in rte_mempool_generic_put()
1417 struct rte_mempool_cache *cache; in rte_mempool_put_bulk() local
1418 cache = rte_mempool_default_cache(mp, rte_lcore_id()); in rte_mempool_put_bulk()
1419 rte_mempool_trace_put_bulk(mp, obj_table, n, cache); in rte_mempool_put_bulk()
1420 rte_mempool_generic_put(mp, obj_table, n, cache); in rte_mempool_put_bulk()
1457 unsigned int n, struct rte_mempool_cache *cache) in rte_mempool_do_generic_get() argument
1464 if (unlikely(cache == NULL || n >= cache->size)) in rte_mempool_do_generic_get()
1467 cache_objs = cache->objs; in rte_mempool_do_generic_get()
1470 if (cache->len < n) { in rte_mempool_do_generic_get()
1472 uint32_t req = n + (cache->size - cache->len); in rte_mempool_do_generic_get()
1476 &cache->objs[cache->len], req); in rte_mempool_do_generic_get()
1487 cache->len += req; in rte_mempool_do_generic_get()
1491 for (index = 0, len = cache->len - 1; index < n; ++index, len--, obj_table++) in rte_mempool_do_generic_get()
1494 cache->len -= n; in rte_mempool_do_generic_get()
1539 unsigned int n, struct rte_mempool_cache *cache) in rte_mempool_generic_get() argument
1542 ret = rte_mempool_do_generic_get(mp, obj_table, n, cache); in rte_mempool_generic_get()
1545 rte_mempool_trace_generic_get(mp, obj_table, n, cache); in rte_mempool_generic_get()
1574 struct rte_mempool_cache *cache; in rte_mempool_get_bulk() local
1575 cache = rte_mempool_default_cache(mp, rte_lcore_id()); in rte_mempool_get_bulk()
1576 rte_mempool_trace_get_bulk(mp, obj_table, n, cache); in rte_mempool_get_bulk()
1577 return rte_mempool_generic_get(mp, obj_table, n, cache); in rte_mempool_get_bulk()