11#ifndef __INCLUDE_DAO_DMA_H__
12#define __INCLUDE_DAO_DMA_H__
16#include <rte_dmadev.h>
18#include <rte_mempool.h>
19#include <rte_prefetch.h>
22#include <dao_config.h>
27#define DAO_DMA_MAX_POINTER 15u
30#define DAO_DMA_MAX_META_POINTER 48
33#define DAO_DMA_MAX_VCHAN_PER_LCORE 64
36#define DAO_DMA_MAX_INFLIGHT_MDATA 4096
319static __rte_always_inline
int
340static __rte_always_inline
bool
343 uint16_t head = vchan->
head;
344 uint16_t tail = vchan->
tail;
346 if (vchan->
src_i && (tail == op_idx))
349 return head <= tail ? (op_idx < head || op_idx >= tail) : (op_idx < head && op_idx >= tail);
362static __rte_always_inline
bool
367 uint64_t flags = (uint64_t)vchan->
auto_free << 3;
370 if (likely((src_avail >= (
int)avail || !vchan->
src_i) &&
371 (dst_avail >= (
int)avail || !vchan->
dst_i)))
375 vchan->
dst_i, flags);
376 if (unlikely(rc < 0)) {
401static __rte_always_inline uint16_t
417static __rte_always_inline uint16_t
433static __rte_always_inline
struct rte_dma_sge *
447static __rte_always_inline
struct rte_dma_sge *
470static __rte_always_inline
void
474 uint16_t src_i = vchan->
src_i;
475 uint16_t dst_i = vchan->
dst_i;
477 vchan->
dst[dst_i].addr = dst;
478 vchan->
dst[dst_i].length = dst_len;
479 vchan->
src[src_i].addr = src;
480 vchan->
src[src_i].length = src_len;
482 vchan->
src_i = src_i + 1;
483 vchan->
dst_i = dst_i + 1;
499static __rte_always_inline
void
502 uint16_t dst_i = vchan->
dst_i;
504 vchan->
dst[dst_i].addr = dst;
505 vchan->
dst[dst_i].length = dst_len;
507 vchan->
dst_i = dst_i + 1;
523static __rte_always_inline
void
526 uint16_t src_i = vchan->
src_i;
528 vchan->
src[src_i].addr = src;
529 vchan->
src[src_i].length = src_len;
531 vchan->
src_i = src_i + 1;
546static __rte_always_inline uint16_t
549 struct rte_dma_sge *src, *dst;
550 uint16_t src_i = vchan->
src_i;
551 uint16_t dst_i = vchan->
dst_i;
552 int src_avail = vchan->
flush_thr - src_i;
555 src = vchan->
src + src_i;
556 dst = vchan->
dst + dst_i;
557 if (src_avail >= 4) {
558 vst1q_u64((uint64_t *)&src[0], vsrc[0]);
559 vst1q_u64((uint64_t *)&src[1], vsrc[1]);
560 vst1q_u64((uint64_t *)&src[2], vsrc[2]);
561 vst1q_u64((uint64_t *)&src[3], vsrc[3]);
563 vst1q_u64((uint64_t *)&dst[0], vdst[0]);
564 vst1q_u64((uint64_t *)&dst[1], vdst[1]);
565 vst1q_u64((uint64_t *)&dst[2], vdst[2]);
566 vst1q_u64((uint64_t *)&dst[3], vdst[3]);
568 vchan->
src_i = src_i + 4;
569 vchan->
dst_i = dst_i + 4;
574 while (i < 4 && src_avail > 0) {
575 vst1q_u64((uint64_t *)src, vsrc[i]);
576 vst1q_u64((uint64_t *)dst, vdst[i]);
582 vchan->
src_i = src_i + i;
583 vchan->
dst_i = dst_i + i;
588 src_i = vchan->
src_i;
589 dst_i = vchan->
dst_i;
590 src = vchan->
src + src_i;
591 dst = vchan->
dst + dst_i;
594 while (i < 4 && src_avail > 0) {
595 vst1q_u64((uint64_t *)src, vsrc[i]);
596 vst1q_u64((uint64_t *)dst, vdst[i]);
613static __rte_always_inline
void
620 cmpl = rte_dma_completed(vchan->
devid, vchan->
vchan, 128, NULL, &has_err);
621 if (unlikely(has_err)) {
636static __rte_always_inline uint16_t
652static __rte_always_inline
struct rte_dma_op **
669static __rte_always_inline
void
683static __rte_always_inline
void
697static __rte_always_inline
void
701 op->user_meta = (uint64_t)(uintptr_t)ptr;
702 op->event_meta = (uint64_t)(uintptr_t)pend_ptr;
703 op->rsvd = ((uint32_t)val << 16) | pend_val;
715static __rte_always_inline
void
718 uint32_t cmpl, i, j, idx = 0;
722 cmpl = rte_dma_completed(vchan->
devid, vchan->
vchan, 128, NULL, &has_err);
723 if (unlikely(has_err)) {
727 for (i = vchan->
head; i < vchan->
head + cmpl; i++) {
729 for (j = 0; j < vchan->
mdata[idx].
cnt; j++) {
751static __rte_always_inline
void
756 struct rte_dma_op *deq_ops[DEQ_SZ];
762 cmpl = rte_dma_dequeue_ops(vchan->
devid, vchan->
vchan, deq_ops, DEQ_SZ);
766 for (i = 0; i < cmpl; i++) {
767 struct rte_dma_op *op = deq_ops[i];
769 if (unlikely(op->status != RTE_DMA_STATUS_SUCCESSFUL))
773 uint16_t *ptr = (uint16_t *)(uintptr_t)op->user_meta;
774 uint16_t *pend_ptr = (uint16_t *)(uintptr_t)op->event_meta;
775 uint16_t val = op->rsvd >> 16;
776 uint16_t pend_val = op->rsvd & 0xFFFF;
779 __atomic_store_n(ptr, val, __ATOMIC_RELEASE);
782 *pend_ptr -= pend_val;
808static __rte_always_inline
void
810 uint16_t *pend_ptr, uint16_t pend_val, uint16_t tail)
831static __rte_always_inline
void
834 uint32_t cmpl, i, j, idx = 0;
838 cmpl = rte_dma_completed(vchan->
devid, vchan->
vchan, 128, NULL, &has_err);
839 if (unlikely(has_err)) {
843 for (i = vchan->
head; i < vchan->
head + cmpl; i++) {
845 for (j = 0; j < vchan->
mdata[idx].
cnt; j++) {
869static __rte_always_inline
void
static __rte_always_inline void dao_dma_enq_src_x1(struct dao_dma_vchan_state *vchan, rte_iova_t src, uint32_t src_len)
static __rte_always_inline int dao_dma_has_stats_feature(void)
static __rte_always_inline struct rte_dma_sge * dao_dma_sge_dst(struct dao_dma_vchan_state *vchan)
static __rte_always_inline void dao_dma_check_meta_compl(struct dao_dma_vchan_state *vchan, const int mem_order)
void dao_dma_compl_wait(uint16_t vchan)
void dao_dma_compl_wait_sp(uint16_t vchan)
int dao_dma_lcore_mem2dev_autofree_set(int16_t dma_devid, uint16_t vchan, bool enable)
static __rte_always_inline void dao_dma_ops_release(struct dao_dma_vchan_state *vchan, uint16_t n)
#define DAO_DMA_MAX_INFLIGHT_MDATA
static __rte_always_inline struct rte_dma_op ** dao_dma_ops_get(struct dao_dma_vchan_state *vchan, uint16_t n)
static __rte_always_inline void dao_dma_ops_put(struct dao_dma_vchan_state *vchan, uint16_t n)
int16_t dao_dma_ctrl_mem2dev(void)
static __rte_always_inline void dao_dma_check_compl(struct dao_dma_vchan_state *vchan)
int dao_dma_lcore_mem2dev_set(int16_t dma_devid, uint16_t nb_vchans, uint16_t flush_thr)
static __rte_always_inline void dao_dma_enq_x1(struct dao_dma_vchan_state *vchan, rte_iova_t src, uint32_t src_len, rte_iova_t dst, uint32_t dst_len)
int dao_dma_lcore_dev2mem_set_ops(int16_t dma_devid, uint16_t nb_vchans, uint16_t flush_thr, uint16_t nb_ops)
int dao_dma_stats_get(uint16_t lcore_id, struct dao_dma_stats *stats)
static __rte_always_inline void dao_dma_check_meta_compl_ops(struct dao_dma_vchan_state *vchan, const int mem_order)
int dao_dma_flush_submit_ops(void)
#define DAO_DMA_MAX_VCHAN_PER_LCORE
#define DAO_DMA_MAX_META_POINTER
static __rte_always_inline uint16_t dao_dma_src_avail(struct dao_dma_vchan_state *vchan)
int dao_dma_flush_submit(void)
void dao_dma_compl_wait_ops(uint16_t vchan)
void dao_dma_compl_wait_for_curr_tail(uint16_t vchan)
RTE_DECLARE_PER_LCORE(struct dao_dma_vchan_info *, dao_dma_vchan_info)
static __rte_always_inline void dao_dma_enq_dst_x1(struct dao_dma_vchan_state *vchan, rte_iova_t dst, uint32_t dst_len)
static __rte_always_inline void dao_dma_check_meta_compl_v2(struct dao_dma_vchan_state *vchan, const int mem_order)
static __rte_always_inline void dao_dma_op_set_cmpl(struct rte_dma_op *op, uint16_t *ptr, uint16_t val, uint16_t *pend_ptr, uint16_t pend_val)
static __rte_always_inline uint16_t dao_dma_dst_avail(struct dao_dma_vchan_state *vchan)
static __rte_always_inline bool dao_dma_flush(struct dao_dma_vchan_state *vchan, const uint8_t avail)
static __rte_always_inline struct rte_dma_sge * dao_dma_sge_src(struct dao_dma_vchan_state *vchan)
static __rte_always_inline void dao_dma_update_cmpl_meta(struct dao_dma_vchan_state *vchan, uint16_t *ptr, uint16_t val, uint16_t *pend_ptr, uint16_t pend_val, uint16_t tail)
int dao_dma_flush_submit_v2(void)
static __rte_always_inline void dao_dma_update_cmpl_meta_v2(struct dao_dma_vchan_state *vchan, uint16_t *ptr, uint16_t val, uint16_t tail)
static __rte_always_inline bool dao_dma_op_status(struct dao_dma_vchan_state *vchan, uint16_t op_idx)
static __rte_always_inline uint16_t dao_dma_ops_avail(struct dao_dma_vchan_state *vchan)
int dao_dma_lcore_mem2dev_set_ops(int16_t dma_devid, uint16_t nb_vchans, uint16_t flush_thr, uint16_t nb_ops)
int16_t dao_dma_ctrl_dev2mem(void)
static __rte_always_inline uint16_t dao_dma_enq_x4(struct dao_dma_vchan_state *vchan, uint64x2_t *vsrc, uint64x2_t *vdst)
int dao_dma_ctrl_dev_set(int16_t dev2mem_id, int16_t mem2dev_id)
int dao_dma_lcore_dev2mem_set(int16_t dma_devid, uint16_t nb_vchans, uint16_t flush_thr)
#define DAO_DMA_MAX_POINTER
uint16_t val[DAO_DMA_MAX_META_POINTER]
uint16_t * pend_ptr[DAO_DMA_MAX_META_POINTER]
uint16_t * ptr[DAO_DMA_MAX_META_POINTER]
uint16_t pend_val[DAO_DMA_MAX_META_POINTER]
struct dao_dma_vchan_stats dev2mem[DAO_DMA_MAX_VCHAN_PER_LCORE]
struct dao_dma_vchan_stats mem2dev[DAO_DMA_MAX_VCHAN_PER_LCORE]
struct dao_dma_vchan_state mem2dev[DAO_DMA_MAX_VCHAN_PER_LCORE]
struct dao_dma_vchan_state dev2mem[DAO_DMA_MAX_VCHAN_PER_LCORE]
struct rte_dma_sge dst[DAO_DMA_MAX_POINTER]
struct rte_dma_op ** dma_ops
struct rte_dma_sge src[DAO_DMA_MAX_POINTER]
struct dao_dma_cmpl_mdata mdata[DAO_DMA_MAX_INFLIGHT_MDATA]