summaryrefslogtreecommitdiff
path: root/drivers/net/ethernet/huawei/hinic3/hinic3_wq.h
blob: ab37893efd7e2f79ba8c42da6bb112f2348503d0 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
/* SPDX-License-Identifier: GPL-2.0 */
/* Copyright (c) Huawei Technologies Co., Ltd. 2025. All rights reserved. */

#ifndef _HINIC3_WQ_H_
#define _HINIC3_WQ_H_

#include <linux/io.h>

#include "hinic3_queue_common.h"

struct hinic3_sq_bufdesc {
	/* 31-bits Length, L2NIC only uses length[17:0] */
	u32 len;
	u32 rsvd;
	u32 hi_addr;
	u32 lo_addr;
};

/* Work queue is used to submit elements (tx, rx, cmd) to hw.
 * Driver is the producer that advances prod_idx. cons_idx is advanced when
 * HW reports completions of previously submitted elements.
 */
struct hinic3_wq {
	struct hinic3_queue_pages qpages;
	/* Unmasked producer/consumer indices that are advanced to natural
	 * integer overflow regardless of queue depth.
	 */
	u16                       cons_idx;
	u16                       prod_idx;

	u32                       q_depth;
	u16                       idx_mask;

	/* Work Queue (logical WQEBB array) is mapped to hw via Chip Logical
	 * Address (CLA) using 1 of 2 levels:
	 *     level 0 - direct mapping of single wq page
	 *     level 1 - indirect mapping of multiple pages via additional page
	 *               table.
	 * When wq uses level 1, wq_block will hold the allocated indirection
	 * table.
	 */
	dma_addr_t                wq_block_paddr;
	__be64                    *wq_block_vaddr;
} ____cacheline_aligned;

/* Get number of elements in work queue that are in-use. */
static inline u16 hinic3_wq_get_used(const struct hinic3_wq *wq)
{
	return READ_ONCE(wq->prod_idx) - READ_ONCE(wq->cons_idx);
}

static inline u16 hinic3_wq_free_wqebbs(struct hinic3_wq *wq)
{
	/* Don't allow queue to become completely full, report (free - 1). */
	return wq->q_depth - hinic3_wq_get_used(wq) - 1;
}

static inline void *hinic3_wq_get_one_wqebb(struct hinic3_wq *wq, u16 *pi)
{
	*pi = wq->prod_idx & wq->idx_mask;
	wq->prod_idx++;
	return get_q_element(&wq->qpages, *pi, NULL);
}

static inline void hinic3_wq_put_wqebbs(struct hinic3_wq *wq, u16 num_wqebbs)
{
	wq->cons_idx += num_wqebbs;
}

void hinic3_wq_get_multi_wqebbs(struct hinic3_wq *wq,
				u16 num_wqebbs, u16 *prod_idx,
				struct hinic3_sq_bufdesc **first_part_wqebbs,
				struct hinic3_sq_bufdesc **second_part_wqebbs,
				u16 *first_part_wqebbs_num);

#endif