2019-09-27 11:41:45 +00:00
|
|
|
/* SPDX-License-Identifier: BSD-3-Clause
|
2020-04-08 08:28:52 +00:00
|
|
|
* Copyright (c) 2015-2020 Amazon.com, Inc. or its affiliates.
|
2019-09-27 11:41:45 +00:00
|
|
|
* All rights reserved.
|
|
|
|
*/
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
#ifndef _ENA_ETHDEV_H_
|
|
|
|
#define _ENA_ETHDEV_H_
|
|
|
|
|
2018-06-07 09:43:06 +00:00
|
|
|
#include <rte_cycles.h>
|
2016-03-17 14:31:18 +00:00
|
|
|
#include <rte_pci.h>
|
2017-10-26 10:06:08 +00:00
|
|
|
#include <rte_bus_pci.h>
|
2018-06-07 09:43:06 +00:00
|
|
|
#include <rte_timer.h>
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
#include "ena_com.h"
|
|
|
|
|
|
|
|
#define ENA_REGS_BAR 0
|
|
|
|
#define ENA_MEM_BAR 2
|
|
|
|
|
|
|
|
#define ENA_MAX_NUM_QUEUES 128
|
|
|
|
#define ENA_MIN_FRAME_LEN 64
|
2016-10-29 01:06:14 +00:00
|
|
|
#define ENA_NAME_MAX_LEN 20
|
|
|
|
#define ENA_PKT_MAX_BUFS 17
|
2020-04-08 08:28:52 +00:00
|
|
|
#define ENA_RX_BUF_MIN_SIZE 1400
|
2020-04-08 08:29:09 +00:00
|
|
|
#define ENA_DEFAULT_RING_SIZE 1024
|
2016-03-17 14:31:18 +00:00
|
|
|
|
2018-06-07 09:43:16 +00:00
|
|
|
#define ENA_MIN_MTU 128
|
|
|
|
|
2016-06-30 15:04:56 +00:00
|
|
|
#define ENA_MMIO_DISABLE_REG_READ BIT(0)
|
|
|
|
|
2018-06-07 09:43:06 +00:00
|
|
|
#define ENA_WD_TIMEOUT_SEC 3
|
|
|
|
#define ENA_DEVICE_KALIVE_TIMEOUT (ENA_WD_TIMEOUT_SEC * rte_get_timer_hz())
|
|
|
|
|
2020-04-08 08:29:16 +00:00
|
|
|
/* While processing submitted and completed descriptors (rx and tx path
|
|
|
|
* respectively) in a loop it is desired to:
|
|
|
|
* - perform batch submissions while populating sumbissmion queue
|
|
|
|
* - avoid blocking transmission of other packets during cleanup phase
|
|
|
|
* Hence the utilization ratio of 1/8 of a queue size or max value if the size
|
|
|
|
* of the ring is very big - like 8k Rx rings.
|
|
|
|
*/
|
|
|
|
#define ENA_REFILL_THRESH_DIVIDER 8
|
|
|
|
#define ENA_REFILL_THRESH_PACKET 256
|
|
|
|
|
2020-04-08 08:29:17 +00:00
|
|
|
#define ENA_IDX_NEXT_MASKED(idx, mask) (((idx) + 1) & (mask))
|
|
|
|
#define ENA_IDX_ADD_MASKED(idx, n, mask) (((idx) + (n)) & (mask))
|
|
|
|
|
2016-03-17 14:31:18 +00:00
|
|
|
struct ena_adapter;
|
|
|
|
|
|
|
|
enum ena_ring_type {
|
|
|
|
ENA_RING_TYPE_RX = 1,
|
|
|
|
ENA_RING_TYPE_TX = 2,
|
|
|
|
};
|
|
|
|
|
|
|
|
struct ena_tx_buffer {
|
|
|
|
struct rte_mbuf *mbuf;
|
|
|
|
unsigned int tx_descs;
|
|
|
|
unsigned int num_of_bufs;
|
|
|
|
struct ena_com_buf bufs[ENA_PKT_MAX_BUFS];
|
|
|
|
};
|
|
|
|
|
2020-04-08 08:29:14 +00:00
|
|
|
/* Rx buffer holds only pointer to the mbuf - may be expanded in the future */
|
|
|
|
struct ena_rx_buffer {
|
|
|
|
struct rte_mbuf *mbuf;
|
|
|
|
struct ena_com_buf ena_buf;
|
|
|
|
};
|
|
|
|
|
2018-12-14 13:18:30 +00:00
|
|
|
struct ena_calc_queue_size_ctx {
|
|
|
|
struct ena_com_dev_get_features_ctx *get_feat_ctx;
|
|
|
|
struct ena_com_dev *ena_dev;
|
2020-04-08 08:29:09 +00:00
|
|
|
u32 max_rx_queue_size;
|
|
|
|
u32 max_tx_queue_size;
|
2018-12-14 13:18:30 +00:00
|
|
|
u16 max_tx_sgl_size;
|
|
|
|
u16 max_rx_sgl_size;
|
|
|
|
};
|
|
|
|
|
2018-12-14 13:18:41 +00:00
|
|
|
struct ena_stats_tx {
|
|
|
|
u64 cnt;
|
|
|
|
u64 bytes;
|
|
|
|
u64 prepare_ctx_err;
|
|
|
|
u64 linearize;
|
|
|
|
u64 linearize_failed;
|
|
|
|
u64 tx_poll;
|
|
|
|
u64 doorbells;
|
|
|
|
u64 bad_req_id;
|
2018-12-14 13:18:42 +00:00
|
|
|
u64 available_desc;
|
2018-12-14 13:18:41 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct ena_stats_rx {
|
|
|
|
u64 cnt;
|
|
|
|
u64 bytes;
|
2018-12-14 13:18:42 +00:00
|
|
|
u64 refill_partial;
|
2018-12-14 13:18:41 +00:00
|
|
|
u64 bad_csum;
|
2018-12-14 13:18:42 +00:00
|
|
|
u64 mbuf_alloc_fail;
|
2018-12-14 13:18:41 +00:00
|
|
|
u64 bad_desc_num;
|
|
|
|
u64 bad_req_id;
|
|
|
|
};
|
|
|
|
|
2016-03-17 14:31:18 +00:00
|
|
|
struct ena_ring {
|
|
|
|
u16 next_to_use;
|
|
|
|
u16 next_to_clean;
|
|
|
|
|
|
|
|
enum ena_ring_type type;
|
|
|
|
enum ena_admin_placement_policy_type tx_mem_queue_type;
|
2018-06-07 09:43:09 +00:00
|
|
|
/* Holds the empty requests for TX/RX OOO completions */
|
|
|
|
union {
|
|
|
|
uint16_t *empty_tx_reqs;
|
|
|
|
uint16_t *empty_rx_reqs;
|
|
|
|
};
|
|
|
|
|
2016-03-17 14:31:18 +00:00
|
|
|
union {
|
|
|
|
struct ena_tx_buffer *tx_buffer_info; /* contex of tx packet */
|
2020-04-08 08:29:14 +00:00
|
|
|
struct ena_rx_buffer *rx_buffer_info; /* contex of rx packet */
|
2016-03-17 14:31:18 +00:00
|
|
|
};
|
2018-11-21 08:21:14 +00:00
|
|
|
struct rte_mbuf **rx_refill_buffer;
|
2016-03-17 14:31:18 +00:00
|
|
|
unsigned int ring_size; /* number of tx/rx_buffer_info's entries */
|
2020-04-08 08:29:17 +00:00
|
|
|
unsigned int size_mask;
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
struct ena_com_io_cq *ena_com_io_cq;
|
|
|
|
struct ena_com_io_sq *ena_com_io_sq;
|
|
|
|
|
|
|
|
struct ena_com_rx_buf_info ena_bufs[ENA_PKT_MAX_BUFS]
|
|
|
|
__rte_cache_aligned;
|
|
|
|
|
|
|
|
struct rte_mempool *mb_pool;
|
|
|
|
unsigned int port_id;
|
|
|
|
unsigned int id;
|
|
|
|
/* Max length PMD can push to device for LLQ */
|
|
|
|
uint8_t tx_max_header_size;
|
|
|
|
int configured;
|
2018-12-14 13:18:32 +00:00
|
|
|
|
|
|
|
uint8_t *push_buf_intermediate_buf;
|
|
|
|
|
2016-03-17 14:31:18 +00:00
|
|
|
struct ena_adapter *adapter;
|
2018-01-17 08:23:44 +00:00
|
|
|
uint64_t offloads;
|
2018-06-07 09:43:10 +00:00
|
|
|
u16 sgl_size;
|
2018-12-14 13:18:41 +00:00
|
|
|
|
2020-04-08 08:29:13 +00:00
|
|
|
bool disable_meta_caching;
|
|
|
|
|
2018-12-14 13:18:41 +00:00
|
|
|
union {
|
|
|
|
struct ena_stats_rx rx_stats;
|
|
|
|
struct ena_stats_tx tx_stats;
|
|
|
|
};
|
2019-06-04 10:59:36 +00:00
|
|
|
|
|
|
|
unsigned int numa_socket_id;
|
2016-03-17 14:31:18 +00:00
|
|
|
} __rte_cache_aligned;
|
|
|
|
|
|
|
|
enum ena_adapter_state {
|
|
|
|
ENA_ADAPTER_STATE_FREE = 0,
|
|
|
|
ENA_ADAPTER_STATE_INIT = 1,
|
2018-06-07 09:43:00 +00:00
|
|
|
ENA_ADAPTER_STATE_RUNNING = 2,
|
2016-03-17 14:31:18 +00:00
|
|
|
ENA_ADAPTER_STATE_STOPPED = 3,
|
|
|
|
ENA_ADAPTER_STATE_CONFIG = 4,
|
2018-06-07 09:43:00 +00:00
|
|
|
ENA_ADAPTER_STATE_CLOSED = 5,
|
2016-03-17 14:31:18 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
struct ena_driver_stats {
|
|
|
|
rte_atomic64_t ierrors;
|
|
|
|
rte_atomic64_t oerrors;
|
|
|
|
rte_atomic64_t rx_nombuf;
|
2020-04-08 08:29:12 +00:00
|
|
|
u64 rx_drops;
|
2016-03-17 14:31:18 +00:00
|
|
|
};
|
|
|
|
|
2016-06-30 15:04:55 +00:00
|
|
|
struct ena_stats_dev {
|
|
|
|
u64 wd_expired;
|
2018-12-14 13:18:42 +00:00
|
|
|
u64 dev_start;
|
|
|
|
u64 dev_stop;
|
2020-04-08 08:29:12 +00:00
|
|
|
/*
|
|
|
|
* Tx drops cannot be reported as the driver statistic, because DPDK
|
|
|
|
* rte_eth_stats structure isn't providing appropriate field for that.
|
|
|
|
* As a workaround it is being published as an extended statistic.
|
|
|
|
*/
|
|
|
|
u64 tx_drops;
|
2016-06-30 15:04:55 +00:00
|
|
|
};
|
|
|
|
|
2019-02-15 08:36:39 +00:00
|
|
|
struct ena_offloads {
|
|
|
|
bool tso4_supported;
|
|
|
|
bool tx_csum_supported;
|
|
|
|
bool rx_csum_supported;
|
|
|
|
};
|
|
|
|
|
2016-03-17 14:31:18 +00:00
|
|
|
/* board specific private data structure */
|
|
|
|
struct ena_adapter {
|
|
|
|
/* OS defined structs */
|
|
|
|
struct rte_pci_device *pdev;
|
|
|
|
struct rte_eth_dev_data *rte_eth_dev_data;
|
|
|
|
struct rte_eth_dev *rte_dev;
|
|
|
|
|
|
|
|
struct ena_com_dev ena_dev __rte_cache_aligned;
|
|
|
|
|
|
|
|
/* TX */
|
|
|
|
struct ena_ring tx_ring[ENA_MAX_NUM_QUEUES] __rte_cache_aligned;
|
2020-04-08 08:29:09 +00:00
|
|
|
u32 max_tx_ring_size;
|
2018-06-07 09:43:10 +00:00
|
|
|
u16 max_tx_sgl_size;
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
/* RX */
|
|
|
|
struct ena_ring rx_ring[ENA_MAX_NUM_QUEUES] __rte_cache_aligned;
|
2020-04-08 08:29:09 +00:00
|
|
|
u32 max_rx_ring_size;
|
2018-12-14 13:18:30 +00:00
|
|
|
u16 max_rx_sgl_size;
|
2016-03-17 14:31:18 +00:00
|
|
|
|
2020-04-08 08:29:09 +00:00
|
|
|
u32 max_num_io_queues;
|
2016-03-17 14:31:18 +00:00
|
|
|
u16 max_mtu;
|
2019-02-15 08:36:39 +00:00
|
|
|
struct ena_offloads offloads;
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
int id_number;
|
|
|
|
char name[ENA_NAME_MAX_LEN];
|
2019-05-21 16:13:05 +00:00
|
|
|
u8 mac_addr[RTE_ETHER_ADDR_LEN];
|
2016-03-17 14:31:18 +00:00
|
|
|
|
|
|
|
void *regs;
|
|
|
|
void *dev_mem_base;
|
|
|
|
|
|
|
|
struct ena_driver_stats *drv_stats;
|
|
|
|
enum ena_adapter_state state;
|
|
|
|
|
2018-01-17 08:23:44 +00:00
|
|
|
uint64_t tx_supported_offloads;
|
|
|
|
uint64_t tx_selected_offloads;
|
2018-01-17 08:26:00 +00:00
|
|
|
uint64_t rx_supported_offloads;
|
|
|
|
uint64_t rx_selected_offloads;
|
2018-06-07 09:43:01 +00:00
|
|
|
|
|
|
|
bool link_status;
|
2018-06-07 09:43:04 +00:00
|
|
|
|
|
|
|
enum ena_regs_reset_reason_types reset_reason;
|
2018-06-07 09:43:06 +00:00
|
|
|
|
|
|
|
struct rte_timer timer_wd;
|
|
|
|
uint64_t timestamp_wd;
|
|
|
|
uint64_t keep_alive_timeout;
|
2018-06-07 09:43:07 +00:00
|
|
|
|
2018-12-14 13:18:42 +00:00
|
|
|
struct ena_stats_dev dev_stats;
|
|
|
|
|
2018-06-07 09:43:07 +00:00
|
|
|
bool trigger_reset;
|
2018-06-07 09:43:08 +00:00
|
|
|
|
|
|
|
bool wd_state;
|
2020-04-08 08:29:10 +00:00
|
|
|
|
|
|
|
bool use_large_llq_hdr;
|
2016-03-17 14:31:18 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
#endif /* _ENA_ETHDEV_H_ */
|