blob: 9e08df7914aa2e142c8926a1230516f5c214d326 [file] [log] [blame]
Yishai Hadasa8b92ca2018-06-17 12:59:57 +03001// SPDX-License-Identifier: GPL-2.0 OR Linux-OpenIB
2/*
3 * Copyright (c) 2018, Mellanox Technologies inc. All rights reserved.
4 */
5
6#include <rdma/ib_user_verbs.h>
7#include <rdma/ib_verbs.h>
8#include <rdma/uverbs_types.h>
9#include <rdma/uverbs_ioctl.h>
10#include <rdma/mlx5_user_ioctl_cmds.h>
Yishai Hadasa124edb2019-01-22 08:29:57 +020011#include <rdma/mlx5_user_ioctl_verbs.h>
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030012#include <rdma/ib_umem.h>
Yishai Hadas34613eb2018-11-26 08:28:35 +020013#include <rdma/uverbs_std_types.h>
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030014#include <linux/mlx5/driver.h>
15#include <linux/mlx5/fs.h>
16#include "mlx5_ib.h"
17
Yishai Hadas8aa8c952018-06-17 13:00:00 +030018#define UVERBS_MODULE_NAME mlx5_ib
19#include <rdma/uverbs_named_ioctl.h>
20
Yishai Hadas534fd7a2019-01-13 16:01:17 +020021enum devx_obj_flags {
22 DEVX_OBJ_FLAGS_INDIRECT_MKEY = 1 << 0,
Yishai Hadasc5ae1952019-03-06 19:21:42 +020023 DEVX_OBJ_FLAGS_DCT = 1 << 1,
Yishai Hadas534fd7a2019-01-13 16:01:17 +020024};
25
Yishai Hadasa124edb2019-01-22 08:29:57 +020026struct devx_async_data {
27 struct mlx5_ib_dev *mdev;
28 struct list_head list;
29 struct ib_uobject *fd_uobj;
30 struct mlx5_async_work cb_work;
31 u16 cmd_out_len;
32 /* must be last field in this structure */
33 struct mlx5_ib_uapi_devx_async_cmd_hdr hdr;
34};
35
Yishai Hadas7efce362018-06-17 13:00:01 +030036#define MLX5_MAX_DESTROY_INBOX_SIZE_DW MLX5_ST_SZ_DW(delete_fte_in)
37struct devx_obj {
38 struct mlx5_core_dev *mdev;
Yishai Hadas2351776e2018-10-07 12:06:34 +030039 u64 obj_id;
Yishai Hadas7efce362018-06-17 13:00:01 +030040 u32 dinlen; /* destroy inbox length */
41 u32 dinbox[MLX5_MAX_DESTROY_INBOX_SIZE_DW];
Yishai Hadas534fd7a2019-01-13 16:01:17 +020042 u32 flags;
Yishai Hadasc5ae1952019-03-06 19:21:42 +020043 union {
44 struct mlx5_ib_devx_mr devx_mr;
45 struct mlx5_core_dct core_dct;
46 };
Yishai Hadas7efce362018-06-17 13:00:01 +030047};
48
Yishai Hadasaeae9452018-06-17 13:00:04 +030049struct devx_umem {
50 struct mlx5_core_dev *mdev;
51 struct ib_umem *umem;
52 u32 page_offset;
53 int page_shift;
54 int ncont;
55 u32 dinlen;
56 u32 dinbox[MLX5_ST_SZ_DW(general_obj_in_cmd_hdr)];
57};
58
59struct devx_umem_reg_cmd {
60 void *in;
61 u32 inlen;
62 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
63};
64
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +020065static struct mlx5_ib_ucontext *
66devx_ufile2uctx(const struct uverbs_attr_bundle *attrs)
Yishai Hadas8aa8c952018-06-17 13:00:00 +030067{
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +020068 return to_mucontext(ib_uverbs_get_ucontext(attrs));
Yishai Hadas8aa8c952018-06-17 13:00:00 +030069}
70
Yishai Hadasfb981532018-11-26 08:28:36 +020071int mlx5_ib_devx_create(struct mlx5_ib_dev *dev, bool is_user)
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030072{
73 u32 in[MLX5_ST_SZ_DW(create_uctx_in)] = {0};
74 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)] = {0};
Yishai Hadas6e3722b2018-12-19 16:28:15 +020075 void *uctx;
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030076 int err;
Yishai Hadas76dc5a82018-09-20 21:45:19 +030077 u16 uid;
Yishai Hadasfb981532018-11-26 08:28:36 +020078 u32 cap = 0;
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030079
Yishai Hadas6e3722b2018-12-19 16:28:15 +020080 /* 0 means not supported */
81 if (!MLX5_CAP_GEN(dev->mdev, log_max_uctx))
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030082 return -EINVAL;
83
Yishai Hadas6e3722b2018-12-19 16:28:15 +020084 uctx = MLX5_ADDR_OF(create_uctx_in, in, uctx);
Yishai Hadasfb981532018-11-26 08:28:36 +020085 if (is_user && capable(CAP_NET_RAW) &&
86 (MLX5_CAP_GEN(dev->mdev, uctx_cap) & MLX5_UCTX_CAP_RAW_TX))
87 cap |= MLX5_UCTX_CAP_RAW_TX;
88
Yishai Hadas6e3722b2018-12-19 16:28:15 +020089 MLX5_SET(create_uctx_in, in, opcode, MLX5_CMD_OP_CREATE_UCTX);
Yishai Hadasfb981532018-11-26 08:28:36 +020090 MLX5_SET(uctx, uctx, cap, cap);
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030091
92 err = mlx5_cmd_exec(dev->mdev, in, sizeof(in), out, sizeof(out));
93 if (err)
94 return err;
95
Yishai Hadas76dc5a82018-09-20 21:45:19 +030096 uid = MLX5_GET(general_obj_out_cmd_hdr, out, obj_id);
97 return uid;
Yishai Hadasa8b92ca2018-06-17 12:59:57 +030098}
99
Yishai Hadas76dc5a82018-09-20 21:45:19 +0300100void mlx5_ib_devx_destroy(struct mlx5_ib_dev *dev, u16 uid)
Yishai Hadasa8b92ca2018-06-17 12:59:57 +0300101{
Yishai Hadas6e3722b2018-12-19 16:28:15 +0200102 u32 in[MLX5_ST_SZ_DW(destroy_uctx_in)] = {0};
Yishai Hadasa8b92ca2018-06-17 12:59:57 +0300103 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)] = {0};
104
Yishai Hadas6e3722b2018-12-19 16:28:15 +0200105 MLX5_SET(destroy_uctx_in, in, opcode, MLX5_CMD_OP_DESTROY_UCTX);
106 MLX5_SET(destroy_uctx_in, in, uid, uid);
Yishai Hadasa8b92ca2018-06-17 12:59:57 +0300107
108 mlx5_cmd_exec(dev->mdev, in, sizeof(in), out, sizeof(out));
109}
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300110
Yishai Hadas32269442018-07-23 15:25:09 +0300111bool mlx5_ib_devx_is_flow_dest(void *obj, int *dest_id, int *dest_type)
112{
113 struct devx_obj *devx_obj = obj;
114 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, devx_obj->dinbox, opcode);
115
116 switch (opcode) {
117 case MLX5_CMD_OP_DESTROY_TIR:
118 *dest_type = MLX5_FLOW_DESTINATION_TYPE_TIR;
119 *dest_id = MLX5_GET(general_obj_in_cmd_hdr, devx_obj->dinbox,
120 obj_id);
121 return true;
122
123 case MLX5_CMD_OP_DESTROY_FLOW_TABLE:
124 *dest_type = MLX5_FLOW_DESTINATION_TYPE_FLOW_TABLE;
125 *dest_id = MLX5_GET(destroy_flow_table_in, devx_obj->dinbox,
126 table_id);
127 return true;
128 default:
129 return false;
130 }
131}
132
Mark Blochbfc5d832018-11-20 20:31:08 +0200133bool mlx5_ib_devx_is_flow_counter(void *obj, u32 *counter_id)
134{
135 struct devx_obj *devx_obj = obj;
136 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, devx_obj->dinbox, opcode);
137
138 if (opcode == MLX5_CMD_OP_DEALLOC_FLOW_COUNTER) {
139 *counter_id = MLX5_GET(dealloc_flow_counter_in,
140 devx_obj->dinbox,
141 flow_counter_id);
142 return true;
143 }
144
145 return false;
146}
147
Yishai Hadas2351776e2018-10-07 12:06:34 +0300148/*
149 * As the obj_id in the firmware is not globally unique the object type
150 * must be considered upon checking for a valid object id.
151 * For that the opcode of the creator command is encoded as part of the obj_id.
152 */
153static u64 get_enc_obj_id(u16 opcode, u32 obj_id)
154{
155 return ((u64)opcode << 32) | obj_id;
156}
157
Yishai Hadas34613eb2018-11-26 08:28:35 +0200158static u64 devx_get_obj_id(const void *in)
Yishai Hadase662e142018-06-17 13:00:02 +0300159{
160 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
Yishai Hadas2351776e2018-10-07 12:06:34 +0300161 u64 obj_id;
Yishai Hadase662e142018-06-17 13:00:02 +0300162
163 switch (opcode) {
164 case MLX5_CMD_OP_MODIFY_GENERAL_OBJECT:
165 case MLX5_CMD_OP_QUERY_GENERAL_OBJECT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300166 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_GENERAL_OBJECT,
167 MLX5_GET(general_obj_in_cmd_hdr, in,
168 obj_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300169 break;
170 case MLX5_CMD_OP_QUERY_MKEY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300171 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_MKEY,
172 MLX5_GET(query_mkey_in, in,
173 mkey_index));
Yishai Hadase662e142018-06-17 13:00:02 +0300174 break;
175 case MLX5_CMD_OP_QUERY_CQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300176 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_CQ,
177 MLX5_GET(query_cq_in, in, cqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300178 break;
179 case MLX5_CMD_OP_MODIFY_CQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300180 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_CQ,
181 MLX5_GET(modify_cq_in, in, cqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300182 break;
183 case MLX5_CMD_OP_QUERY_SQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300184 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_SQ,
185 MLX5_GET(query_sq_in, in, sqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300186 break;
187 case MLX5_CMD_OP_MODIFY_SQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300188 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_SQ,
189 MLX5_GET(modify_sq_in, in, sqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300190 break;
191 case MLX5_CMD_OP_QUERY_RQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300192 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RQ,
193 MLX5_GET(query_rq_in, in, rqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300194 break;
195 case MLX5_CMD_OP_MODIFY_RQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300196 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RQ,
197 MLX5_GET(modify_rq_in, in, rqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300198 break;
199 case MLX5_CMD_OP_QUERY_RMP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300200 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RMP,
201 MLX5_GET(query_rmp_in, in, rmpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300202 break;
203 case MLX5_CMD_OP_MODIFY_RMP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300204 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RMP,
205 MLX5_GET(modify_rmp_in, in, rmpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300206 break;
207 case MLX5_CMD_OP_QUERY_RQT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300208 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RQT,
209 MLX5_GET(query_rqt_in, in, rqtn));
Yishai Hadase662e142018-06-17 13:00:02 +0300210 break;
211 case MLX5_CMD_OP_MODIFY_RQT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300212 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RQT,
213 MLX5_GET(modify_rqt_in, in, rqtn));
Yishai Hadase662e142018-06-17 13:00:02 +0300214 break;
215 case MLX5_CMD_OP_QUERY_TIR:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300216 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_TIR,
217 MLX5_GET(query_tir_in, in, tirn));
Yishai Hadase662e142018-06-17 13:00:02 +0300218 break;
219 case MLX5_CMD_OP_MODIFY_TIR:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300220 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_TIR,
221 MLX5_GET(modify_tir_in, in, tirn));
Yishai Hadase662e142018-06-17 13:00:02 +0300222 break;
223 case MLX5_CMD_OP_QUERY_TIS:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300224 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_TIS,
225 MLX5_GET(query_tis_in, in, tisn));
Yishai Hadase662e142018-06-17 13:00:02 +0300226 break;
227 case MLX5_CMD_OP_MODIFY_TIS:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300228 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_TIS,
229 MLX5_GET(modify_tis_in, in, tisn));
Yishai Hadase662e142018-06-17 13:00:02 +0300230 break;
231 case MLX5_CMD_OP_QUERY_FLOW_TABLE:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300232 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_FLOW_TABLE,
233 MLX5_GET(query_flow_table_in, in,
234 table_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300235 break;
236 case MLX5_CMD_OP_MODIFY_FLOW_TABLE:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300237 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_FLOW_TABLE,
238 MLX5_GET(modify_flow_table_in, in,
239 table_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300240 break;
241 case MLX5_CMD_OP_QUERY_FLOW_GROUP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300242 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_FLOW_GROUP,
243 MLX5_GET(query_flow_group_in, in,
244 group_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300245 break;
246 case MLX5_CMD_OP_QUERY_FLOW_TABLE_ENTRY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300247 obj_id = get_enc_obj_id(MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY,
248 MLX5_GET(query_fte_in, in,
249 flow_index));
Yishai Hadase662e142018-06-17 13:00:02 +0300250 break;
251 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300252 obj_id = get_enc_obj_id(MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY,
253 MLX5_GET(set_fte_in, in, flow_index));
Yishai Hadase662e142018-06-17 13:00:02 +0300254 break;
255 case MLX5_CMD_OP_QUERY_Q_COUNTER:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300256 obj_id = get_enc_obj_id(MLX5_CMD_OP_ALLOC_Q_COUNTER,
257 MLX5_GET(query_q_counter_in, in,
258 counter_set_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300259 break;
260 case MLX5_CMD_OP_QUERY_FLOW_COUNTER:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300261 obj_id = get_enc_obj_id(MLX5_CMD_OP_ALLOC_FLOW_COUNTER,
262 MLX5_GET(query_flow_counter_in, in,
263 flow_counter_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300264 break;
265 case MLX5_CMD_OP_QUERY_MODIFY_HEADER_CONTEXT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300266 obj_id = get_enc_obj_id(MLX5_CMD_OP_ALLOC_MODIFY_HEADER_CONTEXT,
267 MLX5_GET(general_obj_in_cmd_hdr, in,
268 obj_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300269 break;
270 case MLX5_CMD_OP_QUERY_SCHEDULING_ELEMENT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300271 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT,
272 MLX5_GET(query_scheduling_element_in,
273 in, scheduling_element_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300274 break;
275 case MLX5_CMD_OP_MODIFY_SCHEDULING_ELEMENT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300276 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT,
277 MLX5_GET(modify_scheduling_element_in,
278 in, scheduling_element_id));
Yishai Hadase662e142018-06-17 13:00:02 +0300279 break;
280 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300281 obj_id = get_enc_obj_id(MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT,
282 MLX5_GET(add_vxlan_udp_dport_in, in,
283 vxlan_udp_port));
Yishai Hadase662e142018-06-17 13:00:02 +0300284 break;
285 case MLX5_CMD_OP_QUERY_L2_TABLE_ENTRY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300286 obj_id = get_enc_obj_id(MLX5_CMD_OP_SET_L2_TABLE_ENTRY,
287 MLX5_GET(query_l2_table_entry_in, in,
288 table_index));
Yishai Hadase662e142018-06-17 13:00:02 +0300289 break;
290 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300291 obj_id = get_enc_obj_id(MLX5_CMD_OP_SET_L2_TABLE_ENTRY,
292 MLX5_GET(set_l2_table_entry_in, in,
293 table_index));
Yishai Hadase662e142018-06-17 13:00:02 +0300294 break;
295 case MLX5_CMD_OP_QUERY_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300296 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
297 MLX5_GET(query_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300298 break;
299 case MLX5_CMD_OP_RST2INIT_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300300 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
301 MLX5_GET(rst2init_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300302 break;
303 case MLX5_CMD_OP_INIT2RTR_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300304 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
305 MLX5_GET(init2rtr_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300306 break;
307 case MLX5_CMD_OP_RTR2RTS_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300308 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
309 MLX5_GET(rtr2rts_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300310 break;
311 case MLX5_CMD_OP_RTS2RTS_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300312 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
313 MLX5_GET(rts2rts_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300314 break;
315 case MLX5_CMD_OP_SQERR2RTS_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300316 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
317 MLX5_GET(sqerr2rts_qp_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300318 break;
319 case MLX5_CMD_OP_2ERR_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300320 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
321 MLX5_GET(qp_2err_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300322 break;
323 case MLX5_CMD_OP_2RST_QP:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300324 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
325 MLX5_GET(qp_2rst_in, in, qpn));
Yishai Hadase662e142018-06-17 13:00:02 +0300326 break;
327 case MLX5_CMD_OP_QUERY_DCT:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300328 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_DCT,
329 MLX5_GET(query_dct_in, in, dctn));
Yishai Hadase662e142018-06-17 13:00:02 +0300330 break;
331 case MLX5_CMD_OP_QUERY_XRQ:
Yishai Hadas719598c2018-11-26 08:28:37 +0200332 case MLX5_CMD_OP_QUERY_XRQ_DC_PARAMS_ENTRY:
333 case MLX5_CMD_OP_QUERY_XRQ_ERROR_PARAMS:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300334 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_XRQ,
335 MLX5_GET(query_xrq_in, in, xrqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300336 break;
337 case MLX5_CMD_OP_QUERY_XRC_SRQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300338 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_XRC_SRQ,
339 MLX5_GET(query_xrc_srq_in, in,
340 xrc_srqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300341 break;
342 case MLX5_CMD_OP_ARM_XRC_SRQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300343 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_XRC_SRQ,
344 MLX5_GET(arm_xrc_srq_in, in, xrc_srqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300345 break;
346 case MLX5_CMD_OP_QUERY_SRQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300347 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_SRQ,
348 MLX5_GET(query_srq_in, in, srqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300349 break;
350 case MLX5_CMD_OP_ARM_RQ:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300351 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_RQ,
352 MLX5_GET(arm_rq_in, in, srq_number));
Yishai Hadase662e142018-06-17 13:00:02 +0300353 break;
Yishai Hadase662e142018-06-17 13:00:02 +0300354 case MLX5_CMD_OP_ARM_DCT_FOR_KEY_VIOLATION:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300355 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_DCT,
356 MLX5_GET(drain_dct_in, in, dctn));
Yishai Hadase662e142018-06-17 13:00:02 +0300357 break;
358 case MLX5_CMD_OP_ARM_XRQ:
Yishai Hadas719598c2018-11-26 08:28:37 +0200359 case MLX5_CMD_OP_SET_XRQ_DC_PARAMS_ENTRY:
Yishai Hadas2351776e2018-10-07 12:06:34 +0300360 obj_id = get_enc_obj_id(MLX5_CMD_OP_CREATE_XRQ,
361 MLX5_GET(arm_xrq_in, in, xrqn));
Yishai Hadase662e142018-06-17 13:00:02 +0300362 break;
Yishai Hadas719598c2018-11-26 08:28:37 +0200363 case MLX5_CMD_OP_QUERY_PACKET_REFORMAT_CONTEXT:
364 obj_id = get_enc_obj_id
365 (MLX5_CMD_OP_ALLOC_PACKET_REFORMAT_CONTEXT,
366 MLX5_GET(query_packet_reformat_context_in,
367 in, packet_reformat_id));
368 break;
Yishai Hadase662e142018-06-17 13:00:02 +0300369 default:
Yishai Hadas34613eb2018-11-26 08:28:35 +0200370 obj_id = 0;
Yishai Hadase662e142018-06-17 13:00:02 +0300371 }
372
Yishai Hadas34613eb2018-11-26 08:28:35 +0200373 return obj_id;
374}
Yishai Hadase662e142018-06-17 13:00:02 +0300375
Yishai Hadas34613eb2018-11-26 08:28:35 +0200376static bool devx_is_valid_obj_id(struct ib_uobject *uobj, const void *in)
377{
378 u64 obj_id = devx_get_obj_id(in);
379
380 if (!obj_id)
381 return false;
382
383 switch (uobj_get_object_id(uobj)) {
384 case UVERBS_OBJECT_CQ:
385 return get_enc_obj_id(MLX5_CMD_OP_CREATE_CQ,
386 to_mcq(uobj->object)->mcq.cqn) ==
387 obj_id;
388
389 case UVERBS_OBJECT_SRQ:
390 {
391 struct mlx5_core_srq *srq = &(to_msrq(uobj->object)->msrq);
392 struct mlx5_ib_dev *dev = to_mdev(uobj->context->device);
393 u16 opcode;
394
395 switch (srq->common.res) {
396 case MLX5_RES_XSRQ:
397 opcode = MLX5_CMD_OP_CREATE_XRC_SRQ;
398 break;
399 case MLX5_RES_XRQ:
400 opcode = MLX5_CMD_OP_CREATE_XRQ;
401 break;
402 default:
403 if (!dev->mdev->issi)
404 opcode = MLX5_CMD_OP_CREATE_SRQ;
405 else
406 opcode = MLX5_CMD_OP_CREATE_RMP;
407 }
408
409 return get_enc_obj_id(opcode,
410 to_msrq(uobj->object)->msrq.srqn) ==
411 obj_id;
412 }
413
414 case UVERBS_OBJECT_QP:
415 {
416 struct mlx5_ib_qp *qp = to_mqp(uobj->object);
417 enum ib_qp_type qp_type = qp->ibqp.qp_type;
418
419 if (qp_type == IB_QPT_RAW_PACKET ||
420 (qp->flags & MLX5_IB_QP_UNDERLAY)) {
421 struct mlx5_ib_raw_packet_qp *raw_packet_qp =
422 &qp->raw_packet_qp;
423 struct mlx5_ib_rq *rq = &raw_packet_qp->rq;
424 struct mlx5_ib_sq *sq = &raw_packet_qp->sq;
425
426 return (get_enc_obj_id(MLX5_CMD_OP_CREATE_RQ,
427 rq->base.mqp.qpn) == obj_id ||
428 get_enc_obj_id(MLX5_CMD_OP_CREATE_SQ,
429 sq->base.mqp.qpn) == obj_id ||
430 get_enc_obj_id(MLX5_CMD_OP_CREATE_TIR,
431 rq->tirn) == obj_id ||
432 get_enc_obj_id(MLX5_CMD_OP_CREATE_TIS,
433 sq->tisn) == obj_id);
434 }
435
436 if (qp_type == MLX5_IB_QPT_DCT)
437 return get_enc_obj_id(MLX5_CMD_OP_CREATE_DCT,
438 qp->dct.mdct.mqp.qpn) == obj_id;
439
440 return get_enc_obj_id(MLX5_CMD_OP_CREATE_QP,
441 qp->ibqp.qp_num) == obj_id;
442 }
443
444 case UVERBS_OBJECT_WQ:
445 return get_enc_obj_id(MLX5_CMD_OP_CREATE_RQ,
446 to_mrwq(uobj->object)->core_qp.qpn) ==
447 obj_id;
448
449 case UVERBS_OBJECT_RWQ_IND_TBL:
450 return get_enc_obj_id(MLX5_CMD_OP_CREATE_RQT,
451 to_mrwq_ind_table(uobj->object)->rqtn) ==
452 obj_id;
453
454 case MLX5_IB_OBJECT_DEVX_OBJ:
455 return ((struct devx_obj *)uobj->object)->obj_id == obj_id;
456
Yishai Hadase662e142018-06-17 13:00:02 +0300457 default:
458 return false;
459 }
Yishai Hadase662e142018-06-17 13:00:02 +0300460}
461
Yishai Hadasba1a0572018-09-20 21:39:33 +0300462static void devx_set_umem_valid(const void *in)
463{
464 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
465
466 switch (opcode) {
467 case MLX5_CMD_OP_CREATE_MKEY:
468 MLX5_SET(create_mkey_in, in, mkey_umem_valid, 1);
469 break;
470 case MLX5_CMD_OP_CREATE_CQ:
471 {
472 void *cqc;
473
474 MLX5_SET(create_cq_in, in, cq_umem_valid, 1);
475 cqc = MLX5_ADDR_OF(create_cq_in, in, cq_context);
476 MLX5_SET(cqc, cqc, dbr_umem_valid, 1);
477 break;
478 }
479 case MLX5_CMD_OP_CREATE_QP:
480 {
481 void *qpc;
482
483 qpc = MLX5_ADDR_OF(create_qp_in, in, qpc);
484 MLX5_SET(qpc, qpc, dbr_umem_valid, 1);
485 MLX5_SET(create_qp_in, in, wq_umem_valid, 1);
486 break;
487 }
488
489 case MLX5_CMD_OP_CREATE_RQ:
490 {
491 void *rqc, *wq;
492
493 rqc = MLX5_ADDR_OF(create_rq_in, in, ctx);
494 wq = MLX5_ADDR_OF(rqc, rqc, wq);
495 MLX5_SET(wq, wq, dbr_umem_valid, 1);
496 MLX5_SET(wq, wq, wq_umem_valid, 1);
497 break;
498 }
499
500 case MLX5_CMD_OP_CREATE_SQ:
501 {
502 void *sqc, *wq;
503
504 sqc = MLX5_ADDR_OF(create_sq_in, in, ctx);
505 wq = MLX5_ADDR_OF(sqc, sqc, wq);
506 MLX5_SET(wq, wq, dbr_umem_valid, 1);
507 MLX5_SET(wq, wq, wq_umem_valid, 1);
508 break;
509 }
510
511 case MLX5_CMD_OP_MODIFY_CQ:
512 MLX5_SET(modify_cq_in, in, cq_umem_valid, 1);
513 break;
514
515 case MLX5_CMD_OP_CREATE_RMP:
516 {
517 void *rmpc, *wq;
518
519 rmpc = MLX5_ADDR_OF(create_rmp_in, in, ctx);
520 wq = MLX5_ADDR_OF(rmpc, rmpc, wq);
521 MLX5_SET(wq, wq, dbr_umem_valid, 1);
522 MLX5_SET(wq, wq, wq_umem_valid, 1);
523 break;
524 }
525
526 case MLX5_CMD_OP_CREATE_XRQ:
527 {
528 void *xrqc, *wq;
529
530 xrqc = MLX5_ADDR_OF(create_xrq_in, in, xrq_context);
531 wq = MLX5_ADDR_OF(xrqc, xrqc, wq);
532 MLX5_SET(wq, wq, dbr_umem_valid, 1);
533 MLX5_SET(wq, wq, wq_umem_valid, 1);
534 break;
535 }
536
537 case MLX5_CMD_OP_CREATE_XRC_SRQ:
538 {
539 void *xrc_srqc;
540
541 MLX5_SET(create_xrc_srq_in, in, xrc_srq_umem_valid, 1);
542 xrc_srqc = MLX5_ADDR_OF(create_xrc_srq_in, in,
543 xrc_srq_context_entry);
544 MLX5_SET(xrc_srqc, xrc_srqc, dbr_umem_valid, 1);
545 break;
546 }
547
548 default:
549 return;
550 }
551}
552
Yishai Hadas2351776e2018-10-07 12:06:34 +0300553static bool devx_is_obj_create_cmd(const void *in, u16 *opcode)
Yishai Hadas7efce362018-06-17 13:00:01 +0300554{
Yishai Hadas2351776e2018-10-07 12:06:34 +0300555 *opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
Yishai Hadas7efce362018-06-17 13:00:01 +0300556
Yishai Hadas2351776e2018-10-07 12:06:34 +0300557 switch (*opcode) {
Yishai Hadas7efce362018-06-17 13:00:01 +0300558 case MLX5_CMD_OP_CREATE_GENERAL_OBJECT:
559 case MLX5_CMD_OP_CREATE_MKEY:
560 case MLX5_CMD_OP_CREATE_CQ:
561 case MLX5_CMD_OP_ALLOC_PD:
562 case MLX5_CMD_OP_ALLOC_TRANSPORT_DOMAIN:
563 case MLX5_CMD_OP_CREATE_RMP:
564 case MLX5_CMD_OP_CREATE_SQ:
565 case MLX5_CMD_OP_CREATE_RQ:
566 case MLX5_CMD_OP_CREATE_RQT:
567 case MLX5_CMD_OP_CREATE_TIR:
568 case MLX5_CMD_OP_CREATE_TIS:
569 case MLX5_CMD_OP_ALLOC_Q_COUNTER:
570 case MLX5_CMD_OP_CREATE_FLOW_TABLE:
571 case MLX5_CMD_OP_CREATE_FLOW_GROUP:
572 case MLX5_CMD_OP_ALLOC_FLOW_COUNTER:
Mark Bloch60786f02018-08-28 14:18:46 +0300573 case MLX5_CMD_OP_ALLOC_PACKET_REFORMAT_CONTEXT:
Yishai Hadas7efce362018-06-17 13:00:01 +0300574 case MLX5_CMD_OP_ALLOC_MODIFY_HEADER_CONTEXT:
575 case MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT:
576 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
577 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
578 case MLX5_CMD_OP_CREATE_QP:
579 case MLX5_CMD_OP_CREATE_SRQ:
580 case MLX5_CMD_OP_CREATE_XRC_SRQ:
581 case MLX5_CMD_OP_CREATE_DCT:
582 case MLX5_CMD_OP_CREATE_XRQ:
583 case MLX5_CMD_OP_ATTACH_TO_MCG:
584 case MLX5_CMD_OP_ALLOC_XRCD:
585 return true;
586 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
587 {
588 u16 op_mod = MLX5_GET(set_fte_in, in, op_mod);
589 if (op_mod == 0)
590 return true;
591 return false;
592 }
593 default:
594 return false;
595 }
596}
597
Yishai Hadase662e142018-06-17 13:00:02 +0300598static bool devx_is_obj_modify_cmd(const void *in)
599{
600 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
601
602 switch (opcode) {
603 case MLX5_CMD_OP_MODIFY_GENERAL_OBJECT:
604 case MLX5_CMD_OP_MODIFY_CQ:
605 case MLX5_CMD_OP_MODIFY_RMP:
606 case MLX5_CMD_OP_MODIFY_SQ:
607 case MLX5_CMD_OP_MODIFY_RQ:
608 case MLX5_CMD_OP_MODIFY_RQT:
609 case MLX5_CMD_OP_MODIFY_TIR:
610 case MLX5_CMD_OP_MODIFY_TIS:
611 case MLX5_CMD_OP_MODIFY_FLOW_TABLE:
612 case MLX5_CMD_OP_MODIFY_SCHEDULING_ELEMENT:
613 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
614 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
615 case MLX5_CMD_OP_RST2INIT_QP:
616 case MLX5_CMD_OP_INIT2RTR_QP:
617 case MLX5_CMD_OP_RTR2RTS_QP:
618 case MLX5_CMD_OP_RTS2RTS_QP:
619 case MLX5_CMD_OP_SQERR2RTS_QP:
620 case MLX5_CMD_OP_2ERR_QP:
621 case MLX5_CMD_OP_2RST_QP:
622 case MLX5_CMD_OP_ARM_XRC_SRQ:
623 case MLX5_CMD_OP_ARM_RQ:
Yishai Hadase662e142018-06-17 13:00:02 +0300624 case MLX5_CMD_OP_ARM_DCT_FOR_KEY_VIOLATION:
625 case MLX5_CMD_OP_ARM_XRQ:
Yishai Hadas719598c2018-11-26 08:28:37 +0200626 case MLX5_CMD_OP_SET_XRQ_DC_PARAMS_ENTRY:
Yishai Hadase662e142018-06-17 13:00:02 +0300627 return true;
628 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
629 {
630 u16 op_mod = MLX5_GET(set_fte_in, in, op_mod);
631
632 if (op_mod == 1)
633 return true;
634 return false;
635 }
636 default:
637 return false;
638 }
639}
640
641static bool devx_is_obj_query_cmd(const void *in)
642{
643 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
644
645 switch (opcode) {
646 case MLX5_CMD_OP_QUERY_GENERAL_OBJECT:
647 case MLX5_CMD_OP_QUERY_MKEY:
648 case MLX5_CMD_OP_QUERY_CQ:
649 case MLX5_CMD_OP_QUERY_RMP:
650 case MLX5_CMD_OP_QUERY_SQ:
651 case MLX5_CMD_OP_QUERY_RQ:
652 case MLX5_CMD_OP_QUERY_RQT:
653 case MLX5_CMD_OP_QUERY_TIR:
654 case MLX5_CMD_OP_QUERY_TIS:
655 case MLX5_CMD_OP_QUERY_Q_COUNTER:
656 case MLX5_CMD_OP_QUERY_FLOW_TABLE:
657 case MLX5_CMD_OP_QUERY_FLOW_GROUP:
658 case MLX5_CMD_OP_QUERY_FLOW_TABLE_ENTRY:
659 case MLX5_CMD_OP_QUERY_FLOW_COUNTER:
660 case MLX5_CMD_OP_QUERY_MODIFY_HEADER_CONTEXT:
661 case MLX5_CMD_OP_QUERY_SCHEDULING_ELEMENT:
662 case MLX5_CMD_OP_QUERY_L2_TABLE_ENTRY:
663 case MLX5_CMD_OP_QUERY_QP:
664 case MLX5_CMD_OP_QUERY_SRQ:
665 case MLX5_CMD_OP_QUERY_XRC_SRQ:
666 case MLX5_CMD_OP_QUERY_DCT:
667 case MLX5_CMD_OP_QUERY_XRQ:
Yishai Hadas719598c2018-11-26 08:28:37 +0200668 case MLX5_CMD_OP_QUERY_XRQ_DC_PARAMS_ENTRY:
669 case MLX5_CMD_OP_QUERY_XRQ_ERROR_PARAMS:
670 case MLX5_CMD_OP_QUERY_PACKET_REFORMAT_CONTEXT:
Yishai Hadase662e142018-06-17 13:00:02 +0300671 return true;
672 default:
673 return false;
674 }
675}
676
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300677static bool devx_is_whitelist_cmd(void *in)
678{
679 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
680
681 switch (opcode) {
682 case MLX5_CMD_OP_QUERY_HCA_CAP:
683 case MLX5_CMD_OP_QUERY_HCA_VPORT_CONTEXT:
684 return true;
685 default:
686 return false;
687 }
688}
689
690static int devx_get_uid(struct mlx5_ib_ucontext *c, void *cmd_in)
691{
692 if (devx_is_whitelist_cmd(cmd_in)) {
693 struct mlx5_ib_dev *dev;
694
695 if (c->devx_uid)
696 return c->devx_uid;
697
698 dev = to_mdev(c->ibucontext.device);
699 if (dev->devx_whitelist_uid)
700 return dev->devx_whitelist_uid;
701
702 return -EOPNOTSUPP;
703 }
704
705 if (!c->devx_uid)
706 return -EINVAL;
707
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300708 return c->devx_uid;
709}
Yishai Hadase662e142018-06-17 13:00:02 +0300710static bool devx_is_general_cmd(void *in)
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300711{
712 u16 opcode = MLX5_GET(general_obj_in_cmd_hdr, in, opcode);
713
Yishai Hadas719598c2018-11-26 08:28:37 +0200714 if (opcode >= MLX5_CMD_OP_GENERAL_START &&
715 opcode < MLX5_CMD_OP_GENERAL_END)
716 return true;
717
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300718 switch (opcode) {
719 case MLX5_CMD_OP_QUERY_HCA_CAP:
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300720 case MLX5_CMD_OP_QUERY_HCA_VPORT_CONTEXT:
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300721 case MLX5_CMD_OP_QUERY_VPORT_STATE:
722 case MLX5_CMD_OP_QUERY_ADAPTER:
723 case MLX5_CMD_OP_QUERY_ISSI:
724 case MLX5_CMD_OP_QUERY_NIC_VPORT_CONTEXT:
725 case MLX5_CMD_OP_QUERY_ROCE_ADDRESS:
726 case MLX5_CMD_OP_QUERY_VNIC_ENV:
727 case MLX5_CMD_OP_QUERY_VPORT_COUNTER:
728 case MLX5_CMD_OP_GET_DROPPED_PACKET_LOG:
729 case MLX5_CMD_OP_NOP:
730 case MLX5_CMD_OP_QUERY_CONG_STATUS:
731 case MLX5_CMD_OP_QUERY_CONG_PARAMS:
732 case MLX5_CMD_OP_QUERY_CONG_STATISTICS:
733 return true;
734 default:
735 return false;
736 }
737}
738
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -0600739static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_QUERY_EQN)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200740 struct uverbs_attr_bundle *attrs)
Yishai Hadasf6fe01b2018-06-17 13:00:05 +0300741{
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -0600742 struct mlx5_ib_ucontext *c;
743 struct mlx5_ib_dev *dev;
Yishai Hadasf6fe01b2018-06-17 13:00:05 +0300744 int user_vector;
745 int dev_eqn;
746 unsigned int irqn;
747 int err;
748
749 if (uverbs_copy_from(&user_vector, attrs,
750 MLX5_IB_ATTR_DEVX_QUERY_EQN_USER_VEC))
751 return -EFAULT;
752
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200753 c = devx_ufile2uctx(attrs);
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -0600754 if (IS_ERR(c))
755 return PTR_ERR(c);
756 dev = to_mdev(c->ibucontext.device);
757
Yishai Hadasf6fe01b2018-06-17 13:00:05 +0300758 err = mlx5_vector2eqn(dev->mdev, user_vector, &dev_eqn, &irqn);
759 if (err < 0)
760 return err;
761
762 if (uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_QUERY_EQN_DEV_EQN,
763 &dev_eqn, sizeof(dev_eqn)))
764 return -EFAULT;
765
766 return 0;
767}
768
Yishai Hadas7c043e92018-06-17 13:00:03 +0300769/*
770 *Security note:
771 * The hardware protection mechanism works like this: Each device object that
772 * is subject to UAR doorbells (QP/SQ/CQ) gets a UAR ID (called uar_page in
773 * the device specification manual) upon its creation. Then upon doorbell,
774 * hardware fetches the object context for which the doorbell was rang, and
775 * validates that the UAR through which the DB was rang matches the UAR ID
776 * of the object.
777 * If no match the doorbell is silently ignored by the hardware. Of course,
778 * the user cannot ring a doorbell on a UAR that was not mapped to it.
779 * Now in devx, as the devx kernel does not manipulate the QP/SQ/CQ command
780 * mailboxes (except tagging them with UID), we expose to the user its UAR
781 * ID, so it can embed it in these objects in the expected specification
782 * format. So the only thing the user can do is hurt itself by creating a
783 * QP/SQ/CQ with a UAR ID other than his, and then in this case other users
784 * may ring a doorbell on its objects.
785 * The consequence of that will be that another user can schedule a QP/SQ
786 * of the buggy user for execution (just insert it to the hardware schedule
787 * queue or arm its CQ for event generation), no further harm is expected.
788 */
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -0600789static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_QUERY_UAR)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200790 struct uverbs_attr_bundle *attrs)
Yishai Hadas7c043e92018-06-17 13:00:03 +0300791{
Jason Gunthorpe22fa27f2018-07-10 13:43:06 -0600792 struct mlx5_ib_ucontext *c;
793 struct mlx5_ib_dev *dev;
Yishai Hadas7c043e92018-06-17 13:00:03 +0300794 u32 user_idx;
795 s32 dev_idx;
796
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200797 c = devx_ufile2uctx(attrs);
Jason Gunthorpe22fa27f2018-07-10 13:43:06 -0600798 if (IS_ERR(c))
799 return PTR_ERR(c);
800 dev = to_mdev(c->ibucontext.device);
801
Yishai Hadas7c043e92018-06-17 13:00:03 +0300802 if (uverbs_copy_from(&user_idx, attrs,
803 MLX5_IB_ATTR_DEVX_QUERY_UAR_USER_IDX))
804 return -EFAULT;
805
Jason Gunthorpe22fa27f2018-07-10 13:43:06 -0600806 dev_idx = bfregn_to_uar_index(dev, &c->bfregi, user_idx, true);
Yishai Hadas7c043e92018-06-17 13:00:03 +0300807 if (dev_idx < 0)
808 return dev_idx;
809
810 if (uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_QUERY_UAR_DEV_IDX,
811 &dev_idx, sizeof(dev_idx)))
812 return -EFAULT;
813
814 return 0;
815}
816
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -0600817static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OTHER)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200818 struct uverbs_attr_bundle *attrs)
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300819{
Jason Gunthorpe22fa27f2018-07-10 13:43:06 -0600820 struct mlx5_ib_ucontext *c;
821 struct mlx5_ib_dev *dev;
Yishai Hadas7efce362018-06-17 13:00:01 +0300822 void *cmd_in = uverbs_attr_get_alloced_ptr(
823 attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_IN);
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300824 int cmd_out_len = uverbs_attr_get_len(attrs,
825 MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT);
826 void *cmd_out;
827 int err;
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300828 int uid;
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300829
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +0200830 c = devx_ufile2uctx(attrs);
Jason Gunthorpe22fa27f2018-07-10 13:43:06 -0600831 if (IS_ERR(c))
832 return PTR_ERR(c);
833 dev = to_mdev(c->ibucontext.device);
834
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300835 uid = devx_get_uid(c, cmd_in);
836 if (uid < 0)
837 return uid;
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300838
839 /* Only white list of some general HCA commands are allowed for this method. */
840 if (!devx_is_general_cmd(cmd_in))
841 return -EINVAL;
842
Jason Gunthorpeb61815e2018-08-09 20:14:41 -0600843 cmd_out = uverbs_zalloc(attrs, cmd_out_len);
844 if (IS_ERR(cmd_out))
845 return PTR_ERR(cmd_out);
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300846
Yishai Hadas7e1335a2018-09-20 21:45:20 +0300847 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, uid);
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300848 err = mlx5_cmd_exec(dev->mdev, cmd_in,
849 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_IN),
850 cmd_out, cmd_out_len);
851 if (err)
Jason Gunthorpeb61815e2018-08-09 20:14:41 -0600852 return err;
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300853
Jason Gunthorpeb61815e2018-08-09 20:14:41 -0600854 return uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT, cmd_out,
855 cmd_out_len);
Yishai Hadas8aa8c952018-06-17 13:00:00 +0300856}
857
Yishai Hadas7efce362018-06-17 13:00:01 +0300858static void devx_obj_build_destroy_cmd(void *in, void *out, void *din,
859 u32 *dinlen,
860 u32 *obj_id)
861{
862 u16 obj_type = MLX5_GET(general_obj_in_cmd_hdr, in, obj_type);
863 u16 uid = MLX5_GET(general_obj_in_cmd_hdr, in, uid);
864
865 *obj_id = MLX5_GET(general_obj_out_cmd_hdr, out, obj_id);
866 *dinlen = MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr);
867
868 MLX5_SET(general_obj_in_cmd_hdr, din, obj_id, *obj_id);
869 MLX5_SET(general_obj_in_cmd_hdr, din, uid, uid);
870
871 switch (MLX5_GET(general_obj_in_cmd_hdr, in, opcode)) {
872 case MLX5_CMD_OP_CREATE_GENERAL_OBJECT:
873 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_GENERAL_OBJECT);
874 MLX5_SET(general_obj_in_cmd_hdr, din, obj_type, obj_type);
875 break;
876
Yishai Hadas6e3722b2018-12-19 16:28:15 +0200877 case MLX5_CMD_OP_CREATE_UMEM:
878 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
879 MLX5_CMD_OP_DESTROY_UMEM);
880 break;
Yishai Hadas7efce362018-06-17 13:00:01 +0300881 case MLX5_CMD_OP_CREATE_MKEY:
882 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_MKEY);
883 break;
884 case MLX5_CMD_OP_CREATE_CQ:
885 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_CQ);
886 break;
887 case MLX5_CMD_OP_ALLOC_PD:
888 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DEALLOC_PD);
889 break;
890 case MLX5_CMD_OP_ALLOC_TRANSPORT_DOMAIN:
891 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
892 MLX5_CMD_OP_DEALLOC_TRANSPORT_DOMAIN);
893 break;
894 case MLX5_CMD_OP_CREATE_RMP:
895 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RMP);
896 break;
897 case MLX5_CMD_OP_CREATE_SQ:
898 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_SQ);
899 break;
900 case MLX5_CMD_OP_CREATE_RQ:
901 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RQ);
902 break;
903 case MLX5_CMD_OP_CREATE_RQT:
904 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_RQT);
905 break;
906 case MLX5_CMD_OP_CREATE_TIR:
907 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_TIR);
908 break;
909 case MLX5_CMD_OP_CREATE_TIS:
910 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_TIS);
911 break;
912 case MLX5_CMD_OP_ALLOC_Q_COUNTER:
913 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
914 MLX5_CMD_OP_DEALLOC_Q_COUNTER);
915 break;
916 case MLX5_CMD_OP_CREATE_FLOW_TABLE:
917 *dinlen = MLX5_ST_SZ_BYTES(destroy_flow_table_in);
918 *obj_id = MLX5_GET(create_flow_table_out, out, table_id);
919 MLX5_SET(destroy_flow_table_in, din, other_vport,
920 MLX5_GET(create_flow_table_in, in, other_vport));
921 MLX5_SET(destroy_flow_table_in, din, vport_number,
922 MLX5_GET(create_flow_table_in, in, vport_number));
923 MLX5_SET(destroy_flow_table_in, din, table_type,
924 MLX5_GET(create_flow_table_in, in, table_type));
925 MLX5_SET(destroy_flow_table_in, din, table_id, *obj_id);
926 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
927 MLX5_CMD_OP_DESTROY_FLOW_TABLE);
928 break;
929 case MLX5_CMD_OP_CREATE_FLOW_GROUP:
930 *dinlen = MLX5_ST_SZ_BYTES(destroy_flow_group_in);
931 *obj_id = MLX5_GET(create_flow_group_out, out, group_id);
932 MLX5_SET(destroy_flow_group_in, din, other_vport,
933 MLX5_GET(create_flow_group_in, in, other_vport));
934 MLX5_SET(destroy_flow_group_in, din, vport_number,
935 MLX5_GET(create_flow_group_in, in, vport_number));
936 MLX5_SET(destroy_flow_group_in, din, table_type,
937 MLX5_GET(create_flow_group_in, in, table_type));
938 MLX5_SET(destroy_flow_group_in, din, table_id,
939 MLX5_GET(create_flow_group_in, in, table_id));
940 MLX5_SET(destroy_flow_group_in, din, group_id, *obj_id);
941 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
942 MLX5_CMD_OP_DESTROY_FLOW_GROUP);
943 break;
944 case MLX5_CMD_OP_SET_FLOW_TABLE_ENTRY:
945 *dinlen = MLX5_ST_SZ_BYTES(delete_fte_in);
946 *obj_id = MLX5_GET(set_fte_in, in, flow_index);
947 MLX5_SET(delete_fte_in, din, other_vport,
948 MLX5_GET(set_fte_in, in, other_vport));
949 MLX5_SET(delete_fte_in, din, vport_number,
950 MLX5_GET(set_fte_in, in, vport_number));
951 MLX5_SET(delete_fte_in, din, table_type,
952 MLX5_GET(set_fte_in, in, table_type));
953 MLX5_SET(delete_fte_in, din, table_id,
954 MLX5_GET(set_fte_in, in, table_id));
955 MLX5_SET(delete_fte_in, din, flow_index, *obj_id);
956 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
957 MLX5_CMD_OP_DELETE_FLOW_TABLE_ENTRY);
958 break;
959 case MLX5_CMD_OP_ALLOC_FLOW_COUNTER:
960 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
961 MLX5_CMD_OP_DEALLOC_FLOW_COUNTER);
962 break;
Mark Bloch60786f02018-08-28 14:18:46 +0300963 case MLX5_CMD_OP_ALLOC_PACKET_REFORMAT_CONTEXT:
Yishai Hadas7efce362018-06-17 13:00:01 +0300964 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
Mark Bloch60786f02018-08-28 14:18:46 +0300965 MLX5_CMD_OP_DEALLOC_PACKET_REFORMAT_CONTEXT);
Yishai Hadas7efce362018-06-17 13:00:01 +0300966 break;
967 case MLX5_CMD_OP_ALLOC_MODIFY_HEADER_CONTEXT:
968 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
969 MLX5_CMD_OP_DEALLOC_MODIFY_HEADER_CONTEXT);
970 break;
971 case MLX5_CMD_OP_CREATE_SCHEDULING_ELEMENT:
972 *dinlen = MLX5_ST_SZ_BYTES(destroy_scheduling_element_in);
973 *obj_id = MLX5_GET(create_scheduling_element_out, out,
974 scheduling_element_id);
975 MLX5_SET(destroy_scheduling_element_in, din,
976 scheduling_hierarchy,
977 MLX5_GET(create_scheduling_element_in, in,
978 scheduling_hierarchy));
979 MLX5_SET(destroy_scheduling_element_in, din,
980 scheduling_element_id, *obj_id);
981 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
982 MLX5_CMD_OP_DESTROY_SCHEDULING_ELEMENT);
983 break;
984 case MLX5_CMD_OP_ADD_VXLAN_UDP_DPORT:
985 *dinlen = MLX5_ST_SZ_BYTES(delete_vxlan_udp_dport_in);
986 *obj_id = MLX5_GET(add_vxlan_udp_dport_in, in, vxlan_udp_port);
987 MLX5_SET(delete_vxlan_udp_dport_in, din, vxlan_udp_port, *obj_id);
988 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
989 MLX5_CMD_OP_DELETE_VXLAN_UDP_DPORT);
990 break;
991 case MLX5_CMD_OP_SET_L2_TABLE_ENTRY:
992 *dinlen = MLX5_ST_SZ_BYTES(delete_l2_table_entry_in);
993 *obj_id = MLX5_GET(set_l2_table_entry_in, in, table_index);
994 MLX5_SET(delete_l2_table_entry_in, din, table_index, *obj_id);
995 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
996 MLX5_CMD_OP_DELETE_L2_TABLE_ENTRY);
997 break;
998 case MLX5_CMD_OP_CREATE_QP:
999 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_QP);
1000 break;
1001 case MLX5_CMD_OP_CREATE_SRQ:
1002 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_SRQ);
1003 break;
1004 case MLX5_CMD_OP_CREATE_XRC_SRQ:
1005 MLX5_SET(general_obj_in_cmd_hdr, din, opcode,
1006 MLX5_CMD_OP_DESTROY_XRC_SRQ);
1007 break;
1008 case MLX5_CMD_OP_CREATE_DCT:
1009 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_DCT);
1010 break;
1011 case MLX5_CMD_OP_CREATE_XRQ:
1012 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DESTROY_XRQ);
1013 break;
1014 case MLX5_CMD_OP_ATTACH_TO_MCG:
1015 *dinlen = MLX5_ST_SZ_BYTES(detach_from_mcg_in);
1016 MLX5_SET(detach_from_mcg_in, din, qpn,
1017 MLX5_GET(attach_to_mcg_in, in, qpn));
1018 memcpy(MLX5_ADDR_OF(detach_from_mcg_in, din, multicast_gid),
1019 MLX5_ADDR_OF(attach_to_mcg_in, in, multicast_gid),
1020 MLX5_FLD_SZ_BYTES(attach_to_mcg_in, multicast_gid));
1021 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DETACH_FROM_MCG);
1022 break;
1023 case MLX5_CMD_OP_ALLOC_XRCD:
1024 MLX5_SET(general_obj_in_cmd_hdr, din, opcode, MLX5_CMD_OP_DEALLOC_XRCD);
1025 break;
1026 default:
1027 /* The entry must match to one of the devx_is_obj_create_cmd */
1028 WARN_ON(true);
1029 break;
1030 }
1031}
1032
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001033static int devx_handle_mkey_indirect(struct devx_obj *obj,
1034 struct mlx5_ib_dev *dev,
1035 void *in, void *out)
1036{
1037 struct mlx5_mkey_table *table = &dev->mdev->priv.mkey_table;
1038 struct mlx5_ib_devx_mr *devx_mr = &obj->devx_mr;
1039 unsigned long flags;
1040 struct mlx5_core_mkey *mkey;
1041 void *mkc;
1042 u8 key;
1043 int err;
1044
1045 mkey = &devx_mr->mmkey;
1046 mkc = MLX5_ADDR_OF(create_mkey_in, in, memory_key_mkey_entry);
1047 key = MLX5_GET(mkc, mkc, mkey_7_0);
1048 mkey->key = mlx5_idx_to_mkey(
1049 MLX5_GET(create_mkey_out, out, mkey_index)) | key;
1050 mkey->type = MLX5_MKEY_INDIRECT_DEVX;
1051 mkey->iova = MLX5_GET64(mkc, mkc, start_addr);
1052 mkey->size = MLX5_GET64(mkc, mkc, len);
1053 mkey->pd = MLX5_GET(mkc, mkc, pd);
1054 devx_mr->ndescs = MLX5_GET(mkc, mkc, translations_octword_size);
1055
1056 write_lock_irqsave(&table->lock, flags);
1057 err = radix_tree_insert(&table->tree, mlx5_base_mkey(mkey->key),
1058 mkey);
1059 write_unlock_irqrestore(&table->lock, flags);
1060 return err;
1061}
1062
Yishai Hadasfa31f142019-01-13 16:01:16 +02001063static int devx_handle_mkey_create(struct mlx5_ib_dev *dev,
1064 struct devx_obj *obj,
1065 void *in, int in_len)
1066{
1067 int min_len = MLX5_BYTE_OFF(create_mkey_in, memory_key_mkey_entry) +
1068 MLX5_FLD_SZ_BYTES(create_mkey_in,
1069 memory_key_mkey_entry);
1070 void *mkc;
1071 u8 access_mode;
1072
1073 if (in_len < min_len)
1074 return -EINVAL;
1075
1076 mkc = MLX5_ADDR_OF(create_mkey_in, in, memory_key_mkey_entry);
1077
1078 access_mode = MLX5_GET(mkc, mkc, access_mode_1_0);
1079 access_mode |= MLX5_GET(mkc, mkc, access_mode_4_2) << 2;
1080
1081 if (access_mode == MLX5_MKC_ACCESS_MODE_KLMS ||
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001082 access_mode == MLX5_MKC_ACCESS_MODE_KSM) {
1083 if (IS_ENABLED(CONFIG_INFINIBAND_ON_DEMAND_PAGING))
1084 obj->flags |= DEVX_OBJ_FLAGS_INDIRECT_MKEY;
Yishai Hadasfa31f142019-01-13 16:01:16 +02001085 return 0;
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001086 }
Yishai Hadasfa31f142019-01-13 16:01:16 +02001087
1088 MLX5_SET(create_mkey_in, in, mkey_umem_valid, 1);
1089 return 0;
1090}
1091
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001092static void devx_free_indirect_mkey(struct rcu_head *rcu)
1093{
1094 kfree(container_of(rcu, struct devx_obj, devx_mr.rcu));
1095}
1096
1097/* This function to delete from the radix tree needs to be called before
1098 * destroying the underlying mkey. Otherwise a race might occur in case that
1099 * other thread will get the same mkey before this one will be deleted,
1100 * in that case it will fail via inserting to the tree its own data.
1101 *
1102 * Note:
1103 * An error in the destroy is not expected unless there is some other indirect
1104 * mkey which points to this one. In a kernel cleanup flow it will be just
1105 * destroyed in the iterative destruction call. In a user flow, in case
1106 * the application didn't close in the expected order it's its own problem,
1107 * the mkey won't be part of the tree, in both cases the kernel is safe.
1108 */
1109static void devx_cleanup_mkey(struct devx_obj *obj)
1110{
1111 struct mlx5_mkey_table *table = &obj->mdev->priv.mkey_table;
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001112 unsigned long flags;
1113
1114 write_lock_irqsave(&table->lock, flags);
Kamal Heibe5c1bb42019-01-30 16:13:43 +02001115 radix_tree_delete(&table->tree, mlx5_base_mkey(obj->devx_mr.mmkey.key));
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001116 write_unlock_irqrestore(&table->lock, flags);
1117}
1118
Yishai Hadas7efce362018-06-17 13:00:01 +03001119static int devx_obj_cleanup(struct ib_uobject *uobject,
1120 enum rdma_remove_reason why)
1121{
1122 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
1123 struct devx_obj *obj = uobject->object;
1124 int ret;
1125
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001126 if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY)
1127 devx_cleanup_mkey(obj);
1128
Yishai Hadasc5ae1952019-03-06 19:21:42 +02001129 if (obj->flags & DEVX_OBJ_FLAGS_DCT)
1130 ret = mlx5_core_destroy_dct(obj->mdev, &obj->core_dct);
1131 else
1132 ret = mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out,
1133 sizeof(out));
Yishai Hadas1c774832018-06-20 17:11:39 +03001134 if (ib_is_destroy_retryable(ret, why, uobject))
Yishai Hadas7efce362018-06-17 13:00:01 +03001135 return ret;
1136
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001137 if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY) {
1138 struct mlx5_ib_dev *dev = to_mdev(uobject->context->device);
1139
1140 call_srcu(&dev->mr_srcu, &obj->devx_mr.rcu,
1141 devx_free_indirect_mkey);
1142 return ret;
1143 }
1144
Yishai Hadas7efce362018-06-17 13:00:01 +03001145 kfree(obj);
1146 return ret;
1147}
1148
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -06001149static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_CREATE)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +02001150 struct uverbs_attr_bundle *attrs)
Yishai Hadas7efce362018-06-17 13:00:01 +03001151{
Yishai Hadas7efce362018-06-17 13:00:01 +03001152 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN);
1153 int cmd_out_len = uverbs_attr_get_len(attrs,
1154 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT);
Yishai Hadasfa31f142019-01-13 16:01:16 +02001155 int cmd_in_len = uverbs_attr_get_len(attrs,
1156 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN);
Yishai Hadas7efce362018-06-17 13:00:01 +03001157 void *cmd_out;
Jason Gunthorpec36ee462018-07-10 20:55:22 -06001158 struct ib_uobject *uobj = uverbs_attr_get_uobject(
1159 attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_HANDLE);
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001160 struct mlx5_ib_ucontext *c = rdma_udata_to_drv_context(
1161 &attrs->driver_udata, struct mlx5_ib_ucontext, ibucontext);
Jason Gunthorpec36ee462018-07-10 20:55:22 -06001162 struct mlx5_ib_dev *dev = to_mdev(c->ibucontext.device);
Yishai Hadase8ef0902018-09-25 12:11:12 +03001163 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
Yishai Hadas7efce362018-06-17 13:00:01 +03001164 struct devx_obj *obj;
1165 int err;
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001166 int uid;
Yishai Hadas2351776e2018-10-07 12:06:34 +03001167 u32 obj_id;
1168 u16 opcode;
Yishai Hadas7efce362018-06-17 13:00:01 +03001169
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001170 uid = devx_get_uid(c, cmd_in);
1171 if (uid < 0)
1172 return uid;
Yishai Hadas7efce362018-06-17 13:00:01 +03001173
Yishai Hadas2351776e2018-10-07 12:06:34 +03001174 if (!devx_is_obj_create_cmd(cmd_in, &opcode))
Yishai Hadas7efce362018-06-17 13:00:01 +03001175 return -EINVAL;
1176
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001177 cmd_out = uverbs_zalloc(attrs, cmd_out_len);
1178 if (IS_ERR(cmd_out))
1179 return PTR_ERR(cmd_out);
1180
Yishai Hadas7efce362018-06-17 13:00:01 +03001181 obj = kzalloc(sizeof(struct devx_obj), GFP_KERNEL);
1182 if (!obj)
1183 return -ENOMEM;
1184
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001185 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, uid);
Yishai Hadasfa31f142019-01-13 16:01:16 +02001186 if (opcode == MLX5_CMD_OP_CREATE_MKEY) {
1187 err = devx_handle_mkey_create(dev, obj, cmd_in, cmd_in_len);
1188 if (err)
1189 goto obj_free;
1190 } else {
1191 devx_set_umem_valid(cmd_in);
1192 }
Yishai Hadasba1a0572018-09-20 21:39:33 +03001193
Yishai Hadasc5ae1952019-03-06 19:21:42 +02001194 if (opcode == MLX5_CMD_OP_CREATE_DCT) {
1195 obj->flags |= DEVX_OBJ_FLAGS_DCT;
1196 err = mlx5_core_create_dct(dev->mdev, &obj->core_dct,
1197 cmd_in, cmd_in_len,
1198 cmd_out, cmd_out_len);
1199 } else {
1200 err = mlx5_cmd_exec(dev->mdev, cmd_in,
1201 cmd_in_len,
1202 cmd_out, cmd_out_len);
1203 }
1204
Yishai Hadas7efce362018-06-17 13:00:01 +03001205 if (err)
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001206 goto obj_free;
Yishai Hadas7efce362018-06-17 13:00:01 +03001207
Yishai Hadas7efce362018-06-17 13:00:01 +03001208 uobj->object = obj;
1209 obj->mdev = dev->mdev;
Yishai Hadas2351776e2018-10-07 12:06:34 +03001210 devx_obj_build_destroy_cmd(cmd_in, cmd_out, obj->dinbox, &obj->dinlen,
1211 &obj_id);
Yishai Hadas7efce362018-06-17 13:00:01 +03001212 WARN_ON(obj->dinlen > MLX5_MAX_DESTROY_INBOX_SIZE_DW * sizeof(u32));
1213
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001214 if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY) {
1215 err = devx_handle_mkey_indirect(obj, dev, cmd_in, cmd_out);
1216 if (err)
1217 goto obj_destroy;
1218 }
1219
Yishai Hadas7efce362018-06-17 13:00:01 +03001220 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT, cmd_out, cmd_out_len);
1221 if (err)
Yishai Hadas0da4d482019-02-11 17:40:53 +02001222 goto err_copy;
Yishai Hadas7efce362018-06-17 13:00:01 +03001223
Yishai Hadas2351776e2018-10-07 12:06:34 +03001224 obj->obj_id = get_enc_obj_id(opcode, obj_id);
Yishai Hadas7efce362018-06-17 13:00:01 +03001225 return 0;
1226
Yishai Hadas0da4d482019-02-11 17:40:53 +02001227err_copy:
Yishai Hadas534fd7a2019-01-13 16:01:17 +02001228 if (obj->flags & DEVX_OBJ_FLAGS_INDIRECT_MKEY)
1229 devx_cleanup_mkey(obj);
Yishai Hadas0da4d482019-02-11 17:40:53 +02001230obj_destroy:
Yishai Hadasc5ae1952019-03-06 19:21:42 +02001231 if (obj->flags & DEVX_OBJ_FLAGS_DCT)
1232 mlx5_core_destroy_dct(obj->mdev, &obj->core_dct);
1233 else
1234 mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out,
1235 sizeof(out));
Yishai Hadas7efce362018-06-17 13:00:01 +03001236obj_free:
1237 kfree(obj);
1238 return err;
1239}
1240
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -06001241static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_MODIFY)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +02001242 struct uverbs_attr_bundle *attrs)
Yishai Hadase662e142018-06-17 13:00:02 +03001243{
Yishai Hadase662e142018-06-17 13:00:02 +03001244 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN);
1245 int cmd_out_len = uverbs_attr_get_len(attrs,
1246 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT);
1247 struct ib_uobject *uobj = uverbs_attr_get_uobject(attrs,
1248 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_HANDLE);
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001249 struct mlx5_ib_ucontext *c = rdma_udata_to_drv_context(
1250 &attrs->driver_udata, struct mlx5_ib_ucontext, ibucontext);
1251 struct mlx5_ib_dev *mdev = to_mdev(c->ibucontext.device);
Yishai Hadase662e142018-06-17 13:00:02 +03001252 void *cmd_out;
1253 int err;
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001254 int uid;
Yishai Hadase662e142018-06-17 13:00:02 +03001255
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001256 uid = devx_get_uid(c, cmd_in);
1257 if (uid < 0)
1258 return uid;
Yishai Hadase662e142018-06-17 13:00:02 +03001259
1260 if (!devx_is_obj_modify_cmd(cmd_in))
1261 return -EINVAL;
1262
Yishai Hadas34613eb2018-11-26 08:28:35 +02001263 if (!devx_is_valid_obj_id(uobj, cmd_in))
Yishai Hadase662e142018-06-17 13:00:02 +03001264 return -EINVAL;
1265
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001266 cmd_out = uverbs_zalloc(attrs, cmd_out_len);
1267 if (IS_ERR(cmd_out))
1268 return PTR_ERR(cmd_out);
Yishai Hadase662e142018-06-17 13:00:02 +03001269
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001270 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, uid);
Yishai Hadasba1a0572018-09-20 21:39:33 +03001271 devx_set_umem_valid(cmd_in);
1272
Yishai Hadas34613eb2018-11-26 08:28:35 +02001273 err = mlx5_cmd_exec(mdev->mdev, cmd_in,
Yishai Hadase662e142018-06-17 13:00:02 +03001274 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN),
1275 cmd_out, cmd_out_len);
1276 if (err)
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001277 return err;
Yishai Hadase662e142018-06-17 13:00:02 +03001278
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001279 return uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT,
1280 cmd_out, cmd_out_len);
Yishai Hadase662e142018-06-17 13:00:02 +03001281}
1282
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -06001283static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_QUERY)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +02001284 struct uverbs_attr_bundle *attrs)
Yishai Hadase662e142018-06-17 13:00:02 +03001285{
Yishai Hadase662e142018-06-17 13:00:02 +03001286 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN);
1287 int cmd_out_len = uverbs_attr_get_len(attrs,
1288 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT);
1289 struct ib_uobject *uobj = uverbs_attr_get_uobject(attrs,
1290 MLX5_IB_ATTR_DEVX_OBJ_QUERY_HANDLE);
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001291 struct mlx5_ib_ucontext *c = rdma_udata_to_drv_context(
1292 &attrs->driver_udata, struct mlx5_ib_ucontext, ibucontext);
Yishai Hadase662e142018-06-17 13:00:02 +03001293 void *cmd_out;
1294 int err;
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001295 int uid;
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001296 struct mlx5_ib_dev *mdev = to_mdev(c->ibucontext.device);
Yishai Hadase662e142018-06-17 13:00:02 +03001297
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001298 uid = devx_get_uid(c, cmd_in);
1299 if (uid < 0)
1300 return uid;
Yishai Hadase662e142018-06-17 13:00:02 +03001301
1302 if (!devx_is_obj_query_cmd(cmd_in))
1303 return -EINVAL;
1304
Yishai Hadas34613eb2018-11-26 08:28:35 +02001305 if (!devx_is_valid_obj_id(uobj, cmd_in))
Yishai Hadase662e142018-06-17 13:00:02 +03001306 return -EINVAL;
1307
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001308 cmd_out = uverbs_zalloc(attrs, cmd_out_len);
1309 if (IS_ERR(cmd_out))
1310 return PTR_ERR(cmd_out);
Yishai Hadase662e142018-06-17 13:00:02 +03001311
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001312 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, uid);
Yishai Hadas34613eb2018-11-26 08:28:35 +02001313 err = mlx5_cmd_exec(mdev->mdev, cmd_in,
Yishai Hadase662e142018-06-17 13:00:02 +03001314 uverbs_attr_get_len(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN),
1315 cmd_out, cmd_out_len);
1316 if (err)
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001317 return err;
Yishai Hadase662e142018-06-17 13:00:02 +03001318
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001319 return uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT,
1320 cmd_out, cmd_out_len);
Yishai Hadase662e142018-06-17 13:00:02 +03001321}
1322
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001323struct devx_async_event_queue {
1324 spinlock_t lock;
1325 wait_queue_head_t poll_wait;
1326 struct list_head event_list;
Yishai Hadasa124edb2019-01-22 08:29:57 +02001327 atomic_t bytes_in_use;
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001328 u8 is_destroyed:1;
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001329};
1330
1331struct devx_async_cmd_event_file {
1332 struct ib_uobject uobj;
1333 struct devx_async_event_queue ev_queue;
Yishai Hadasa124edb2019-01-22 08:29:57 +02001334 struct mlx5_async_ctx async_ctx;
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001335};
1336
1337static void devx_init_event_queue(struct devx_async_event_queue *ev_queue)
1338{
1339 spin_lock_init(&ev_queue->lock);
1340 INIT_LIST_HEAD(&ev_queue->event_list);
1341 init_waitqueue_head(&ev_queue->poll_wait);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001342 atomic_set(&ev_queue->bytes_in_use, 0);
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001343 ev_queue->is_destroyed = 0;
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001344}
1345
1346static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_ASYNC_CMD_FD_ALLOC)(
1347 struct uverbs_attr_bundle *attrs)
1348{
1349 struct devx_async_cmd_event_file *ev_file;
1350
1351 struct ib_uobject *uobj = uverbs_attr_get_uobject(
1352 attrs, MLX5_IB_ATTR_DEVX_ASYNC_CMD_FD_ALLOC_HANDLE);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001353 struct mlx5_ib_dev *mdev = to_mdev(uobj->context->device);
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001354
1355 ev_file = container_of(uobj, struct devx_async_cmd_event_file,
1356 uobj);
1357 devx_init_event_queue(&ev_file->ev_queue);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001358 mlx5_cmd_init_async_ctx(mdev->mdev, &ev_file->async_ctx);
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001359 return 0;
1360}
1361
Yishai Hadasa124edb2019-01-22 08:29:57 +02001362static void devx_query_callback(int status, struct mlx5_async_work *context)
1363{
1364 struct devx_async_data *async_data =
1365 container_of(context, struct devx_async_data, cb_work);
1366 struct ib_uobject *fd_uobj = async_data->fd_uobj;
1367 struct devx_async_cmd_event_file *ev_file;
1368 struct devx_async_event_queue *ev_queue;
1369 unsigned long flags;
1370
1371 ev_file = container_of(fd_uobj, struct devx_async_cmd_event_file,
1372 uobj);
1373 ev_queue = &ev_file->ev_queue;
1374
1375 spin_lock_irqsave(&ev_queue->lock, flags);
1376 list_add_tail(&async_data->list, &ev_queue->event_list);
1377 spin_unlock_irqrestore(&ev_queue->lock, flags);
1378
1379 wake_up_interruptible(&ev_queue->poll_wait);
1380 fput(fd_uobj->object);
1381}
1382
1383#define MAX_ASYNC_BYTES_IN_USE (1024 * 1024) /* 1MB */
1384
1385static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_OBJ_ASYNC_QUERY)(
1386 struct uverbs_attr_bundle *attrs)
1387{
1388 void *cmd_in = uverbs_attr_get_alloced_ptr(attrs,
1389 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_CMD_IN);
1390 struct ib_uobject *uobj = uverbs_attr_get_uobject(
1391 attrs,
1392 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_HANDLE);
1393 u16 cmd_out_len;
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001394 struct mlx5_ib_ucontext *c = rdma_udata_to_drv_context(
1395 &attrs->driver_udata, struct mlx5_ib_ucontext, ibucontext);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001396 struct ib_uobject *fd_uobj;
1397 int err;
1398 int uid;
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001399 struct mlx5_ib_dev *mdev = to_mdev(c->ibucontext.device);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001400 struct devx_async_cmd_event_file *ev_file;
1401 struct devx_async_data *async_data;
1402
1403 uid = devx_get_uid(c, cmd_in);
1404 if (uid < 0)
1405 return uid;
1406
1407 if (!devx_is_obj_query_cmd(cmd_in))
1408 return -EINVAL;
1409
1410 err = uverbs_get_const(&cmd_out_len, attrs,
1411 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_OUT_LEN);
1412 if (err)
1413 return err;
1414
1415 if (!devx_is_valid_obj_id(uobj, cmd_in))
1416 return -EINVAL;
1417
1418 fd_uobj = uverbs_attr_get_uobject(attrs,
1419 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_FD);
1420 if (IS_ERR(fd_uobj))
1421 return PTR_ERR(fd_uobj);
1422
1423 ev_file = container_of(fd_uobj, struct devx_async_cmd_event_file,
1424 uobj);
1425
1426 if (atomic_add_return(cmd_out_len, &ev_file->ev_queue.bytes_in_use) >
1427 MAX_ASYNC_BYTES_IN_USE) {
1428 atomic_sub(cmd_out_len, &ev_file->ev_queue.bytes_in_use);
1429 return -EAGAIN;
1430 }
1431
1432 async_data = kvzalloc(struct_size(async_data, hdr.out_data,
1433 cmd_out_len), GFP_KERNEL);
1434 if (!async_data) {
1435 err = -ENOMEM;
1436 goto sub_bytes;
1437 }
1438
1439 err = uverbs_copy_from(&async_data->hdr.wr_id, attrs,
1440 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_WR_ID);
1441 if (err)
1442 goto free_async;
1443
1444 async_data->cmd_out_len = cmd_out_len;
1445 async_data->mdev = mdev;
1446 async_data->fd_uobj = fd_uobj;
1447
1448 get_file(fd_uobj->object);
1449 MLX5_SET(general_obj_in_cmd_hdr, cmd_in, uid, uid);
1450 err = mlx5_cmd_exec_cb(&ev_file->async_ctx, cmd_in,
1451 uverbs_attr_get_len(attrs,
1452 MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_CMD_IN),
1453 async_data->hdr.out_data,
1454 async_data->cmd_out_len,
1455 devx_query_callback, &async_data->cb_work);
1456
1457 if (err)
1458 goto cb_err;
1459
1460 return 0;
1461
1462cb_err:
1463 fput(fd_uobj->object);
1464free_async:
1465 kvfree(async_data);
1466sub_bytes:
1467 atomic_sub(cmd_out_len, &ev_file->ev_queue.bytes_in_use);
1468 return err;
1469}
1470
Yishai Hadasaeae9452018-06-17 13:00:04 +03001471static int devx_umem_get(struct mlx5_ib_dev *dev, struct ib_ucontext *ucontext,
1472 struct uverbs_attr_bundle *attrs,
1473 struct devx_umem *obj)
1474{
1475 u64 addr;
1476 size_t size;
Jason Gunthorpebccd0622018-07-26 16:37:14 -06001477 u32 access;
Yishai Hadasaeae9452018-06-17 13:00:04 +03001478 int npages;
1479 int err;
1480 u32 page_mask;
1481
1482 if (uverbs_copy_from(&addr, attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_ADDR) ||
Jason Gunthorpebccd0622018-07-26 16:37:14 -06001483 uverbs_copy_from(&size, attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_LEN))
Yishai Hadasaeae9452018-06-17 13:00:04 +03001484 return -EFAULT;
1485
Jason Gunthorpebccd0622018-07-26 16:37:14 -06001486 err = uverbs_get_flags32(&access, attrs,
1487 MLX5_IB_ATTR_DEVX_UMEM_REG_ACCESS,
Yishai Hadas47f07f02018-12-05 15:50:21 +02001488 IB_ACCESS_LOCAL_WRITE |
1489 IB_ACCESS_REMOTE_WRITE |
1490 IB_ACCESS_REMOTE_READ);
Jason Gunthorpebccd0622018-07-26 16:37:14 -06001491 if (err)
1492 return err;
1493
Yishai Hadasaeae9452018-06-17 13:00:04 +03001494 err = ib_check_mr_access(access);
1495 if (err)
1496 return err;
1497
Jason Gunthorpeb0ea0fa2019-01-09 11:15:16 +02001498 obj->umem = ib_umem_get(&attrs->driver_udata, addr, size, access, 0);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001499 if (IS_ERR(obj->umem))
1500 return PTR_ERR(obj->umem);
1501
1502 mlx5_ib_cont_pages(obj->umem, obj->umem->address,
1503 MLX5_MKEY_PAGE_SHIFT_MASK, &npages,
1504 &obj->page_shift, &obj->ncont, NULL);
1505
1506 if (!npages) {
1507 ib_umem_release(obj->umem);
1508 return -EINVAL;
1509 }
1510
1511 page_mask = (1 << obj->page_shift) - 1;
1512 obj->page_offset = obj->umem->address & page_mask;
1513
1514 return 0;
1515}
1516
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001517static int devx_umem_reg_cmd_alloc(struct uverbs_attr_bundle *attrs,
1518 struct devx_umem *obj,
Yishai Hadasaeae9452018-06-17 13:00:04 +03001519 struct devx_umem_reg_cmd *cmd)
1520{
1521 cmd->inlen = MLX5_ST_SZ_BYTES(create_umem_in) +
1522 (MLX5_ST_SZ_BYTES(mtt) * obj->ncont);
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001523 cmd->in = uverbs_zalloc(attrs, cmd->inlen);
1524 return PTR_ERR_OR_ZERO(cmd->in);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001525}
1526
1527static void devx_umem_reg_cmd_build(struct mlx5_ib_dev *dev,
1528 struct devx_umem *obj,
1529 struct devx_umem_reg_cmd *cmd)
1530{
1531 void *umem;
1532 __be64 *mtt;
1533
1534 umem = MLX5_ADDR_OF(create_umem_in, cmd->in, umem);
1535 mtt = (__be64 *)MLX5_ADDR_OF(umem, umem, mtt);
1536
Yishai Hadas6e3722b2018-12-19 16:28:15 +02001537 MLX5_SET(create_umem_in, cmd->in, opcode, MLX5_CMD_OP_CREATE_UMEM);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001538 MLX5_SET64(umem, umem, num_of_mtt, obj->ncont);
1539 MLX5_SET(umem, umem, log_page_size, obj->page_shift -
1540 MLX5_ADAPTER_PAGE_SHIFT);
1541 MLX5_SET(umem, umem, page_offset, obj->page_offset);
1542 mlx5_ib_populate_pas(dev, obj->umem, obj->page_shift, mtt,
1543 (obj->umem->writable ? MLX5_IB_MTT_WRITE : 0) |
1544 MLX5_IB_MTT_READ);
1545}
1546
Jason Gunthorpee83f0ec2018-07-25 21:40:18 -06001547static int UVERBS_HANDLER(MLX5_IB_METHOD_DEVX_UMEM_REG)(
Jason Gunthorpe15a1b4b2018-11-25 20:51:15 +02001548 struct uverbs_attr_bundle *attrs)
Yishai Hadasaeae9452018-06-17 13:00:04 +03001549{
Yishai Hadasaeae9452018-06-17 13:00:04 +03001550 struct devx_umem_reg_cmd cmd;
1551 struct devx_umem *obj;
Jason Gunthorpec36ee462018-07-10 20:55:22 -06001552 struct ib_uobject *uobj = uverbs_attr_get_uobject(
1553 attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_HANDLE);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001554 u32 obj_id;
Shamir Rabinovitch89944452019-02-07 18:44:49 +02001555 struct mlx5_ib_ucontext *c = rdma_udata_to_drv_context(
1556 &attrs->driver_udata, struct mlx5_ib_ucontext, ibucontext);
Jason Gunthorpec36ee462018-07-10 20:55:22 -06001557 struct mlx5_ib_dev *dev = to_mdev(c->ibucontext.device);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001558 int err;
1559
1560 if (!c->devx_uid)
Yishai Hadas7e1335a2018-09-20 21:45:20 +03001561 return -EINVAL;
1562
Yishai Hadasaeae9452018-06-17 13:00:04 +03001563 obj = kzalloc(sizeof(struct devx_umem), GFP_KERNEL);
1564 if (!obj)
1565 return -ENOMEM;
1566
1567 err = devx_umem_get(dev, &c->ibucontext, attrs, obj);
1568 if (err)
1569 goto err_obj_free;
1570
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001571 err = devx_umem_reg_cmd_alloc(attrs, obj, &cmd);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001572 if (err)
1573 goto err_umem_release;
1574
1575 devx_umem_reg_cmd_build(dev, obj, &cmd);
1576
Yishai Hadas6e3722b2018-12-19 16:28:15 +02001577 MLX5_SET(create_umem_in, cmd.in, uid, c->devx_uid);
Yishai Hadasaeae9452018-06-17 13:00:04 +03001578 err = mlx5_cmd_exec(dev->mdev, cmd.in, cmd.inlen, cmd.out,
1579 sizeof(cmd.out));
1580 if (err)
Jason Gunthorpeb61815e2018-08-09 20:14:41 -06001581 goto err_umem_release;
Yishai Hadasaeae9452018-06-17 13:00:04 +03001582
1583 obj->mdev = dev->mdev;
1584 uobj->object = obj;
1585 devx_obj_build_destroy_cmd(cmd.in, cmd.out, obj->dinbox, &obj->dinlen, &obj_id);
1586 err = uverbs_copy_to(attrs, MLX5_IB_ATTR_DEVX_UMEM_REG_OUT_ID, &obj_id, sizeof(obj_id));
1587 if (err)
1588 goto err_umem_destroy;
1589
Yishai Hadasaeae9452018-06-17 13:00:04 +03001590 return 0;
1591
1592err_umem_destroy:
1593 mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, cmd.out, sizeof(cmd.out));
Yishai Hadasaeae9452018-06-17 13:00:04 +03001594err_umem_release:
1595 ib_umem_release(obj->umem);
1596err_obj_free:
1597 kfree(obj);
1598 return err;
1599}
1600
Yishai Hadasaeae9452018-06-17 13:00:04 +03001601static int devx_umem_cleanup(struct ib_uobject *uobject,
1602 enum rdma_remove_reason why)
1603{
1604 struct devx_umem *obj = uobject->object;
1605 u32 out[MLX5_ST_SZ_DW(general_obj_out_cmd_hdr)];
1606 int err;
1607
1608 err = mlx5_cmd_exec(obj->mdev, obj->dinbox, obj->dinlen, out, sizeof(out));
Yishai Hadas1c774832018-06-20 17:11:39 +03001609 if (ib_is_destroy_retryable(err, why, uobject))
Yishai Hadasaeae9452018-06-17 13:00:04 +03001610 return err;
1611
1612 ib_umem_release(obj->umem);
1613 kfree(obj);
1614 return 0;
1615}
1616
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001617static ssize_t devx_async_cmd_event_read(struct file *filp, char __user *buf,
1618 size_t count, loff_t *pos)
1619{
Yishai Hadas4accbb32019-01-22 08:29:58 +02001620 struct devx_async_cmd_event_file *comp_ev_file = filp->private_data;
1621 struct devx_async_event_queue *ev_queue = &comp_ev_file->ev_queue;
1622 struct devx_async_data *event;
1623 int ret = 0;
1624 size_t eventsz;
1625
1626 spin_lock_irq(&ev_queue->lock);
1627
1628 while (list_empty(&ev_queue->event_list)) {
1629 spin_unlock_irq(&ev_queue->lock);
1630
1631 if (filp->f_flags & O_NONBLOCK)
1632 return -EAGAIN;
1633
1634 if (wait_event_interruptible(
1635 ev_queue->poll_wait,
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001636 (!list_empty(&ev_queue->event_list) ||
1637 ev_queue->is_destroyed))) {
Yishai Hadas4accbb32019-01-22 08:29:58 +02001638 return -ERESTARTSYS;
1639 }
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001640
1641 if (list_empty(&ev_queue->event_list) &&
1642 ev_queue->is_destroyed)
1643 return -EIO;
1644
Yishai Hadas4accbb32019-01-22 08:29:58 +02001645 spin_lock_irq(&ev_queue->lock);
1646 }
1647
1648 event = list_entry(ev_queue->event_list.next,
1649 struct devx_async_data, list);
1650 eventsz = event->cmd_out_len +
1651 sizeof(struct mlx5_ib_uapi_devx_async_cmd_hdr);
1652
1653 if (eventsz > count) {
1654 spin_unlock_irq(&ev_queue->lock);
1655 return -ENOSPC;
1656 }
1657
1658 list_del(ev_queue->event_list.next);
1659 spin_unlock_irq(&ev_queue->lock);
1660
1661 if (copy_to_user(buf, &event->hdr, eventsz))
1662 ret = -EFAULT;
1663 else
1664 ret = eventsz;
1665
1666 atomic_sub(event->cmd_out_len, &ev_queue->bytes_in_use);
1667 kvfree(event);
1668 return ret;
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001669}
1670
1671static int devx_async_cmd_event_close(struct inode *inode, struct file *filp)
1672{
Yishai Hadasa124edb2019-01-22 08:29:57 +02001673 struct ib_uobject *uobj = filp->private_data;
1674 struct devx_async_cmd_event_file *comp_ev_file = container_of(
1675 uobj, struct devx_async_cmd_event_file, uobj);
1676 struct devx_async_data *entry, *tmp;
1677
1678 spin_lock_irq(&comp_ev_file->ev_queue.lock);
1679 list_for_each_entry_safe(entry, tmp,
1680 &comp_ev_file->ev_queue.event_list, list)
1681 kvfree(entry);
1682 spin_unlock_irq(&comp_ev_file->ev_queue.lock);
1683
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001684 uverbs_close_fd(filp);
1685 return 0;
1686}
1687
1688static __poll_t devx_async_cmd_event_poll(struct file *filp,
1689 struct poll_table_struct *wait)
1690{
Yishai Hadas4accbb32019-01-22 08:29:58 +02001691 struct devx_async_cmd_event_file *comp_ev_file = filp->private_data;
1692 struct devx_async_event_queue *ev_queue = &comp_ev_file->ev_queue;
1693 __poll_t pollflags = 0;
1694
1695 poll_wait(filp, &ev_queue->poll_wait, wait);
1696
1697 spin_lock_irq(&ev_queue->lock);
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001698 if (ev_queue->is_destroyed)
1699 pollflags = EPOLLIN | EPOLLRDNORM | EPOLLRDHUP;
1700 else if (!list_empty(&ev_queue->event_list))
Yishai Hadas4accbb32019-01-22 08:29:58 +02001701 pollflags = EPOLLIN | EPOLLRDNORM;
1702 spin_unlock_irq(&ev_queue->lock);
1703
1704 return pollflags;
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001705}
1706
1707const struct file_operations devx_async_cmd_event_fops = {
1708 .owner = THIS_MODULE,
1709 .read = devx_async_cmd_event_read,
1710 .poll = devx_async_cmd_event_poll,
1711 .release = devx_async_cmd_event_close,
1712 .llseek = no_llseek,
1713};
1714
1715static int devx_hot_unplug_async_cmd_event_file(struct ib_uobject *uobj,
1716 enum rdma_remove_reason why)
1717{
Yishai Hadasa124edb2019-01-22 08:29:57 +02001718 struct devx_async_cmd_event_file *comp_ev_file =
1719 container_of(uobj, struct devx_async_cmd_event_file,
1720 uobj);
Yishai Hadaseaebaf72019-01-22 08:29:59 +02001721 struct devx_async_event_queue *ev_queue = &comp_ev_file->ev_queue;
1722
1723 spin_lock_irq(&ev_queue->lock);
1724 ev_queue->is_destroyed = 1;
1725 spin_unlock_irq(&ev_queue->lock);
1726
1727 if (why == RDMA_REMOVE_DRIVER_REMOVE)
1728 wake_up_interruptible(&ev_queue->poll_wait);
Yishai Hadasa124edb2019-01-22 08:29:57 +02001729
1730 mlx5_cmd_cleanup_async_ctx(&comp_ev_file->async_ctx);
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001731 return 0;
1732};
1733
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001734DECLARE_UVERBS_NAMED_METHOD(
1735 MLX5_IB_METHOD_DEVX_UMEM_REG,
1736 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_UMEM_REG_HANDLE,
1737 MLX5_IB_OBJECT_DEVX_UMEM,
1738 UVERBS_ACCESS_NEW,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001739 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001740 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_ADDR,
1741 UVERBS_ATTR_TYPE(u64),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001742 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001743 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_LEN,
1744 UVERBS_ATTR_TYPE(u64),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001745 UA_MANDATORY),
Jason Gunthorpebccd0622018-07-26 16:37:14 -06001746 UVERBS_ATTR_FLAGS_IN(MLX5_IB_ATTR_DEVX_UMEM_REG_ACCESS,
1747 enum ib_access_flags),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001748 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_UMEM_REG_OUT_ID,
1749 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001750 UA_MANDATORY));
Yishai Hadasaeae9452018-06-17 13:00:04 +03001751
Yishai Hadas528922a2018-07-08 13:24:39 +03001752DECLARE_UVERBS_NAMED_METHOD_DESTROY(
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001753 MLX5_IB_METHOD_DEVX_UMEM_DEREG,
1754 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_UMEM_DEREG_HANDLE,
1755 MLX5_IB_OBJECT_DEVX_UMEM,
1756 UVERBS_ACCESS_DESTROY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001757 UA_MANDATORY));
Yishai Hadasaeae9452018-06-17 13:00:04 +03001758
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001759DECLARE_UVERBS_NAMED_METHOD(
1760 MLX5_IB_METHOD_DEVX_QUERY_EQN,
1761 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_QUERY_EQN_USER_VEC,
1762 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001763 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001764 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_QUERY_EQN_DEV_EQN,
1765 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001766 UA_MANDATORY));
Yishai Hadasf6fe01b2018-06-17 13:00:05 +03001767
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001768DECLARE_UVERBS_NAMED_METHOD(
1769 MLX5_IB_METHOD_DEVX_QUERY_UAR,
1770 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_QUERY_UAR_USER_IDX,
1771 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001772 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001773 UVERBS_ATTR_PTR_OUT(MLX5_IB_ATTR_DEVX_QUERY_UAR_DEV_IDX,
1774 UVERBS_ATTR_TYPE(u32),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001775 UA_MANDATORY));
Yishai Hadas7c043e92018-06-17 13:00:03 +03001776
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001777DECLARE_UVERBS_NAMED_METHOD(
1778 MLX5_IB_METHOD_DEVX_OTHER,
1779 UVERBS_ATTR_PTR_IN(
1780 MLX5_IB_ATTR_DEVX_OTHER_CMD_IN,
1781 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001782 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001783 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001784 UVERBS_ATTR_PTR_OUT(
1785 MLX5_IB_ATTR_DEVX_OTHER_CMD_OUT,
1786 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001787 UA_MANDATORY));
Yishai Hadas8aa8c952018-06-17 13:00:00 +03001788
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001789DECLARE_UVERBS_NAMED_METHOD(
1790 MLX5_IB_METHOD_DEVX_OBJ_CREATE,
1791 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_CREATE_HANDLE,
1792 MLX5_IB_OBJECT_DEVX_OBJ,
1793 UVERBS_ACCESS_NEW,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001794 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001795 UVERBS_ATTR_PTR_IN(
1796 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_IN,
1797 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001798 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001799 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001800 UVERBS_ATTR_PTR_OUT(
1801 MLX5_IB_ATTR_DEVX_OBJ_CREATE_CMD_OUT,
1802 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001803 UA_MANDATORY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001804
Yishai Hadas528922a2018-07-08 13:24:39 +03001805DECLARE_UVERBS_NAMED_METHOD_DESTROY(
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001806 MLX5_IB_METHOD_DEVX_OBJ_DESTROY,
1807 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_DESTROY_HANDLE,
1808 MLX5_IB_OBJECT_DEVX_OBJ,
1809 UVERBS_ACCESS_DESTROY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001810 UA_MANDATORY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001811
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001812DECLARE_UVERBS_NAMED_METHOD(
1813 MLX5_IB_METHOD_DEVX_OBJ_MODIFY,
1814 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_MODIFY_HANDLE,
Yishai Hadas34613eb2018-11-26 08:28:35 +02001815 UVERBS_IDR_ANY_OBJECT,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001816 UVERBS_ACCESS_WRITE,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001817 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001818 UVERBS_ATTR_PTR_IN(
1819 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_IN,
1820 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001821 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001822 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001823 UVERBS_ATTR_PTR_OUT(
1824 MLX5_IB_ATTR_DEVX_OBJ_MODIFY_CMD_OUT,
1825 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001826 UA_MANDATORY));
Yishai Hadase662e142018-06-17 13:00:02 +03001827
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001828DECLARE_UVERBS_NAMED_METHOD(
1829 MLX5_IB_METHOD_DEVX_OBJ_QUERY,
1830 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_QUERY_HANDLE,
Yishai Hadas34613eb2018-11-26 08:28:35 +02001831 UVERBS_IDR_ANY_OBJECT,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001832 UVERBS_ACCESS_READ,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001833 UA_MANDATORY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001834 UVERBS_ATTR_PTR_IN(
1835 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN,
1836 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001837 UA_MANDATORY,
Jason Gunthorpe83bb4442018-07-04 08:50:29 +03001838 UA_ALLOC_AND_COPY),
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001839 UVERBS_ATTR_PTR_OUT(
1840 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_OUT,
1841 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_out_cmd_hdr)),
Jason Gunthorpe540cd692018-07-04 08:50:30 +03001842 UA_MANDATORY));
Yishai Hadase662e142018-06-17 13:00:02 +03001843
Yishai Hadasa124edb2019-01-22 08:29:57 +02001844DECLARE_UVERBS_NAMED_METHOD(
1845 MLX5_IB_METHOD_DEVX_OBJ_ASYNC_QUERY,
1846 UVERBS_ATTR_IDR(MLX5_IB_ATTR_DEVX_OBJ_QUERY_HANDLE,
1847 UVERBS_IDR_ANY_OBJECT,
1848 UVERBS_ACCESS_READ,
1849 UA_MANDATORY),
1850 UVERBS_ATTR_PTR_IN(
1851 MLX5_IB_ATTR_DEVX_OBJ_QUERY_CMD_IN,
1852 UVERBS_ATTR_MIN_SIZE(MLX5_ST_SZ_BYTES(general_obj_in_cmd_hdr)),
1853 UA_MANDATORY,
1854 UA_ALLOC_AND_COPY),
1855 UVERBS_ATTR_CONST_IN(MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_OUT_LEN,
1856 u16, UA_MANDATORY),
1857 UVERBS_ATTR_FD(MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_FD,
1858 MLX5_IB_OBJECT_DEVX_ASYNC_CMD_FD,
1859 UVERBS_ACCESS_READ,
1860 UA_MANDATORY),
1861 UVERBS_ATTR_PTR_IN(MLX5_IB_ATTR_DEVX_OBJ_QUERY_ASYNC_WR_ID,
1862 UVERBS_ATTR_TYPE(u64),
1863 UA_MANDATORY));
1864
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001865DECLARE_UVERBS_GLOBAL_METHODS(MLX5_IB_OBJECT_DEVX,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001866 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OTHER),
1867 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_QUERY_UAR),
1868 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_QUERY_EQN));
Yishai Hadas8aa8c952018-06-17 13:00:00 +03001869
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001870DECLARE_UVERBS_NAMED_OBJECT(MLX5_IB_OBJECT_DEVX_OBJ,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001871 UVERBS_TYPE_ALLOC_IDR(devx_obj_cleanup),
1872 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_CREATE),
1873 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_DESTROY),
1874 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_MODIFY),
Yishai Hadasa124edb2019-01-22 08:29:57 +02001875 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_QUERY),
1876 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_OBJ_ASYNC_QUERY));
Yishai Hadas7efce362018-06-17 13:00:01 +03001877
Jason Gunthorpe6c61d2a2018-07-04 08:50:27 +03001878DECLARE_UVERBS_NAMED_OBJECT(MLX5_IB_OBJECT_DEVX_UMEM,
Jason Gunthorpe9a119cd2018-07-04 08:50:28 +03001879 UVERBS_TYPE_ALLOC_IDR(devx_umem_cleanup),
1880 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_UMEM_REG),
1881 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_UMEM_DEREG));
Yishai Hadasaeae9452018-06-17 13:00:04 +03001882
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001883
1884DECLARE_UVERBS_NAMED_METHOD(
1885 MLX5_IB_METHOD_DEVX_ASYNC_CMD_FD_ALLOC,
1886 UVERBS_ATTR_FD(MLX5_IB_ATTR_DEVX_ASYNC_CMD_FD_ALLOC_HANDLE,
1887 MLX5_IB_OBJECT_DEVX_ASYNC_CMD_FD,
1888 UVERBS_ACCESS_NEW,
1889 UA_MANDATORY));
1890
1891DECLARE_UVERBS_NAMED_OBJECT(
1892 MLX5_IB_OBJECT_DEVX_ASYNC_CMD_FD,
1893 UVERBS_TYPE_ALLOC_FD(sizeof(struct devx_async_cmd_event_file),
1894 devx_hot_unplug_async_cmd_event_file,
1895 &devx_async_cmd_event_fops, "[devx_async_cmd]",
1896 O_RDONLY),
1897 &UVERBS_METHOD(MLX5_IB_METHOD_DEVX_ASYNC_CMD_FD_ALLOC));
1898
Jason Gunthorpe36e235c2018-11-12 22:59:53 +02001899static bool devx_is_supported(struct ib_device *device)
Yishai Hadasc59450c2018-06-17 13:00:06 +03001900{
Jason Gunthorpe36e235c2018-11-12 22:59:53 +02001901 struct mlx5_ib_dev *dev = to_mdev(device);
1902
Yishai Hadas6e3722b2018-12-19 16:28:15 +02001903 return !dev->rep && MLX5_CAP_GEN(dev->mdev, log_max_uctx);
Yishai Hadasc59450c2018-06-17 13:00:06 +03001904}
Jason Gunthorpe36e235c2018-11-12 22:59:53 +02001905
Jason Gunthorpe0cbf4322018-11-12 22:59:50 +02001906const struct uapi_definition mlx5_ib_devx_defs[] = {
Jason Gunthorpe36e235c2018-11-12 22:59:53 +02001907 UAPI_DEF_CHAIN_OBJ_TREE_NAMED(
1908 MLX5_IB_OBJECT_DEVX,
1909 UAPI_DEF_IS_OBJ_SUPPORTED(devx_is_supported)),
1910 UAPI_DEF_CHAIN_OBJ_TREE_NAMED(
1911 MLX5_IB_OBJECT_DEVX_OBJ,
1912 UAPI_DEF_IS_OBJ_SUPPORTED(devx_is_supported)),
1913 UAPI_DEF_CHAIN_OBJ_TREE_NAMED(
1914 MLX5_IB_OBJECT_DEVX_UMEM,
1915 UAPI_DEF_IS_OBJ_SUPPORTED(devx_is_supported)),
Yishai Hadas6bf8f222019-01-22 08:29:56 +02001916 UAPI_DEF_CHAIN_OBJ_TREE_NAMED(
1917 MLX5_IB_OBJECT_DEVX_ASYNC_CMD_FD,
1918 UAPI_DEF_IS_OBJ_SUPPORTED(devx_is_supported)),
Jason Gunthorpe0cbf4322018-11-12 22:59:50 +02001919 {},
1920};