1// SPDX-License-Identifier: ISC
2/* Copyright (C) 2020 Felix Fietkau <nbd@nbd.name> */
3#include "mt76.h"
4
5static const struct nla_policy mt76_tm_policy[NUM_MT76_TM_ATTRS] = {
6	[MT76_TM_ATTR_RESET] = { .type = NLA_FLAG },
7	[MT76_TM_ATTR_STATE] = { .type = NLA_U8 },
8	[MT76_TM_ATTR_TX_COUNT] = { .type = NLA_U32 },
9	[MT76_TM_ATTR_TX_LENGTH] = { .type = NLA_U32 },
10	[MT76_TM_ATTR_TX_RATE_MODE] = { .type = NLA_U8 },
11	[MT76_TM_ATTR_TX_RATE_NSS] = { .type = NLA_U8 },
12	[MT76_TM_ATTR_TX_RATE_IDX] = { .type = NLA_U8 },
13	[MT76_TM_ATTR_TX_RATE_SGI] = { .type = NLA_U8 },
14	[MT76_TM_ATTR_TX_RATE_LDPC] = { .type = NLA_U8 },
15	[MT76_TM_ATTR_TX_ANTENNA] = { .type = NLA_U8 },
16	[MT76_TM_ATTR_TX_POWER_CONTROL] = { .type = NLA_U8 },
17	[MT76_TM_ATTR_TX_POWER] = { .type = NLA_NESTED },
18	[MT76_TM_ATTR_FREQ_OFFSET] = { .type = NLA_U32 },
19};
20
21void mt76_testmode_tx_pending(struct mt76_dev *dev)
22{
23	struct mt76_testmode_data *td = &dev->test;
24	struct mt76_wcid *wcid = &dev->global_wcid;
25	struct sk_buff *skb = td->tx_skb;
26	struct mt76_queue *q;
27	int qid;
28
29	if (!skb || !td->tx_pending)
30		return;
31
32	qid = skb_get_queue_mapping(skb);
33	q = dev->q_tx[qid];
34
35	spin_lock_bh(&q->lock);
36
37	while (td->tx_pending > 0 && td->tx_queued - td->tx_done < 1000 &&
38	       q->queued < q->ndesc / 2) {
39		int ret;
40
41		ret = dev->queue_ops->tx_queue_skb(dev, qid, skb_get(skb), wcid, NULL);
42		if (ret < 0)
43			break;
44
45		td->tx_pending--;
46		td->tx_queued++;
47	}
48
49	dev->queue_ops->kick(dev, q);
50
51	spin_unlock_bh(&q->lock);
52}
53
54
55static int
56mt76_testmode_tx_init(struct mt76_dev *dev)
57{
58	struct mt76_testmode_data *td = &dev->test;
59	struct ieee80211_tx_info *info;
60	struct ieee80211_hdr *hdr;
61	struct sk_buff *skb;
62	u16 fc = IEEE80211_FTYPE_DATA | IEEE80211_STYPE_DATA |
63		 IEEE80211_FCTL_FROMDS;
64	struct ieee80211_tx_rate *rate;
65	u8 max_nss = hweight8(dev->phy.antenna_mask);
66
67	if (td->tx_antenna_mask)
68		max_nss = min_t(u8, max_nss, hweight8(td->tx_antenna_mask));
69
70	skb = alloc_skb(td->tx_msdu_len, GFP_KERNEL);
71	if (!skb)
72		return -ENOMEM;
73
74	dev_kfree_skb(td->tx_skb);
75	td->tx_skb = skb;
76	hdr = __skb_put_zero(skb, td->tx_msdu_len);
77	hdr->frame_control = cpu_to_le16(fc);
78	memcpy(hdr->addr1, dev->macaddr, sizeof(dev->macaddr));
79	memcpy(hdr->addr2, dev->macaddr, sizeof(dev->macaddr));
80	memcpy(hdr->addr3, dev->macaddr, sizeof(dev->macaddr));
81
82	info = IEEE80211_SKB_CB(skb);
83	info->flags = IEEE80211_TX_CTL_INJECTED |
84		      IEEE80211_TX_CTL_NO_ACK |
85		      IEEE80211_TX_CTL_NO_PS_BUFFER;
86	rate = &info->control.rates[0];
87	rate->count = 1;
88	rate->idx = td->tx_rate_idx;
89
90	switch (td->tx_rate_mode) {
91	case MT76_TM_TX_MODE_CCK:
92		if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
93			return -EINVAL;
94
95		if (rate->idx > 4)
96			return -EINVAL;
97		break;
98	case MT76_TM_TX_MODE_OFDM:
99		if (dev->phy.chandef.chan->band != NL80211_BAND_2GHZ)
100			break;
101
102		if (rate->idx > 8)
103			return -EINVAL;
104
105		rate->idx += 4;
106		break;
107	case MT76_TM_TX_MODE_HT:
108		if (rate->idx > 8 * max_nss &&
109			!(rate->idx == 32 &&
110			  dev->phy.chandef.width >= NL80211_CHAN_WIDTH_40))
111			return -EINVAL;
112
113		rate->flags |= IEEE80211_TX_RC_MCS;
114		break;
115	case MT76_TM_TX_MODE_VHT:
116		if (rate->idx > 9)
117			return -EINVAL;
118
119		if (td->tx_rate_nss > max_nss)
120			return -EINVAL;
121
122		ieee80211_rate_set_vht(rate, td->tx_rate_idx, td->tx_rate_nss);
123		rate->flags |= IEEE80211_TX_RC_VHT_MCS;
124		break;
125	default:
126		break;
127	}
128
129	if (td->tx_rate_sgi)
130		rate->flags |= IEEE80211_TX_RC_SHORT_GI;
131
132	if (td->tx_rate_ldpc)
133		info->flags |= IEEE80211_TX_CTL_LDPC;
134
135	if (td->tx_rate_mode >= MT76_TM_TX_MODE_HT) {
136		switch (dev->phy.chandef.width) {
137		case NL80211_CHAN_WIDTH_40:
138			rate->flags |= IEEE80211_TX_RC_40_MHZ_WIDTH;
139			break;
140		case NL80211_CHAN_WIDTH_80:
141			rate->flags |= IEEE80211_TX_RC_80_MHZ_WIDTH;
142			break;
143		case NL80211_CHAN_WIDTH_80P80:
144		case NL80211_CHAN_WIDTH_160:
145			rate->flags |= IEEE80211_TX_RC_160_MHZ_WIDTH;
146			break;
147		default:
148			break;
149		}
150	}
151
152	skb_set_queue_mapping(skb, IEEE80211_AC_BE);
153
154	return 0;
155}
156
157static void
158mt76_testmode_tx_start(struct mt76_dev *dev)
159{
160	struct mt76_testmode_data *td = &dev->test;
161
162	td->tx_queued = 0;
163	td->tx_done = 0;
164	td->tx_pending = td->tx_count;
165	mt76_worker_schedule(&dev->tx_worker);
166}
167
168static void
169mt76_testmode_tx_stop(struct mt76_dev *dev)
170{
171	struct mt76_testmode_data *td = &dev->test;
172
173	mt76_worker_disable(&dev->tx_worker);
174
175	td->tx_pending = 0;
176
177	mt76_worker_enable(&dev->tx_worker);
178
179	wait_event_timeout(dev->tx_wait, td->tx_done == td->tx_queued, 10 * HZ);
180
181	dev_kfree_skb(td->tx_skb);
182	td->tx_skb = NULL;
183}
184
185static inline void
186mt76_testmode_param_set(struct mt76_testmode_data *td, u16 idx)
187{
188	td->param_set[idx / 32] |= BIT(idx % 32);
189}
190
191static inline bool
192mt76_testmode_param_present(struct mt76_testmode_data *td, u16 idx)
193{
194	return td->param_set[idx / 32] & BIT(idx % 32);
195}
196
197static void
198mt76_testmode_init_defaults(struct mt76_dev *dev)
199{
200	struct mt76_testmode_data *td = &dev->test;
201
202	if (td->tx_msdu_len > 0)
203		return;
204
205	td->tx_msdu_len = 1024;
206	td->tx_count = 1;
207	td->tx_rate_mode = MT76_TM_TX_MODE_OFDM;
208	td->tx_rate_nss = 1;
209}
210
211static int
212__mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
213{
214	enum mt76_testmode_state prev_state = dev->test.state;
215	int err;
216
217	if (prev_state == MT76_TM_STATE_TX_FRAMES)
218		mt76_testmode_tx_stop(dev);
219
220	if (state == MT76_TM_STATE_TX_FRAMES) {
221		err = mt76_testmode_tx_init(dev);
222		if (err)
223			return err;
224	}
225
226	err = dev->test_ops->set_state(dev, state);
227	if (err) {
228		if (state == MT76_TM_STATE_TX_FRAMES)
229			mt76_testmode_tx_stop(dev);
230
231		return err;
232	}
233
234	if (state == MT76_TM_STATE_TX_FRAMES)
235		mt76_testmode_tx_start(dev);
236	else if (state == MT76_TM_STATE_RX_FRAMES) {
237		memset(&dev->test.rx_stats, 0, sizeof(dev->test.rx_stats));
238	}
239
240	dev->test.state = state;
241
242	return 0;
243}
244
245int mt76_testmode_set_state(struct mt76_dev *dev, enum mt76_testmode_state state)
246{
247	struct mt76_testmode_data *td = &dev->test;
248	struct ieee80211_hw *hw = dev->phy.hw;
249
250	if (state == td->state && state == MT76_TM_STATE_OFF)
251		return 0;
252
253	if (state > MT76_TM_STATE_OFF &&
254	    (!test_bit(MT76_STATE_RUNNING, &dev->phy.state) ||
255	     !(hw->conf.flags & IEEE80211_CONF_MONITOR)))
256		return -ENOTCONN;
257
258	if (state != MT76_TM_STATE_IDLE &&
259	    td->state != MT76_TM_STATE_IDLE) {
260		int ret;
261
262		ret = __mt76_testmode_set_state(dev, MT76_TM_STATE_IDLE);
263		if (ret)
264			return ret;
265	}
266
267	return __mt76_testmode_set_state(dev, state);
268
269}
270EXPORT_SYMBOL(mt76_testmode_set_state);
271
272static int
273mt76_tm_get_u8(struct nlattr *attr, u8 *dest, u8 min, u8 max)
274{
275	u8 val;
276
277	if (!attr)
278		return 0;
279
280	val = nla_get_u8(attr);
281	if (val < min || val > max)
282		return -EINVAL;
283
284	*dest = val;
285	return 0;
286}
287
288int mt76_testmode_cmd(struct ieee80211_hw *hw, struct ieee80211_vif *vif,
289		      void *data, int len)
290{
291	struct mt76_phy *phy = hw->priv;
292	struct mt76_dev *dev = phy->dev;
293	struct mt76_testmode_data *td = &dev->test;
294	struct nlattr *tb[NUM_MT76_TM_ATTRS];
295	u32 state;
296	int err;
297	int i;
298
299	if (!dev->test_ops)
300		return -EOPNOTSUPP;
301
302	err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
303				   mt76_tm_policy, NULL);
304	if (err)
305		return err;
306
307	err = -EINVAL;
308
309	mutex_lock(&dev->mutex);
310
311	if (tb[MT76_TM_ATTR_RESET]) {
312		mt76_testmode_set_state(dev, MT76_TM_STATE_OFF);
313		memset(td, 0, sizeof(*td));
314	}
315
316	mt76_testmode_init_defaults(dev);
317
318	if (tb[MT76_TM_ATTR_TX_COUNT])
319		td->tx_count = nla_get_u32(tb[MT76_TM_ATTR_TX_COUNT]);
320
321	if (tb[MT76_TM_ATTR_TX_LENGTH]) {
322		u32 val = nla_get_u32(tb[MT76_TM_ATTR_TX_LENGTH]);
323
324		if (val > IEEE80211_MAX_FRAME_LEN ||
325		    val < sizeof(struct ieee80211_hdr))
326			goto out;
327
328		td->tx_msdu_len = val;
329	}
330
331	if (tb[MT76_TM_ATTR_TX_RATE_IDX])
332		td->tx_rate_idx = nla_get_u8(tb[MT76_TM_ATTR_TX_RATE_IDX]);
333
334	if (mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_MODE], &td->tx_rate_mode,
335			   0, MT76_TM_TX_MODE_MAX) ||
336	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_NSS], &td->tx_rate_nss,
337			   1, hweight8(phy->antenna_mask)) ||
338	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_SGI], &td->tx_rate_sgi, 0, 1) ||
339	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_RATE_LDPC], &td->tx_rate_ldpc, 0, 1) ||
340	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_ANTENNA], &td->tx_antenna_mask, 1,
341			   phy->antenna_mask) ||
342	    mt76_tm_get_u8(tb[MT76_TM_ATTR_TX_POWER_CONTROL],
343			   &td->tx_power_control, 0, 1))
344		goto out;
345
346	if (tb[MT76_TM_ATTR_FREQ_OFFSET])
347		td->freq_offset = nla_get_u32(tb[MT76_TM_ATTR_FREQ_OFFSET]);
348
349	if (tb[MT76_TM_ATTR_STATE]) {
350		state = nla_get_u32(tb[MT76_TM_ATTR_STATE]);
351		if (state > MT76_TM_STATE_MAX)
352			goto out;
353	} else {
354		state = td->state;
355	}
356
357	if (tb[MT76_TM_ATTR_TX_POWER]) {
358		struct nlattr *cur;
359		int idx = 0;
360		int rem;
361
362		nla_for_each_nested(cur, tb[MT76_TM_ATTR_TX_POWER], rem) {
363			if (nla_len(cur) != 1 ||
364			    idx >= ARRAY_SIZE(td->tx_power))
365				goto out;
366
367			td->tx_power[idx++] = nla_get_u8(cur);
368		}
369	}
370
371	if (dev->test_ops->set_params) {
372		err = dev->test_ops->set_params(dev, tb, state);
373		if (err)
374			goto out;
375	}
376
377	for (i = MT76_TM_ATTR_STATE; i < ARRAY_SIZE(tb); i++)
378		if (tb[i])
379			mt76_testmode_param_set(td, i);
380
381	err = 0;
382	if (tb[MT76_TM_ATTR_STATE])
383		err = mt76_testmode_set_state(dev, state);
384
385out:
386	mutex_unlock(&dev->mutex);
387
388	return err;
389}
390EXPORT_SYMBOL(mt76_testmode_cmd);
391
392static int
393mt76_testmode_dump_stats(struct mt76_dev *dev, struct sk_buff *msg)
394{
395	struct mt76_testmode_data *td = &dev->test;
396	u64 rx_packets = 0;
397	u64 rx_fcs_error = 0;
398	int i;
399
400	for (i = 0; i < ARRAY_SIZE(td->rx_stats.packets); i++) {
401		rx_packets += td->rx_stats.packets[i];
402		rx_fcs_error += td->rx_stats.fcs_error[i];
403	}
404
405	if (nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_PENDING, td->tx_pending) ||
406	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_QUEUED, td->tx_queued) ||
407	    nla_put_u32(msg, MT76_TM_STATS_ATTR_TX_DONE, td->tx_done) ||
408	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_PACKETS, rx_packets,
409			      MT76_TM_STATS_ATTR_PAD) ||
410	    nla_put_u64_64bit(msg, MT76_TM_STATS_ATTR_RX_FCS_ERROR, rx_fcs_error,
411			      MT76_TM_STATS_ATTR_PAD))
412		return -EMSGSIZE;
413
414	if (dev->test_ops->dump_stats)
415		return dev->test_ops->dump_stats(dev, msg);
416
417	return 0;
418}
419
420int mt76_testmode_dump(struct ieee80211_hw *hw, struct sk_buff *msg,
421		       struct netlink_callback *cb, void *data, int len)
422{
423	struct mt76_phy *phy = hw->priv;
424	struct mt76_dev *dev = phy->dev;
425	struct mt76_testmode_data *td = &dev->test;
426	struct nlattr *tb[NUM_MT76_TM_ATTRS] = {};
427	int err = 0;
428	void *a;
429	int i;
430
431	if (!dev->test_ops)
432		return -EOPNOTSUPP;
433
434	if (cb->args[2]++ > 0)
435		return -ENOENT;
436
437	if (data) {
438		err = nla_parse_deprecated(tb, MT76_TM_ATTR_MAX, data, len,
439					   mt76_tm_policy, NULL);
440		if (err)
441			return err;
442	}
443
444	mutex_lock(&dev->mutex);
445
446	if (tb[MT76_TM_ATTR_STATS]) {
447		err = -EINVAL;
448
449		a = nla_nest_start(msg, MT76_TM_ATTR_STATS);
450		if (a) {
451			err = mt76_testmode_dump_stats(dev, msg);
452			nla_nest_end(msg, a);
453		}
454
455		goto out;
456	}
457
458	mt76_testmode_init_defaults(dev);
459
460	err = -EMSGSIZE;
461	if (nla_put_u32(msg, MT76_TM_ATTR_STATE, td->state))
462		goto out;
463
464	if (td->mtd_name &&
465	    (nla_put_string(msg, MT76_TM_ATTR_MTD_PART, td->mtd_name) ||
466	     nla_put_u32(msg, MT76_TM_ATTR_MTD_OFFSET, td->mtd_offset)))
467		goto out;
468
469	if (nla_put_u32(msg, MT76_TM_ATTR_TX_COUNT, td->tx_count) ||
470	    nla_put_u32(msg, MT76_TM_ATTR_TX_LENGTH, td->tx_msdu_len) ||
471	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_MODE, td->tx_rate_mode) ||
472	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_NSS, td->tx_rate_nss) ||
473	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_IDX, td->tx_rate_idx) ||
474	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_SGI, td->tx_rate_sgi) ||
475	    nla_put_u8(msg, MT76_TM_ATTR_TX_RATE_LDPC, td->tx_rate_ldpc) ||
476	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_ANTENNA) &&
477	     nla_put_u8(msg, MT76_TM_ATTR_TX_ANTENNA, td->tx_antenna_mask)) ||
478	    (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER_CONTROL) &&
479	     nla_put_u8(msg, MT76_TM_ATTR_TX_POWER_CONTROL, td->tx_power_control)) ||
480	    (mt76_testmode_param_present(td, MT76_TM_ATTR_FREQ_OFFSET) &&
481	     nla_put_u8(msg, MT76_TM_ATTR_FREQ_OFFSET, td->freq_offset)))
482		goto out;
483
484	if (mt76_testmode_param_present(td, MT76_TM_ATTR_TX_POWER)) {
485		a = nla_nest_start(msg, MT76_TM_ATTR_TX_POWER);
486		if (!a)
487			goto out;
488
489		for (i = 0; i < ARRAY_SIZE(td->tx_power); i++)
490			if (nla_put_u8(msg, i, td->tx_power[i]))
491				goto out;
492
493		nla_nest_end(msg, a);
494	}
495
496	err = 0;
497
498out:
499	mutex_unlock(&dev->mutex);
500
501	return err;
502}
503EXPORT_SYMBOL(mt76_testmode_dump);
504