blob: db42d97a200644835a79ab8bae8fb6a0d7907985 [file] [log] [blame]
Jiri Pirkobf3994d2016-07-21 12:03:11 +02001/*
2 * net/sched/cls_matchll.c Match-all classifier
3 *
4 * Copyright (c) 2016 Jiri Pirko <jiri@mellanox.com>
5 *
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
10 */
11
12#include <linux/kernel.h>
13#include <linux/init.h>
14#include <linux/module.h>
Cong Wangf88c19a2019-01-17 12:44:25 -080015#include <linux/percpu.h>
Jiri Pirkobf3994d2016-07-21 12:03:11 +020016
17#include <net/sch_generic.h>
18#include <net/pkt_cls.h>
19
Yotam Gigifd62d9f2017-01-31 15:14:29 +020020struct cls_mall_head {
Jiri Pirkobf3994d2016-07-21 12:03:11 +020021 struct tcf_exts exts;
22 struct tcf_result res;
23 u32 handle;
Yotam Gigib87f7932016-07-21 12:03:12 +020024 u32 flags;
John Hurley0efd1b32018-06-25 14:30:07 -070025 unsigned int in_hw_count;
Cong Wangf88c19a2019-01-17 12:44:25 -080026 struct tc_matchall_pcnt __percpu *pf;
Cong Wangaaa908f2018-05-23 15:26:53 -070027 struct rcu_work rwork;
Jiri Pirkobf3994d2016-07-21 12:03:11 +020028};
29
30static int mall_classify(struct sk_buff *skb, const struct tcf_proto *tp,
31 struct tcf_result *res)
32{
33 struct cls_mall_head *head = rcu_dereference_bh(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020034
Matteo Croce25426042019-05-02 10:51:05 +020035 if (unlikely(!head))
36 return -1;
37
Yotam Gigifd62d9f2017-01-31 15:14:29 +020038 if (tc_skip_sw(head->flags))
Yotam Gigib87f7932016-07-21 12:03:12 +020039 return -1;
40
Davide Caratti3ff4cbe2017-09-16 14:02:21 +020041 *res = head->res;
Cong Wangf88c19a2019-01-17 12:44:25 -080042 __this_cpu_inc(head->pf->rhit);
Yotam Gigifd62d9f2017-01-31 15:14:29 +020043 return tcf_exts_exec(skb, &head->exts, res);
Jiri Pirkobf3994d2016-07-21 12:03:11 +020044}
45
46static int mall_init(struct tcf_proto *tp)
47{
Jiri Pirkobf3994d2016-07-21 12:03:11 +020048 return 0;
49}
50
Cong Wang57767e782017-11-06 13:47:26 -080051static void __mall_destroy(struct cls_mall_head *head)
52{
53 tcf_exts_destroy(&head->exts);
54 tcf_exts_put_net(&head->exts);
Cong Wangf88c19a2019-01-17 12:44:25 -080055 free_percpu(head->pf);
Cong Wang57767e782017-11-06 13:47:26 -080056 kfree(head);
57}
58
Cong Wangdf2735e2017-10-26 18:24:35 -070059static void mall_destroy_work(struct work_struct *work)
60{
Cong Wangaaa908f2018-05-23 15:26:53 -070061 struct cls_mall_head *head = container_of(to_rcu_work(work),
62 struct cls_mall_head,
63 rwork);
Cong Wangdf2735e2017-10-26 18:24:35 -070064 rtnl_lock();
Cong Wang57767e782017-11-06 13:47:26 -080065 __mall_destroy(head);
Cong Wangdf2735e2017-10-26 18:24:35 -070066 rtnl_unlock();
67}
68
Jiri Pirko2447a962017-10-19 15:50:33 +020069static void mall_destroy_hw_filter(struct tcf_proto *tp,
70 struct cls_mall_head *head,
Jakub Kicinskib505b292018-01-24 12:54:19 -080071 unsigned long cookie,
72 struct netlink_ext_ack *extack)
Jiri Pirko2447a962017-10-19 15:50:33 +020073{
Jiri Pirko2447a962017-10-19 15:50:33 +020074 struct tc_cls_matchall_offload cls_mall = {};
75 struct tcf_block *block = tp->chain->block;
76
Pieter Jansen van Vuurend6787142019-05-06 17:24:21 -070077 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirko2447a962017-10-19 15:50:33 +020078 cls_mall.command = TC_CLSMATCHALL_DESTROY;
79 cls_mall.cookie = cookie;
80
Cong Wangaeb3fec2018-12-11 11:15:46 -080081 tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, false);
Jiri Pirkocaa72602018-01-17 11:46:50 +010082 tcf_block_offload_dec(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +020083}
84
Yotam Gigib87f7932016-07-21 12:03:12 +020085static int mall_replace_hw_filter(struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +020086 struct cls_mall_head *head,
Quentin Monnet02798142018-01-19 17:44:44 -080087 unsigned long cookie,
88 struct netlink_ext_ack *extack)
Yotam Gigib87f7932016-07-21 12:03:12 +020089{
Jiri Pirkode4784c2017-08-07 10:15:32 +020090 struct tc_cls_matchall_offload cls_mall = {};
Jiri Pirko2447a962017-10-19 15:50:33 +020091 struct tcf_block *block = tp->chain->block;
92 bool skip_sw = tc_skip_sw(head->flags);
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +020093 int err;
Yotam Gigib87f7932016-07-21 12:03:12 +020094
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -070095 cls_mall.rule = flow_rule_alloc(tcf_exts_num_actions(&head->exts));
96 if (!cls_mall.rule)
97 return -ENOMEM;
98
Pieter Jansen van Vuurend6787142019-05-06 17:24:21 -070099 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
Jiri Pirkode4784c2017-08-07 10:15:32 +0200100 cls_mall.command = TC_CLSMATCHALL_REPLACE;
Jiri Pirkode4784c2017-08-07 10:15:32 +0200101 cls_mall.cookie = cookie;
Yotam Gigib87f7932016-07-21 12:03:12 +0200102
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700103 err = tc_setup_flow_action(&cls_mall.rule->action, &head->exts);
104 if (err) {
105 kfree(cls_mall.rule);
106 mall_destroy_hw_filter(tp, head, cookie, NULL);
107 if (skip_sw)
108 NL_SET_ERR_MSG_MOD(extack, "Failed to setup flow action");
109 else
110 err = 0;
111
112 return err;
113 }
114
Cong Wangaeb3fec2018-12-11 11:15:46 -0800115 err = tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, skip_sw);
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700116 kfree(cls_mall.rule);
117
Jiri Pirko2447a962017-10-19 15:50:33 +0200118 if (err < 0) {
Jakub Kicinskib505b292018-01-24 12:54:19 -0800119 mall_destroy_hw_filter(tp, head, cookie, NULL);
Jiri Pirko2447a962017-10-19 15:50:33 +0200120 return err;
121 } else if (err > 0) {
John Hurley0efd1b32018-06-25 14:30:07 -0700122 head->in_hw_count = err;
Jiri Pirkocaa72602018-01-17 11:46:50 +0100123 tcf_block_offload_inc(block, &head->flags);
Jiri Pirko2447a962017-10-19 15:50:33 +0200124 }
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200125
Jiri Pirko2447a962017-10-19 15:50:33 +0200126 if (skip_sw && !(head->flags & TCA_CLS_FLAGS_IN_HW))
127 return -EINVAL;
Yotam Gigib87f7932016-07-21 12:03:12 +0200128
Jiri Pirko2447a962017-10-19 15:50:33 +0200129 return 0;
Yotam Gigib87f7932016-07-21 12:03:12 +0200130}
131
Vlad Buslov12db03b2019-02-11 10:55:45 +0200132static void mall_destroy(struct tcf_proto *tp, bool rtnl_held,
133 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200134{
135 struct cls_mall_head *head = rtnl_dereference(tp->root);
136
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200137 if (!head)
WANG Cong763dbf62017-04-19 14:21:21 -0700138 return;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200139
Hangbin Liua51c76b2018-08-14 17:28:26 +0800140 tcf_unbind_filter(tp, &head->res);
141
Jiri Pirko2447a962017-10-19 15:50:33 +0200142 if (!tc_skip_hw(head->flags))
Jakub Kicinskib505b292018-01-24 12:54:19 -0800143 mall_destroy_hw_filter(tp, head, (unsigned long) head, extack);
Yotam Gigib87f7932016-07-21 12:03:12 +0200144
Cong Wang57767e782017-11-06 13:47:26 -0800145 if (tcf_exts_get_net(&head->exts))
Cong Wangaaa908f2018-05-23 15:26:53 -0700146 tcf_queue_work(&head->rwork, mall_destroy_work);
Cong Wang57767e782017-11-06 13:47:26 -0800147 else
148 __mall_destroy(head);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200149}
150
WANG Cong8113c092017-08-04 21:31:43 -0700151static void *mall_get(struct tcf_proto *tp, u32 handle)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200152{
Nicolas Dichtel0db6f8b2019-03-28 10:35:06 +0100153 struct cls_mall_head *head = rtnl_dereference(tp->root);
154
155 if (head && head->handle == handle)
156 return head;
157
WANG Cong8113c092017-08-04 21:31:43 -0700158 return NULL;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200159}
160
161static const struct nla_policy mall_policy[TCA_MATCHALL_MAX + 1] = {
162 [TCA_MATCHALL_UNSPEC] = { .type = NLA_UNSPEC },
163 [TCA_MATCHALL_CLASSID] = { .type = NLA_U32 },
164};
165
166static int mall_set_parms(struct net *net, struct tcf_proto *tp,
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200167 struct cls_mall_head *head,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200168 unsigned long base, struct nlattr **tb,
Alexander Aring50a56192018-01-18 11:20:52 -0500169 struct nlattr *est, bool ovr,
170 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200171{
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200172 int err;
173
Vlad Buslovec6743a2019-02-11 10:55:43 +0200174 err = tcf_exts_validate(net, tp, tb, est, &head->exts, ovr, true,
175 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200176 if (err < 0)
Jiri Pirkoa74cb362017-08-04 14:29:08 +0200177 return err;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200178
179 if (tb[TCA_MATCHALL_CLASSID]) {
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200180 head->res.classid = nla_get_u32(tb[TCA_MATCHALL_CLASSID]);
181 tcf_bind_filter(tp, &head->res, base);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200182 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200183 return 0;
184}
185
186static int mall_change(struct net *net, struct sk_buff *in_skb,
187 struct tcf_proto *tp, unsigned long base,
188 u32 handle, struct nlattr **tca,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200189 void **arg, bool ovr, bool rtnl_held,
190 struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200191{
192 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200193 struct nlattr *tb[TCA_MATCHALL_MAX + 1];
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200194 struct cls_mall_head *new;
Yotam Gigib87f7932016-07-21 12:03:12 +0200195 u32 flags = 0;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200196 int err;
197
198 if (!tca[TCA_OPTIONS])
199 return -EINVAL;
200
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200201 if (head)
202 return -EEXIST;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200203
Johannes Berg8cb08172019-04-26 14:07:28 +0200204 err = nla_parse_nested_deprecated(tb, TCA_MATCHALL_MAX,
205 tca[TCA_OPTIONS], mall_policy, NULL);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200206 if (err < 0)
207 return err;
208
Yotam Gigib87f7932016-07-21 12:03:12 +0200209 if (tb[TCA_MATCHALL_FLAGS]) {
210 flags = nla_get_u32(tb[TCA_MATCHALL_FLAGS]);
211 if (!tc_flags_valid(flags))
212 return -EINVAL;
213 }
214
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200215 new = kzalloc(sizeof(*new), GFP_KERNEL);
216 if (!new)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200217 return -ENOBUFS;
218
Cong Wang14215102019-02-20 21:37:42 -0800219 err = tcf_exts_init(&new->exts, net, TCA_MATCHALL_ACT, 0);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200220 if (err)
221 goto err_exts_init;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200222
223 if (!handle)
224 handle = 1;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200225 new->handle = handle;
226 new->flags = flags;
Cong Wangf88c19a2019-01-17 12:44:25 -0800227 new->pf = alloc_percpu(struct tc_matchall_pcnt);
228 if (!new->pf) {
229 err = -ENOMEM;
230 goto err_alloc_percpu;
231 }
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200232
Alexander Aring50a56192018-01-18 11:20:52 -0500233 err = mall_set_parms(net, tp, new, base, tb, tca[TCA_RATE], ovr,
234 extack);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200235 if (err)
Yotam Gigiec2507d2017-01-03 19:20:24 +0200236 goto err_set_parms;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200237
Jiri Pirko2447a962017-10-19 15:50:33 +0200238 if (!tc_skip_hw(new->flags)) {
Quentin Monnet02798142018-01-19 17:44:44 -0800239 err = mall_replace_hw_filter(tp, new, (unsigned long)new,
240 extack);
Jiri Pirko2447a962017-10-19 15:50:33 +0200241 if (err)
242 goto err_replace_hw_filter;
Yotam Gigib87f7932016-07-21 12:03:12 +0200243 }
244
Or Gerlitzc7d2b2f2017-02-16 10:31:14 +0200245 if (!tc_in_hw(new->flags))
246 new->flags |= TCA_CLS_FLAGS_NOT_IN_HW;
247
WANG Cong8113c092017-08-04 21:31:43 -0700248 *arg = head;
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200249 rcu_assign_pointer(tp->root, new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200250 return 0;
251
Yotam Gigiec2507d2017-01-03 19:20:24 +0200252err_replace_hw_filter:
253err_set_parms:
Cong Wangf88c19a2019-01-17 12:44:25 -0800254 free_percpu(new->pf);
255err_alloc_percpu:
David S. Millere2160152017-02-02 16:54:00 -0500256 tcf_exts_destroy(&new->exts);
Yotam Gigiec2507d2017-01-03 19:20:24 +0200257err_exts_init:
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200258 kfree(new);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200259 return err;
260}
261
Alexander Aring571acf22018-01-18 11:20:53 -0500262static int mall_delete(struct tcf_proto *tp, void *arg, bool *last,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200263 bool rtnl_held, struct netlink_ext_ack *extack)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200264{
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200265 return -EOPNOTSUPP;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200266}
267
Vlad Buslov12db03b2019-02-11 10:55:45 +0200268static void mall_walk(struct tcf_proto *tp, struct tcf_walker *arg,
269 bool rtnl_held)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200270{
271 struct cls_mall_head *head = rtnl_dereference(tp->root);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200272
273 if (arg->count < arg->skip)
274 goto skip;
Vlad Buslovd66022c2019-02-15 17:17:56 +0200275
276 if (!head)
277 return;
WANG Cong8113c092017-08-04 21:31:43 -0700278 if (arg->fn(tp, head, arg) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200279 arg->stop = 1;
280skip:
281 arg->count++;
282}
283
John Hurley0efd1b32018-06-25 14:30:07 -0700284static int mall_reoffload(struct tcf_proto *tp, bool add, tc_setup_cb_t *cb,
285 void *cb_priv, struct netlink_ext_ack *extack)
286{
287 struct cls_mall_head *head = rtnl_dereference(tp->root);
288 struct tc_cls_matchall_offload cls_mall = {};
289 struct tcf_block *block = tp->chain->block;
290 int err;
291
292 if (tc_skip_hw(head->flags))
293 return 0;
294
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700295 cls_mall.rule = flow_rule_alloc(tcf_exts_num_actions(&head->exts));
296 if (!cls_mall.rule)
297 return -ENOMEM;
298
Pieter Jansen van Vuurend6787142019-05-06 17:24:21 -0700299 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, extack);
John Hurley0efd1b32018-06-25 14:30:07 -0700300 cls_mall.command = add ?
301 TC_CLSMATCHALL_REPLACE : TC_CLSMATCHALL_DESTROY;
John Hurley0efd1b32018-06-25 14:30:07 -0700302 cls_mall.cookie = (unsigned long)head;
303
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700304 err = tc_setup_flow_action(&cls_mall.rule->action, &head->exts);
305 if (err) {
306 kfree(cls_mall.rule);
307 if (add && tc_skip_sw(head->flags)) {
308 NL_SET_ERR_MSG_MOD(extack, "Failed to setup flow action");
309 return err;
310 }
Pieter Jansen van Vuuren5f058362019-05-08 15:56:07 -0700311 return 0;
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700312 }
313
John Hurley0efd1b32018-06-25 14:30:07 -0700314 err = cb(TC_SETUP_CLSMATCHALL, &cls_mall, cb_priv);
Pieter Jansen van Vuurenf00cbf192019-05-04 04:46:17 -0700315 kfree(cls_mall.rule);
316
John Hurley0efd1b32018-06-25 14:30:07 -0700317 if (err) {
318 if (add && tc_skip_sw(head->flags))
319 return err;
320 return 0;
321 }
322
323 tc_cls_offload_cnt_update(block, &head->in_hw_count, &head->flags, add);
324
325 return 0;
326}
327
Pieter Jansen van Vuurenb7fe4ab2019-05-04 04:46:23 -0700328static void mall_stats_hw_filter(struct tcf_proto *tp,
329 struct cls_mall_head *head,
330 unsigned long cookie)
331{
332 struct tc_cls_matchall_offload cls_mall = {};
333 struct tcf_block *block = tp->chain->block;
334
Pieter Jansen van Vuurend6787142019-05-06 17:24:21 -0700335 tc_cls_common_offload_init(&cls_mall.common, tp, head->flags, NULL);
Pieter Jansen van Vuurenb7fe4ab2019-05-04 04:46:23 -0700336 cls_mall.command = TC_CLSMATCHALL_STATS;
337 cls_mall.cookie = cookie;
338
339 tc_setup_cb_call(block, TC_SETUP_CLSMATCHALL, &cls_mall, false);
340
341 tcf_exts_stats_update(&head->exts, cls_mall.stats.bytes,
342 cls_mall.stats.pkts, cls_mall.stats.lastused);
343}
344
WANG Cong8113c092017-08-04 21:31:43 -0700345static int mall_dump(struct net *net, struct tcf_proto *tp, void *fh,
Vlad Buslov12db03b2019-02-11 10:55:45 +0200346 struct sk_buff *skb, struct tcmsg *t, bool rtnl_held)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200347{
Cong Wangf88c19a2019-01-17 12:44:25 -0800348 struct tc_matchall_pcnt gpf = {};
WANG Cong8113c092017-08-04 21:31:43 -0700349 struct cls_mall_head *head = fh;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200350 struct nlattr *nest;
Cong Wangf88c19a2019-01-17 12:44:25 -0800351 int cpu;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200352
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200353 if (!head)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200354 return skb->len;
355
Pieter Jansen van Vuurenb7fe4ab2019-05-04 04:46:23 -0700356 if (!tc_skip_hw(head->flags))
357 mall_stats_hw_filter(tp, head, (unsigned long)head);
358
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200359 t->tcm_handle = head->handle;
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200360
Michal Kubecekae0be8d2019-04-26 11:13:06 +0200361 nest = nla_nest_start_noflag(skb, TCA_OPTIONS);
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200362 if (!nest)
363 goto nla_put_failure;
364
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200365 if (head->res.classid &&
366 nla_put_u32(skb, TCA_MATCHALL_CLASSID, head->res.classid))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200367 goto nla_put_failure;
368
Or Gerlitz7a335ad2017-02-16 10:31:11 +0200369 if (head->flags && nla_put_u32(skb, TCA_MATCHALL_FLAGS, head->flags))
370 goto nla_put_failure;
371
Cong Wangf88c19a2019-01-17 12:44:25 -0800372 for_each_possible_cpu(cpu) {
373 struct tc_matchall_pcnt *pf = per_cpu_ptr(head->pf, cpu);
374
375 gpf.rhit += pf->rhit;
376 }
377
378 if (nla_put_64bit(skb, TCA_MATCHALL_PCNT,
379 sizeof(struct tc_matchall_pcnt),
380 &gpf, TCA_MATCHALL_PAD))
381 goto nla_put_failure;
382
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200383 if (tcf_exts_dump(skb, &head->exts))
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200384 goto nla_put_failure;
385
386 nla_nest_end(skb, nest);
387
Yotam Gigifd62d9f2017-01-31 15:14:29 +0200388 if (tcf_exts_dump_stats(skb, &head->exts) < 0)
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200389 goto nla_put_failure;
390
391 return skb->len;
392
393nla_put_failure:
394 nla_nest_cancel(skb, nest);
395 return -1;
396}
397
Cong Wang07d79fc2017-08-30 14:30:36 -0700398static void mall_bind_class(void *fh, u32 classid, unsigned long cl)
399{
400 struct cls_mall_head *head = fh;
401
402 if (head && head->res.classid == classid)
403 head->res.class = cl;
404}
405
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200406static struct tcf_proto_ops cls_mall_ops __read_mostly = {
407 .kind = "matchall",
408 .classify = mall_classify,
409 .init = mall_init,
410 .destroy = mall_destroy,
411 .get = mall_get,
412 .change = mall_change,
413 .delete = mall_delete,
414 .walk = mall_walk,
John Hurley0efd1b32018-06-25 14:30:07 -0700415 .reoffload = mall_reoffload,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200416 .dump = mall_dump,
Cong Wang07d79fc2017-08-30 14:30:36 -0700417 .bind_class = mall_bind_class,
Jiri Pirkobf3994d2016-07-21 12:03:11 +0200418 .owner = THIS_MODULE,
419};
420
421static int __init cls_mall_init(void)
422{
423 return register_tcf_proto_ops(&cls_mall_ops);
424}
425
426static void __exit cls_mall_exit(void)
427{
428 unregister_tcf_proto_ops(&cls_mall_ops);
429}
430
431module_init(cls_mall_init);
432module_exit(cls_mall_exit);
433
434MODULE_AUTHOR("Jiri Pirko <jiri@mellanox.com>");
435MODULE_DESCRIPTION("Match-all classifier");
436MODULE_LICENSE("GPL v2");