import PULS_20180308
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / iio / buffer_cb.c
1 #include <linux/kernel.h>
2 #include <linux/slab.h>
3 #include <linux/err.h>
4 #include <linux/export.h>
5 #include <linux/iio/buffer.h>
6 #include <linux/iio/consumer.h>
7
8 struct iio_cb_buffer {
9 struct iio_buffer buffer;
10 int (*cb)(u8 *data, void *private);
11 void *private;
12 struct iio_channel *channels;
13 };
14
15 static int iio_buffer_cb_store_to(struct iio_buffer *buffer, u8 *data)
16 {
17 struct iio_cb_buffer *cb_buff = container_of(buffer,
18 struct iio_cb_buffer,
19 buffer);
20
21 return cb_buff->cb(data, cb_buff->private);
22 }
23
24 static struct iio_buffer_access_funcs iio_cb_access = {
25 .store_to = &iio_buffer_cb_store_to,
26 };
27
28 struct iio_cb_buffer *iio_channel_get_all_cb(struct device *dev,
29 int (*cb)(u8 *data,
30 void *private),
31 void *private)
32 {
33 int ret;
34 struct iio_cb_buffer *cb_buff;
35 struct iio_dev *indio_dev;
36 struct iio_channel *chan;
37
38 cb_buff = kzalloc(sizeof(*cb_buff), GFP_KERNEL);
39 if (cb_buff == NULL) {
40 ret = -ENOMEM;
41 goto error_ret;
42 }
43
44 cb_buff->private = private;
45 cb_buff->cb = cb;
46 cb_buff->buffer.access = &iio_cb_access;
47 INIT_LIST_HEAD(&cb_buff->buffer.demux_list);
48
49 cb_buff->channels = iio_channel_get_all(dev);
50 if (IS_ERR(cb_buff->channels)) {
51 ret = PTR_ERR(cb_buff->channels);
52 goto error_free_cb_buff;
53 }
54
55 indio_dev = cb_buff->channels[0].indio_dev;
56 cb_buff->buffer.scan_mask
57 = kcalloc(BITS_TO_LONGS(indio_dev->masklength), sizeof(long),
58 GFP_KERNEL);
59 if (cb_buff->buffer.scan_mask == NULL) {
60 ret = -ENOMEM;
61 goto error_release_channels;
62 }
63 chan = &cb_buff->channels[0];
64 while (chan->indio_dev) {
65 if (chan->indio_dev != indio_dev) {
66 ret = -EINVAL;
67 goto error_free_scan_mask;
68 }
69 set_bit(chan->channel->scan_index,
70 cb_buff->buffer.scan_mask);
71 chan++;
72 }
73
74 return cb_buff;
75
76 error_free_scan_mask:
77 kfree(cb_buff->buffer.scan_mask);
78 error_release_channels:
79 iio_channel_release_all(cb_buff->channels);
80 error_free_cb_buff:
81 kfree(cb_buff);
82 error_ret:
83 return ERR_PTR(ret);
84 }
85 EXPORT_SYMBOL_GPL(iio_channel_get_all_cb);
86
87 int iio_channel_start_all_cb(struct iio_cb_buffer *cb_buff)
88 {
89 return iio_update_buffers(cb_buff->channels[0].indio_dev,
90 &cb_buff->buffer,
91 NULL);
92 }
93 EXPORT_SYMBOL_GPL(iio_channel_start_all_cb);
94
95 void iio_channel_stop_all_cb(struct iio_cb_buffer *cb_buff)
96 {
97 iio_update_buffers(cb_buff->channels[0].indio_dev,
98 NULL,
99 &cb_buff->buffer);
100 }
101 EXPORT_SYMBOL_GPL(iio_channel_stop_all_cb);
102
103 void iio_channel_release_all_cb(struct iio_cb_buffer *cb_buff)
104 {
105 kfree(cb_buff->buffer.scan_mask);
106 iio_channel_release_all(cb_buff->channels);
107 kfree(cb_buff);
108 }
109 EXPORT_SYMBOL_GPL(iio_channel_release_all_cb);
110
111 struct iio_channel
112 *iio_channel_cb_get_channels(const struct iio_cb_buffer *cb_buffer)
113 {
114 return cb_buffer->channels;
115 }
116 EXPORT_SYMBOL_GPL(iio_channel_cb_get_channels);