~ubuntu-branches/ubuntu/precise/linux-ti-omap4/precise-security

« back to all changes in this revision

Viewing changes to drivers/dma/pch_dma.c

  • Committer: Package Import Robot
  • Author(s): Paolo Pisati, Paolo Pisati
  • Date: 2011-12-06 15:56:07 UTC
  • Revision ID: package-import@ubuntu.com-20111206155607-pcf44kv5fmhk564f
Tags: 3.2.0-1401.1
[ Paolo Pisati ]

* Rebased on top of Ubuntu-3.2.0-3.8
* Tilt-tracking @ ef2487af4bb15bdd0689631774b5a5e3a59f74e2
* Delete debian.ti-omap4/control, it shoudln't be tracked
* Fix architecture spelling (s/armel/armhf/)
* [Config] Update configs following 3.2 import
* [Config] Fix compilation: disable CODA and ARCH_OMAP3
* [Config] Fix compilation: disable Ethernet Faraday
* Update series to precise

Show diffs side-by-side

added added

removed removed

Lines of Context:
45
45
#define DMA_STATUS_MASK_BITS            0x3
46
46
#define DMA_STATUS_SHIFT_BITS           16
47
47
#define DMA_STATUS_IRQ(x)               (0x1 << (x))
48
 
#define DMA_STATUS_ERR(x)               (0x1 << ((x) + 8))
 
48
#define DMA_STATUS0_ERR(x)              (0x1 << ((x) + 8))
 
49
#define DMA_STATUS2_ERR(x)              (0x1 << (x))
49
50
 
50
51
#define DMA_DESC_WIDTH_SHIFT_BITS       12
51
52
#define DMA_DESC_WIDTH_1_BYTE           (0x3 << DMA_DESC_WIDTH_SHIFT_BITS)
59
60
#define DMA_DESC_FOLLOW_WITHOUT_IRQ     0x2
60
61
#define DMA_DESC_FOLLOW_WITH_IRQ        0x3
61
62
 
62
 
#define MAX_CHAN_NR                     8
 
63
#define MAX_CHAN_NR                     12
 
64
 
 
65
#define DMA_MASK_CTL0_MODE      0x33333333
 
66
#define DMA_MASK_CTL2_MODE      0x00003333
63
67
 
64
68
static unsigned int init_nr_desc_per_channel = 64;
65
69
module_param(init_nr_desc_per_channel, uint, 0644);
133
137
#define PCH_DMA_CTL3    0x0C
134
138
#define PCH_DMA_STS0    0x10
135
139
#define PCH_DMA_STS1    0x14
 
140
#define PCH_DMA_STS2    0x18
136
141
 
137
142
#define dma_readl(pd, name) \
138
143
        readl((pd)->membase + PCH_DMA_##name)
183
188
{
184
189
        struct pch_dma *pd = to_pd(chan->device);
185
190
        u32 val;
 
191
        int pos;
 
192
 
 
193
        if (chan->chan_id < 8)
 
194
                pos = chan->chan_id;
 
195
        else
 
196
                pos = chan->chan_id + 8;
186
197
 
187
198
        val = dma_readl(pd, CTL2);
188
199
 
189
200
        if (enable)
190
 
                val |= 0x1 << chan->chan_id;
 
201
                val |= 0x1 << pos;
191
202
        else
192
 
                val &= ~(0x1 << chan->chan_id);
 
203
                val &= ~(0x1 << pos);
193
204
 
194
205
        dma_writel(pd, CTL2, val);
195
206
 
202
213
        struct pch_dma_chan *pd_chan = to_pd_chan(chan);
203
214
        struct pch_dma *pd = to_pd(chan->device);
204
215
        u32 val;
 
216
        u32 mask_mode;
 
217
        u32 mask_ctl;
205
218
 
206
219
        if (chan->chan_id < 8) {
207
220
                val = dma_readl(pd, CTL0);
208
221
 
 
222
                mask_mode = DMA_CTL0_MODE_MASK_BITS <<
 
223
                                        (DMA_CTL0_BITS_PER_CH * chan->chan_id);
 
224
                mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
 
225
                                       (DMA_CTL0_BITS_PER_CH * chan->chan_id));
 
226
                val &= mask_mode;
209
227
                if (pd_chan->dir == DMA_TO_DEVICE)
210
228
                        val |= 0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +
211
229
                                       DMA_CTL0_DIR_SHIFT_BITS);
213
231
                        val &= ~(0x1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +
214
232
                                         DMA_CTL0_DIR_SHIFT_BITS));
215
233
 
 
234
                val |= mask_ctl;
216
235
                dma_writel(pd, CTL0, val);
217
236
        } else {
218
237
                int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */
219
238
                val = dma_readl(pd, CTL3);
220
239
 
 
240
                mask_mode = DMA_CTL0_MODE_MASK_BITS <<
 
241
                                                (DMA_CTL0_BITS_PER_CH * ch);
 
242
                mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
 
243
                                                 (DMA_CTL0_BITS_PER_CH * ch));
 
244
                val &= mask_mode;
221
245
                if (pd_chan->dir == DMA_TO_DEVICE)
222
246
                        val |= 0x1 << (DMA_CTL0_BITS_PER_CH * ch +
223
247
                                       DMA_CTL0_DIR_SHIFT_BITS);
224
248
                else
225
249
                        val &= ~(0x1 << (DMA_CTL0_BITS_PER_CH * ch +
226
250
                                         DMA_CTL0_DIR_SHIFT_BITS));
227
 
 
 
251
                val |= mask_ctl;
228
252
                dma_writel(pd, CTL3, val);
229
253
        }
230
254
 
236
260
{
237
261
        struct pch_dma *pd = to_pd(chan->device);
238
262
        u32 val;
 
263
        u32 mask_ctl;
 
264
        u32 mask_dir;
239
265
 
240
266
        if (chan->chan_id < 8) {
 
267
                mask_ctl = DMA_MASK_CTL0_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
 
268
                           (DMA_CTL0_BITS_PER_CH * chan->chan_id));
 
269
                mask_dir = 1 << (DMA_CTL0_BITS_PER_CH * chan->chan_id +\
 
270
                                 DMA_CTL0_DIR_SHIFT_BITS);
241
271
                val = dma_readl(pd, CTL0);
242
 
 
243
 
                val &= ~(DMA_CTL0_MODE_MASK_BITS <<
244
 
                        (DMA_CTL0_BITS_PER_CH * chan->chan_id));
 
272
                val &= mask_dir;
245
273
                val |= mode << (DMA_CTL0_BITS_PER_CH * chan->chan_id);
246
 
 
 
274
                val |= mask_ctl;
247
275
                dma_writel(pd, CTL0, val);
248
276
        } else {
249
277
                int ch = chan->chan_id - 8; /* ch8-->0 ch9-->1 ... ch11->3 */
250
 
 
 
278
                mask_ctl = DMA_MASK_CTL2_MODE & ~(DMA_CTL0_MODE_MASK_BITS <<
 
279
                                                 (DMA_CTL0_BITS_PER_CH * ch));
 
280
                mask_dir = 1 << (DMA_CTL0_BITS_PER_CH * ch +\
 
281
                                 DMA_CTL0_DIR_SHIFT_BITS);
251
282
                val = dma_readl(pd, CTL3);
252
 
 
253
 
                val &= ~(DMA_CTL0_MODE_MASK_BITS <<
254
 
                        (DMA_CTL0_BITS_PER_CH * ch));
 
283
                val &= mask_dir;
255
284
                val |= mode << (DMA_CTL0_BITS_PER_CH * ch);
256
 
 
 
285
                val |= mask_ctl;
257
286
                dma_writel(pd, CTL3, val);
258
 
 
259
287
        }
260
288
 
261
289
        dev_dbg(chan2dev(chan), "pdc_set_mode: chan %d -> %x\n",
262
290
                chan->chan_id, val);
263
291
}
264
292
 
265
 
static u32 pdc_get_status(struct pch_dma_chan *pd_chan)
 
293
static u32 pdc_get_status0(struct pch_dma_chan *pd_chan)
266
294
{
267
295
        struct pch_dma *pd = to_pd(pd_chan->chan.device);
268
296
        u32 val;
272
300
                        DMA_STATUS_BITS_PER_CH * pd_chan->chan.chan_id));
273
301
}
274
302
 
 
303
static u32 pdc_get_status2(struct pch_dma_chan *pd_chan)
 
304
{
 
305
        struct pch_dma *pd = to_pd(pd_chan->chan.device);
 
306
        u32 val;
 
307
 
 
308
        val = dma_readl(pd, STS2);
 
309
        return DMA_STATUS_MASK_BITS & (val >> (DMA_STATUS_SHIFT_BITS +
 
310
                        DMA_STATUS_BITS_PER_CH * (pd_chan->chan.chan_id - 8)));
 
311
}
 
312
 
275
313
static bool pdc_is_idle(struct pch_dma_chan *pd_chan)
276
314
{
277
 
        if (pdc_get_status(pd_chan) == DMA_STATUS_IDLE)
 
315
        u32 sts;
 
316
 
 
317
        if (pd_chan->chan.chan_id < 8)
 
318
                sts = pdc_get_status0(pd_chan);
 
319
        else
 
320
                sts = pdc_get_status2(pd_chan);
 
321
 
 
322
 
 
323
        if (sts == DMA_STATUS_IDLE)
278
324
                return true;
279
325
        else
280
326
                return false;
495
541
                list_add_tail(&desc->desc_node, &tmp_list);
496
542
        }
497
543
 
498
 
        spin_lock_bh(&pd_chan->lock);
 
544
        spin_lock_irq(&pd_chan->lock);
499
545
        list_splice(&tmp_list, &pd_chan->free_list);
500
546
        pd_chan->descs_allocated = i;
501
547
        pd_chan->completed_cookie = chan->cookie = 1;
502
 
        spin_unlock_bh(&pd_chan->lock);
 
548
        spin_unlock_irq(&pd_chan->lock);
503
549
 
504
550
        pdc_enable_irq(chan, 1);
505
551
 
517
563
        BUG_ON(!list_empty(&pd_chan->active_list));
518
564
        BUG_ON(!list_empty(&pd_chan->queue));
519
565
 
520
 
        spin_lock_bh(&pd_chan->lock);
 
566
        spin_lock_irq(&pd_chan->lock);
521
567
        list_splice_init(&pd_chan->free_list, &tmp_list);
522
568
        pd_chan->descs_allocated = 0;
523
 
        spin_unlock_bh(&pd_chan->lock);
 
569
        spin_unlock_irq(&pd_chan->lock);
524
570
 
525
571
        list_for_each_entry_safe(desc, _d, &tmp_list, desc_node)
526
572
                pci_pool_free(pd->pool, desc, desc->txd.phys);
536
582
        dma_cookie_t last_completed;
537
583
        int ret;
538
584
 
539
 
        spin_lock_bh(&pd_chan->lock);
 
585
        spin_lock_irq(&pd_chan->lock);
540
586
        last_completed = pd_chan->completed_cookie;
541
587
        last_used = chan->cookie;
542
 
        spin_unlock_bh(&pd_chan->lock);
 
588
        spin_unlock_irq(&pd_chan->lock);
543
589
 
544
590
        ret = dma_async_is_complete(cookie, last_completed, last_used);
545
591
 
654
700
        if (cmd != DMA_TERMINATE_ALL)
655
701
                return -ENXIO;
656
702
 
657
 
        spin_lock_bh(&pd_chan->lock);
 
703
        spin_lock_irq(&pd_chan->lock);
658
704
 
659
705
        pdc_set_mode(&pd_chan->chan, DMA_CTL0_DISABLE);
660
706
 
664
710
        list_for_each_entry_safe(desc, _d, &list, desc_node)
665
711
                pdc_chain_complete(pd_chan, desc);
666
712
 
667
 
        spin_unlock_bh(&pd_chan->lock);
 
713
        spin_unlock_irq(&pd_chan->lock);
668
714
 
669
715
        return 0;
670
716
}
693
739
        struct pch_dma *pd = (struct pch_dma *)devid;
694
740
        struct pch_dma_chan *pd_chan;
695
741
        u32 sts0;
 
742
        u32 sts2;
696
743
        int i;
697
 
        int ret = IRQ_NONE;
 
744
        int ret0 = IRQ_NONE;
 
745
        int ret2 = IRQ_NONE;
698
746
 
699
747
        sts0 = dma_readl(pd, STS0);
 
748
        sts2 = dma_readl(pd, STS2);
700
749
 
701
750
        dev_dbg(pd->dma.dev, "pd_irq sts0: %x\n", sts0);
702
751
 
703
752
        for (i = 0; i < pd->dma.chancnt; i++) {
704
753
                pd_chan = &pd->channels[i];
705
754
 
706
 
                if (sts0 & DMA_STATUS_IRQ(i)) {
707
 
                        if (sts0 & DMA_STATUS_ERR(i))
708
 
                                set_bit(0, &pd_chan->err_status);
709
 
 
710
 
                        tasklet_schedule(&pd_chan->tasklet);
711
 
                        ret = IRQ_HANDLED;
 
755
                if (i < 8) {
 
756
                        if (sts0 & DMA_STATUS_IRQ(i)) {
 
757
                                if (sts0 & DMA_STATUS0_ERR(i))
 
758
                                        set_bit(0, &pd_chan->err_status);
 
759
 
 
760
                                tasklet_schedule(&pd_chan->tasklet);
 
761
                                ret0 = IRQ_HANDLED;
 
762
                        }
 
763
                } else {
 
764
                        if (sts2 & DMA_STATUS_IRQ(i - 8)) {
 
765
                                if (sts2 & DMA_STATUS2_ERR(i))
 
766
                                        set_bit(0, &pd_chan->err_status);
 
767
 
 
768
                                tasklet_schedule(&pd_chan->tasklet);
 
769
                                ret2 = IRQ_HANDLED;
 
770
                        }
712
771
                }
713
 
 
714
772
        }
715
773
 
716
774
        /* clear interrupt bits in status register */
717
 
        dma_writel(pd, STS0, sts0);
 
775
        if (ret0)
 
776
                dma_writel(pd, STS0, sts0);
 
777
        if (ret2)
 
778
                dma_writel(pd, STS2, sts2);
718
779
 
719
 
        return ret;
 
780
        return ret0 | ret2;
720
781
}
721
782
 
722
783
#ifdef  CONFIG_PM
811
872
        int i;
812
873
 
813
874
        nr_channels = id->driver_data;
814
 
        pd = kzalloc(sizeof(struct pch_dma)+
815
 
                sizeof(struct pch_dma_chan) * nr_channels, GFP_KERNEL);
 
875
        pd = kzalloc(sizeof(*pd), GFP_KERNEL);
816
876
        if (!pd)
817
877
                return -ENOMEM;
818
878
 
865
925
        }
866
926
 
867
927
        pd->dma.dev = &pdev->dev;
868
 
        pd->dma.chancnt = nr_channels;
869
928
 
870
929
        INIT_LIST_HEAD(&pd->dma.channels);
871
930
 
874
933
 
875
934
                pd_chan->chan.device = &pd->dma;
876
935
                pd_chan->chan.cookie = 1;
877
 
                pd_chan->chan.chan_id = i;
878
936
 
879
937
                pd_chan->membase = &regs->desc[i];
880
938