summaryrefslogtreecommitdiffstats
path: root/firmware/target/arm/system-arm.h
blob: e5d3f3500195cd0f66da2920d24b7e465afbb851 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
/***************************************************************************
 *             __________               __   ___.
 *   Open      \______   \ ____   ____ |  | _\_ |__   _______  ___
 *   Source     |       _//  _ \_/ ___\|  |/ /| __ \ /  _ \  \/  /
 *   Jukebox    |    |   (  <_> )  \___|    < | \_\ (  <_> > <  <
 *   Firmware   |____|_  /\____/ \___  >__|_ \|___  /\____/__/\_ \
 *                     \/            \/     \/    \/            \/
 * $Id$
 *
 * Copyright (C) 2002 by Alan Korr
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public License
 * as published by the Free Software Foundation; either version 2
 * of the License, or (at your option) any later version.
 *
 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
 * KIND, either express or implied.
 *
 ****************************************************************************/
#ifndef SYSTEM_ARM_H
#define SYSTEM_ARM_H

/* Common to all ARM_ARCH */
#define nop \
  asm volatile ("nop")

void __div0(void);

#define IRQ_ENABLED      0x00
#define IRQ_DISABLED     0x80
#define IRQ_STATUS       0x80
#define FIQ_ENABLED      0x00
#define FIQ_DISABLED     0x40
#define FIQ_STATUS       0x40
#define IRQ_FIQ_ENABLED  0x00
#define IRQ_FIQ_DISABLED 0xc0
#define IRQ_FIQ_STATUS   0xc0
#define HIGHEST_IRQ_LEVEL IRQ_DISABLED

#define set_irq_level(status) \
    set_interrupt_status((status), IRQ_STATUS)
#define set_fiq_status(status) \
    set_interrupt_status((status), FIQ_STATUS)

#define disable_irq_save() \
    disable_interrupt_save(IRQ_STATUS)
#define disable_fiq_save() \
    disable_interrupt_save(FIQ_STATUS)

#define restore_irq(cpsr) \
    restore_interrupt(cpsr)
#define restore_fiq(cpsr) \
    restore_interrupt(cpsr)

#define disable_irq() \
    disable_interrupt(IRQ_STATUS)
#define enable_irq() \
    enable_interrupt(IRQ_STATUS)
#define disable_fiq() \
    disable_interrupt(FIQ_STATUS)
#define enable_fiq() \
    enable_interrupt(FIQ_STATUS)

#define irq_enabled() \
    interrupt_enabled(IRQ_STATUS)
#define fiq_enabled() \
    interrupt_enabled(FIQ_STATUS)
#define ints_enabled() \
    interrupt_enabled(IRQ_FIQ_STATUS)

#define irq_enabled_checkval(val) \
    (((val) & IRQ_STATUS) == 0)
#define fiq_enabled_checkval(val) \
    (((val) & FIQ_STATUS) == 0)
#define ints_enabled_checkval(val) \
    (((val) & IRQ_FIQ_STATUS) == 0)

#define CPU_MODE_USER   0x10
#define CPU_MODE_FIQ    0x11
#define CPU_MODE_IRQ    0x12
#define CPU_MODE_SVC    0x13
#define CPU_MODE_ABT    0x17
#define CPU_MODE_UNDEF  0x1b
#define CPU_MODE_SYS    0x1f

/* We run in SYS mode */
#define CPU_MODE_THREAD_CONTEXT CPU_MODE_SYS

#define is_thread_context() \
    (get_processor_mode() == CPU_MODE_THREAD_CONTEXT)

/* Assert that the processor is in the desired execution mode
 * mode:       Processor mode value to test for
 * rstatus...: Provide if you already have the value saved, otherwise leave
 *             blank to get it automatically.
 */
#define ASSERT_CPU_MODE(mode, rstatus...) \
    ({ unsigned long __massert = (mode);                         \
       unsigned long __mproc = *#rstatus ?                       \
            ((rstatus +0) & 0x1f) : get_processor_mode();        \
       if (__mproc != __massert)                                 \
           panicf("Incorrect CPU mode in %s (0x%02lx!=0x%02lx)", \
                  __func__, __mproc, __massert); })

/* Core-level interrupt masking */

static inline int set_interrupt_status(int status, int mask)
{
    unsigned long cpsr;
    int oldstatus;
    /* Read the old levels and set the new ones */
#if defined(CREATIVE_ZVM) && defined(BOOTLOADER)
// FIXME:  This workaround is for a problem with inlining;
// for some reason 'mask' gets treated as a variable/non-immediate constant
// but only on this build.  All others (including the nearly-identical mrobe500boot) are fine
    asm volatile (
        "mrs    %1, cpsr        \n"
        "bic    %0, %1, %[mask] \n"
        "orr    %0, %0, %2      \n"
        "msr    cpsr_c, %0      \n"
        : "=&r,r"(cpsr), "=&r,r"(oldstatus)
        : "r,i"(status & mask), [mask]"r,i"(mask));
#else
    asm volatile (
        "mrs    %1, cpsr        \n"
        "bic    %0, %1, %[mask] \n"
        "orr    %0, %0, %2      \n"
        "msr    cpsr_c, %0      \n"
        : "=&r,r"(cpsr), "=&r,r"(oldstatus)
        : "r,i"(status & mask), [mask]"i,i"(mask));
#endif
    return oldstatus;
}

static inline void restore_interrupt(int cpsr)
{
    /* Set cpsr_c from value returned by disable_interrupt_save
     * or set_interrupt_status */
    asm volatile ("msr cpsr_c, %0" : : "r"(cpsr));
}

static inline bool interrupt_enabled(int status)
{
    unsigned long cpsr;
    asm ("mrs %0, cpsr" : "=r"(cpsr));
    return (cpsr & status) == 0;
}

static inline unsigned long get_processor_mode(void)
{
    unsigned long cpsr;
    asm ("mrs %0, cpsr" : "=r"(cpsr));
    return cpsr & 0x1f;
}

/* ARM_ARCH version section for architecture*/

#if ARM_ARCH >= 6
static inline uint16_t swap16_hw(uint16_t value)
    /*
      result[15..8] = value[ 7..0];
      result[ 7..0] = value[15..8];
    */
{
    uint32_t retval;
    asm ("revsh %0, %1"                         /* xxAB */
        : "=r"(retval) : "r"((uint32_t)value)); /* xxBA */
    return retval;
}

static inline uint32_t swap32_hw(uint32_t value)
    /*
      result[31..24] = value[ 7.. 0];
      result[23..16] = value[15.. 8];
      result[15.. 8] = value[23..16];
      result[ 7.. 0] = value[31..24];
    */
{
    uint32_t retval;
    asm ("rev %0, %1"                 /* ABCD */
        : "=r"(retval) : "r"(value)); /* DCBA */
    return retval;
}

static inline uint32_t swap_odd_even32_hw(uint32_t value)
{
    /*
      result[31..24],[15.. 8] = value[23..16],[ 7.. 0]
      result[23..16],[ 7.. 0] = value[31..24],[15.. 8]
    */
    uint32_t retval;
    asm ("rev16 %0, %1"               /* ABCD */
        : "=r"(retval) : "r"(value)); /* BADC */
    return retval;
}

static inline void enable_interrupt(int mask)
{
    /* Clear I and/or F disable bit */
    /* mask is expected to be constant and so only relevent branch
     * is preserved */
    switch (mask & IRQ_FIQ_STATUS)
    {
    case IRQ_STATUS:
        asm volatile ("cpsie i");
        break;
    case FIQ_STATUS:
        asm volatile ("cpsie f");
        break;
    case IRQ_FIQ_STATUS:
        asm volatile ("cpsie if");
        break;
    }
}

static inline void disable_interrupt(int mask)
{
    /* Set I and/or F disable bit */
    /* mask is expected to be constant and so only relevent branch
     * is preserved */
    switch (mask & IRQ_FIQ_STATUS)
    {
    case IRQ_STATUS:
        asm volatile ("cpsid i");
        break;
    case FIQ_STATUS:
        asm volatile ("cpsid f");
        break;
    case IRQ_FIQ_STATUS:
        asm volatile ("cpsid if");
        break;
    }
}

static inline int disable_interrupt_save(int mask)
{
    /* Set I and/or F disable bit and return old cpsr value */
    int cpsr;
    /* mask is expected to be constant and so only relevent branch
     * is preserved */
    asm volatile("mrs %0, cpsr" : "=r"(cpsr));
    switch (mask & IRQ_FIQ_STATUS)
    {
    case IRQ_STATUS:
        asm volatile ("cpsid i");
        break;
    case FIQ_STATUS:
        asm volatile ("cpsid f");
        break;
    case IRQ_FIQ_STATUS:
        asm volatile ("cpsid if");
        break;
    }
    return cpsr;
}

#else /* ARM_ARCH < 6 */

static inline uint16_t swap16_hw(uint16_t value)
    /*
      result[15..8] = value[ 7..0];
      result[ 7..0] = value[15..8];
    */
{
    return (value >> 8) | (value << 8);
}

static inline uint32_t swap32_hw(uint32_t value)
    /*
      result[31..24] = value[ 7.. 0];
      result[23..16] = value[15.. 8];
      result[15.. 8] = value[23..16];
      result[ 7.. 0] = value[31..24];
    */
{
#ifdef __thumb__
    uint32_t mask = 0x00FF00FF;
    asm (                            /* val  = ABCD */
        "and %1, %0              \n" /* mask = .B.D */
        "eor %0, %1              \n" /* val  = A.C. */
        "lsl %1, #8              \n" /* mask = B.D. */
        "lsr %0, #8              \n" /* val  = .A.C */
        "orr %0, %1              \n" /* val  = BADC */
        "mov %1, #16             \n" /* mask = 16   */
        "ror %0, %1              \n" /* val  = DCBA */
        : "+l"(value), "+l"(mask));
#else
    uint32_t tmp;
    asm (
        "eor %1, %0, %0, ror #16 \n"
        "bic %1, %1, #0xff0000   \n"
        "mov %0, %0, ror #8      \n"
        "eor %0, %0, %1, lsr #8  \n"
        : "+r" (value), "=r" (tmp));
#endif
    return value;
}

static inline uint32_t swap_odd_even32_hw(uint32_t value)
{
    /*
      result[31..24],[15.. 8] = value[23..16],[ 7.. 0]
      result[23..16],[ 7.. 0] = value[31..24],[15.. 8]
    */
#ifdef __thumb__
    uint32_t mask = 0x00FF00FF;
    asm (                            /* val  = ABCD */
        "and %1, %0             \n"  /* mask = .B.D */
        "eor %0, %1             \n"  /* val  = A.C. */
        "lsl %1, #8             \n"  /* mask = B.D. */
        "lsr %0, #8             \n"  /* val  = .A.C */
        "orr %0, %1             \n"  /* val  = BADC */
        : "+l"(value), "+l"(mask));
#else
    uint32_t tmp;
    asm (                            /* ABCD      */
        "bic %1, %0, #0x00ff00  \n"  /* AB.D      */
        "bic %0, %0, #0xff0000  \n"  /* A.CD      */
        "mov %0, %0, lsr #8     \n"  /* .A.C      */
        "orr %0, %0, %1, lsl #8 \n"  /* B.D.|.A.C */
        : "+r" (value), "=r" (tmp)); /* BADC      */
#endif
    return value;
}

static inline void enable_interrupt(int mask)
{
    /* Clear I and/or F disable bit */
    int tmp;
    asm volatile (
        "mrs     %0, cpsr   \n"
        "bic     %0, %0, %1 \n"
        "msr     cpsr_c, %0 \n"
        : "=&r"(tmp) : "i"(mask));
}

static inline void disable_interrupt(int mask)
{
    /* Set I and/or F disable bit */
    int tmp;
    asm volatile (
        "mrs     %0, cpsr   \n"
        "orr     %0, %0, %1 \n"
        "msr     cpsr_c, %0 \n"
        : "=&r"(tmp) : "i"(mask));
}

static inline int disable_interrupt_save(int mask)
{
    /* Set I and/or F disable bit and return old cpsr value */
    int cpsr, tmp;
    asm volatile (
        "mrs     %1, cpsr   \n"
        "orr     %0, %1, %2 \n"
        "msr     cpsr_c, %0 \n"
        : "=&r"(tmp), "=&r"(cpsr)
        : "i"(mask));
    return cpsr;
}

#endif /* ARM_ARCH */

static inline uint32_t swaw32_hw(uint32_t value)
{
    /*
      result[31..16] = value[15.. 0];
      result[15.. 0] = value[31..16];
    */
#ifdef __thumb__
    asm (
        "ror %0, %1"
        : "+l"(value) : "l"(16));
    return value;
#else
    uint32_t retval;
    asm (
        "mov %0, %1, ror #16"
        : "=r"(retval) : "r"(value));
    return retval;
#endif

}

#if defined(CPU_TCC780X) || defined(CPU_TCC77X) /* Single core only for now */ \
|| CONFIG_CPU == IMX31L || CONFIG_CPU == DM320 || CONFIG_CPU == AS3525 \
|| CONFIG_CPU == S3C2440 || CONFIG_CPU == S5L8701 || CONFIG_CPU == AS3525v2 \
|| CONFIG_CPU == S5L8702
/* Use the generic ARMv4/v5/v6 wait for IRQ */
static inline void core_sleep(void)
{
    asm volatile (
        "mcr p15, 0, %0, c7, c0, 4 \n" /* Wait for interrupt */
#if CONFIG_CPU == IMX31L
        "nop\n nop\n nop\n nop\n nop\n" /* Clean out the pipes */
#endif
        : : "r"(0)
    );
    enable_irq();
}
#else
/* Skip this if special code is required and implemented */
#if !(defined(CPU_PP)) && CONFIG_CPU != RK27XX && CONFIG_CPU != IMX233
static inline void core_sleep(void)
{
    /* TODO: core_sleep not implemented, battery life will be decreased */
    enable_irq();
}
#endif /* CPU_PP */
#endif

#endif /* SYSTEM_ARM_H */