summaryrefslogtreecommitdiffstats
path: root/firmware/asm/arm/memset16.S
blob: 851b9207a2e727374f30f07ea7a612731e15e6a3 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
/***************************************************************************
 *             __________               __   ___.
 *   Open      \______   \ ____   ____ |  | _\_ |__   _______  ___
 *   Source     |       _//  _ \_/ ___\|  |/ /| __ \ /  _ \  \/  /
 *   Jukebox    |    |   (  <_> )  \___|    < | \_\ (  <_> > <  <
 *   Firmware   |____|_  /\____/ \___  >__|_ \|___  /\____/__/\_ \
 *                     \/            \/     \/    \/            \/
 * $Id$
 *
 * Copyright (C) 2006 by Thom Johansen
 *
 * This program is free software; you can redistribute it and/or
 * modify it under the terms of the GNU General Public License
 * as published by the Free Software Foundation; either version 2
 * of the License, or (at your option) any later version.
 *
 * This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
 * KIND, either express or implied.
 *
 ****************************************************************************/
#include "config.h"

    .syntax unified

    .section    .icode,"ax",%progbits

    .align      2

/*  The following code is based on code from the Linux kernel version 2.6.15.3,
 *  linux/arch/arm/lib/memset.S
 *
 *  Copyright (C) 1995-2000 Russell King
 */

        .global     memset16
        .type       memset16,%function
memset16:
        tst     r0, #2                  @ unaligned?
        cmpne   r2, #0
        strhne  r1, [r0], #2            @ store one halfword to align
        subne   r2, r2, #1

/*
 * we know that the pointer in r0 is aligned to a word boundary.
 */
        orr     r1, r1, r1, lsl #16
        mov     r3, r1
        cmp     r2, #8
        blt     4f
/*
 * We need an extra register for this loop - save the return address and
 * use the LR
 */
        str     lr, [sp, #-4]!
        mov     ip, r1
        mov     lr, r1

2:      subs    r2, r2, #32
        stmiage r0!, {r1, r3, ip, lr}   @ 64 bytes at a time.
        stmiage r0!, {r1, r3, ip, lr}
        stmiage r0!, {r1, r3, ip, lr}
        stmiage r0!, {r1, r3, ip, lr}
        bgt     2b
        ldrpc   cond=eq                 @ Now <64 bytes to go.
/*
 * No need to correct the count; we're only testing bits from now on
 */
        tst     r2, #16
        stmiane r0!, {r1, r3, ip, lr}
        stmiane r0!, {r1, r3, ip, lr}
        tst     r2, #8
        stmiane r0!, {r1, r3, ip, lr}
        ldr     lr, [sp], #4

4:      tst     r2, #4
        stmiane r0!, {r1, r3}
        tst     r2, #2
        strne   r1, [r0], #4

        tst     r2, #1
        strhne  r1, [r0], #2
        bx      lr
.end:
        .size   memset16,.end-memset16