about summary refs log tree commit diff
path: root/sysdeps/mips/mips64/memset.S
blob: 6a3b154badd864236dfc5a0c2605501763e24459 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
/* Copyright (C) 2002, 2003 Free Software Foundation, Inc.
   This file is part of the GNU C Library.
   Contributed by Hartvig Ekner <hartvige@mips.com>, 2002.
   Ported to mips3 n32/n64 by Alexandre Oliva <aoliva@redhat.com>

   The GNU C Library is free software; you can redistribute it and/or
   modify it under the terms of the GNU Lesser General Public
   License as published by the Free Software Foundation; either
   version 2.1 of the License, or (at your option) any later version.

   The GNU C Library is distributed in the hope that it will be useful,
   but WITHOUT ANY WARRANTY; without even the implied warranty of
   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
   Lesser General Public License for more details.

   You should have received a copy of the GNU Lesser General Public
   License along with the GNU C Library; if not, write to the Free
   Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
   02111-1307 USA.  */

#include <sysdep.h>
#include <endian.h>
#include <sys/asm.h>


/* void *memset(void *s, int c, size_t n);
	
   This could probably be optimized further.  */

#if __BYTE_ORDER == __BIG_ENDIAN
# define SDHI	sdl		/* high part is left in big-endian	*/
#else
# define SDHI	sdr		/* high part is right in little-endian	*/
#endif

ENTRY (memset)
	.set	noreorder

	slti	t5, a2, 8		# Less than 8?
	bne	t5, zero, L(last8)
	move	v0, a0			# Setup exit value before too late

	beq	a1, zero, L(ueven)	# If zero pattern, no need to extend
	andi	a1, 0xff		# Avoid problems with bogus arguments
	dsll	t4, a1, 8
	or	a1, t4
	dsll	t4, a1, 16
	or	a1, t4			# a1 is now pattern in full word
	dsll	t4, a1, 32
	or	a1, t4			# a1 is now pattern in double word

L(ueven):
	PTR_SUBU t4, zero, a0		# Unaligned address?
	andi	t4, 0x7
	beq	t4, zero, L(chkw)
	PTR_SUBU a2, t4
	SDHI	a1, 0(a0)		# Yes, handle first unaligned part
	PTR_ADDU a0, t4			# Now both a0 and a2 are updated

L(chkw):
	andi	t4, a2, 0xf		# Enough left for one loop iteration?
	beq	t4, a2, L(chkl)
	PTR_SUBU a3, a2, t4
	PTR_ADDU a3, a0			# a3 is last loop address +1
	move	a2, t4			# a2 is now # of bytes left after loop
L(loopw):
	PTR_ADDIU a0, 16		# Handle 2 words pr. iteration
	sd	a1, -16(a0)
	bne	a0, a3, L(loopw)
	sd	a1,  -8(a0)

L(chkl):
	andi	t4, a2, 0x8		# Check if there is at least a double
	beq	t4, zero, L(last8)	#  word remaining after the loop
	PTR_SUBU a2, t4
	sd	a1, 0(a0)		# Yes...
	PTR_ADDIU a0, 8

L(last8):
	blez	a2, L(exit)		# Handle last 8 bytes (if cnt>0)
	PTR_ADDU a3, a2, a0		# a3 is last address +1
L(lst8l):
	PTR_ADDIU a0, 1
	bne	a0, a3, L(lst8l)
	sb	a1, -1(a0)
L(exit):
	j	ra			# Bye, bye
	nop

	.set	reorder
END (memset)