about summary refs log tree commit diff
path: root/src/thread
diff options
context:
space:
mode:
authorRich Felker <dalias@aerifal.cx>2019-05-24 10:46:08 -0400
committerRich Felker <dalias@aerifal.cx>2019-06-14 17:13:05 -0400
commit0a48860c27a8eb291bcc7616ea9eb073dc660cab (patch)
tree6021d6d18943d7b883e38e2f3e20a3b81d916fc5 /src/thread
parent5fc43798250255455e4b5f9b08000bd3102274d9 (diff)
downloadmusl-0a48860c27a8eb291bcc7616ea9eb073dc660cab.tar.gz
musl-0a48860c27a8eb291bcc7616ea9eb073dc660cab.tar.xz
musl-0a48860c27a8eb291bcc7616ea9eb073dc660cab.zip
add riscv64 architecture support
Author: Alex Suykov <alex.suykov@gmail.com>
Author: Aric Belsito <lluixhi@gmail.com>
Author: Drew DeVault <sir@cmpwn.com>
Author: Michael Clark <mjc@sifive.com>
Author: Michael Forney <mforney@mforney.org>
Author: Stefan O'Rear <sorear2@gmail.com>

This port has involved the work of many people over several years. I
have tried to ensure that everyone with substantial contributions has
been credited above; if any omissions are found they will be noted
later in an update to the authors/contributors list in the COPYRIGHT
file.

The version committed here comes from the riscv/riscv-musl repo's
commit 3fe7e2c75df78eef42dcdc352a55757729f451e2, with minor changes by
me for issues found during final review:

- a_ll/a_sc atomics are removed (according to the ISA spec, lr/sc
  are not safe to use in separate inline asm fragments)

- a_cas[_p] is fixed to be a memory barrier

- the call from the _start assembly into the C part of crt1/ldso is
  changed to allow for the possibility that the linker does not place
  them nearby each other.

- DTP_OFFSET is defined correctly so that local-dynamic TLS works

- reloc.h LDSO_ARCH logic is simplified and made explicit.

- unused, non-functional crti/n asm files are removed.

- an empty .sdata section is added to crt1 so that the
  __global_pointer reference is resolvable.

- indentation style errors in some asm files are fixed.
Diffstat (limited to 'src/thread')
-rw-r--r--src/thread/riscv64/__set_thread_area.s6
-rw-r--r--src/thread/riscv64/__unmapself.s7
-rw-r--r--src/thread/riscv64/clone.s34
-rw-r--r--src/thread/riscv64/syscall_cp.s29
4 files changed, 76 insertions, 0 deletions
diff --git a/src/thread/riscv64/__set_thread_area.s b/src/thread/riscv64/__set_thread_area.s
new file mode 100644
index 00000000..828154d2
--- /dev/null
+++ b/src/thread/riscv64/__set_thread_area.s
@@ -0,0 +1,6 @@
+.global __set_thread_area
+.type   __set_thread_area, %function
+__set_thread_area:
+	mv tp, a0
+	li a0, 0
+	ret
diff --git a/src/thread/riscv64/__unmapself.s b/src/thread/riscv64/__unmapself.s
new file mode 100644
index 00000000..2849119c
--- /dev/null
+++ b/src/thread/riscv64/__unmapself.s
@@ -0,0 +1,7 @@
+.global __unmapself
+.type __unmapself, %function
+__unmapself:
+	li a7, 215 # SYS_munmap
+	ecall
+	li a7, 93  # SYS_exit
+	ecall
diff --git a/src/thread/riscv64/clone.s b/src/thread/riscv64/clone.s
new file mode 100644
index 00000000..db908248
--- /dev/null
+++ b/src/thread/riscv64/clone.s
@@ -0,0 +1,34 @@
+# __clone(func, stack, flags, arg, ptid, tls, ctid)
+#           a0,    a1,    a2,  a3,   a4,  a5,   a6
+
+# syscall(SYS_clone, flags, stack, ptid, tls, ctid)
+#                a7     a0,    a1,   a2,  a3,   a4
+
+.global __clone
+.type  __clone, %function
+__clone:
+	# Save func and arg to stack
+	addi a1, a1, -16
+	sd a0, 0(a1)
+	sd a3, 8(a1)
+
+	# Call SYS_clone
+	mv a0, a2
+	mv a2, a4
+	mv a3, a5
+	mv a4, a6
+	li a7, 220 # SYS_clone
+	ecall
+
+	beqz a0, 1f
+	# Parent
+	ret
+
+	# Child
+1:      ld a1, 0(sp)
+	ld a0, 8(sp)
+	jalr a1
+
+	# Exit
+	li a7, 93 # SYS_exit
+	ecall
diff --git a/src/thread/riscv64/syscall_cp.s b/src/thread/riscv64/syscall_cp.s
new file mode 100644
index 00000000..eeef6391
--- /dev/null
+++ b/src/thread/riscv64/syscall_cp.s
@@ -0,0 +1,29 @@
+.global __cp_begin
+.hidden __cp_begin
+.global __cp_end
+.hidden __cp_end
+.global __cp_cancel
+.hidden __cp_cancel
+.hidden __cancel
+.global __syscall_cp_asm
+.hidden __syscall_cp_asm
+.type __syscall_cp_asm, %function
+__syscall_cp_asm:
+__cp_begin:
+	lw t0, 0(a0)
+	bnez t0, __cp_cancel
+
+	mv t0, a1
+	mv a0, a2
+	mv a1, a3
+	mv a2, a4
+	mv a3, a5
+	mv a4, a6
+	mv a5, a7
+	ld a6, 0(sp)
+	mv a7, t0
+	ecall
+__cp_end:
+	ret
+__cp_cancel:
+	tail __cancel