summaryrefslogtreecommitdiff
path: root/include/arch/x86_64/asm/syscall.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/arch/x86_64/asm/syscall.h')
-rw-r--r--include/arch/x86_64/asm/syscall.h56
1 files changed, 27 insertions, 29 deletions
diff --git a/include/arch/x86_64/asm/syscall.h b/include/arch/x86_64/asm/syscall.h
index b3d1ae48..181120ec 100644
--- a/include/arch/x86_64/asm/syscall.h
+++ b/include/arch/x86_64/asm/syscall.h
@@ -1,81 +1,79 @@
#ifndef __ASM_SYSCALL_H
#define __ASM_SYSCALL_H
-static __inline long __syscall0(long n)
+__attribute__((__always_inline__)) inline unsigned long __syscall0(long n)
{
unsigned long ret;
- __asm__ volatile("syscall"
- : "=a"(ret)
- : "a"(n)
- : "rcx", "r11", "memory");
+ __asm__ volatile("syscall" : "=a"(ret) : "a"(n) : "rcx", "r11", "memory");
return ret;
}
-static __inline long __syscall1(long n, long a1)
+__attribute__((__always_inline__)) inline unsigned long __syscall1(long n, long a1)
{
unsigned long ret;
- __asm__ __volatile__("syscall"
- : "=a"(ret)
- : "a"(n), "D"(a1)
- : "rcx", "r11", "memory");
+ __asm__ __volatile__("syscall" : "=a"(ret) : "a"(n), "D"(a1) : "rcx", "r11", "memory");
return ret;
}
-static inline long __syscall2(long n, long a1, long a2)
+__attribute__((__always_inline__)) inline unsigned long __syscall2(long n, long a1, long a2)
{
unsigned long ret;
- __asm__ volatile("syscall"
- : "=a"(ret)
- : "a"(n), "D"(a1), "S"(a2)
- : "rcx", "r11", "memory");
+ __asm__ volatile("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2) : "rcx", "r11", "memory");
return ret;
}
-static inline long __syscall3(long n, long a1, long a2, long a3)
+__attribute__((__always_inline__)) inline unsigned long __syscall3(long n, long a1, long a2, long a3)
{
unsigned long ret;
- __asm__ volatile("syscall"
- : "=a"(ret)
- : "a"(n), "D"(a1), "S"(a2), "d"(a3)
- : "rcx", "r11", "memory");
+ __asm__ volatile("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2), "d"(a3) : "rcx", "r11", "memory");
return ret;
}
-static inline long __syscall4(long n, long a1, long a2, long a3, long a4)
+__attribute__((__always_inline__)) inline unsigned long __syscall4(long n, long a1, long a2, long a3, long a4)
{
unsigned long ret;
register long r10 __asm__("r10") = a4;
+ __asm__ volatile("syscall" : "=a"(ret) : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10) : "rcx", "r11", "memory");
+ return ret;
+}
+
+__attribute__((__always_inline__)) inline unsigned long __syscall5(long n, long a1, long a2, long a3, long a4, long a5)
+{
+ unsigned long ret;
+ register long r10 __asm__("r10") = a4;
+ register long r8 __asm__("r8") = a5;
__asm__ volatile("syscall"
: "=a"(ret)
- : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10)
+ : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10), "r"(r8)
: "rcx", "r11", "memory");
return ret;
}
-static inline long __syscall5(long n, long a1, long a2, long a3, long a4,
- long a5)
+__attribute__((__always_inline__)) inline unsigned long __syscall6(long n, long a1, long a2, long a3, long a4, long a5,
+ long a6)
{
unsigned long ret;
register long r10 __asm__("r10") = a4;
register long r8 __asm__("r8") = a5;
+ register long r9 __asm__("r9") = a6;
__asm__ volatile("syscall"
: "=a"(ret)
- : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10), "r"(r8)
+ : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10), "r"(r8), "r"(r9)
: "rcx", "r11", "memory");
return ret;
}
-static inline long __syscall6(long n, long a1, long a2, long a3, long a4,
- long a5, long a6)
+__attribute__((__always_inline__)) inline unsigned long __syscall7(long n, long a1, long a2, long a3, long a4, long a5,
+ long a6, long a7)
{
unsigned long ret;
register long r10 __asm__("r10") = a4;
register long r8 __asm__("r8") = a5;
register long r9 __asm__("r9") = a6;
+ register long r12 __asm__("r12") = a7;
__asm__ volatile("syscall"
: "=a"(ret)
- : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10), "r"(r8),
- "r"(r9)
+ : "a"(n), "D"(a1), "S"(a2), "d"(a3), "r"(r10), "r"(r8), "r"(r9), "r"(r12)
: "rcx", "r11", "memory");
return ret;
}