1/*
2 * Copyright 2018, Jaroslaw Pelczar <jarek@jpelczar.com>
3 * Distributed under the terms of the MIT License.
4 */
5#ifndef _KERNEL_ARCH_ARM64_ARCH_INT_H_
6#define _KERNEL_ARCH_ARM64_ARCH_INT_H_
7
8
9#include <SupportDefs.h>
10#include <kernel/arch/arm64/arm_registers.h>
11
12
13#define NUM_IO_VECTORS			1024
14
15static inline void
16arch_int_enable_interrupts_inline(void)
17{
18	__asm__ __volatile__("msr daifclr, #2");
19}
20
21
22static inline int
23arch_int_disable_interrupts_inline(void)
24{
25	uint32 flags;
26
27	__asm__ __volatile__(
28			"mrs %0, daif\n\t"
29			"msr daifset, #2\n\t"
30			: "=&r"(flags));
31
32	return flags;
33}
34
35
36static inline void
37arch_int_restore_interrupts_inline(int oldState)
38{
39	WRITE_SPECIALREG(daif, oldState);
40}
41
42
43static inline bool
44arch_int_are_interrupts_enabled_inline(void)
45{
46	uint32 flags;
47
48	__asm__ __volatile__(
49			"mrs %0, daif\n\t"
50			: "=&r"(flags));
51
52	return (flags & PSR_I) == 0;
53}
54
55// map the functions to the inline versions
56#define arch_int_enable_interrupts()	arch_int_enable_interrupts_inline()
57#define arch_int_disable_interrupts()	arch_int_disable_interrupts_inline()
58#define arch_int_restore_interrupts(status)	\
59	arch_int_restore_interrupts_inline(status)
60#define arch_int_are_interrupts_enabled()	\
61	arch_int_are_interrupts_enabled_inline()
62
63#endif /* _KERNEL_ARCH_ARM64_ARCH_INT_H_ */
64