mirror of
https://github.com/Atmosphere-NX/Atmosphere.git
synced 2025-05-30 06:25:20 -04:00
kern: implement KUserPointer (and test with QueryMemory) in advance of svc dev
This commit is contained in:
parent
9f9593e05f
commit
efae01c165
17 changed files with 460 additions and 72 deletions
|
@ -294,7 +294,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result InvalidateDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::InvalidateDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -302,7 +302,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result StoreDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::StoreDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -310,7 +310,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result FlushDataCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, DataCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, DataCacheLineSize));
|
||||
R_UNLESS(arm64::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::FlushDataCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
DataSynchronizationBarrier();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
@ -318,7 +318,7 @@ namespace ams::kern::arch::arm64::cpu {
|
|||
ALWAYS_INLINE Result InvalidateInstructionCacheRange(uintptr_t start, uintptr_t end) {
|
||||
MESOSPHERE_ASSERT(util::IsAligned(start, InstructionCacheLineSize));
|
||||
MESOSPHERE_ASSERT(util::IsAligned(end, InstructionCacheLineSize));
|
||||
R_UNLESS(arm64::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
R_UNLESS(UserspaceAccess::InvalidateInstructionCache(start, end), svc::ResultInvalidCurrentMemory());
|
||||
EnsureInstructionConsistency();
|
||||
return ResultSuccess();
|
||||
}
|
||||
|
|
|
@ -14,20 +14,58 @@
|
|||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaBegin() */
|
||||
.section .text._ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv
|
||||
.type _ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv, %function
|
||||
_ZN3ams4kern4arch5arm6438UserspaceMemoryAccessFunctionAreaBeginEv:
|
||||
/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaBegin() */
|
||||
.section .text._ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv
|
||||
.type _ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv, %function
|
||||
_ZN3ams4kern4arch5arm6432UserspaceAccessFunctionAreaBeginEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
||||
|
||||
/* ================ All Userspace Memory Functions after this line. ================ */
|
||||
/* ================ All Userspace Access Functions after this line. ================ */
|
||||
|
||||
/* ams::kern::arch::arm64::StoreDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6414StoreDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6414StoreDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6414StoreDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6414StoreDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::CopyMemoryToUserAligned64Bit(void *dst, const void *src, size_t size) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm:
|
||||
/* Check if there are 0x40 bytes to copy */
|
||||
cmp x2, #0x3F
|
||||
b.ls 1f
|
||||
ldp x4, x5, [x1, #0x00]
|
||||
ldp x6, x7, [x1, #0x10]
|
||||
ldp x8, x9, [x1, #0x20]
|
||||
ldp x10, x11, [x1, #0x30]
|
||||
sttr x4, [x0, #0x00]
|
||||
sttr x5, [x0, #0x08]
|
||||
sttr x6, [x0, #0x10]
|
||||
sttr x7, [x0, #0x18]
|
||||
sttr x8, [x0, #0x20]
|
||||
sttr x9, [x0, #0x28]
|
||||
sttr x10, [x0, #0x30]
|
||||
sttr x11, [x0, #0x38]
|
||||
add x0, x0, #0x40
|
||||
add x1, x1, #0x40
|
||||
sub x2, x2, #0x40
|
||||
b _ZN3ams4kern4arch5arm6415UserspaceAccess28CopyMemoryToUserAligned64BitEPvPKvm
|
||||
|
||||
1: /* We have less than 0x40 bytes to copy. */
|
||||
cmp x2, #0x0
|
||||
b.eq 2f
|
||||
ldr x4, [x1], #0x8
|
||||
sttr x4, [x0]
|
||||
add x0, x0, #0x8
|
||||
sub x2, x2, #0x8
|
||||
b 1b
|
||||
|
||||
2: /* We're done. */
|
||||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::StoreDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14StoreDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -42,11 +80,11 @@ _ZN3ams4kern4arch5arm6414StoreDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::FlushDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6414FlushDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6414FlushDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6414FlushDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6414FlushDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::FlushDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess14FlushDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -61,11 +99,11 @@ _ZN3ams4kern4arch5arm6414FlushDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::InvalidateDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::InvalidateDataCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess19InvalidateDataCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -80,11 +118,11 @@ _ZN3ams4kern4arch5arm6419InvalidateDataCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ams::kern::arch::arm64::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm:
|
||||
/* ams::kern::arch::arm64::UserspaceAccess::InvalidateInstructionCache(uintptr_t start, uintptr_t end) */
|
||||
.section .text._ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm
|
||||
.type _ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm, %function
|
||||
_ZN3ams4kern4arch5arm6415UserspaceAccess26InvalidateInstructionCacheEmm:
|
||||
/* Check if we have any work to do. */
|
||||
cmp x1, x0
|
||||
b.eq 2f
|
||||
|
@ -99,11 +137,11 @@ _ZN3ams4kern4arch5arm6426InvalidateInstructionCacheEmm:
|
|||
mov x0, #1
|
||||
ret
|
||||
|
||||
/* ================ All Userspace Memory Functions before this line. ================ */
|
||||
/* ================ All Userspace Access Functions before this line. ================ */
|
||||
|
||||
/* ams::kern::arch::arm64::UserspaceMemoryAccessFunctionAreaEnd() */
|
||||
.section .text._ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv
|
||||
.type _ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv, %function
|
||||
_ZN3ams4kern4arch5arm6436UserspaceMemoryAccessFunctionAreaEndEv:
|
||||
/* ams::kern::arch::arm64::UserspaceAccessFunctionAreaEnd() */
|
||||
.section .text._ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, "ax", %progbits
|
||||
.global _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv
|
||||
.type _ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv, %function
|
||||
_ZN3ams4kern4arch5arm6430UserspaceAccessFunctionAreaEndEv:
|
||||
/* NOTE: This is not a real function, and only exists as a label for safety. */
|
Loading…
Add table
Add a link
Reference in a new issue