aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorYang Liu <[email protected]>2024-02-17 20:06:10 +0800
committerMerry <[email protected]>2024-03-02 19:38:46 +0000
commit02d8a7ff10126e86810a95a210124f8220f39615 (patch)
tree94129c3f28ac86210a21377ae04ef569eb36ebb3
parente7bafeae24e1b4936cbf009309157f70bc32db03 (diff)
downloaddynarmic-02d8a7ff10126e86810a95a210124f8220f39615.tar.gz
dynarmic-02d8a7ff10126e86810a95a210124f8220f39615.zip
backend/rv64: Stub all IR instruction implementations
-rw-r--r--src/dynarmic/CMakeLists.txt17
-rw-r--r--src/dynarmic/backend/riscv64/a32_interface.cpp4
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64.cpp51
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_a32.cpp150
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_a32_coprocessor.cpp55
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_a32_memory.cpp105
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_a64.cpp200
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_a64_memory.cpp125
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_cryptography.cpp100
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_data_processing.cpp396
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_floating_point.cpp445
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_packed.cpp190
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_saturation.cpp130
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_vector.cpp1395
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_vector_floating_point.cpp355
-rw-r--r--src/dynarmic/backend/riscv64/emit_riscv64_vector_saturation.cpp100
-rw-r--r--src/dynarmic/backend/riscv64/reg_alloc.cpp2
17 files changed, 3796 insertions, 24 deletions
diff --git a/src/dynarmic/CMakeLists.txt b/src/dynarmic/CMakeLists.txt
index f6340ff9..6f0813ca 100644
--- a/src/dynarmic/CMakeLists.txt
+++ b/src/dynarmic/CMakeLists.txt
@@ -404,8 +404,22 @@ if ("riscv" IN_LIST ARCHITECTURE)
target_sources(dynarmic PRIVATE
backend/riscv64/abi.h
+ backend/riscv64/a32_jitstate.cpp
+ backend/riscv64/a32_jitstate.h
backend/riscv64/emit_context.h
+ backend/riscv64/emit_riscv64_a32.cpp
+ backend/riscv64/emit_riscv64_a32_coprocessor.cpp
+ backend/riscv64/emit_riscv64_a32_memory.cpp
+ backend/riscv64/emit_riscv64_a64.cpp
+ backend/riscv64/emit_riscv64_a64_memory.cpp
+ backend/riscv64/emit_riscv64_cryptography.cpp
backend/riscv64/emit_riscv64_data_processing.cpp
+ backend/riscv64/emit_riscv64_floating_point.cpp
+ backend/riscv64/emit_riscv64_packed.cpp
+ backend/riscv64/emit_riscv64_saturation.cpp
+ backend/riscv64/emit_riscv64_vector_floating_point.cpp
+ backend/riscv64/emit_riscv64_vector_saturation.cpp
+ backend/riscv64/emit_riscv64_vector.cpp
backend/riscv64/emit_riscv64.cpp
backend/riscv64/emit_riscv64.h
backend/riscv64/reg_alloc.cpp
@@ -419,10 +433,7 @@ if ("riscv" IN_LIST ARCHITECTURE)
backend/riscv64/a32_address_space.h
backend/riscv64/a32_core.h
backend/riscv64/a32_interface.cpp
- backend/riscv64/a32_jitstate.cpp
- backend/riscv64/a32_jitstate.h
backend/riscv64/code_block.h
- backend/riscv64/emit_riscv64_a32.cpp
)
endif()
diff --git a/src/dynarmic/backend/riscv64/a32_interface.cpp b/src/dynarmic/backend/riscv64/a32_interface.cpp
index 02aabb33..dac4d2d2 100644
--- a/src/dynarmic/backend/riscv64/a32_interface.cpp
+++ b/src/dynarmic/backend/riscv64/a32_interface.cpp
@@ -49,7 +49,7 @@ struct Jit::Impl final {
jit_interface->is_executing = false;
};
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
RequestCacheInvalidation();
@@ -117,7 +117,7 @@ struct Jit::Impl final {
}
void DumpDisassembly() const {
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
}
private:
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64.cpp b/src/dynarmic/backend/riscv64/emit_riscv64.cpp
index 0aae01f0..32a5506b 100644
--- a/src/dynarmic/backend/riscv64/emit_riscv64.cpp
+++ b/src/dynarmic/backend/riscv64/emit_riscv64.cpp
@@ -21,11 +21,6 @@
namespace Dynarmic::Backend::RV64 {
-template<IR::Opcode op>
-void EmitIR(biscuit::Assembler&, EmitContext&, IR::Inst*) {
- ASSERT_FALSE("Unimplemented opcode {} ", op);
-}
-
template<>
void EmitIR<IR::Opcode::Void>(biscuit::Assembler&, EmitContext&, IR::Inst*) {}
@@ -36,13 +31,19 @@ void EmitIR<IR::Opcode::Identity>(biscuit::Assembler&, EmitContext& ctx, IR::Ins
}
template<>
-void EmitIR<IR::Opcode::A32GetRegister>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst);
-template<>
-void EmitIR<IR::Opcode::A32SetRegister>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst);
+void EmitIR<IR::Opcode::Breakpoint>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
template<>
-void EmitIR<IR::Opcode::A32SetCpsrNZC>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst);
+void EmitIR<IR::Opcode::CallHostFunction>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
template<>
-void EmitIR<IR::Opcode::LogicalShiftLeft32>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst);
+void EmitIR<IR::Opcode::PushRSB>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
template<>
void EmitIR<IR::Opcode::GetCarryFromOp>(biscuit::Assembler&, EmitContext& ctx, IR::Inst* inst) {
@@ -51,6 +52,16 @@ void EmitIR<IR::Opcode::GetCarryFromOp>(biscuit::Assembler&, EmitContext& ctx, I
}
template<>
+void EmitIR<IR::Opcode::GetOverflowFromOp>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::GetGEFromOp>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::GetNZCVFromOp>(biscuit::Assembler&, EmitContext& ctx, IR::Inst* inst) {
[[maybe_unused]] auto args = ctx.reg_alloc.GetArgumentInfo(inst);
ASSERT(ctx.reg_alloc.IsValueLive(inst));
@@ -71,6 +82,26 @@ void EmitIR<IR::Opcode::GetNZFromOp>(biscuit::Assembler& as, EmitContext& ctx, I
as.OR(Xnz, Xnz, Xscratch0);
}
+template<>
+void EmitIR<IR::Opcode::GetUpperFromOp>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::GetLowerFromOp>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::GetCFlagFromNZCV>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::NZCVFromPackedFlags>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
EmittedBlockInfo EmitRV64(biscuit::Assembler& as, IR::Block block, const EmitConfig& emit_conf) {
using namespace biscuit;
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_a32.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_a32.cpp
index ff9bc462..95a11bec 100644
--- a/src/dynarmic/backend/riscv64/emit_riscv64_a32.cpp
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_a32.cpp
@@ -206,6 +206,11 @@ void EmitA32Terminal(biscuit::Assembler& as, EmitContext& ctx) {
}
template<>
+void EmitIR<IR::Opcode::A32SetCheckBit>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::A32GetRegister>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
const A32::Reg reg = inst->GetArg(0).GetA32RegRef();
@@ -216,6 +221,21 @@ void EmitIR<IR::Opcode::A32GetRegister>(biscuit::Assembler& as, EmitContext& ctx
}
template<>
+void EmitIR<IR::Opcode::A32GetExtendedRegister32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32GetExtendedRegister64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32GetVector>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::A32SetRegister>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
const A32::Reg reg = inst->GetArg(0).GetA32RegRef();
@@ -230,6 +250,56 @@ void EmitIR<IR::Opcode::A32SetRegister>(biscuit::Assembler& as, EmitContext& ctx
}
template<>
+void EmitIR<IR::Opcode::A32SetExtendedRegister32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetExtendedRegister64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetVector>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32GetCpsr>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetCpsr>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetCpsrNZCV>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
+ auto args = ctx.reg_alloc.GetArgumentInfo(inst);
+
+ auto Xnzcv = ctx.reg_alloc.ReadX(args[0]);
+ RegAlloc::Realize(Xnzcv);
+
+ as.SW(Xnzcv, offsetof(A32JitState, cpsr_nzcv), Xstate);
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetCpsrNZCVRaw>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetCpsrNZCVQ>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetCpsrNZ>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::A32SetCpsrNZC>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
auto args = ctx.reg_alloc.GetArgumentInfo(inst);
@@ -249,13 +319,83 @@ void EmitIR<IR::Opcode::A32SetCpsrNZC>(biscuit::Assembler& as, EmitContext& ctx,
}
template<>
-void EmitIR<IR::Opcode::A32SetCpsrNZCV>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
- auto args = ctx.reg_alloc.GetArgumentInfo(inst);
+void EmitIR<IR::Opcode::A32GetCFlag>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
- auto Xnzcv = ctx.reg_alloc.ReadX(args[0]);
- RegAlloc::Realize(Xnzcv);
+template<>
+void EmitIR<IR::Opcode::A32OrQFlag>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
- as.SW(Xnzcv, offsetof(A32JitState, cpsr_nzcv), Xstate);
+template<>
+void EmitIR<IR::Opcode::A32GetGEFlags>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetGEFlags>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetGEFlagsCompressed>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32BXWritePC>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32UpdateUpperLocationDescriptor>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CallSupervisor>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExceptionRaised>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32DataSynchronizationBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32DataMemoryBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32InstructionSynchronizationBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32GetFpscr>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetFpscr>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32GetFpscrNZCV>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32SetFpscrNZCV>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
}
} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_a32_coprocessor.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_a32_coprocessor.cpp
new file mode 100644
index 00000000..a014d57f
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_a32_coprocessor.cpp
@@ -0,0 +1,55 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocInternalOperation>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocSendOneWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocSendTwoWords>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocGetOneWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocGetTwoWords>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocLoadWords>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32CoprocStoreWords>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_a32_memory.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_a32_memory.cpp
new file mode 100644
index 00000000..f9a3aabf
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_a32_memory.cpp
@@ -0,0 +1,105 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::A32ClearExclusive>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ReadMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ReadMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ReadMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ReadMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveReadMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveReadMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveReadMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveReadMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32WriteMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32WriteMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32WriteMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32WriteMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveWriteMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveWriteMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveWriteMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A32ExclusiveWriteMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_a64.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_a64.cpp
new file mode 100644
index 00000000..38ea167f
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_a64.cpp
@@ -0,0 +1,200 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::A64SetCheckBit>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetCFlag>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetNZCVRaw>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetNZCVRaw>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetNZCV>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetW>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetX>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetS>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetD>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetQ>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetSP>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetFPCR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetFPSR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetW>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetX>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetS>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetD>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetQ>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetSP>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetFPCR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetFPSR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetPC>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64CallSupervisor>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExceptionRaised>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64DataCacheOperationRaised>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64InstructionCacheOperationRaised>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64DataSynchronizationBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64DataMemoryBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64InstructionSynchronizationBarrier>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetCNTFRQ>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetCNTPCT>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetCTR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetDCZID>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetTPIDR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64GetTPIDRRO>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64SetTPIDR>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_a64_memory.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_a64_memory.cpp
new file mode 100644
index 00000000..a5c0c1b8
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_a64_memory.cpp
@@ -0,0 +1,125 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::A64ClearExclusive>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ReadMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ReadMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ReadMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ReadMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ReadMemory128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveReadMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveReadMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveReadMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveReadMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveReadMemory128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64WriteMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64WriteMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64WriteMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64WriteMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64WriteMemory128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveWriteMemory8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveWriteMemory16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveWriteMemory32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveWriteMemory64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::A64ExclusiveWriteMemory128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_cryptography.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_cryptography.cpp
new file mode 100644
index 00000000..c1d3fa0e
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_cryptography.cpp
@@ -0,0 +1,100 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::CRC32Castagnoli8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32Castagnoli16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32Castagnoli32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32Castagnoli64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32ISO8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32ISO16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32ISO32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CRC32ISO64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AESDecryptSingleRound>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AESEncryptSingleRound>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AESInverseMixColumns>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AESMixColumns>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SM4AccessSubstitutionBox>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SHA256Hash>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SHA256MessageSchedule0>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SHA256MessageSchedule1>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_data_processing.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_data_processing.cpp
index 90565af5..ffb82d74 100644
--- a/src/dynarmic/backend/riscv64/emit_riscv64_data_processing.cpp
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_data_processing.cpp
@@ -18,6 +18,71 @@
namespace Dynarmic::Backend::RV64 {
template<>
+void EmitIR<IR::Opcode::Pack2x32To1x64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Pack2x64To1x128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LeastSignificantWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LeastSignificantHalf>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LeastSignificantByte>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MostSignificantWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MostSignificantBit>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::IsZero32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::IsZero64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::TestBit>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ConditionalSelect32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ConditionalSelect64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ConditionalSelectNZCV>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::LogicalShiftLeft32>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
const auto carry_inst = inst->GetAssociatedPseudoOperation(IR::Opcode::GetCarryFromOp);
@@ -54,6 +119,86 @@ void EmitIR<IR::Opcode::LogicalShiftLeft32>(biscuit::Assembler& as, EmitContext&
}
}
+template<>
+void EmitIR<IR::Opcode::LogicalShiftLeft64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftRight32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftRight64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ArithmeticShiftRight32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ArithmeticShiftRight64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::RotateRight32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::RotateRight64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::RotateRightExtended>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftLeftMasked32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftLeftMasked64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftRightMasked32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::LogicalShiftRightMasked64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ArithmeticShiftRightMasked32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ArithmeticShiftRightMasked64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::RotateRightMasked32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::RotateRightMasked64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
template<size_t bitsize>
static void AddImmWithFlags(biscuit::Assembler& as, biscuit::GPR rd, biscuit::GPR rs, u64 imm, biscuit::GPR flags) {
static_assert(bitsize == 32 || bitsize == 64);
@@ -122,19 +267,264 @@ static void EmitSub(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
AddImmWithFlags<bitsize>(as, *Xresult, *Xa, -imm, *Xflags);
}
} else {
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
}
} else {
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
}
} else {
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
}
}
template<>
+void EmitIR<IR::Opcode::Add32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Add64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
void EmitIR<IR::Opcode::Sub32>(biscuit::Assembler& as, EmitContext& ctx, IR::Inst* inst) {
EmitSub<32>(as, ctx, inst);
}
+template<>
+void EmitIR<IR::Opcode::Sub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Mul32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Mul64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedMultiplyHigh64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedMultiplyHigh64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedDiv32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedDiv64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedDiv32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedDiv64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::And32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::And64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AndNot32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::AndNot64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Eor32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Eor64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Or32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Or64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Not32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::Not64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignExtendByteToWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignExtendHalfToWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignExtendByteToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignExtendHalfToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignExtendWordToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendByteToWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendHalfToWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendByteToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendHalfToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendWordToLong>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroExtendLongToQuad>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ByteReverseWord>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ByteReverseHalf>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ByteReverseDual>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CountLeadingZeros32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::CountLeadingZeros64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ExtractRegister32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ExtractRegister64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ReplicateBit32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ReplicateBit64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MaxSigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MaxSigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MaxUnsigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MaxUnsigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MinSigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MinSigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MinUnsigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::MinUnsigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_floating_point.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_floating_point.cpp
new file mode 100644
index 00000000..f6b6ecf7
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_floating_point.cpp
@@ -0,0 +1,445 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::FPAbs16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPAbs32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPAbs64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPCompare32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPCompare64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDiv32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDiv64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMax32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMax64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMaxNumeric32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMaxNumeric64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMin32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMin64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMinNumeric32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMinNumeric64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMul32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMul64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMulAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMulAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMulAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMulX32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPMulX64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPNeg16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPNeg32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPNeg64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipEstimate16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipEstimate32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipEstimate64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipExponent16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipExponent32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipExponent64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipStepFused16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipStepFused32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRecipStepFused64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRoundInt16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRoundInt32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRoundInt64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtEstimate16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtEstimate32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtEstimate64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtStepFused16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtStepFused32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPRSqrtStepFused64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSqrt32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSqrt64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToHalf>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToHalf>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPDoubleToFixedU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPHalfToFixedU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPSingleToFixedU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU16ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS16ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU16ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS16ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU32ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS32ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU32ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS32ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU64ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedU64ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS64ToDouble>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPFixedS64ToSingle>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_packed.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_packed.cpp
new file mode 100644
index 00000000..5272dbb1
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_packed.cpp
@@ -0,0 +1,190 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::PackedAddU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAddS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAddSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAddSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSubAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingAddSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedHalvingSubAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedAddU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedAddS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedSubU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedSubS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSaturatedSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedAbsDiffSumU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::PackedSelect>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_saturation.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_saturation.cpp
new file mode 100644
index 00000000..3bceeb08
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_saturation.cpp
@@ -0,0 +1,130 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedAddWithFlag32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedSubWithFlag32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturation>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturation>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedDoublingMultiplyReturnHigh16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedDoublingMultiplyReturnHigh32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedSub8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedSub16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::SignedSaturatedSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedSub8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedSub16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::UnsignedSaturatedSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_vector.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_vector.cpp
new file mode 100644
index 00000000..31cfd65c
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_vector.cpp
@@ -0,0 +1,1395 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::VectorGetElement8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGetElement16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGetElement32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGetElement64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSetElement8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSetElement16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSetElement32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSetElement64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAbs8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAbs16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAbs32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAbs64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAnd>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorAndNot>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticShiftRight8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticShiftRight16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticShiftRight32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticShiftRight64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticVShift8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticVShift16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticVShift32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorArithmeticVShift64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcast8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcast16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcast32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcast64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElementLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElementLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElementLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElement8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElement16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElement32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorBroadcastElement64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorCountLeadingZeros8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorCountLeadingZeros16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorCountLeadingZeros32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEven8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEven16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEven32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEven64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEvenLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEvenLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveEvenLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOddLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOddLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorDeinterleaveOddLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEor>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEqual8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEqual16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEqual32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEqual64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorEqual128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorExtract>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorExtractLower>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGreaterS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGreaterS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGreaterS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorGreaterS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingAddU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorHalvingSubU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveLower64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveUpper8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveUpper16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveUpper32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorInterleaveUpper64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftLeft8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftLeft16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftLeft32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftLeft64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftRight8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftRight16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftRight32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalShiftRight64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalVShift8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalVShift16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalVShift32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorLogicalVShift64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMaxU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMinU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiply8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiply16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiply32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiply64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplySignedWiden8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplySignedWiden16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplySignedWiden32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplyUnsignedWiden8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplyUnsignedWiden16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorMultiplyUnsignedWiden32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorNarrow16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorNarrow32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorNarrow64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorNot>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorOr>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddLower8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddLower16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddSignedWiden8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddSignedWiden16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddSignedWiden32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddUnsignedWiden8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddUnsignedWiden16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAddUnsignedWiden32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMaxLowerU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPairedMinLowerU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPolynomialMultiply8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPolynomialMultiplyLong8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPolynomialMultiplyLong64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorPopulationCount>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseBits>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInHalfGroups8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInWordGroups8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInWordGroups16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInLongGroups8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInLongGroups16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReverseElementsInLongGroups32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReduceAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReduceAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReduceAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorReduceAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRotateWholeVectorRight>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingHalvingAddU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftS8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftS16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftS32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftS64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftU8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftU16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftU32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorRoundingShiftLeftU64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignExtend8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignExtend16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignExtend32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignExtend64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedAbsoluteDifference8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedAbsoluteDifference16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedAbsoluteDifference32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedMultiply16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedMultiply32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAbs8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAbs16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAbs32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAbs64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAccumulateUnsigned8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAccumulateUnsigned16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAccumulateUnsigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAccumulateUnsigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyHigh16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyHigh32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyHighRounding16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyHighRounding32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyLong16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedDoublingMultiplyLong32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToSigned16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToSigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToSigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToUnsigned16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToUnsigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNarrowToUnsigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNeg8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNeg16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNeg32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedNeg64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeft8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeft16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeft32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeft64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeftUnsigned8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeftUnsigned16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeftUnsigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedShiftLeftUnsigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSub8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSub16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTable>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTableLookup64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTableLookup128>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTranspose8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTranspose16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTranspose32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorTranspose64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedAbsoluteDifference8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedAbsoluteDifference16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedAbsoluteDifference32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedMultiply16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedMultiply32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedRecipEstimate>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedRecipSqrtEstimate>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAccumulateSigned8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAccumulateSigned16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAccumulateSigned32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAccumulateSigned64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedNarrow16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedNarrow32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedNarrow64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedShiftLeft8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedShiftLeft16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedShiftLeft32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedShiftLeft64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorZeroExtend8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorZeroExtend16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorZeroExtend32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorZeroExtend64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorZeroUpper>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::ZeroVector>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_vector_floating_point.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_vector_floating_point.cpp
new file mode 100644
index 00000000..c3bf9708
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_vector_floating_point.cpp
@@ -0,0 +1,355 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::FPVectorAbs16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorAbs32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorAbs64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorDiv32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorDiv64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorEqual16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorEqual32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorEqual64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorFromHalf32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorFromSignedFixed32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorFromSignedFixed64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorFromUnsignedFixed32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorFromUnsignedFixed64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorGreater32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorGreater64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorGreaterEqual32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorGreaterEqual64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMax32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMax64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMaxNumeric32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMaxNumeric64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMin32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMin64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMinNumeric32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMinNumeric64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMul32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMul64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMulAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMulAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMulAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMulX32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorMulX64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorNeg16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorNeg32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorNeg64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorPairedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorPairedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorPairedAddLower32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorPairedAddLower64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipEstimate16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipEstimate32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipEstimate64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipStepFused16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipStepFused32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRecipStepFused64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRoundInt16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRoundInt32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRoundInt64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtEstimate16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtEstimate32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtEstimate64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtStepFused16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtStepFused32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorRSqrtStepFused64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorSqrt32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorSqrt64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToHalf32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToSignedFixed16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToSignedFixed32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToSignedFixed64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToUnsignedFixed16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToUnsignedFixed32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::FPVectorToUnsignedFixed64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/emit_riscv64_vector_saturation.cpp b/src/dynarmic/backend/riscv64/emit_riscv64_vector_saturation.cpp
new file mode 100644
index 00000000..0d3868ec
--- /dev/null
+++ b/src/dynarmic/backend/riscv64/emit_riscv64_vector_saturation.cpp
@@ -0,0 +1,100 @@
+/* This file is part of the dynarmic project.
+ * Copyright (c) 2024 MerryMage
+ * SPDX-License-Identifier: 0BSD
+ */
+
+#include <biscuit/assembler.hpp>
+#include <fmt/ostream.h>
+
+#include "dynarmic/backend/riscv64/a32_jitstate.h"
+#include "dynarmic/backend/riscv64/abi.h"
+#include "dynarmic/backend/riscv64/emit_context.h"
+#include "dynarmic/backend/riscv64/emit_riscv64.h"
+#include "dynarmic/backend/riscv64/reg_alloc.h"
+#include "dynarmic/ir/basic_block.h"
+#include "dynarmic/ir/microinstruction.h"
+#include "dynarmic/ir/opcodes.h"
+
+namespace Dynarmic::Backend::RV64 {
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedSub8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedSub16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorSignedSaturatedSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAdd8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAdd16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAdd32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedAdd64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedSub8>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedSub16>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedSub32>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+template<>
+void EmitIR<IR::Opcode::VectorUnsignedSaturatedSub64>(biscuit::Assembler&, EmitContext&, IR::Inst*) {
+ UNIMPLEMENTED();
+}
+
+} // namespace Dynarmic::Backend::RV64
diff --git a/src/dynarmic/backend/riscv64/reg_alloc.cpp b/src/dynarmic/backend/riscv64/reg_alloc.cpp
index 1451b430..e8a0a64f 100644
--- a/src/dynarmic/backend/riscv64/reg_alloc.cpp
+++ b/src/dynarmic/backend/riscv64/reg_alloc.cpp
@@ -159,7 +159,7 @@ u32 RegAlloc::GenerateImmediate(const IR::Value& value) {
return new_location_index;
} else if constexpr (kind == HostLoc::Kind::Fpr) {
- ASSERT_FALSE("Unimplemented");
+ UNIMPLEMENTED();
} else {
static_assert(Common::always_false_v<mcl::mp::lift_value<kind>>);
}