Преглед на файлове

efDataPreprocessing memoryCall.c 沈瑞清 commit at 2020-12-10

沈瑞清 преди 4 години
родител
ревизия
2b9e49819a
променени са 1 файла, в които са добавени 198 реда и са изтрити 0 реда
  1. 198 0
      efDataPreprocessing/dataSharedMemory/memoryCall.c

+ 198 - 0
efDataPreprocessing/dataSharedMemory/memoryCall.c

@@ -459,3 +459,201 @@ do_entUna(void * va, unsigned long opcode, unsigned long reg,
 	unsigned long pc = regs->pc - 4;
 	unsigned long *_regs = regs->regs;
 	const struct exception_table_entry *fixup;
+
+	unaligned[0].count++;
+	unaligned[0].va = (unsigned long) va;
+	unaligned[0].pc = pc;
+
+	/* We don't want to use the generic get/put unaligned macros as
+	   we want to trap exceptions.  Only if we actually get an
+	   exception will we decide whether we should have caught it.  */
+
+	switch (opcode) {
+	case 0x0c: /* ldwu */
+		__asm__ __volatile__(
+		"1:	ldq_u %1,0(%3)\n"
+		"2:	ldq_u %2,1(%3)\n"
+		"	extwl %1,%3,%1\n"
+		"	extwh %2,%3,%2\n"
+		"3:\n"
+		".section __ex_table,\"a\"\n"
+		"	.long 1b - .\n"
+		"	lda %1,3b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %2,3b-2b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2)
+			: "r"(va), "0"(0));
+		if (error)
+			goto got_exception;
+		una_reg(reg) = tmp1|tmp2;
+		return;
+
+	case 0x28: /* ldl */
+		__asm__ __volatile__(
+		"1:	ldq_u %1,0(%3)\n"
+		"2:	ldq_u %2,3(%3)\n"
+		"	extll %1,%3,%1\n"
+		"	extlh %2,%3,%2\n"
+		"3:\n"
+		".section __ex_table,\"a\"\n"
+		"	.long 1b - .\n"
+		"	lda %1,3b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %2,3b-2b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2)
+			: "r"(va), "0"(0));
+		if (error)
+			goto got_exception;
+		una_reg(reg) = (int)(tmp1|tmp2);
+		return;
+
+	case 0x29: /* ldq */
+		__asm__ __volatile__(
+		"1:	ldq_u %1,0(%3)\n"
+		"2:	ldq_u %2,7(%3)\n"
+		"	extql %1,%3,%1\n"
+		"	extqh %2,%3,%2\n"
+		"3:\n"
+		".section __ex_table,\"a\"\n"
+		"	.long 1b - .\n"
+		"	lda %1,3b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %2,3b-2b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2)
+			: "r"(va), "0"(0));
+		if (error)
+			goto got_exception;
+		una_reg(reg) = tmp1|tmp2;
+		return;
+
+	/* Note that the store sequences do not indicate that they change
+	   memory because it _should_ be affecting nothing in this context.
+	   (Otherwise we have other, much larger, problems.)  */
+	case 0x0d: /* stw */
+		__asm__ __volatile__(
+		"1:	ldq_u %2,1(%5)\n"
+		"2:	ldq_u %1,0(%5)\n"
+		"	inswh %6,%5,%4\n"
+		"	inswl %6,%5,%3\n"
+		"	mskwh %2,%5,%2\n"
+		"	mskwl %1,%5,%1\n"
+		"	or %2,%4,%2\n"
+		"	or %1,%3,%1\n"
+		"3:	stq_u %2,1(%5)\n"
+		"4:	stq_u %1,0(%5)\n"
+		"5:\n"
+		".section __ex_table,\"a\"\n"
+		"	.long 1b - .\n"
+		"	lda %2,5b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %1,5b-2b(%0)\n"
+		"	.long 3b - .\n"
+		"	lda $31,5b-3b(%0)\n"
+		"	.long 4b - .\n"
+		"	lda $31,5b-4b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2),
+			  "=&r"(tmp3), "=&r"(tmp4)
+			: "r"(va), "r"(una_reg(reg)), "0"(0));
+		if (error)
+			goto got_exception;
+		return;
+
+	case 0x2c: /* stl */
+		__asm__ __volatile__(
+		"1:	ldq_u %2,3(%5)\n"
+		"2:	ldq_u %1,0(%5)\n"
+		"	inslh %6,%5,%4\n"
+		"	insll %6,%5,%3\n"
+		"	msklh %2,%5,%2\n"
+		"	mskll %1,%5,%1\n"
+		"	or %2,%4,%2\n"
+		"	or %1,%3,%1\n"
+		"3:	stq_u %2,3(%5)\n"
+		"4:	stq_u %1,0(%5)\n"
+		"5:\n"
+		".section __ex_table,\"a\"\n"
+		"	.long 1b - .\n"
+		"	lda %2,5b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %1,5b-2b(%0)\n"
+		"	.long 3b - .\n"
+		"	lda $31,5b-3b(%0)\n"
+		"	.long 4b - .\n"
+		"	lda $31,5b-4b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2),
+			  "=&r"(tmp3), "=&r"(tmp4)
+			: "r"(va), "r"(una_reg(reg)), "0"(0));
+		if (error)
+			goto got_exception;
+		return;
+
+	case 0x2d: /* stq */
+		__asm__ __volatile__(
+		"1:	ldq_u %2,7(%5)\n"
+		"2:	ldq_u %1,0(%5)\n"
+		"	insqh %6,%5,%4\n"
+		"	insql %6,%5,%3\n"
+		"	mskqh %2,%5,%2\n"
+		"	mskql %1,%5,%1\n"
+		"	or %2,%4,%2\n"
+		"	or %1,%3,%1\n"
+		"3:	stq_u %2,7(%5)\n"
+		"4:	stq_u %1,0(%5)\n"
+		"5:\n"
+		".section __ex_table,\"a\"\n\t"
+		"	.long 1b - .\n"
+		"	lda %2,5b-1b(%0)\n"
+		"	.long 2b - .\n"
+		"	lda %1,5b-2b(%0)\n"
+		"	.long 3b - .\n"
+		"	lda $31,5b-3b(%0)\n"
+		"	.long 4b - .\n"
+		"	lda $31,5b-4b(%0)\n"
+		".previous"
+			: "=r"(error), "=&r"(tmp1), "=&r"(tmp2),
+			  "=&r"(tmp3), "=&r"(tmp4)
+			: "r"(va), "r"(una_reg(reg)), "0"(0));
+		if (error)
+			goto got_exception;
+		return;
+	}
+
+	printk("Bad unaligned kernel access at %016lx: %p %lx %lu\n",
+		pc, va, opcode, reg);
+	do_exit(SIGSEGV);
+
+got_exception:
+	/* Ok, we caught the exception, but we don't want it.  Is there
+	   someone to pass it along to?  */
+	if ((fixup = search_exception_tables(pc)) != 0) {
+		unsigned long newpc;
+		newpc = fixup_exception(una_reg, fixup, pc);
+
+		printk("Forwarding unaligned exception at %lx (%lx)\n",
+		       pc, newpc);
+
+		regs->pc = newpc;
+		return;
+	}
+
+	/*
+	 * Yikes!  No one to forward the exception to.
+	 * Since the registers are in a weird format, dump them ourselves.
+ 	 */
+
+	printk("%s(%d): unhandled unaligned exception\n",
+	       current->comm, task_pid_nr(current));
+
+	printk("pc = [<%016lx>]  ra = [<%016lx>]  ps = %04lx\n",
+	       pc, una_reg(26), regs->ps);
+	printk("r0 = %016lx  r1 = %016lx  r2 = %016lx\n",
+	       una_reg(0), una_reg(1), una_reg(2));
+	printk("r3 = %016lx  r4 = %016lx  r5 = %016lx\n",
+ 	       una_reg(3), una_reg(4), una_reg(5));
+	printk("r6 = %016lx  r7 = %016lx  r8 = %016lx\n",
+	       una_reg(6), una_reg(7), una_reg(8));