Module Name:    src
Committed By:   maxv
Date:           Mon Oct 30 17:06:42 UTC 2017

Modified Files:
        src/sys/arch/amd64/amd64: copy.S cpufunc.S linux32_sigcode.S
            linux_sigcode.S vector.S

Log Message:
Always use END() markers when declaring functions in assembly, so that ld
can compute the size of the functions. A few remain.

While here, fix a bug in the INTRSTUB macro: we are falling through
resume_, but it is aligned, so it looks like we're executing the inter-
function padding - which probably happens to contain NOPs, but that's
still bad.


To generate a diff of this commit:
cvs rdiff -u -r1.26 -r1.27 src/sys/arch/amd64/amd64/copy.S
cvs rdiff -u -r1.29 -r1.30 src/sys/arch/amd64/amd64/cpufunc.S
cvs rdiff -u -r1.3 -r1.4 src/sys/arch/amd64/amd64/linux32_sigcode.S
cvs rdiff -u -r1.2 -r1.3 src/sys/arch/amd64/amd64/linux_sigcode.S
cvs rdiff -u -r1.51 -r1.52 src/sys/arch/amd64/amd64/vector.S

Please note that diffs are not public domain; they are subject to the
copyright notices on the relevant files.

Modified files:

Index: src/sys/arch/amd64/amd64/copy.S
diff -u src/sys/arch/amd64/amd64/copy.S:1.26 src/sys/arch/amd64/amd64/copy.S:1.27
--- src/sys/arch/amd64/amd64/copy.S:1.26	Tue Oct 17 07:02:50 2017
+++ src/sys/arch/amd64/amd64/copy.S	Mon Oct 30 17:06:42 2017
@@ -1,4 +1,4 @@
-/*	$NetBSD: copy.S,v 1.26 2017/10/17 07:02:50 maxv Exp $	*/
+/*	$NetBSD: copy.S,v 1.27 2017/10/30 17:06:42 maxv Exp $	*/
 
 /*
  * Copyright (c) 2001 Wasabi Systems, Inc.
@@ -105,6 +105,7 @@ ENTRY(do_pmap_load)
 	popq	%rdi
 	leaveq
 	ret
+END(do_pmap_load)
 
 /*
  * SMAP functions. ret+int3+int3 is patched dynamically to STAC/CLAC.
@@ -116,6 +117,7 @@ ENTRY(smap_enable)
 	int3
 	int3
 	ret
+END(smap_enable)
 
 ENTRY(smap_disable)
 .Lstacpatch:
@@ -123,6 +125,7 @@ ENTRY(smap_disable)
 	int3
 	int3
 	ret
+END(smap_disable)
 
 /*
  * Copy routines from and to userland, plus a few more. See the
@@ -190,6 +193,7 @@ ENTRY(kcopy)
 .Lkcopy_end:
 	xorq	%rax,%rax
 	ret
+END(kcopy)
 
 ENTRY(copyout)
 	DEFERRED_SWITCH_CHECK
@@ -219,6 +223,7 @@ ENTRY(copyout)
 	xorl	%eax,%eax
 	ret
 	DEFERRED_SWITCH_CALL
+END(copyout)
 
 ENTRY(copyin)
 	DEFERRED_SWITCH_CHECK
@@ -249,6 +254,7 @@ ENTRY(copyin)
 	xorl	%eax,%eax
 	ret
 	DEFERRED_SWITCH_CALL
+END(copyin)
 
 NENTRY(copy_efault)
 	movq	$EFAULT,%rax
@@ -308,6 +314,7 @@ ENTRY(copyoutstr)
 	movq	$ENAMETOOLONG,%rax
 	jmp	copystr_return
 	DEFERRED_SWITCH_CALL
+END(copyoutstr)
 
 ENTRY(copyinstr)
 	DEFERRED_SWITCH_CHECK
@@ -351,6 +358,7 @@ ENTRY(copyinstr)
 	movq	$ENAMETOOLONG,%rax
 	jmp	copystr_return
 	DEFERRED_SWITCH_CALL
+END(copyinstr)
 
 ENTRY(copystr_efault)
 	movl	$EFAULT,%eax
@@ -394,7 +402,7 @@ ENTRY(copystr)
 	movq	%r8,(%rcx)
 
 7:	ret
-
+END(copystr)
 
 ENTRY(fuswintr)
 	cmpl	$TLBSTATE_VALID,CPUVAR(TLBSTATE)
@@ -412,6 +420,7 @@ ENTRY(fuswintr)
 
 	movq	$0,PCB_ONFAULT(%rcx)
 	ret
+END(fuswintr)
 
 ENTRY(fubyte)
 	DEFERRED_SWITCH_CHECK
@@ -429,6 +438,7 @@ ENTRY(fubyte)
 	movq	$0,PCB_ONFAULT(%rcx)
 	ret
 	DEFERRED_SWITCH_CALL
+END(fubyte)
 
 ENTRY(suswintr)
 	cmpl	$TLBSTATE_VALID,CPUVAR(TLBSTATE)
@@ -447,6 +457,7 @@ ENTRY(suswintr)
 	xorq	%rax,%rax
 	movq	%rax,PCB_ONFAULT(%rcx)
 	ret
+END(suswintr)
 
 ENTRY(subyte)
 	DEFERRED_SWITCH_CHECK
@@ -466,6 +477,7 @@ ENTRY(subyte)
 	movq	%rax,PCB_ONFAULT(%rcx)
 	ret
 	DEFERRED_SWITCH_CALL
+END(subyte)
 
 /*
  * These are the same, but must reside at different addresses,
@@ -476,16 +488,19 @@ ENTRY(fusuintrfailure)
 	movq	$0,PCB_ONFAULT(%rcx)
 	movl	$-1,%eax
 	ret
+END(fusuintrfailure)
 
 ENTRY(fusufailure)
 	callq	smap_enable
 	movq	$0,PCB_ONFAULT(%rcx)
 	movl	$-1,%eax
 	ret
+END(fusufailure)
 
 ENTRY(fusuaddrfault)
 	movl	$-1,%eax
 	ret
+END(fusuaddrfault)
 
 /*
  * Compare-and-swap the 64-bit integer in the user-space.
@@ -516,6 +531,7 @@ ENTRY(ucas_64)
 	xorq	%rax,%rax
 	ret
 	DEFERRED_SWITCH_CALL
+END(ucas_64)
 
 /*
  * int	ucas_32(volatile int32_t *uptr, int32_t old, int32_t new, int32_t *ret);
@@ -544,6 +560,7 @@ ENTRY(ucas_32)
 	xorq	%rax,%rax
 	ret
 	DEFERRED_SWITCH_CALL
+END(ucas_32)
 
 ENTRY(ucas_efault)
 	movq	$EFAULT,%rax

Index: src/sys/arch/amd64/amd64/cpufunc.S
diff -u src/sys/arch/amd64/amd64/cpufunc.S:1.29 src/sys/arch/amd64/amd64/cpufunc.S:1.30
--- src/sys/arch/amd64/amd64/cpufunc.S:1.29	Sun Oct 15 11:31:00 2017
+++ src/sys/arch/amd64/amd64/cpufunc.S	Mon Oct 30 17:06:42 2017
@@ -1,6 +1,6 @@
-/*	$NetBSD: cpufunc.S,v 1.29 2017/10/15 11:31:00 maxv Exp $	*/
+/*	$NetBSD: cpufunc.S,v 1.30 2017/10/30 17:06:42 maxv Exp $	*/
 
-/*-
+/*
  * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc.
  * All rights reserved.
  *
@@ -51,23 +51,28 @@
 ENTRY(x86_lfence)
 	lfence
 	ret
+END(x86_lfence)
 
 ENTRY(x86_sfence)
 	sfence
 	ret
+END(x86_sfence)
 
 ENTRY(x86_mfence)
 	mfence
 	ret
+END(x86_mfence)
 
 #ifndef XEN
 ENTRY(invlpg)
 	invlpg	(%rdi)
 	ret
+END(invlpg)
 
 ENTRY(lidt)
 	lidt	(%rdi)
 	ret
+END(lidt)
 
 ENTRY(lldt)
 	cmpl	%edi, CPUVAR(CURLDT)
@@ -77,51 +82,63 @@ ENTRY(lldt)
 	movl	%edi, CPUVAR(CURLDT)
 	lldt	%di
 	ret
+END(lldt)
 
 ENTRY(ltr)
 	ltr	%di
 	ret
+END(ltr)
 
 ENTRY(lcr0)
 	movq	%rdi, %cr0
 	ret
+END(lcr0)
 
 ENTRY(rcr0)
 	movq	%cr0, %rax
 	ret
+END(rcr0)
 
 ENTRY(lcr2)
 	movq	%rdi, %cr2
 	ret
+END(lcr2)
 
 ENTRY(rcr2)
 	movq	%cr2, %rax
 	ret
+END(rcr2)
 
 ENTRY(lcr3)
 	movq	%rdi, %cr3
 	ret
+END(lcr3)
 
 ENTRY(rcr3)
 	movq	%cr3, %rax
 	ret
+END(rcr3)
 #endif
 
 ENTRY(lcr4)
 	movq	%rdi, %cr4
 	ret
+END(lcr4)
 
 ENTRY(rcr4)
 	movq	%cr4, %rax
 	ret
+END(rcr4)
 
 ENTRY(lcr8)
 	movq	%rdi, %cr8
 	ret
+END(lcr8)
 
 ENTRY(rcr8)
 	movq	%cr8, %rax
 	ret
+END(rcr8)
 
 /*
  * Big hammer: flush all TLB entries, including ones from PTE's
@@ -151,73 +168,90 @@ ENTRY(tlbflushg)
 	movq	%rdx, %cr4
 	movq	%rax, %cr4
 	ret
+END(tlbflushg)
 
 ENTRY(tlbflush)
 1:
 	movq	%cr3, %rax
 	movq	%rax, %cr3
 	ret
+END(tlbflush)
 
 ENTRY(ldr0)
 	movq	%rdi, %dr0
 	ret
+END(ldr0)
 
 ENTRY(rdr0)
 	movq	%dr0, %rax
 	ret
+END(rdr0)
 
 ENTRY(ldr1)
 	movq	%rdi, %dr1
 	ret
+END(ldr1)
 
 ENTRY(rdr1)
 	movq	%dr1, %rax
 	ret
+END(rdr1)
 
 ENTRY(ldr2)
 	movq	%rdi, %dr2
 	ret
+END(ldr2)
 
 ENTRY(rdr2)
 	movq	%dr2, %rax
 	ret
+END(rdr2)
 
 ENTRY(ldr3)
 	movq	%rdi, %dr3
 	ret
+END(ldr3)
 
 ENTRY(rdr3)
 	movq	%dr3, %rax
 	ret
+END(rdr3)
 
 ENTRY(ldr6)
 	movq	%rdi, %dr6
 	ret
+END(ldr6)
 
 ENTRY(rdr6)
 	movq	%dr6, %rax
 	ret
+END(rdr6)
 
 ENTRY(ldr7)
 	movq	%rdi, %dr7
 	ret
+END(ldr7)
 
 ENTRY(rdr7)
 	movq	%dr7, %rax
 	ret
+END(rdr7)
 
 ENTRY(x86_disable_intr)
 	cli
 	ret
+END(x86_disable_intr)
 
 ENTRY(x86_enable_intr)
 	sti
 	ret
+END(x86_enable_intr)
 
 ENTRY(x86_read_flags)
 	pushfq
 	popq	%rax
 	ret
+END(x86_read_flags)
 
 STRONG_ALIAS(x86_read_psl,x86_read_flags)
 
@@ -225,6 +259,7 @@ ENTRY(x86_write_flags)
 	pushq	%rdi
 	popfq
 	ret
+END(x86_write_flags)
 
 STRONG_ALIAS(x86_write_psl,x86_write_flags)
 #endif /* XEN */
@@ -236,6 +271,7 @@ ENTRY(rdmsr)
 	shlq	$32, %rdx
 	orq	%rdx, %rax
 	ret
+END(rdmsr)
 
 ENTRY(wrmsr)
 	movq	%rdi, %rcx
@@ -244,6 +280,7 @@ ENTRY(wrmsr)
 	shrq	$32, %rdx
 	wrmsr
 	ret
+END(wrmsr)
 
 ENTRY(rdmsr_locked)
 	movq	%rdi, %rcx
@@ -253,6 +290,7 @@ ENTRY(rdmsr_locked)
 	shlq	$32, %rdx
 	orq	%rdx, %rax
 	ret
+END(rdmsr_locked)
 
 ENTRY(wrmsr_locked)
 	movq	%rdi, %rcx
@@ -262,6 +300,7 @@ ENTRY(wrmsr_locked)
 	movl	$OPTERON_MSR_PASSCODE, %edi
 	wrmsr
 	ret
+END(wrmsr_locked)
 
 /*
  * Support for reading MSRs in the safe manner (returns EFAULT on fault)
@@ -283,6 +322,7 @@ ENTRY(rdmsr_safe)
 
 	movq	%rax, PCB_ONFAULT(%r8)
 	ret
+END(rdmsr_safe)
 
 ENTRY(rdxcr)
 	movq	%rdi, %rcx
@@ -290,6 +330,7 @@ ENTRY(rdxcr)
 	shlq	$32, %rdx
 	orq	%rdx, %rax
 	ret
+END(rdxcr)
 
 ENTRY(wrxcr)
 	movq	%rdi, %rcx
@@ -298,6 +339,7 @@ ENTRY(wrxcr)
 	shrq	$32, %rdx
 	xsetbv
 	ret
+END(wrxcr)
 
 /*
  * MSR operations fault handler
@@ -308,11 +350,13 @@ NENTRY(msr_onfault)
 	movq	$0, PCB_ONFAULT(%r8)
 	movl	$EFAULT, %eax
 	ret
+END(msr_onfault)
 
 #ifndef XEN
 ENTRY(wbinvd)
 	wbinvd
 	ret
+END(wbinvd)
 #endif
 
 ENTRY(cpu_counter)
@@ -322,11 +366,13 @@ ENTRY(cpu_counter)
 	orq	%rdx, %rax
 	addq	CPUVAR(CC_SKEW), %rax
 	ret
+END(cpu_counter)
 
 ENTRY(cpu_counter32)
 	rdtsc
 	addl	CPUVAR(CC_SKEW), %eax
 	ret
+END(cpu_counter32)
 
 ENTRY(rdpmc)
 	movq	%rdi, %rcx
@@ -335,6 +381,7 @@ ENTRY(rdpmc)
 	shlq	$32, %rdx
 	orq	%rdx, %rax
 	ret
+END(rdpmc)
 
 ENTRY(breakpoint)
 	pushq	%rbp
@@ -342,28 +389,34 @@ ENTRY(breakpoint)
 	int	$0x03		/* paranoid, not 'int3' */
 	leave
 	ret
+END(breakpoint)
 
 ENTRY(x86_curcpu)
 	movq	%gs:(CPU_INFO_SELF), %rax
 	ret
+END(x86_curcpu)
 
 ENTRY(x86_curlwp)
 	movq	%gs:(CPU_INFO_CURLWP), %rax
 	ret
+END(x86_curlwp)
 
 ENTRY(cpu_set_curpri)
 	movl	%edi, %gs:(CPU_INFO_CURPRIORITY)
 	ret
+END(cpu_set_curpri)
 
 ENTRY(__byte_swap_u32_variable)
 	movl	%edi, %eax
 	bswapl	%eax
 	ret
+END(__byte_swap_u32_variable)
 
 ENTRY(__byte_swap_u16_variable)
 	movl	%edi, %eax
 	xchgb	%al, %ah
 	ret
+END(__byte_swap_u16_variable)
 
 /*
  * void lgdt(struct region_descriptor *rdp);
@@ -406,6 +459,7 @@ ENTRY(x86_flush)
 	pushq	$GSEL(GCODE_SEL, SEL_KPL)
 	pushq	%rax
 	lretq
+END(x86_flush)
 
 /* Waits - set up stack frame. */
 ENTRY(x86_hlt)
@@ -414,6 +468,7 @@ ENTRY(x86_hlt)
 	hlt
 	leave
 	ret
+END(x86_hlt)
 
 /* Waits - set up stack frame. */
 ENTRY(x86_stihlt)
@@ -423,15 +478,17 @@ ENTRY(x86_stihlt)
 	hlt
 	leave
 	ret
+END(x86_stihlt)
 
 ENTRY(x86_monitor)
 	movq	%rdi, %rax
 	movq	%rsi, %rcx
 	monitor	%rax, %rcx, %rdx
 	ret
+END(x86_monitor)
 
 /* Waits - set up stack frame. */
-ENTRY(x86_mwait)  
+ENTRY(x86_mwait)
 	pushq	%rbp
 	movq	%rsp, %rbp
 	movq	%rdi, %rax
@@ -439,10 +496,12 @@ ENTRY(x86_mwait)  
 	mwait	%rax, %rcx
 	leave
 	ret
+END(x86_mwait)
 
 NENTRY(x86_pause)
 	pause
 	ret
+END(x86_pause)
 
 ENTRY(x86_cpuid2)
 	movq	%rbx, %r8
@@ -456,38 +515,47 @@ ENTRY(x86_cpuid2)
 	movl	%edx, 12(%rsi)
 	movq	%r8, %rbx
 	ret
+END(x86_cpuid2)
 
 ENTRY(x86_getss)
 	movl	%ss, %eax
 	ret
+END(x86_getss)
 
 ENTRY(fldcw)
 	fldcw	(%rdi)
 	ret
+END(fldcw)
 
-ENTRY(fnclex)	
+ENTRY(fnclex)
 	fnclex
 	ret
+END(fnclex)
 
 ENTRY(fninit)
 	fninit
 	ret
+END(fninit)
 
 ENTRY(fnsave)
 	fnsave	(%rdi)
 	ret
+END(fnsave)
 
 ENTRY(fnstcw)
 	fnstcw	(%rdi)
 	ret
+END(fnstcw)
 
 ENTRY(fngetsw)
 	fnstsw	%ax
 	ret
+END(fngetsw)
 
 ENTRY(fnstsw)
 	fnstsw	(%rdi)
 	ret
+END(fnstsw)
 
 ENTRY(fp_divide_by_0)
 	fldz
@@ -495,37 +563,45 @@ ENTRY(fp_divide_by_0)
 	fdiv	%st, %st(1)
 	fwait
 	ret
+END(fp_divide_by_0)
 
 ENTRY(frstor)
 	frstor	(%rdi)
 	ret
+END(frstor)
 
 ENTRY(fwait)
 	fwait
 	ret
+END(fwait)
 
 ENTRY(clts)
 	clts
 	ret
+END(clts)
 
 ENTRY(stts)
 	movq	%cr0, %rax
 	orq	$CR0_TS, %rax
 	movq	%rax, %cr0
 	ret
+END(stts)
 
 ENTRY(fxsave)
 	fxsave	(%rdi)
 	ret
+END(fxsave)
 
 ENTRY(fxrstor)
 	fxrstor	(%rdi)
 	ret
+END(fxrstor)
 
 ENTRY(fldummy)
 	ffree	%st(7)
 	fldz
 	ret
+END(fldummy)
 
 ENTRY(xsave)
 	movq	%rsi, %rax
@@ -533,6 +609,7 @@ ENTRY(xsave)
 	shrq	$32, %rdx
 	xsave	(%rdi)
 	ret
+END(xsave)
 
 ENTRY(xsaveopt)
 	movq	%rsi, %rax
@@ -540,6 +617,7 @@ ENTRY(xsaveopt)
 	shrq	$32, %rdx
 	xsaveopt	(%rdi)
 	ret
+END(xsaveopt)
 
 ENTRY(xrstor)
 	movq	%rsi, %rax
@@ -547,20 +625,24 @@ ENTRY(xrstor)
 	shrq	$32, %rdx
 	xrstor	(%rdi)
 	ret
+END(xrstor)
 
 ENTRY(x86_stmxcsr)
 	stmxcsr	(%rdi)
 	ret
+END(x86_stmxcsr)
 
 ENTRY(x86_ldmxcsr)
 	ldmxcsr	(%rdi)
 	ret
+END(x86_ldmxcsr)
 
 ENTRY(inb)
 	movq	%rdi, %rdx
 	xorq	%rax, %rax
 	inb	%dx, %al
 	ret
+END(inb)
 
 ENTRY(insb)
 	movl	%edx, %ecx
@@ -569,12 +651,14 @@ ENTRY(insb)
 	rep
 	insb
 	ret
+END(insb)
 
 ENTRY(inw)
 	movq	%rdi, %rdx
 	xorq	%rax, %rax
 	inw	%dx, %ax
 	ret
+END(inw)
 
 ENTRY(insw)
 	movl	%edx, %ecx
@@ -583,12 +667,14 @@ ENTRY(insw)
 	rep
 	insw
 	ret
+END(insw)
 
 ENTRY(inl)
 	movq	%rdi, %rdx
 	xorq	%rax, %rax
 	inl	%dx, %eax
 	ret
+END(inl)
 
 ENTRY(insl)
 	movl	%edx, %ecx
@@ -597,12 +683,14 @@ ENTRY(insl)
 	rep
 	insl
 	ret
+END(insl)
 
 ENTRY(outb)
 	movq	%rdi, %rdx
 	movq	%rsi, %rax
 	outb	%al, %dx
 	ret
+END(outb)
 
 ENTRY(outsb)
 	movl	%edx, %ecx
@@ -610,12 +698,14 @@ ENTRY(outsb)
 	rep
 	outsb
 	ret
+END(outsb)
 
 ENTRY(outw)
 	movq	%rdi, %rdx
 	movq	%rsi, %rax
 	outw	%ax, %dx
 	ret
+END(outw)
 
 ENTRY(outsw)
 	movl	%edx, %ecx
@@ -623,12 +713,14 @@ ENTRY(outsw)
 	rep
 	outsw
 	ret
+END(outsw)
 
 ENTRY(outl)
 	movq	%rdi, %rdx
 	movq	%rsi, %rax
 	outl	%eax, %dx
 	ret
+END(outl)
 
 ENTRY(outsl)
 	movl	%edx, %ecx
@@ -636,18 +728,22 @@ ENTRY(outsl)
 	rep
 	outsl
 	ret
+END(outsl)
 
 ENTRY(setds)
 	movw	%di, %ds
 	ret
+END(setds)
 
 ENTRY(setes)
 	movw	%di, %es
 	ret
+END(setes)
 
 ENTRY(setfs)
 	movw	%di, %fs
 	ret
+END(setfs)
 
 #ifndef XEN
 ENTRY(setusergs)
@@ -657,4 +753,5 @@ ENTRY(setusergs)
 	swapgs
 	STI(ax)
 	ret
+END(setusergs)
 #endif

Index: src/sys/arch/amd64/amd64/linux32_sigcode.S
diff -u src/sys/arch/amd64/amd64/linux32_sigcode.S:1.3 src/sys/arch/amd64/amd64/linux32_sigcode.S:1.4
--- src/sys/arch/amd64/amd64/linux32_sigcode.S:1.3	Sun Nov 22 13:41:24 2015
+++ src/sys/arch/amd64/amd64/linux32_sigcode.S	Mon Oct 30 17:06:42 2017
@@ -1,4 +1,4 @@
-/*	$NetBSD: linux32_sigcode.S,v 1.3 2015/11/22 13:41:24 maxv Exp $ */
+/*	$NetBSD: linux32_sigcode.S,v 1.4 2017/10/30 17:06:42 maxv Exp $ */
 
 #include "assym.h"
 #include <machine/asm.h>
@@ -12,6 +12,7 @@ NENTRY(linux32_sigcode)
 	int	$0x80
 	movl	$LINUX32_SYS_exit,%eax
 	int	$0x80
+END(linux32_sigcode)
 
 	.balign	16
 NENTRY(linux32_rt_sigcode)

Index: src/sys/arch/amd64/amd64/linux_sigcode.S
diff -u src/sys/arch/amd64/amd64/linux_sigcode.S:1.2 src/sys/arch/amd64/amd64/linux_sigcode.S:1.3
--- src/sys/arch/amd64/amd64/linux_sigcode.S:1.2	Sun Dec 11 12:16:21 2005
+++ src/sys/arch/amd64/amd64/linux_sigcode.S	Mon Oct 30 17:06:42 2017
@@ -1,4 +1,4 @@
-/*	$NetBSD: linux_sigcode.S,v 1.2 2005/12/11 12:16:21 christos Exp $ */
+/*	$NetBSD: linux_sigcode.S,v 1.3 2017/10/30 17:06:42 maxv Exp $ */
 
 /*-
  * Copyright (c) 2005 Emmanuel Dreyfus, all rights reserved.
@@ -41,6 +41,7 @@
 
 /* LINTSTUB: Var: char linux_sigcode[1], linux_esigcode[1]; */
 NENTRY(linux_sigcode)
+END(linux_sigcode)
 
 /* LINTSTUB: Var: char linux_rt_sigcode[1]; */
 NENTRY(linux_rt_sigcode)

Index: src/sys/arch/amd64/amd64/vector.S
diff -u src/sys/arch/amd64/amd64/vector.S:1.51 src/sys/arch/amd64/amd64/vector.S:1.52
--- src/sys/arch/amd64/amd64/vector.S:1.51	Tue Aug 15 09:08:39 2017
+++ src/sys/arch/amd64/amd64/vector.S	Mon Oct 30 17:06:42 2017
@@ -1,4 +1,4 @@
-/*	$NetBSD: vector.S,v 1.51 2017/08/15 09:08:39 maxv Exp $	*/
+/*	$NetBSD: vector.S,v 1.52 2017/10/30 17:06:42 maxv Exp $	*/
 
 /*-
  * Copyright (c) 1998, 2007, 2008 The NetBSD Foundation, Inc.
@@ -283,12 +283,15 @@ IDTVEC(recurse_ ## name ## num)						;\
 	subq	$8,%rsp							;\
 	pushq	$T_ASTFLT		/* trap # for doing ASTs */	;\
 	INTRENTRY							;\
+	jmp	1f							;\
+IDTVEC_END(recurse_ ## name ## num)					;\
 IDTVEC(resume_ ## name ## num)						\
-	movq	$IREENT_MAGIC,TF_ERR(%rsp)				;\
+1:	movq	$IREENT_MAGIC,TF_ERR(%rsp)				;\
 	movl	%ebx,%r13d						;\
 	movq	CPUVAR(ISOURCES) + (num) * 8,%r14			;\
 	movl	IS_MAXLEVEL(%r14),%ebx					;\
 	jmp	1f							;\
+IDTVEC_END(resume_ ## name ## num)					;\
 IDTVEC(intr_ ## name ## num)						;\
 	pushq	$0			/* dummy error code */		;\
 	pushq	$T_ASTFLT		/* trap # for doing ASTs */	;\
@@ -343,7 +346,8 @@ IDTVEC(intr_ ## name ## num)						;\
 9:									\
 	unmask(num)							;\
 	late_ack(num)							;\
-	INTRFASTEXIT
+	INTRFASTEXIT							;\
+IDTVEC_END(intr_ ## name ## num)
 
 #define ICUADDR IO_ICU1
 

Reply via email to