This patch to libgo adds the runtime/internal/atomic package.  This
copies the API that is in the Go 1.7 library, but for gccgo the actual
implementation is in C, written using the __atomic intrinsics.
Bootstrapped and ran Go testsuite on x86_64-pc-linux-gnu.  Committed
to mainline.

Ian
Index: gcc/go/gofrontend/MERGE
===================================================================
--- gcc/go/gofrontend/MERGE     (revision 240067)
+++ gcc/go/gofrontend/MERGE     (working copy)
@@ -1,4 +1,4 @@
-6e791d2f3c183d55fdcabe3df20f01c029fd6d54
+04fe765560107e5d4c5f98c1022765930a1069f9
 
 The first line of this file holds the git revision number of the last
 merge done from the gofrontend repository.
Index: libgo/Makefile.am
===================================================================
--- libgo/Makefile.am   (revision 240053)
+++ libgo/Makefile.am   (working copy)
@@ -843,6 +843,8 @@ libgo_go_objs = \
        net/rpc/jsonrpc.lo \
        runtime/debug.lo \
        runtime/pprof.lo \
+       runtime/internal/atomic.lo \
+       runtime/internal/atomic_c.lo \
        sync/atomic.lo \
        sync/atomic_c.lo \
        text/scanner.lo \
@@ -2365,6 +2367,18 @@ runtime/pprof/check: $(CHECK_DEPS)
 # Also use -fno-inline to get better results from the memory profiler.
 runtime_pprof_check_GOCFLAGS = -static-libgo -fno-inline
 
+@go_include@ runtime/internal/atomic.lo.dep
+runtime/internal/atomic.lo.dep: $(srcdir)/go/runtime/internal/atomic/*.go
+       $(BUILDDEPS)
+runtime_internal_atomic_lo_GOCFLAGS = -fgo-compiling-runtime
+runtime/internal/atomic.lo:
+       $(BUILDPACKAGE)
+runtime/internal/atomic_c.lo: go/runtime/internal/atomic/atomic.c runtime.inc 
runtime/internal/atomic.lo
+       $(LTCOMPILE) -c -o $@ $(srcdir)/go/runtime/internal/atomic/atomic.c
+runtime/internal/atomic/check: $(CHECK_DEPS)
+       @$(CHECK)
+.PHONY: runtime/internal/atomic/check
+
 @go_include@ sync/atomic.lo.dep
 sync/atomic.lo.dep: $(srcdir)/go/sync/atomic/*.go
        $(BUILDDEPS)
@@ -2800,6 +2814,8 @@ runtime/debug.gox: runtime/debug.lo
        $(BUILDGOX)
 runtime/pprof.gox: runtime/pprof.lo
        $(BUILDGOX)
+runtime/internal/atomic.gox: runtime/internal/atomic.lo
+       $(BUILDGOX)
 
 sync/atomic.gox: sync/atomic.lo
        $(BUILDGOX)
@@ -2954,6 +2970,7 @@ TEST_PACKAGES = \
        path/filepath/check \
        regexp/syntax/check \
        runtime/pprof/check \
+       runtime/internal/atomic/check \
        sync/atomic/check \
        text/scanner/check \
        text/tabwriter/check \
Index: libgo/go/runtime/internal/atomic/atomic.c
===================================================================
--- libgo/go/runtime/internal/atomic/atomic.c   (revision 0)
+++ libgo/go/runtime/internal/atomic/atomic.c   (working copy)
@@ -0,0 +1,237 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+#include <stdint.h>
+
+#include "runtime.h"
+
+uint32_t Load (uint32_t *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Load")
+  __attribute__ ((no_split_stack));
+
+uint32_t
+Load (uint32_t *ptr)
+{
+  return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+void *Loadp (void *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loadp")
+  __attribute__ ((no_split_stack));
+
+void *
+Loadp (void *ptr)
+{
+  return __atomic_load_n ((void **) ptr, __ATOMIC_ACQUIRE);
+}
+
+uint64_t Load64 (uint64_t *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Load64")
+  __attribute__ ((no_split_stack));
+
+uint64_t
+Load64 (uint64_t *ptr)
+{
+  return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uintptr_t Loaduintptr (uintptr_t *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loaduintptr")
+  __attribute__ ((no_split_stack));
+
+uintptr_t
+Loaduintptr (uintptr_t *ptr)
+{
+  return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uintgo Loaduint (uintgo *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loaduint")
+  __attribute__ ((no_split_stack));
+
+uintgo
+Loaduint (uintgo *ptr)
+{
+  return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+int64_t Loadint64 (int64_t *ptr)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Loadint64")
+  __attribute__ ((no_split_stack));
+
+int64_t
+Loadint64 (int64_t *ptr)
+{
+  return __atomic_load_n (ptr, __ATOMIC_ACQUIRE);
+}
+
+uint32_t Xadd (uint32_t *ptr, int32_t delta)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadd")
+  __attribute__ ((no_split_stack));
+
+uint32_t
+Xadd (uint32_t *ptr, int32_t delta)
+{
+  return __atomic_add_fetch (ptr, (uint32_t) delta, __ATOMIC_SEQ_CST);
+}
+
+uint64_t Xadd64 (uint64_t *ptr, int64_t delta)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadd64")
+  __attribute__ ((no_split_stack));
+
+uint64_t
+Xadd64 (uint64_t *ptr, int64_t delta)
+{
+  return __atomic_add_fetch (ptr, (uint64_t) delta, __ATOMIC_SEQ_CST);
+}
+
+uintptr_t Xadduintptr (uintptr_t *ptr, uintptr_t delta)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xadduintptr")
+  __attribute__ ((no_split_stack));
+
+uintptr_t
+Xadduintptr (uintptr_t *ptr, uintptr_t delta)
+{
+  return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
+}
+
+int64_t Xaddint64 (int64_t *ptr, int64_t delta)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xaddint64")
+  __attribute__ ((no_split_stack));
+
+int64_t
+Xaddint64 (int64_t *ptr, int64_t delta)
+{
+  return __atomic_add_fetch (ptr, delta, __ATOMIC_SEQ_CST);
+}
+
+uint32_t Xchg (uint32_t *ptr, uint32_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchg")
+  __attribute__ ((no_split_stack));
+
+uint32_t
+Xchg (uint32_t *ptr, uint32_t new)
+{
+  return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+uint64_t Xchg64 (uint64_t *ptr, uint64_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchg64")
+  __attribute__ ((no_split_stack));
+
+uint64_t
+Xchg64 (uint64_t *ptr, uint64_t new)
+{
+  return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+uintptr_t Xchguintptr (uintptr_t *ptr, uintptr_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Xchguintptr")
+  __attribute__ ((no_split_stack));
+
+uintptr_t
+Xchguintptr (uintptr_t *ptr, uintptr_t new)
+{
+  return __atomic_exchange_n (ptr, new, __ATOMIC_SEQ_CST);
+}
+
+void And8 (uint8_t *ptr, uint8_t val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.And8")
+  __attribute__ ((no_split_stack));
+
+void
+And8 (uint8_t *ptr, uint8_t val)
+{
+  __atomic_and_fetch (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Or8 (uint8_t *ptr, uint8_t val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Or8")
+  __attribute__ ((no_split_stack));
+
+void
+Or8 (uint8_t *ptr, uint8_t val)
+{
+  __atomic_or_fetch (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+_Bool Cas (uint32_t *ptr, uint32_t old, uint32_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Cas")
+  __attribute__ ((no_split_stack));
+
+_Bool
+Cas (uint32_t *ptr, uint32_t old, uint32_t new)
+{
+  return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, 
__ATOMIC_RELAXED);
+}
+
+_Bool Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Cas64")
+  __attribute__ ((no_split_stack));
+
+_Bool
+Cas64 (uint64_t *ptr, uint64_t old, uint64_t new)
+{
+  return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, 
__ATOMIC_RELAXED);
+}
+
+_Bool Casp1 (void **ptr, void *old, void *new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Casp1")
+  __attribute__ ((no_split_stack));
+
+_Bool
+Casp1 (void **ptr, void *old, void *new)
+{
+  return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, 
__ATOMIC_RELAXED);
+}
+
+_Bool Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Casuintptr")
+  __attribute__ ((no_split_stack));
+
+_Bool
+Casuintptr (uintptr_t *ptr, uintptr_t old, uintptr_t new)
+{
+  return __atomic_compare_exchange_n (ptr, &old, new, false, __ATOMIC_SEQ_CST, 
__ATOMIC_RELAXED);
+}
+
+void Store (uint32_t *ptr, uint32_t val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Store")
+  __attribute__ ((no_split_stack));
+
+void
+Store (uint32_t *ptr, uint32_t val)
+{
+  __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Store64 (uint64_t *ptr, uint64_t val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Store64")
+  __attribute__ ((no_split_stack));
+
+void
+Store64 (uint64_t *ptr, uint64_t val)
+{
+  __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void Storeuintptr (uintptr_t *ptr, uintptr_t val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.Storeuintptr")
+  __attribute__ ((no_split_stack));
+
+void
+Storeuintptr (uintptr_t *ptr, uintptr_t val)
+{
+  __atomic_store_n (ptr, val, __ATOMIC_SEQ_CST);
+}
+
+void StorepNoWB (void *ptr, void *val)
+  __asm__ (GOSYM_PREFIX "runtime_internal_atomic.StorepNoWB")
+  __attribute__ ((no_split_stack));
+
+void
+StorepNoWB (void *ptr, void *val)
+{
+  __atomic_store_n ((void**) ptr, val, __ATOMIC_SEQ_CST);
+}
Index: libgo/go/runtime/internal/atomic/atomic_test.go
===================================================================
--- libgo/go/runtime/internal/atomic/atomic_test.go     (revision 0)
+++ libgo/go/runtime/internal/atomic/atomic_test.go     (working copy)
@@ -0,0 +1,67 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic_test
+
+import (
+       "runtime"
+       "runtime/internal/atomic"
+       "testing"
+       "unsafe"
+)
+
+func runParallel(N, iter int, f func()) {
+       defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(int(N)))
+       done := make(chan bool)
+       for i := 0; i < N; i++ {
+               go func() {
+                       for j := 0; j < iter; j++ {
+                               f()
+                       }
+                       done <- true
+               }()
+       }
+       for i := 0; i < N; i++ {
+               <-done
+       }
+}
+
+func TestXadduintptr(t *testing.T) {
+       const N = 20
+       const iter = 100000
+       inc := uintptr(100)
+       total := uintptr(0)
+       runParallel(N, iter, func() {
+               atomic.Xadduintptr(&total, inc)
+       })
+       if want := uintptr(N * iter * inc); want != total {
+               t.Fatalf("xadduintpr error, want %d, got %d", want, total)
+       }
+       total = 0
+       runParallel(N, iter, func() {
+               atomic.Xadduintptr(&total, inc)
+               atomic.Xadduintptr(&total, uintptr(-int64(inc)))
+       })
+       if total != 0 {
+               t.Fatalf("xadduintpr total error, want %d, got %d", 0, total)
+       }
+}
+
+// Tests that xadduintptr correctly updates 64-bit values. The place where
+// we actually do so is mstats.go, functions mSysStat{Inc,Dec}.
+func TestXadduintptrOnUint64(t *testing.T) {
+       /*      if runtime.BigEndian != 0 {
+               // On big endian architectures, we never use xadduintptr to 
update
+               // 64-bit values and hence we skip the test.  (Note that 
functions
+               // mSysStat{Inc,Dec} in mstats.go have explicit checks for
+               // big-endianness.)
+               return
+       }*/
+       const inc = 100
+       val := uint64(0)
+       atomic.Xadduintptr((*uintptr)(unsafe.Pointer(&val)), inc)
+       if inc != val {
+               t.Fatalf("xadduintptr should increase lower-order bits, want 
%d, got %d", inc, val)
+       }
+}
Index: libgo/go/runtime/internal/atomic/gccgo.go
===================================================================
--- libgo/go/runtime/internal/atomic/gccgo.go   (revision 0)
+++ libgo/go/runtime/internal/atomic/gccgo.go   (working copy)
@@ -0,0 +1,59 @@
+// Copyright 2016 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic
+
+// Stubs for atomic functions that in gccgo are implemented in C.
+
+import "unsafe"
+
+//go:noescape
+func Load(ptr *uint32) uint32
+
+//go:noescape
+func Loadp(ptr unsafe.Pointer) unsafe.Pointer
+
+//go:noescape
+func Load64(ptr *uint64) uint64
+
+//go:noescape
+func Xadd(ptr *uint32, delta int32) uint32
+
+//go:noescape
+func Xadd64(ptr *uint64, delta int64) uint64
+
+//go:noescape
+func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
+
+//go:noescape
+func Xchg(ptr *uint32, new uint32) uint32
+
+//go:noescape
+func Xchg64(ptr *uint64, new uint64) uint64
+
+//go:noescape
+func Xchguintptr(ptr *uintptr, new uintptr) uintptr
+
+//go:noescape
+func And8(ptr *uint8, val uint8)
+
+//go:noescape
+func Or8(ptr *uint8, val uint8)
+
+// NOTE: Do not add atomicxor8 (XOR is not idempotent).
+
+//go:noescape
+func Cas64(ptr *uint64, old, new uint64) bool
+
+//go:noescape
+func Store(ptr *uint32, val uint32)
+
+//go:noescape
+func Store64(ptr *uint64, val uint64)
+
+// StorepNoWB performs *ptr = val atomically and without a write
+// barrier.
+//
+// NO go:noescape annotation; see atomic_pointer.go.
+func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
Index: libgo/go/runtime/internal/atomic/stubs.go
===================================================================
--- libgo/go/runtime/internal/atomic/stubs.go   (revision 0)
+++ libgo/go/runtime/internal/atomic/stubs.go   (working copy)
@@ -0,0 +1,33 @@
+// Copyright 2015 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package atomic
+
+import "unsafe"
+
+//go:noescape
+func Cas(ptr *uint32, old, new uint32) bool
+
+// NO go:noescape annotation; see atomic_pointer.go.
+func Casp1(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool
+
+//go:noescape
+func Casuintptr(ptr *uintptr, old, new uintptr) bool
+
+//go:noescape
+func Storeuintptr(ptr *uintptr, new uintptr)
+
+//go:noescape
+func Loaduintptr(ptr *uintptr) uintptr
+
+//go:noescape
+func Loaduint(ptr *uint) uint
+
+// TODO(matloob): Should these functions have the go:noescape annotation?
+
+//go:noescape
+func Loadint64(ptr *int64) int64
+
+//go:noescape
+func Xaddint64(ptr *int64, delta int64) int64

Reply via email to