提交 e783efa6 编写于 作者: R Rich Felker

fix arm thread-pointer/atomic asm when compiling to thumb code

armv7/thumb2 provides a way to do atomics in thumb mode, but for armv6
we need a call to arm mode.

this commit is based on a patch by Stephen Thomas which fixed the
armv7 cases but not the armv6 ones.

all of this should be revisited if/when runtime selection of thread
pointer access and atomics are added.
上级 468bc11e
...@@ -22,9 +22,8 @@ static inline int a_ctz_64(uint64_t x) ...@@ -22,9 +22,8 @@ static inline int a_ctz_64(uint64_t x)
return a_ctz_l(y); return a_ctz_l(y);
} }
#if __ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__ \ #if ((__ARM_ARCH_6__ || __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) \
|| __ARM_ARCH_7A__ || __ARM_ARCH_7R__ \ || __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
|| __ARM_ARCH >= 7
#if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7 #if __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
#define MEM_BARRIER "dmb ish" #define MEM_BARRIER "dmb ish"
...@@ -39,6 +38,9 @@ static inline int __k_cas(int t, int s, volatile int *p) ...@@ -39,6 +38,9 @@ static inline int __k_cas(int t, int s, volatile int *p)
" " MEM_BARRIER "\n" " " MEM_BARRIER "\n"
"1: ldrex %0,%3\n" "1: ldrex %0,%3\n"
" subs %0,%0,%1\n" " subs %0,%0,%1\n"
#ifdef __thumb__
" itt eq\n"
#endif
" strexeq %0,%2,%3\n" " strexeq %0,%2,%3\n"
" teqeq %0,#1\n" " teqeq %0,#1\n"
" beq 1b\n" " beq 1b\n"
......
#if __ARM_ARCH_6K__ || __ARM_ARCH_6ZK__ \ #if ((__ARM_ARCH_6K__ || __ARM_ARCH_6ZK__) && !__thumb__) \
|| __ARM_ARCH_7A__ || __ARM_ARCH_7R__ \ || __ARM_ARCH_7A__ || __ARM_ARCH_7R__ || __ARM_ARCH >= 7
|| __ARM_ARCH >= 7
static inline __attribute__((const)) pthread_t __pthread_self() static inline __attribute__((const)) pthread_t __pthread_self()
{ {
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册