mirror of
https://github.com/unicode-org/icu.git
synced 2025-04-06 05:55:35 +00:00
ICU-10182 Build Time mutexes, merge from development branch.
X-SVN-Rev: 34139
This commit is contained in:
parent
7e42977505
commit
45561757cc
6 changed files with 69 additions and 79 deletions
|
@ -181,7 +181,7 @@ public:
|
|||
UTrie fTrie;
|
||||
|
||||
private:
|
||||
atomic_int32_t fRefCount;
|
||||
u_atomic_int32_t fRefCount;
|
||||
UDataMemory *fUDataMem;
|
||||
UnicodeString fRuleString;
|
||||
UBool fDontFreeData;
|
||||
|
|
|
@ -33,7 +33,11 @@ static UMutex globalMutex = U_MUTEX_INITIALIZER;
|
|||
* platform independent set of mutex operations. For internal ICU use only.
|
||||
*/
|
||||
|
||||
#if U_PLATFORM_HAS_WIN32_API
|
||||
#if defined(U_USER_MUTEX_CPP)
|
||||
// Build time user mutex hook: #include "U_USER_MUTEX_CPP"
|
||||
#include U_MUTEX_XSTR(U_USER_MUTEX_CPP)
|
||||
|
||||
#elif U_PLATFORM_HAS_WIN32_API
|
||||
|
||||
//-------------------------------------------------------------------------------------------
|
||||
//
|
||||
|
@ -96,9 +100,8 @@ U_CAPI UBool U_EXPORT2 umtx_initImplPreInit(UInitOnce &uio) {
|
|||
// False: the initializtion failed. The next call to umtx_initOnce()
|
||||
// will retry the initialization.
|
||||
|
||||
U_CAPI void U_EXPORT2 umtx_initImplPostInit(UInitOnce &uio, UBool success) {
|
||||
int32_t nextState = success? 2: 0;
|
||||
umtx_storeRelease(uio.fState, nextState);
|
||||
U_CAPI void U_EXPORT2 umtx_initImplPostInit(UInitOnce &uio) {
|
||||
umtx_storeRelease(uio.fState, 2);
|
||||
}
|
||||
|
||||
|
||||
|
@ -180,57 +183,34 @@ UBool umtx_initImplPreInit(UInitOnce &uio) {
|
|||
if (state == 0) {
|
||||
umtx_storeRelease(uio.fState, 1);
|
||||
pthread_mutex_unlock(&initMutex);
|
||||
return true; // Caller will next call the init function.
|
||||
} else if (state == 2) {
|
||||
// Another thread already completed the initialization, in
|
||||
// a race with this thread. We can simply return FALSE, indicating no
|
||||
// further action is needed by the caller.
|
||||
pthread_mutex_unlock(&initMutex);
|
||||
return FALSE;
|
||||
return TRUE; // Caller will next call the init function.
|
||||
} else {
|
||||
// Another thread is currently running the initialization.
|
||||
// Wait until it completes.
|
||||
U_ASSERT(state == 1);
|
||||
while (uio.fState == 1) {
|
||||
// Another thread is currently running the initialization.
|
||||
// Wait until it completes.
|
||||
pthread_cond_wait(&initCondition, &initMutex);
|
||||
}
|
||||
UBool returnVal = uio.fState == 0;
|
||||
if (returnVal) {
|
||||
// Initialization that was running in another thread failed.
|
||||
// We will retry it in this thread.
|
||||
// (This is only used by SimpleSingleton)
|
||||
umtx_storeRelease(uio.fState, 1);
|
||||
}
|
||||
pthread_mutex_unlock(&initMutex);
|
||||
return returnVal;
|
||||
U_ASSERT(uio.fState == 2);
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
// This function is called by the thread that ran an initialization function,
|
||||
// just after completing the function.
|
||||
// Some threads may be waiting on the condition, requiring the broadcast wakeup.
|
||||
// Some threads may be racing to test the fState variable outside of the mutex,
|
||||
// requiring the use of store/release when changing its value.
|
||||
//
|
||||
// success: True: the inialization succeeded. No further calls to the init
|
||||
// function will be made.
|
||||
// False: the initializtion failed. The next call to umtx_initOnce()
|
||||
// will retry the initialization.
|
||||
|
||||
void umtx_initImplPostInit(UInitOnce &uio, UBool success) {
|
||||
int32_t nextState = success? 2: 0;
|
||||
void umtx_initImplPostInit(UInitOnce &uio) {
|
||||
pthread_mutex_lock(&initMutex);
|
||||
umtx_storeRelease(uio.fState, nextState);
|
||||
umtx_storeRelease(uio.fState, 2);
|
||||
pthread_cond_broadcast(&initCondition);
|
||||
pthread_mutex_unlock(&initMutex);
|
||||
}
|
||||
|
||||
|
||||
void umtx_initOnceReset(UInitOnce &uio) {
|
||||
// Not a thread safe function, we can use an ordinary assignment.
|
||||
uio.fState = 0;
|
||||
}
|
||||
|
||||
// End of POSIX specific umutex implementation.
|
||||
|
||||
#else // Platform #define chain.
|
||||
|
@ -254,7 +234,7 @@ void umtx_initOnceReset(UInitOnce &uio) {
|
|||
static UMutex gIncDecMutex = U_MUTEX_INITIALIZER;
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2
|
||||
umtx_atomic_inc(int32_t *p) {
|
||||
umtx_atomic_inc(u_atomic_int32_t *p) {
|
||||
int32_t retVal;
|
||||
umtx_lock(&gIncDecMutex);
|
||||
retVal = ++(*p);
|
||||
|
@ -264,7 +244,7 @@ umtx_atomic_inc(int32_t *p) {
|
|||
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2
|
||||
umtx_atomic_dec(int32_t *p) {
|
||||
umtx_atomic_dec(u_atomic_int32_t *p) {
|
||||
int32_t retVal;
|
||||
umtx_lock(&gIncDecMutex);
|
||||
retVal = --(*p);
|
||||
|
@ -273,7 +253,7 @@ umtx_atomic_dec(int32_t *p) {
|
|||
}
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2
|
||||
umtx_loadAcquire(atomic_int32_t &var) {
|
||||
umtx_loadAcquire(u_atomic_int32_t &var) {
|
||||
int32_t val = var;
|
||||
umtx_lock(&gIncDecMutex);
|
||||
umtx_unlock(&gIncDecMutex);
|
||||
|
@ -281,7 +261,7 @@ umtx_loadAcquire(atomic_int32_t &var) {
|
|||
}
|
||||
|
||||
U_INTERNAL void U_EXPORT2
|
||||
umtx_storeRelease(atomic_int32_t &var, int32_t val) {
|
||||
umtx_storeRelease(u_atomic_int32_t &var, int32_t val) {
|
||||
umtx_lock(&gIncDecMutex);
|
||||
umtx_unlock(&gIncDecMutex);
|
||||
var = val;
|
||||
|
|
|
@ -28,6 +28,9 @@
|
|||
struct UMutex;
|
||||
struct UInitOnce;
|
||||
|
||||
// Stringify macros, to allow #include of user supplied atomic & mutex files.
|
||||
#define U_MUTEX_STR(s) #s
|
||||
#define U_MUTEX_XSTR(s) U_MUTEX_STR(s)
|
||||
|
||||
/****************************************************************************
|
||||
*
|
||||
|
@ -35,28 +38,31 @@ struct UInitOnce;
|
|||
* Compiler dependent. Not operating system dependent.
|
||||
*
|
||||
****************************************************************************/
|
||||
#if U_HAVE_STD_ATOMICS
|
||||
#if defined (U_USER_ATOMICS_H)
|
||||
#include U_MUTEX_XSTR(U_USER_ATOMICS_H)
|
||||
|
||||
#elif U_HAVE_STD_ATOMICS
|
||||
|
||||
// C++11 atomics are available.
|
||||
|
||||
#include <atomic>
|
||||
|
||||
typedef std::atomic<int32_t> atomic_int32_t;
|
||||
typedef std::atomic<int32_t> u_atomic_int32_t;
|
||||
#define ATOMIC_INT32_T_INITIALIZER(val) ATOMIC_VAR_INIT(val)
|
||||
|
||||
inline int32_t umtx_loadAcquire(atomic_int32_t &var) {
|
||||
inline int32_t umtx_loadAcquire(u_atomic_int32_t &var) {
|
||||
return var.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
inline void umtx_storeRelease(atomic_int32_t &var, int32_t val) {
|
||||
inline void umtx_storeRelease(u_atomic_int32_t &var, int32_t val) {
|
||||
var.store(val, std::memory_order_release);
|
||||
}
|
||||
|
||||
inline int32_t umtx_atomic_inc(atomic_int32_t *var) {
|
||||
inline int32_t umtx_atomic_inc(u_atomic_int32_t *var) {
|
||||
return var->fetch_add(1) + 1;
|
||||
}
|
||||
|
||||
inline int32_t umtx_atomic_dec(atomic_int32_t *var) {
|
||||
inline int32_t umtx_atomic_dec(u_atomic_int32_t *var) {
|
||||
return var->fetch_sub(1) - 1;
|
||||
}
|
||||
|
||||
|
@ -82,23 +88,23 @@ inline int32_t umtx_atomic_dec(atomic_int32_t *var) {
|
|||
# endif
|
||||
# include <windows.h>
|
||||
|
||||
typedef volatile LONG atomic_int32_t;
|
||||
typedef volatile LONG u_atomic_int32_t;
|
||||
#define ATOMIC_INT32_T_INITIALIZER(val) val
|
||||
|
||||
inline int32_t umtx_loadAcquire(atomic_int32_t &var) {
|
||||
inline int32_t umtx_loadAcquire(u_atomic_int32_t &var) {
|
||||
return InterlockedCompareExchange(&var, 0, 0);
|
||||
}
|
||||
|
||||
inline void umtx_storeRelease(atomic_int32_t &var, int32_t val) {
|
||||
inline void umtx_storeRelease(u_atomic_int32_t &var, int32_t val) {
|
||||
InterlockedExchange(&var, val);
|
||||
}
|
||||
|
||||
|
||||
inline int32_t umtx_atomic_inc(atomic_int32_t *var) {
|
||||
inline int32_t umtx_atomic_inc(u_atomic_int32_t *var) {
|
||||
return InterlockedIncrement(var);
|
||||
}
|
||||
|
||||
inline int32_t umtx_atomic_dec(atomic_int32_t *var) {
|
||||
inline int32_t umtx_atomic_dec(u_atomic_int32_t *var) {
|
||||
return InterlockedDecrement(var);
|
||||
}
|
||||
|
||||
|
@ -107,25 +113,25 @@ inline int32_t umtx_atomic_dec(atomic_int32_t *var) {
|
|||
/*
|
||||
* gcc atomic ops. These are available on several other compilers as well.
|
||||
*/
|
||||
typedef int32_t atomic_int32_t;
|
||||
typedef int32_t u_atomic_int32_t;
|
||||
#define ATOMIC_INT32_T_INITIALIZER(val) val
|
||||
|
||||
inline int32_t umtx_loadAcquire(atomic_int32_t &var) {
|
||||
inline int32_t umtx_loadAcquire(u_atomic_int32_t &var) {
|
||||
int32_t val = var;
|
||||
__sync_synchronize();
|
||||
return val;
|
||||
}
|
||||
|
||||
inline void umtx_storeRelease(atomic_int32_t &var, int32_t val) {
|
||||
inline void umtx_storeRelease(u_atomic_int32_t &var, int32_t val) {
|
||||
__sync_synchronize();
|
||||
var = val;
|
||||
}
|
||||
|
||||
inline int32_t umtx_atomic_inc(atomic_int32_t *p) {
|
||||
inline int32_t umtx_atomic_inc(u_atomic_int32_t *p) {
|
||||
return __sync_add_and_fetch(p, 1);
|
||||
}
|
||||
|
||||
inline int32_t umtx_atomic_dec(atomic_int32_t *p) {
|
||||
inline int32_t umtx_atomic_dec(u_atomic_int32_t *p) {
|
||||
return __sync_sub_and_fetch(p, 1);
|
||||
}
|
||||
|
||||
|
@ -138,16 +144,16 @@ inline int32_t umtx_atomic_dec(atomic_int32_t *p) {
|
|||
|
||||
#define U_NO_PLATFORM_ATOMICS
|
||||
|
||||
typedef int32_t atomic_int32_t;
|
||||
typedef int32_t u_atomic_int32_t;
|
||||
#define ATOMIC_INT32_T_INITIALIZER(val) val
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_loadAcquire(atomic_int32_t &var);
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_loadAcquire(u_atomic_int32_t &var);
|
||||
|
||||
U_INTERNAL void U_EXPORT2 umtx_storeRelease(atomic_int32_t &var, int32_t val);
|
||||
U_INTERNAL void U_EXPORT2 umtx_storeRelease(u_atomic_int32_t &var, int32_t val);
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_atomic_inc(atomic_int32_t *p);
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_atomic_inc(u_atomic_int32_t *p);
|
||||
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_atomic_dec(atomic_int32_t *p);
|
||||
U_INTERNAL int32_t U_EXPORT2 umtx_atomic_dec(u_atomic_int32_t *p);
|
||||
|
||||
#endif /* Low Level Atomic Ops Platfrom Chain */
|
||||
|
||||
|
@ -161,9 +167,9 @@ U_INTERNAL int32_t U_EXPORT2 umtx_atomic_dec(atomic_int32_t *p);
|
|||
*************************************************************************************************/
|
||||
|
||||
struct UInitOnce {
|
||||
atomic_int32_t fState;
|
||||
u_atomic_int32_t fState;
|
||||
UErrorCode fErrCode;
|
||||
void reset() {fState = 0; fState=0;};
|
||||
void reset() {fState = 0;};
|
||||
UBool isReset() {return umtx_loadAcquire(fState) == 0;};
|
||||
// Note: isReset() is used by service registration code.
|
||||
// Thread safety of this usage needs review.
|
||||
|
@ -173,7 +179,7 @@ struct UInitOnce {
|
|||
|
||||
|
||||
U_CAPI UBool U_EXPORT2 umtx_initImplPreInit(UInitOnce &);
|
||||
U_CAPI void U_EXPORT2 umtx_initImplPostInit(UInitOnce &, UBool success);
|
||||
U_CAPI void U_EXPORT2 umtx_initImplPostInit(UInitOnce &);
|
||||
|
||||
template<class T> void umtx_initOnce(UInitOnce &uio, T *obj, void (T::*fp)()) {
|
||||
if (umtx_loadAcquire(uio.fState) == 2) {
|
||||
|
@ -181,7 +187,7 @@ template<class T> void umtx_initOnce(UInitOnce &uio, T *obj, void (T::*fp)()) {
|
|||
}
|
||||
if (umtx_initImplPreInit(uio)) {
|
||||
(obj->*fp)();
|
||||
umtx_initImplPostInit(uio, TRUE);
|
||||
umtx_initImplPostInit(uio);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -194,7 +200,7 @@ inline void umtx_initOnce(UInitOnce &uio, void (*fp)()) {
|
|||
}
|
||||
if (umtx_initImplPreInit(uio)) {
|
||||
(*fp)();
|
||||
umtx_initImplPostInit(uio, TRUE);
|
||||
umtx_initImplPostInit(uio);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,7 +214,7 @@ inline void umtx_initOnce(UInitOnce &uio, void (*fp)(UErrorCode &), UErrorCode &
|
|||
// We run the initialization.
|
||||
(*fp)(errCode);
|
||||
uio.fErrCode = errCode;
|
||||
umtx_initImplPostInit(uio, TRUE);
|
||||
umtx_initImplPostInit(uio);
|
||||
} else {
|
||||
// Someone else already ran the initialization.
|
||||
if (U_FAILURE(uio.fErrCode)) {
|
||||
|
@ -225,7 +231,7 @@ template<class T> void umtx_initOnce(UInitOnce &uio, void (*fp)(T), T context) {
|
|||
}
|
||||
if (umtx_initImplPreInit(uio)) {
|
||||
(*fp)(context);
|
||||
umtx_initImplPostInit(uio, TRUE);
|
||||
umtx_initImplPostInit(uio);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -239,7 +245,7 @@ template<class T> void umtx_initOnce(UInitOnce &uio, void (*fp)(T, UErrorCode &)
|
|||
// We run the initialization.
|
||||
(*fp)(context, errCode);
|
||||
uio.fErrCode = errCode;
|
||||
umtx_initImplPostInit(uio, TRUE);
|
||||
umtx_initImplPostInit(uio);
|
||||
} else {
|
||||
// Someone else already ran the initialization.
|
||||
if (U_FAILURE(uio.fErrCode)) {
|
||||
|
@ -259,7 +265,11 @@ template<class T> void umtx_initOnce(UInitOnce &uio, void (*fp)(T, UErrorCode &)
|
|||
*
|
||||
*************************************************************************************************/
|
||||
|
||||
#if U_PLATFORM_HAS_WIN32_API
|
||||
#if defined(U_USER_MUTEX_H)
|
||||
// #inlcude "U_USER_MUTEX_H"
|
||||
#include U_MUTEX_XSTR(U_USER_MUTEX_H)
|
||||
|
||||
#elif U_PLATFORM_HAS_WIN32_API
|
||||
|
||||
/* Windows Definitions.
|
||||
* Windows comes first in the platform chain.
|
||||
|
|
|
@ -118,11 +118,11 @@ operator+ (const UnicodeString &s1, const UnicodeString &s2) {
|
|||
|
||||
void
|
||||
UnicodeString::addRef()
|
||||
{ umtx_atomic_inc((atomic_int32_t *)fUnion.fFields.fArray - 1);}
|
||||
{ umtx_atomic_inc((u_atomic_int32_t *)fUnion.fFields.fArray - 1);}
|
||||
|
||||
int32_t
|
||||
UnicodeString::removeRef()
|
||||
{ return umtx_atomic_dec((atomic_int32_t *)fUnion.fFields.fArray - 1);}
|
||||
{ return umtx_atomic_dec((u_atomic_int32_t *)fUnion.fFields.fArray - 1);}
|
||||
|
||||
int32_t
|
||||
UnicodeString::refCount() const
|
||||
|
@ -1679,10 +1679,10 @@ UnicodeString::cloneArrayIfNeeded(int32_t newCapacity,
|
|||
// release the old array
|
||||
if(flags & kRefCounted) {
|
||||
// the array is refCounted; decrement and release if 0
|
||||
atomic_int32_t *pRefCount = ((atomic_int32_t *)oldArray - 1);
|
||||
u_atomic_int32_t *pRefCount = ((u_atomic_int32_t *)oldArray - 1);
|
||||
if(umtx_atomic_dec(pRefCount) == 0) {
|
||||
if(pBufferToDelete == 0) {
|
||||
// Note: cast to (void *) is needed with MSVC, where atomic_int32_t
|
||||
// Note: cast to (void *) is needed with MSVC, where u_atomic_int32_t
|
||||
// is defined as volatile. (Volatile has useful non-standard behavior
|
||||
// with this compiler.)
|
||||
uprv_free((void *)pRefCount);
|
||||
|
|
|
@ -35,7 +35,7 @@ public:
|
|||
~RegularExpression();
|
||||
int32_t fMagic;
|
||||
RegexPattern *fPat;
|
||||
atomic_int32_t *fPatRefCount;
|
||||
u_atomic_int32_t *fPatRefCount;
|
||||
UChar *fPatString;
|
||||
int32_t fPatStringLen;
|
||||
RegexMatcher *fMatcher;
|
||||
|
@ -123,7 +123,7 @@ uregex_open( const UChar *pattern,
|
|||
}
|
||||
|
||||
RegularExpression *re = new RegularExpression;
|
||||
atomic_int32_t *refC = (atomic_int32_t *)uprv_malloc(sizeof(int32_t));
|
||||
u_atomic_int32_t *refC = (u_atomic_int32_t *)uprv_malloc(sizeof(int32_t));
|
||||
UChar *patBuf = (UChar *)uprv_malloc(sizeof(UChar)*(actualPatLen+1));
|
||||
if (re == NULL || refC == NULL || patBuf == NULL) {
|
||||
*status = U_MEMORY_ALLOCATION_ERROR;
|
||||
|
@ -207,7 +207,7 @@ uregex_openUText(UText *pattern,
|
|||
UErrorCode lengthStatus = U_ZERO_ERROR;
|
||||
int32_t pattern16Length = utext_extract(pattern, 0, patternNativeLength, NULL, 0, &lengthStatus);
|
||||
|
||||
atomic_int32_t *refC = (atomic_int32_t *)uprv_malloc(sizeof(int32_t));
|
||||
u_atomic_int32_t *refC = (u_atomic_int32_t *)uprv_malloc(sizeof(int32_t));
|
||||
UChar *patBuf = (UChar *)uprv_malloc(sizeof(UChar)*(pattern16Length+1));
|
||||
if (re == NULL || refC == NULL || patBuf == NULL) {
|
||||
*status = U_MEMORY_ALLOCATION_ERROR;
|
||||
|
|
|
@ -242,7 +242,7 @@ class SpoofData: public UMemory {
|
|||
// we are done.
|
||||
|
||||
uint32_t fMemLimit; // Limit of available raw data space
|
||||
atomic_int32_t fRefCount;
|
||||
u_atomic_int32_t fRefCount;
|
||||
|
||||
// Confusable data
|
||||
int32_t *fCFUKeys;
|
||||
|
|
Loading…
Add table
Reference in a new issue