Ticket #6692: spinlock_gcc_arm.patch

File spinlock_gcc_arm.patch, 3.3 KB (added by Ben Craig <ben.craig@…>, 11 years ago)

Patch from boost 1.49->boost 1.50 for gcc arm spinlock issue

  • \boost\smart_ptr\detail\spinlock_gcc_arm.hpp

    old new  
    4141
    4242    bool try_lock()
    4343    {
     44#if defined(__ARM_ARCH_7__) || defined(__ARM_ARCH_7A__) || defined(__ARM_ARCH_7R__) || defined(__ARM_ARCH_7M__) || defined(__ARM_ARCH_7EM__)
     45        int lockValue;
     46        int lockCurrent;
     47        int lockFailed;
     48
     49        __asm__ __volatile__(
     50            "mov %0, #1\n\t"           // pre-load our lock token value (the number 1)
     51            "mov %2, #1\n\t"           // pre-load lockFailed with 1 (assumes failure)
     52            "ldrex %1, [%4]\n\t"       // exclusive load of v_ into lockCurrent
     53            "cmp %1, #0\n\t"           // if lockCurrent is zero...
     54            "strexeq %2, %0, [%4]\n\t" // attempt to claim the lock, if successful, lockFailed will be 0
     55            "dmb\n\t"                  // hardware memory barrier to ensure that prior loads and stores by
     56                                       // clients get processed now, instead of at the processor's leisure
     57                                       // ===  outputs  ===
     58            :"=&r"( lockValue ),       // %0: "=&r" means we have a write only variable
     59                                       //     that can be modified before we have finished tweaking inputs,
     60                                       //     and that this value should be stored in a register
     61             "=&r"( lockCurrent ),     // %1
     62             "=&r"( lockFailed ),      // %2
     63             "+Qo"( v_ )               // %3: "+Qo" means we have an offsettable reference that is both read and modified
     64                                       // ===  inputs   ===
     65            :"r"( &v_ )                // %4
     66            :"cc", "memory");          // clobber list, changed condition codes, and invalidating all compiler
     67                                       // loads and stores for a compiler memory barrier
     68
     69        return lockFailed == 0;
     70#else
    4471        int r;
    4572
    4673        __asm__ __volatile__(
    47             "swp %0, %1, [%2]\n\t"
    48                         BOOST_SP_ARM_BARRIER :
    49             "=&r"( r ): // outputs
    50             "r"( 1 ), "r"( &v_ ): // inputs
    51             "memory", "cc" );
     74            "swp %0, %1, [%2]\n\t" // copy this->v_ to r as Int32 AND copy 1 into this->v_ as Int32
     75            BOOST_SP_ARM_BARRIER :
     76                                   // ===  outputs  ===
     77            "=&r"( r ):            // %0: value of v; "=&r" means we have a write only variable
     78                                   // inputs
     79            "r"( 1 ),              // %1: 1 as Int32; "r" means we have a read only variable
     80            "r"( &v_ ):            // %2: address of member this->v_; "r" means we have a read only variable
     81            "memory", "cc" );      // clobber list, changed condition codes, and invalidating all compiler
     82                                   // loads and stores for a compiler memory barrier
    5283
    5384        return r == 0;
     85#endif
    5486    }
    5587
    5688    void lock()
     
    6395
    6496    void unlock()
    6597    {
    66         __asm__ __volatile__( BOOST_SP_ARM_BARRIER ::: "memory" );
     98        __asm__ __volatile__( BOOST_SP_ARM_BARRIER ::: "memory" ); // force cached values to un-cache
    6799        *const_cast< int volatile* >( &v_ ) = 0;
    68100    }
    69101