mbed_critical.c 9.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350
  1. /*
  2. * Copyright (c) 2015-2016, ARM Limited, All Rights Reserved
  3. * SPDX-License-Identifier: Apache-2.0
  4. *
  5. * Licensed under the Apache License, Version 2.0 (the "License"); you may
  6. * not use this file except in compliance with the License.
  7. * You may obtain a copy of the License at
  8. *
  9. * http://www.apache.org/licenses/LICENSE-2.0
  10. *
  11. * Unless required by applicable law or agreed to in writing, software
  12. * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
  13. * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. * See the License for the specific language governing permissions and
  15. * limitations under the License.
  16. */
  17. /* Declare __STDC_LIMIT_MACROS so stdint.h defines UINT32_MAX when using C++ */
  18. #define __STDC_LIMIT_MACROS
  19. #include "hal/critical_section_api.h"
  20. #include "cmsis.h"
  21. #include "platform/mbed_assert.h"
  22. #include "platform/mbed_critical.h"
  23. #include "platform/mbed_toolchain.h"
  24. // if __EXCLUSIVE_ACCESS rtx macro not defined, we need to get this via own-set architecture macros
  25. #ifndef MBED_EXCLUSIVE_ACCESS
  26. #ifndef __EXCLUSIVE_ACCESS
  27. #if ((__ARM_ARCH_7M__ == 1U) || \
  28. (__ARM_ARCH_7EM__ == 1U) || \
  29. (__ARM_ARCH_8M_BASE__ == 1U) || \
  30. (__ARM_ARCH_8M_MAIN__ == 1U)) || \
  31. (__ARM_ARCH_7A__ == 1U)
  32. #define MBED_EXCLUSIVE_ACCESS 1U
  33. #elif (__ARM_ARCH_6M__ == 1U)
  34. #define MBED_EXCLUSIVE_ACCESS 0U
  35. #else
  36. #error "Unknown architecture for exclusive access"
  37. #endif
  38. #else
  39. #define MBED_EXCLUSIVE_ACCESS __EXCLUSIVE_ACCESS
  40. #endif
  41. #endif
  42. static volatile uint32_t critical_section_reentrancy_counter = 0;
  43. bool core_util_are_interrupts_enabled(void)
  44. {
  45. #if defined(__CORTEX_A9)
  46. return ((__get_CPSR() & 0x80) == 0);
  47. #else
  48. return ((__get_PRIMASK() & 0x1) == 0);
  49. #endif
  50. }
  51. bool core_util_is_isr_active(void)
  52. {
  53. #if defined(__CORTEX_A9)
  54. switch (__get_CPSR() & 0x1FU) {
  55. case CPSR_M_USR:
  56. case CPSR_M_SYS:
  57. return false;
  58. case CPSR_M_SVC:
  59. default:
  60. return true;
  61. }
  62. #else
  63. return (__get_IPSR() != 0U);
  64. #endif
  65. }
  66. bool core_util_in_critical_section(void)
  67. {
  68. return hal_in_critical_section();
  69. }
  70. void core_util_critical_section_enter(void)
  71. {
  72. // FIXME
  73. #ifdef FEATURE_UVISOR
  74. #warning "core_util_critical_section_enter needs fixing to work from unprivileged code"
  75. #else
  76. // If the reentrancy counter overflows something has gone badly wrong.
  77. MBED_ASSERT(critical_section_reentrancy_counter < UINT32_MAX);
  78. #endif /* FEATURE_UVISOR */
  79. hal_critical_section_enter();
  80. ++critical_section_reentrancy_counter;
  81. }
  82. void core_util_critical_section_exit(void)
  83. {
  84. // FIXME
  85. #ifdef FEATURE_UVISOR
  86. #warning "core_util_critical_section_exit needs fixing to work from unprivileged code"
  87. #endif /* FEATURE_UVISOR */
  88. // If critical_section_enter has not previously been called, do nothing
  89. if (critical_section_reentrancy_counter == 0) {
  90. return;
  91. }
  92. --critical_section_reentrancy_counter;
  93. if (critical_section_reentrancy_counter == 0) {
  94. hal_critical_section_exit();
  95. }
  96. }
  97. #if MBED_EXCLUSIVE_ACCESS
  98. /* Supress __ldrex and __strex deprecated warnings - "#3731-D: intrinsic is deprecated" */
  99. #if defined (__CC_ARM)
  100. #pragma diag_suppress 3731
  101. #endif
  102. bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
  103. {
  104. do {
  105. uint8_t currentValue = __LDREXB(ptr);
  106. if (currentValue != *expectedCurrentValue) {
  107. *expectedCurrentValue = currentValue;
  108. __CLREX();
  109. return false;
  110. }
  111. } while (__STREXB(desiredValue, ptr));
  112. return true;
  113. }
  114. bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
  115. {
  116. do {
  117. uint16_t currentValue = __LDREXH(ptr);
  118. if (currentValue != *expectedCurrentValue) {
  119. *expectedCurrentValue = currentValue;
  120. __CLREX();
  121. return false;
  122. }
  123. } while (__STREXH(desiredValue, ptr));
  124. return true;
  125. }
  126. bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
  127. {
  128. do {
  129. uint32_t currentValue = __LDREXW(ptr);
  130. if (currentValue != *expectedCurrentValue) {
  131. *expectedCurrentValue = currentValue;
  132. __CLREX();
  133. return false;
  134. }
  135. } while (__STREXW(desiredValue, ptr));
  136. return true;
  137. }
  138. uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
  139. {
  140. uint8_t newValue;
  141. do {
  142. newValue = __LDREXB(valuePtr) + delta;
  143. } while (__STREXB(newValue, valuePtr));
  144. return newValue;
  145. }
  146. uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
  147. {
  148. uint16_t newValue;
  149. do {
  150. newValue = __LDREXH(valuePtr) + delta;
  151. } while (__STREXH(newValue, valuePtr));
  152. return newValue;
  153. }
  154. uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
  155. {
  156. uint32_t newValue;
  157. do {
  158. newValue = __LDREXW(valuePtr) + delta;
  159. } while (__STREXW(newValue, valuePtr));
  160. return newValue;
  161. }
  162. uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
  163. {
  164. uint8_t newValue;
  165. do {
  166. newValue = __LDREXB(valuePtr) - delta;
  167. } while (__STREXB(newValue, valuePtr));
  168. return newValue;
  169. }
  170. uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
  171. {
  172. uint16_t newValue;
  173. do {
  174. newValue = __LDREXH(valuePtr) - delta;
  175. } while (__STREXH(newValue, valuePtr));
  176. return newValue;
  177. }
  178. uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
  179. {
  180. uint32_t newValue;
  181. do {
  182. newValue = __LDREXW(valuePtr) - delta;
  183. } while (__STREXW(newValue, valuePtr));
  184. return newValue;
  185. }
  186. #else
  187. bool core_util_atomic_cas_u8(volatile uint8_t *ptr, uint8_t *expectedCurrentValue, uint8_t desiredValue)
  188. {
  189. bool success;
  190. uint8_t currentValue;
  191. core_util_critical_section_enter();
  192. currentValue = *ptr;
  193. if (currentValue == *expectedCurrentValue) {
  194. *ptr = desiredValue;
  195. success = true;
  196. } else {
  197. *expectedCurrentValue = currentValue;
  198. success = false;
  199. }
  200. core_util_critical_section_exit();
  201. return success;
  202. }
  203. bool core_util_atomic_cas_u16(volatile uint16_t *ptr, uint16_t *expectedCurrentValue, uint16_t desiredValue)
  204. {
  205. bool success;
  206. uint16_t currentValue;
  207. core_util_critical_section_enter();
  208. currentValue = *ptr;
  209. if (currentValue == *expectedCurrentValue) {
  210. *ptr = desiredValue;
  211. success = true;
  212. } else {
  213. *expectedCurrentValue = currentValue;
  214. success = false;
  215. }
  216. core_util_critical_section_exit();
  217. return success;
  218. }
  219. bool core_util_atomic_cas_u32(volatile uint32_t *ptr, uint32_t *expectedCurrentValue, uint32_t desiredValue)
  220. {
  221. bool success;
  222. uint32_t currentValue;
  223. core_util_critical_section_enter();
  224. currentValue = *ptr;
  225. if (currentValue == *expectedCurrentValue) {
  226. *ptr = desiredValue;
  227. success = true;
  228. } else {
  229. *expectedCurrentValue = currentValue;
  230. success = false;
  231. }
  232. core_util_critical_section_exit();
  233. return success;
  234. }
  235. uint8_t core_util_atomic_incr_u8(volatile uint8_t *valuePtr, uint8_t delta)
  236. {
  237. uint8_t newValue;
  238. core_util_critical_section_enter();
  239. newValue = *valuePtr + delta;
  240. *valuePtr = newValue;
  241. core_util_critical_section_exit();
  242. return newValue;
  243. }
  244. uint16_t core_util_atomic_incr_u16(volatile uint16_t *valuePtr, uint16_t delta)
  245. {
  246. uint16_t newValue;
  247. core_util_critical_section_enter();
  248. newValue = *valuePtr + delta;
  249. *valuePtr = newValue;
  250. core_util_critical_section_exit();
  251. return newValue;
  252. }
  253. uint32_t core_util_atomic_incr_u32(volatile uint32_t *valuePtr, uint32_t delta)
  254. {
  255. uint32_t newValue;
  256. core_util_critical_section_enter();
  257. newValue = *valuePtr + delta;
  258. *valuePtr = newValue;
  259. core_util_critical_section_exit();
  260. return newValue;
  261. }
  262. uint8_t core_util_atomic_decr_u8(volatile uint8_t *valuePtr, uint8_t delta)
  263. {
  264. uint8_t newValue;
  265. core_util_critical_section_enter();
  266. newValue = *valuePtr - delta;
  267. *valuePtr = newValue;
  268. core_util_critical_section_exit();
  269. return newValue;
  270. }
  271. uint16_t core_util_atomic_decr_u16(volatile uint16_t *valuePtr, uint16_t delta)
  272. {
  273. uint16_t newValue;
  274. core_util_critical_section_enter();
  275. newValue = *valuePtr - delta;
  276. *valuePtr = newValue;
  277. core_util_critical_section_exit();
  278. return newValue;
  279. }
  280. uint32_t core_util_atomic_decr_u32(volatile uint32_t *valuePtr, uint32_t delta)
  281. {
  282. uint32_t newValue;
  283. core_util_critical_section_enter();
  284. newValue = *valuePtr - delta;
  285. *valuePtr = newValue;
  286. core_util_critical_section_exit();
  287. return newValue;
  288. }
  289. #endif
  290. bool core_util_atomic_cas_ptr(void *volatile *ptr, void **expectedCurrentValue, void *desiredValue)
  291. {
  292. return core_util_atomic_cas_u32(
  293. (volatile uint32_t *)ptr,
  294. (uint32_t *)expectedCurrentValue,
  295. (uint32_t)desiredValue);
  296. }
  297. void *core_util_atomic_incr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
  298. {
  299. return (void *)core_util_atomic_incr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
  300. }
  301. void *core_util_atomic_decr_ptr(void *volatile *valuePtr, ptrdiff_t delta)
  302. {
  303. return (void *)core_util_atomic_decr_u32((volatile uint32_t *)valuePtr, (uint32_t)delta);
  304. }