I did some testing Timerand DispatchSourceTimer(like a GCD timer) on the iPhone 7 with 1000 data points at 0.05 second intervals. I expected the GCD timer to be significantly more accurate (given that it had a dedicated queue), but I found that they were comparable to the standard deviation of my various tests in the range of 0.2-0.8 milliseconds and the maximum deviation from the average of about 2 - 8 milliseconds.
mach_wait_until, TN2169: iOS/OS X, , 4 , , Timer GCD.
, , mach_wait_until - , thread_policy_set . , , , Swift? TN2169:
var timebaseInfo = mach_timebase_info_data_t()
func configureThread() {
mach_timebase_info(&timebaseInfo)
let clock2abs = Double(timebaseInfo.denom) / Double(timebaseInfo.numer) * Double(NSEC_PER_SEC)
let period = UInt32(0.00 * clock2abs)
let computation = UInt32(0.03 * clock2abs)
let constraint = UInt32(0.05 * clock2abs)
let THREAD_TIME_CONSTRAINT_POLICY_COUNT = mach_msg_type_number_t(MemoryLayout<thread_time_constraint_policy>.size / MemoryLayout<integer_t>.size)
var policy = thread_time_constraint_policy()
var ret: Int32
let thread: thread_port_t = pthread_mach_thread_np(pthread_self())
policy.period = period
policy.computation = computation
policy.constraint = constraint
policy.preemptible = 0
ret = withUnsafeMutablePointer(to: &policy) {
$0.withMemoryRebound(to: integer_t.self, capacity: Int(THREAD_TIME_CONSTRAINT_POLICY_COUNT)) {
thread_policy_set(thread, UInt32(THREAD_TIME_CONSTRAINT_POLICY), $0, THREAD_TIME_CONSTRAINT_POLICY_COUNT)
}
}
if ret != KERN_SUCCESS {
mach_error("thread_policy_set:", ret)
exit(1)
}
}
:
private func nanosToAbs(_ nanos: UInt64) -> UInt64 {
return nanos * UInt64(timebaseInfo.denom) / UInt64(timebaseInfo.numer)
}
private func startMachTimer() {
Thread.detachNewThread {
autoreleasepool {
self.configureThread()
var when = mach_absolute_time()
for _ in 0 ..< maxCount {
when += self.nanosToAbs(UInt64(0.05 * Double(NSEC_PER_SEC)))
mach_wait_until(when)
}
}
}
}
. , , when ( , , ), , , .
, mach_wait_until, , Timer GCD, / , do dont , ?
, , CoreAudio , .