@@ -292,7 +292,7 @@ public class LDClient {
292292 */
293293 @available ( * , deprecated, message: " Use LDClient.identify(context: completion:) with non-optional completion parameter " )
294294 public func identify( context: LDContext , completion: ( ( ) -> Void ) ? = nil ) {
295- _identify ( context: context, sheddable: false ) { _ in
295+ _identify ( context: context, sheddable: false , useCache : . yes ) { _ in
296296 if let completion = completion {
297297 completion ( )
298298 }
@@ -315,19 +315,33 @@ public class LDClient {
315315 - parameter completion: Closure called when the embedded `setOnlineIdentify` call completes, subject to throttling delays.
316316 */
317317 public func identify( context: LDContext , completion: @escaping ( _ result: IdentifyResult ) -> Void ) {
318- _identify ( context: context, sheddable: true , completion: completion)
318+ _identify ( context: context, sheddable: true , useCache: . yes, completion: completion)
319+ }
320+
321+ /**
322+ Sets the LDContext into the LDClient inline with the behavior detailed on `LDClient.identify(context: completion:)`. Additionally,
323+ this method allows specifying how the flag cache should be handled when transitioning between contexts through the `useCache` parameter.
324+
325+ To learn more about these cache transitions, refer to the `IdentifyCacheUsage` documentation.
326+
327+ - parameter context: The LDContext set with the desired context.
328+ - parameter useCache: How to handle flag caches during identify transition.
329+ - parameter completion: Closure called when the embedded `setOnlineIdentify` call completes, subject to throttling delays.
330+ */
331+ public func identify( context: LDContext , useCache: IdentifyCacheUsage , completion: @escaping ( _ result: IdentifyResult ) -> Void ) {
332+ _identify ( context: context, sheddable: true , useCache: useCache, completion: completion)
319333 }
320334
321335 // Temporary helper method to allow code sharing between the sheddable and unsheddable identify methods. In the next major release, we will remove the deprecated identify method and inline
322336 // this implementation in the other one.
323- private func _identify( context: LDContext , sheddable: Bool , completion: @escaping ( _ result: IdentifyResult ) -> Void ) {
337+ private func _identify( context: LDContext , sheddable: Bool , useCache : IdentifyCacheUsage , completion: @escaping ( _ result: IdentifyResult ) -> Void ) {
324338 let work : TaskHandler = { taskCompletion in
325339 let dispatch = DispatchGroup ( )
326340
327341 LDClient . instancesQueue. sync ( flags: . barrier) {
328342 LDClient . instances? . forEach { _, instance in
329343 dispatch. enter ( )
330- instance. internalIdentify ( newContext: context, completion: dispatch. leave)
344+ instance. internalIdentify ( newContext: context, useCache : useCache , completion: dispatch. leave)
331345 }
332346 }
333347
@@ -354,6 +368,21 @@ public class LDClient {
354368 - parameter completion: Closure called when the embedded `setOnlineIdentify` call completes, subject to throttling delays.
355369 */
356370 public func identify( context: LDContext , timeout: TimeInterval , completion: @escaping ( ( _ result: IdentifyResult ) -> Void ) ) {
371+ identify ( context: context, timeout: timeout, useCache: . yes, completion: completion)
372+ }
373+
374+ /**
375+ Sets the LDContext into the LDClient inline with the behavior detailed on `LDClient.identify(context: timeout: completion:)`. Additionally,
376+ this method allows specifying how the flag cache should be handled when transitioning between contexts through the `useCache` parameter.
377+
378+ To learn more about these cache transitions, refer to the `IdentifyCacheUsage` documentation.
379+
380+ - parameter context: The LDContext set with the desired context.
381+ - parameter timeout: The upper time limit before the `completion` callback will be invoked.
382+ - parameter useCache: How to handle flag caches during identify transition.
383+ - parameter completion: Closure called when the embedded `setOnlineIdentify` call completes, subject to throttling delays.
384+ */
385+ public func identify( context: LDContext , timeout: TimeInterval , useCache: IdentifyCacheUsage , completion: @escaping ( ( _ result: IdentifyResult ) -> Void ) ) {
357386 if timeout > LDClient . longTimeoutInterval {
358387 os_log ( " %s LDClient.identify was called with a timeout greater than %f seconds. We recommend a timeout of less than %f seconds. " , log: config. logger, type: . info, self . typeName ( and: #function) , LDClient . longTimeoutInterval, LDClient . longTimeoutInterval)
359388 }
@@ -367,15 +396,15 @@ public class LDClient {
367396 completion ( . timeout)
368397 }
369398
370- identify ( context: context) { result in
399+ identify ( context: context, useCache : useCache ) { result in
371400 guard !cancel else { return }
372401
373402 cancel = true
374403 completion ( result)
375404 }
376405 }
377406
378- func internalIdentify( newContext: LDContext , completion: ( ( ) -> Void ) ? = nil ) {
407+ func internalIdentify( newContext: LDContext , useCache : IdentifyCacheUsage , completion: ( ( ) -> Void ) ? = nil ) {
379408 var updatedContext = newContext
380409 if config. autoEnvAttributes {
381410 updatedContext = AutoEnvContextModifier ( environmentReporter: environmentReporter, logger: config. logger) . modifyContext ( updatedContext)
@@ -394,27 +423,31 @@ public class LDClient {
394423 self . internalSetOnline ( false )
395424
396425 let cachedData = self . flagCache. getCachedData ( cacheKey: self . context. fullyQualifiedHashedKey ( ) , contextHash: self . context. contextHash ( ) )
397- let cachedContextFlags = cachedData. items ?? [ : ]
398- let oldItems = flagStore. storedItems. featureFlags
399-
400- // Here we prime the store with the last known values from the
401- // cache.
402- //
403- // Once the flag sync. process finishes, the new payload is
404- // compared to this, and if they are different, change listeners
405- // will be notified; otherwise, they aren't.
406- //
407- // This is problematic since the flag values really did change. So
408- // we should trigger the change listener when we set these cache
409- // values.
410- //
411- // However, if there are no cached values, we don't want to inform
412- // customers that we set their store to nothing. In that case, we
413- // will not trigger the change listener and instead relay on the
414- // payload comparsion to do that when the request has completed.
415- flagStore. replaceStore ( newStoredItems: cachedContextFlags)
416- if !cachedContextFlags. featureFlags. isEmpty {
417- flagChangeNotifier. notifyObservers ( oldFlags: oldItems, newFlags: flagStore. storedItems. featureFlags)
426+
427+ if useCache != . no {
428+ let oldItems = flagStore. storedItems
429+ let fallback = useCache == . yes ? [ : ] : oldItems
430+ let cachedContextFlags = cachedData. items ?? fallback
431+
432+ // Here we prime the store with the last known values from the
433+ // cache.
434+ //
435+ // Once the flag sync. process finishes, the new payload is
436+ // compared to this, and if they are different, change listeners
437+ // will be notified; otherwise, they aren't.
438+ //
439+ // This is problematic since the flag values really did change. So
440+ // we should trigger the change listener when we set these cache
441+ // values.
442+ //
443+ // However, if there are no cached values, we don't want to inform
444+ // customers that we set their store to nothing. In that case, we
445+ // will not trigger the change listener and instead relay on the
446+ // payload comparsion to do that when the request has completed.
447+ flagStore. replaceStore ( newStoredItems: cachedContextFlags)
448+ if !cachedContextFlags. featureFlags. isEmpty {
449+ flagChangeNotifier. notifyObservers ( oldFlags: oldItems. featureFlags, newFlags: flagStore. storedItems. featureFlags)
450+ }
418451 }
419452
420453 self . service. context = self . context
0 commit comments