From 4505f04fa0777afa3018db712290e31a8347b877 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Wed, 3 Dec 2025 21:22:01 +0000 Subject: [PATCH 01/11] chore: Update tags and locations for private implementations --- lib/ldclient-rb/cache_store.rb | 2 +- lib/ldclient-rb/config.rb | 4 ++-- lib/ldclient-rb/context.rb | 2 +- lib/ldclient-rb/events.rb | 24 +++++++++---------- lib/ldclient-rb/expiring_cache.rb | 2 +- lib/ldclient-rb/flags_state.rb | 2 +- lib/ldclient-rb/impl.rb | 2 +- lib/ldclient-rb/in_memory_store.rb | 6 ++--- lib/ldclient-rb/integrations/test_data.rb | 10 ++++---- .../integrations/test_data/flag_builder.rb | 18 +++++++------- lib/ldclient-rb/memoized_value.rb | 2 +- lib/ldclient-rb/non_blocking_thread_pool.rb | 2 +- lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/requestor.rb | 4 ++-- lib/ldclient-rb/simple_lru_cache.rb | 2 +- lib/ldclient-rb/stream.rb | 12 +++++----- lib/ldclient-rb/util.rb | 2 +- 17 files changed, 49 insertions(+), 49 deletions(-) diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index b91b363d..50ca9dd0 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -5,7 +5,7 @@ module LaunchDarkly # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. # - # @private + # @api private # class ThreadSafeMemoryStore # diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 5ec80393..c0fb197c 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -96,7 +96,7 @@ def initialize(opts = {}) # Custom data source implementations should integrate with this sink if # they want to provide support for data source status listeners. # - # @private + # @api private # attr_accessor :data_source_update_sink @@ -108,7 +108,7 @@ def initialize(opts = {}) # property is not supported; it is temporarily being exposed to maintain # backwards compatibility while the SDK structure is updated. # - # @private + # @api private # attr_accessor :instance_id diff --git a/lib/ldclient-rb/context.rb b/lib/ldclient-rb/context.rb index c9dd4618..081d61a2 100644 --- a/lib/ldclient-rb/context.rb +++ b/lib/ldclient-rb/context.rb @@ -48,7 +48,7 @@ class LDContext attr_reader :error # - # @private + # @api private # @param key [String, nil] # @param fully_qualified_key [String, nil] # @param kind [String, nil] diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0ac068c4..17ef5438 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -70,24 +70,24 @@ def stop MAX_FLUSH_WORKERS = 5 private_constant :MAX_FLUSH_WORKERS - # @private + # @api private class NullEventProcessor include EventProcessorMethods end - # @private + # @api private class FlushMessage end - # @private + # @api private class FlushContextsMessage end - # @private + # @api private class DiagnosticEventMessage end - # @private + # @api private class SynchronousMessage def initialize @reply = Concurrent::Semaphore.new(0) @@ -102,15 +102,15 @@ def wait_for_completion end end - # @private + # @api private class TestSyncMessage < SynchronousMessage end - # @private + # @api private class StopMessage < SynchronousMessage end - # @private + # @api private class EventProcessor include EventProcessorMethods @@ -226,7 +226,7 @@ def wait_until_inactive end end - # @private + # @api private class EventDispatcher def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @sdk_key = sdk_key @@ -414,10 +414,10 @@ def send_diagnostic_event(event, diagnostic_event_workers) end end - # @private + # @api private FlushPayload = Struct.new(:events, :summary) - # @private + # @api private class EventBuffer def initialize(capacity, logger) @capacity = capacity @@ -461,7 +461,7 @@ def clear end end - # @private + # @api private class EventOutputFormatter FEATURE_KIND = 'feature' IDENTIFY_KIND = 'identify' diff --git a/lib/ldclient-rb/expiring_cache.rb b/lib/ldclient-rb/expiring_cache.rb index fa6051c9..5067ffbb 100644 --- a/lib/ldclient-rb/expiring_cache.rb +++ b/lib/ldclient-rb/expiring_cache.rb @@ -6,7 +6,7 @@ module LaunchDarkly # * made thread-safe # * removed many unused methods # * reading a key does not reset its expiration time, only writing - # @private + # @api private class ExpiringCache def initialize(max_size, ttl) @max_size = max_size diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index f0c4b0c2..2261e565 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,7 +15,7 @@ def initialize(valid) end # Used internally to build the state map. - # @private + # @api private def add_flag(flag_state, with_reasons, details_only_if_tracked) key = flag_state[:key] @flag_values[key] = flag_state[:value] diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index b0d63ebe..5ed4eb00 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -5,7 +5,7 @@ module LaunchDarkly # and subject to change. # # @since 5.5.0 - # @private + # @api private # module Impl # code is in ldclient-rb/impl/ diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 3cb9dc48..ccb18cae 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -10,13 +10,13 @@ module LaunchDarkly # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter # to ensure data consistency during non-atomic updates. - # @private + # @api private FEATURES = Impl::DataStore::DataKind.new(namespace: "features", priority: 1).freeze - # @private + # @api private SEGMENTS = Impl::DataStore::DataKind.new(namespace: "segments", priority: 0).freeze - # @private + # @api private ALL_KINDS = [FEATURES, SEGMENTS].freeze # diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index aa2ee107..6cc0da17 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -42,7 +42,7 @@ def self.data_source self.new end - # @private + # @api private def initialize @flag_builders = Hash.new @current_flags = Hash.new @@ -56,7 +56,7 @@ def initialize # Called internally by the SDK to determine what arguments to pass to call # You do not need to call this method. # - # @private + # @api private def arity 2 end @@ -65,7 +65,7 @@ def arity # Called internally by the SDK to associate this test data source with an {@code LDClient} instance. # You do not need to call this method. # - # @private + # @api private def call(_, config) impl = LaunchDarkly::Impl::Integrations::TestData::TestDataSource.new(config.feature_store, self) @instances_lock.with_write_lock { @instances.push(impl) } @@ -194,7 +194,7 @@ def use_preconfigured_segment(segment) end end - # @private + # @api private def make_init_data @lock.with_read_lock do { @@ -204,7 +204,7 @@ def make_init_data end end - # @private + # @api private def closed_instance(instance) @instances_lock.with_write_lock { @instances.delete(instance) } end diff --git a/lib/ldclient-rb/integrations/test_data/flag_builder.rb b/lib/ldclient-rb/integrations/test_data/flag_builder.rb index ac04ec89..7c30c004 100644 --- a/lib/ldclient-rb/integrations/test_data/flag_builder.rb +++ b/lib/ldclient-rb/integrations/test_data/flag_builder.rb @@ -12,14 +12,14 @@ class TestData class FlagBuilder attr_reader :key - # @private + # @api private def initialize(key) @key = key @on = true @variations = [] end - # @private + # @api private def initialize_copy(other) super(other) @variations = @variations.clone @@ -357,7 +357,7 @@ def clear_rules self end - # @private + # @api private def add_rule(rule) if @rules.nil? @rules = Array.new @@ -386,7 +386,7 @@ def boolean_flag end end - # @private + # @api private def build(version) res = { key: @key, version: version, @@ -486,16 +486,16 @@ def build # Finally, call {#then_return} to finish defining the rule. # class FlagRuleBuilder - # @private + # @api private FlagRuleClause = Struct.new(:contextKind, :attribute, :op, :values, :negate, keyword_init: true) # rubocop:disable Naming/MethodName: - # @private + # @api private def initialize(flag_builder) @flag_builder = flag_builder @clauses = Array.new end - # @private + # @api private def intialize_copy(other) super(other) @clauses = @clauses.clone @@ -612,7 +612,7 @@ def then_return(variation) end end - # @private + # @api private def build(ri) { id: 'rule' + ri.to_s, @@ -622,7 +622,7 @@ def build(ri) end end - # @private + # @api private def variation_for_boolean(variation) variation ? TRUE_VARIATION_INDEX : FALSE_VARIATION_INDEX end diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/memoized_value.rb index 7a829f29..450924ee 100644 --- a/lib/ldclient-rb/memoized_value.rb +++ b/lib/ldclient-rb/memoized_value.rb @@ -3,7 +3,7 @@ module LaunchDarkly # Simple implementation of a thread-safe memoized value whose generator function will never be # run more than once, and whose value can be overridden by explicit assignment. # Note that we no longer use this class and it will be removed in a future version. - # @private + # @api private class MemoizedValue def initialize(&generator) @generator = generator diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/non_blocking_thread_pool.rb index 5234bef1..f2c6174d 100644 --- a/lib/ldclient-rb/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/non_blocking_thread_pool.rb @@ -6,7 +6,7 @@ module LaunchDarkly # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather # than blocking. Also provides a way to wait for all jobs to finish without shutting down. - # @private + # @api private class NonBlockingThreadPool def initialize(capacity, name = 'LD/NonBlockingThreadPool') @capacity = capacity diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 69963d20..c60351be 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -5,7 +5,7 @@ require "thread" module LaunchDarkly - # @private + # @api private class PollingProcessor def initialize(config, requestor) @config = config diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index d1542890..9ecaade7 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -6,7 +6,7 @@ require "http" module LaunchDarkly - # @private + # @api private class UnexpectedResponseError < StandardError def initialize(status) @status = status @@ -18,7 +18,7 @@ def status end end - # @private + # @api private class Requestor CacheEntry = Struct.new(:etag, :body) diff --git a/lib/ldclient-rb/simple_lru_cache.rb b/lib/ldclient-rb/simple_lru_cache.rb index 4eda4e27..b2bb0e91 100644 --- a/lib/ldclient-rb/simple_lru_cache.rb +++ b/lib/ldclient-rb/simple_lru_cache.rb @@ -2,7 +2,7 @@ module LaunchDarkly # A non-thread-safe implementation of a LRU cache set with only add and reset methods. # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb - # @private + # @api private class SimpleLRUCacheSet def initialize(capacity) @values = {} diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 7bc5311b..8290d02c 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -5,22 +5,22 @@ require "ld-eventsource" module LaunchDarkly - # @private + # @api private PUT = :put - # @private + # @api private PATCH = :patch - # @private + # @api private DELETE = :delete - # @private + # @api private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes - # @private + # @api private KEY_PATHS = { FEATURES => "/flags/", SEGMENTS => "/segments/", } - # @private + # @api private class StreamProcessor def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 5e8b40f2..865448ad 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -65,7 +65,7 @@ def success? end end - # @private + # @api private module Util # # Append the payload filter key query parameter to the provided URI. From f88e6ec75a83c8203111f836ee1e8930ed80444e Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 16:55:04 +0000 Subject: [PATCH 02/11] move private files into impl folder --- lib/ldclient-rb/{ => impl}/cache_store.rb | 0 lib/ldclient-rb/{ => impl/data_source}/polling.rb | 0 lib/ldclient-rb/{ => impl/data_source}/requestor.rb | 0 lib/ldclient-rb/{ => impl/data_source}/stream.rb | 0 lib/ldclient-rb/{ => impl}/expiring_cache.rb | 0 lib/ldclient-rb/{ => impl}/memoized_value.rb | 0 lib/ldclient-rb/{ => impl}/non_blocking_thread_pool.rb | 0 lib/ldclient-rb/{ => impl}/simple_lru_cache.rb | 0 8 files changed, 0 insertions(+), 0 deletions(-) rename lib/ldclient-rb/{ => impl}/cache_store.rb (100%) rename lib/ldclient-rb/{ => impl/data_source}/polling.rb (100%) rename lib/ldclient-rb/{ => impl/data_source}/requestor.rb (100%) rename lib/ldclient-rb/{ => impl/data_source}/stream.rb (100%) rename lib/ldclient-rb/{ => impl}/expiring_cache.rb (100%) rename lib/ldclient-rb/{ => impl}/memoized_value.rb (100%) rename lib/ldclient-rb/{ => impl}/non_blocking_thread_pool.rb (100%) rename lib/ldclient-rb/{ => impl}/simple_lru_cache.rb (100%) diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/impl/cache_store.rb similarity index 100% rename from lib/ldclient-rb/cache_store.rb rename to lib/ldclient-rb/impl/cache_store.rb diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/impl/data_source/polling.rb similarity index 100% rename from lib/ldclient-rb/polling.rb rename to lib/ldclient-rb/impl/data_source/polling.rb diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/impl/data_source/requestor.rb similarity index 100% rename from lib/ldclient-rb/requestor.rb rename to lib/ldclient-rb/impl/data_source/requestor.rb diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/impl/data_source/stream.rb similarity index 100% rename from lib/ldclient-rb/stream.rb rename to lib/ldclient-rb/impl/data_source/stream.rb diff --git a/lib/ldclient-rb/expiring_cache.rb b/lib/ldclient-rb/impl/expiring_cache.rb similarity index 100% rename from lib/ldclient-rb/expiring_cache.rb rename to lib/ldclient-rb/impl/expiring_cache.rb diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/impl/memoized_value.rb similarity index 100% rename from lib/ldclient-rb/memoized_value.rb rename to lib/ldclient-rb/impl/memoized_value.rb diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/impl/non_blocking_thread_pool.rb similarity index 100% rename from lib/ldclient-rb/non_blocking_thread_pool.rb rename to lib/ldclient-rb/impl/non_blocking_thread_pool.rb diff --git a/lib/ldclient-rb/simple_lru_cache.rb b/lib/ldclient-rb/impl/simple_lru_cache.rb similarity index 100% rename from lib/ldclient-rb/simple_lru_cache.rb rename to lib/ldclient-rb/impl/simple_lru_cache.rb From fc751ab8ab2140dc9629ef498da8952e2fc7e402 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 16:59:11 +0000 Subject: [PATCH 03/11] add correct module nesting for moved files --- lib/ldclient-rb/impl/cache_store.rb | 71 ++-- lib/ldclient-rb/impl/data_source/polling.rb | 171 ++++----- lib/ldclient-rb/impl/data_source/requestor.rb | 163 +++++---- lib/ldclient-rb/impl/data_source/stream.rb | 343 +++++++++--------- lib/ldclient-rb/impl/expiring_cache.rb | 109 +++--- lib/ldclient-rb/impl/memoized_value.rb | 47 +-- .../impl/non_blocking_thread_pool.rb | 65 ++-- lib/ldclient-rb/impl/simple_lru_cache.rb | 39 +- 8 files changed, 519 insertions(+), 489 deletions(-) diff --git a/lib/ldclient-rb/impl/cache_store.rb b/lib/ldclient-rb/impl/cache_store.rb index 50ca9dd0..7662ea04 100644 --- a/lib/ldclient-rb/impl/cache_store.rb +++ b/lib/ldclient-rb/impl/cache_store.rb @@ -1,45 +1,48 @@ require "concurrent/map" module LaunchDarkly - # - # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we - # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. - # - # @api private - # - class ThreadSafeMemoryStore + module Impl # - # Default constructor + # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we + # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. # - # @return [ThreadSafeMemoryStore] a new store - def initialize - @cache = Concurrent::Map.new - end - - # - # Read a value from the cache - # @param key [Object] the cache key + # @api private # - # @return [Object] the cache value - def read(key) - @cache[key] - end + class ThreadSafeMemoryStore + # + # Default constructor + # + # @return [ThreadSafeMemoryStore] a new store + def initialize + @cache = Concurrent::Map.new + end - # - # Store a value in the cache - # @param key [Object] the cache key - # @param value [Object] the value to associate with the key - # - # @return [Object] the value - def write(key, value) - @cache[key] = value - end + # + # Read a value from the cache + # @param key [Object] the cache key + # + # @return [Object] the cache value + def read(key) + @cache[key] + end - # - # Delete a value in the cache - # @param key [Object] the cache key - def delete(key) - @cache.delete(key) + # + # Store a value in the cache + # @param key [Object] the cache key + # @param value [Object] the value to associate with the key + # + # @return [Object] the value + def write(key, value) + @cache[key] = value + end + + # + # Delete a value in the cache + # @param key [Object] the cache key + def delete(key) + @cache.delete(key) + end end end end + diff --git a/lib/ldclient-rb/impl/data_source/polling.rb b/lib/ldclient-rb/impl/data_source/polling.rb index c60351be..8dc43534 100644 --- a/lib/ldclient-rb/impl/data_source/polling.rb +++ b/lib/ldclient-rb/impl/data_source/polling.rb @@ -5,98 +5,103 @@ require "thread" module LaunchDarkly - # @api private - class PollingProcessor - def initialize(config, requestor) - @config = config - @requestor = requestor - @initialized = Concurrent::AtomicBoolean.new(false) - @started = Concurrent::AtomicBoolean.new(false) - @ready = Concurrent::Event.new - @task = Impl::RepeatingTask.new(@config.poll_interval, 0, -> { self.poll }, @config.logger, 'LD/PollingDataSource') - end + module Impl + module DataSource + # @api private + class PollingProcessor + def initialize(config, requestor) + @config = config + @requestor = requestor + @initialized = Concurrent::AtomicBoolean.new(false) + @started = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + @task = Impl::RepeatingTask.new(@config.poll_interval, 0, -> { self.poll }, @config.logger, 'LD/PollingDataSource') + end - def initialized? - @initialized.value - end + def initialized? + @initialized.value + end - def start - return @ready unless @started.make_true - @config.logger.info { "[LDClient] Initializing polling connection" } - @task.start - @ready - end + def start + return @ready unless @started.make_true + @config.logger.info { "[LDClient] Initializing polling connection" } + @task.start + @ready + end - def stop - stop_with_error_info - end + def stop + stop_with_error_info + end - def poll - begin - all_data = @requestor.request_all_data - if all_data - update_sink_or_data_store.init(all_data) - if @initialized.make_true - @config.logger.info { "[LDClient] Polling connection initialized" } - @ready.set + def poll + begin + all_data = @requestor.request_all_data + if all_data + update_sink_or_data_store.init(all_data) + if @initialized.make_true + @config.logger.info { "[LDClient] Polling connection initialized" } + @ready.set + end + end + @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil) + rescue JSON::ParserError => e + @config.logger.error { "[LDClient] JSON parsing failed for polling response." } + error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( + LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA, + 0, + e.to_s, + Time.now + ) + @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info) + rescue Impl::DataSource::UnexpectedResponseError => e + error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( + LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, e.status, nil, Time.now) + message = Util.http_error_message(e.status, "polling request", "will retry") + @config.logger.error { "[LDClient] #{message}" } + + if Util.http_error_recoverable?(e.status) + @config.data_source_update_sink&.update_status( + LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, + error_info + ) + else + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop_with_error_info error_info + end + rescue StandardError => e + Util.log_exception(@config.logger, "Exception while polling", e) + @config.data_source_update_sink&.update_status( + LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, + LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, e.to_s, Time.now) + ) end end - @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil) - rescue JSON::ParserError => e - @config.logger.error { "[LDClient] JSON parsing failed for polling response." } - error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( - LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA, - 0, - e.to_s, - Time.now - ) - @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info) - rescue UnexpectedResponseError => e - error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( - LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, e.status, nil, Time.now) - message = Util.http_error_message(e.status, "polling request", "will retry") - @config.logger.error { "[LDClient] #{message}" } - if Util.http_error_recoverable?(e.status) - @config.data_source_update_sink&.update_status( - LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, - error_info - ) - else - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop_with_error_info error_info + # + # The original implementation of this class relied on the feature store + # directly, which we are trying to move away from. Customers who might have + # instantiated this directly for some reason wouldn't know they have to set + # the config's sink manually, so we have to fall back to the store if the + # sink isn't present. + # + # The next major release should be able to simplify this structure and + # remove the need for fall back to the data store because the update sink + # should always be present. + # + private def update_sink_or_data_store + @config.data_source_update_sink || @config.feature_store end - rescue StandardError => e - Util.log_exception(@config.logger, "Exception while polling", e) - @config.data_source_update_sink&.update_status( - LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, - LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, e.to_s, Time.now) - ) - end - end - - # - # The original implementation of this class relied on the feature store - # directly, which we are trying to move away from. Customers who might have - # instantiated this directly for some reason wouldn't know they have to set - # the config's sink manually, so we have to fall back to the store if the - # sink isn't present. - # - # The next major release should be able to simplify this structure and - # remove the need for fall back to the data store because the update sink - # should always be present. - # - private def update_sink_or_data_store - @config.data_source_update_sink || @config.feature_store - end - # - # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info - # - private def stop_with_error_info(error_info = nil) - @task.stop - @config.logger.info { "[LDClient] Polling connection stopped" } - @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info) + # + # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info + # + private def stop_with_error_info(error_info = nil) + @task.stop + @config.logger.info { "[LDClient] Polling connection stopped" } + @config.data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info) + end + end end end end + diff --git a/lib/ldclient-rb/impl/data_source/requestor.rb b/lib/ldclient-rb/impl/data_source/requestor.rb index 9ecaade7..61ee6d55 100644 --- a/lib/ldclient-rb/impl/data_source/requestor.rb +++ b/lib/ldclient-rb/impl/data_source/requestor.rb @@ -6,97 +6,102 @@ require "http" module LaunchDarkly - # @api private - class UnexpectedResponseError < StandardError - def initialize(status) - @status = status - super("HTTP error #{status}") - end + module Impl + module DataSource + # @api private + class UnexpectedResponseError < StandardError + def initialize(status) + @status = status + super("HTTP error #{status}") + end - def status - @status - end - end + def status + @status + end + end - # @api private - class Requestor - CacheEntry = Struct.new(:etag, :body) + # @api private + class Requestor + CacheEntry = Struct.new(:etag, :body) - def initialize(sdk_key, config) - @sdk_key = sdk_key - @config = config - @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config) - .use(:auto_inflate) - .headers("Accept-Encoding" => "gzip") - @cache = @config.cache_store - end + def initialize(sdk_key, config) + @sdk_key = sdk_key + @config = config + @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config) + .use(:auto_inflate) + .headers("Accept-Encoding" => "gzip") + @cache = @config.cache_store + end - def request_all_data() - all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) - Impl::Model.make_all_store_data(all_data, @config.logger) - end + def request_all_data() + all_data = JSON.parse(make_request("/sdk/latest-all"), symbolize_names: true) + Impl::Model.make_all_store_data(all_data, @config.logger) + end - def stop - begin - @http_client.close - rescue - end - end + def stop + begin + @http_client.close + rescue + end + end - private + private - def make_request(path) - uri = URI( - Util.add_payload_filter_key(@config.base_uri + path, @config) - ) - headers = {} - Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } - headers["Connection"] = "keep-alive" - cached = @cache.read(uri) - unless cached.nil? - headers["If-None-Match"] = cached.etag - end - response = @http_client.request("GET", uri, { - headers: headers, - }) - status = response.status.code - # must fully read body for persistent connections - body = response.to_s - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{response.headers.to_h}\n\tbody: #{body}" } - if status == 304 && !cached.nil? - body = cached.body - else - @cache.delete(uri) - if status < 200 || status >= 300 - raise UnexpectedResponseError.new(status) + def make_request(path) + uri = URI( + Util.add_payload_filter_key(@config.base_uri + path, @config) + ) + headers = {} + Impl::Util.default_http_headers(@sdk_key, @config).each { |k, v| headers[k] = v } + headers["Connection"] = "keep-alive" + cached = @cache.read(uri) + unless cached.nil? + headers["If-None-Match"] = cached.etag + end + response = @http_client.request("GET", uri, { + headers: headers, + }) + status = response.status.code + # must fully read body for persistent connections + body = response.to_s + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{response.headers.to_h}\n\tbody: #{body}" } + if status == 304 && !cached.nil? + body = cached.body + else + @cache.delete(uri) + if status < 200 || status >= 300 + raise UnexpectedResponseError.new(status) + end + body = fix_encoding(body, response.headers["content-type"]) + etag = response.headers["etag"] + @cache.write(uri, CacheEntry.new(etag, body)) unless etag.nil? + end + body end - body = fix_encoding(body, response.headers["content-type"]) - etag = response.headers["etag"] - @cache.write(uri, CacheEntry.new(etag, body)) unless etag.nil? - end - body - end - def fix_encoding(body, content_type) - return body if content_type.nil? - media_type, charset = parse_content_type(content_type) - return body if charset.nil? - body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8) - end + def fix_encoding(body, content_type) + return body if content_type.nil? + media_type, charset = parse_content_type(content_type) + return body if charset.nil? + body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8) + end - def parse_content_type(value) - return [nil, nil] if value.nil? || value == '' - parts = value.split(/; */) - return [value, nil] if parts.count < 2 - charset = nil - parts.each do |part| - fields = part.split('=') - if fields.count >= 2 && fields[0] == 'charset' - charset = fields[1] - break + def parse_content_type(value) + return [nil, nil] if value.nil? || value == '' + parts = value.split(/; */) + return [value, nil] if parts.count < 2 + charset = nil + parts.each do |part| + fields = part.split('=') + if fields.count >= 2 && fields[0] == 'charset' + charset = fields[1] + break + end + end + [parts[0], charset] end end - [parts[0], charset] end end end + diff --git a/lib/ldclient-rb/impl/data_source/stream.rb b/lib/ldclient-rb/impl/data_source/stream.rb index 8290d02c..6359679b 100644 --- a/lib/ldclient-rb/impl/data_source/stream.rb +++ b/lib/ldclient-rb/impl/data_source/stream.rb @@ -5,192 +5,197 @@ require "ld-eventsource" module LaunchDarkly - # @api private - PUT = :put - # @api private - PATCH = :patch - # @api private - DELETE = :delete - # @api private - READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes - - # @api private - KEY_PATHS = { - FEATURES => "/flags/", - SEGMENTS => "/segments/", - } - - # @api private - class StreamProcessor - def initialize(sdk_key, config, diagnostic_accumulator = nil) - @sdk_key = sdk_key - @config = config - @data_source_update_sink = config.data_source_update_sink - @feature_store = config.feature_store - @initialized = Concurrent::AtomicBoolean.new(false) - @started = Concurrent::AtomicBoolean.new(false) - @stopped = Concurrent::AtomicBoolean.new(false) - @ready = Concurrent::Event.new - @connection_attempt_start_time = 0 - end - - def initialized? - @initialized.value - end - - def start - return @ready unless @started.make_true + module Impl + module DataSource + # @api private + PUT = :put + # @api private + PATCH = :patch + # @api private + DELETE = :delete + # @api private + READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes + + # @api private + KEY_PATHS = { + FEATURES => "/flags/", + SEGMENTS => "/segments/", + } - @config.logger.info { "[LDClient] Initializing stream connection" } + # @api private + class StreamProcessor + def initialize(sdk_key, config, diagnostic_accumulator = nil) + @sdk_key = sdk_key + @config = config + @data_source_update_sink = config.data_source_update_sink + @feature_store = config.feature_store + @initialized = Concurrent::AtomicBoolean.new(false) + @started = Concurrent::AtomicBoolean.new(false) + @stopped = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + @connection_attempt_start_time = 0 + end - headers = Impl::Util.default_http_headers(@sdk_key, @config) - opts = { - headers: headers, - read_timeout: READ_TIMEOUT_SECONDS, - logger: @config.logger, - socket_factory: @config.socket_factory, - reconnect_time: @config.initial_reconnect_delay, - } - log_connection_started - - uri = Util.add_payload_filter_key(@config.stream_uri + "/all", @config) - @es = SSE::Client.new(uri, **opts) do |conn| - conn.on_event { |event| process_message(event) } - conn.on_error { |err| - log_connection_result(false) - case err - when SSE::Errors::HTTPStatusError - status = err.status - error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( - LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, status, nil, Time.now) - message = Util.http_error_message(status, "streaming connection", "will retry") - @config.logger.error { "[LDClient] #{message}" } - - if Util.http_error_recoverable?(status) - @data_source_update_sink&.update_status( - LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, - error_info - ) - else - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop_with_error_info error_info - end - when SSE::Errors::HTTPContentTypeError, SSE::Errors::HTTPProxyError, SSE::Errors::ReadTimeoutError - @data_source_update_sink&.update_status( - LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, - LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::NETWORK_ERROR, 0, err.to_s, Time.now) - ) + def initialized? + @initialized.value + end - else - @data_source_update_sink&.update_status( - LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, - LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, err.to_s, Time.now) - ) + def start + return @ready unless @started.make_true + + @config.logger.info { "[LDClient] Initializing stream connection" } + + headers = Impl::Util.default_http_headers(@sdk_key, @config) + opts = { + headers: headers, + read_timeout: READ_TIMEOUT_SECONDS, + logger: @config.logger, + socket_factory: @config.socket_factory, + reconnect_time: @config.initial_reconnect_delay, + } + log_connection_started + + uri = Util.add_payload_filter_key(@config.stream_uri + "/all", @config) + @es = SSE::Client.new(uri, **opts) do |conn| + conn.on_event { |event| process_message(event) } + conn.on_error { |err| + log_connection_result(false) + case err + when SSE::Errors::HTTPStatusError + status = err.status + error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( + LaunchDarkly::Interfaces::DataSource::ErrorInfo::ERROR_RESPONSE, status, nil, Time.now) + message = Util.http_error_message(status, "streaming connection", "will retry") + @config.logger.error { "[LDClient] #{message}" } + + if Util.http_error_recoverable?(status) + @data_source_update_sink&.update_status( + LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, + error_info + ) + else + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop_with_error_info error_info + end + when SSE::Errors::HTTPContentTypeError, SSE::Errors::HTTPProxyError, SSE::Errors::ReadTimeoutError + @data_source_update_sink&.update_status( + LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, + LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::NETWORK_ERROR, 0, err.to_s, Time.now) + ) + + else + @data_source_update_sink&.update_status( + LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, + LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, err.to_s, Time.now) + ) + end + } end - } - end - @ready - end + @ready + end - def stop - stop_with_error_info - end + def stop + stop_with_error_info + end - private + private - # - # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info - # - def stop_with_error_info(error_info = nil) - if @stopped.make_true - @es.close - @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info) - @config.logger.info { "[LDClient] Stream connection stopped" } - end - end + # + # @param [LaunchDarkly::Interfaces::DataSource::ErrorInfo, nil] error_info + # + def stop_with_error_info(error_info = nil) + if @stopped.make_true + @es.close + @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::OFF, error_info) + @config.logger.info { "[LDClient] Stream connection stopped" } + end + end - # - # The original implementation of this class relied on the feature store - # directly, which we are trying to move away from. Customers who might have - # instantiated this directly for some reason wouldn't know they have to set - # the config's sink manually, so we have to fall back to the store if the - # sink isn't present. - # - # The next major release should be able to simplify this structure and - # remove the need for fall back to the data store because the update sink - # should always be present. - # - def update_sink_or_data_store - @data_source_update_sink || @feature_store - end + # + # The original implementation of this class relied on the feature store + # directly, which we are trying to move away from. Customers who might have + # instantiated this directly for some reason wouldn't know they have to set + # the config's sink manually, so we have to fall back to the store if the + # sink isn't present. + # + # The next major release should be able to simplify this structure and + # remove the need for fall back to the data store because the update sink + # should always be present. + # + def update_sink_or_data_store + @data_source_update_sink || @feature_store + end - def process_message(message) - log_connection_result(true) - method = message.type - @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } - - begin - if method == PUT - message = JSON.parse(message.data, symbolize_names: true) - all_data = Impl::Model.make_all_store_data(message[:data], @config.logger) - update_sink_or_data_store.init(all_data) - @initialized.make_true - @config.logger.info { "[LDClient] Stream initialized" } - @ready.set - elsif method == PATCH - data = JSON.parse(message.data, symbolize_names: true) - for kind in [FEATURES, SEGMENTS] - key = key_for_path(kind, data[:path]) - if key - item = Impl::Model.deserialize(kind, data[:data], @config.logger) - update_sink_or_data_store.upsert(kind, item) - break - end - end - elsif method == DELETE - data = JSON.parse(message.data, symbolize_names: true) - for kind in [FEATURES, SEGMENTS] - key = key_for_path(kind, data[:path]) - if key - update_sink_or_data_store.delete(kind, key, data[:version]) - break + def process_message(message) + log_connection_result(true) + method = message.type + @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } + + begin + if method == PUT + message = JSON.parse(message.data, symbolize_names: true) + all_data = Impl::Model.make_all_store_data(message[:data], @config.logger) + update_sink_or_data_store.init(all_data) + @initialized.make_true + @config.logger.info { "[LDClient] Stream initialized" } + @ready.set + elsif method == PATCH + data = JSON.parse(message.data, symbolize_names: true) + for kind in [FEATURES, SEGMENTS] + key = key_for_path(kind, data[:path]) + if key + item = Impl::Model.deserialize(kind, data[:data], @config.logger) + update_sink_or_data_store.upsert(kind, item) + break + end + end + elsif method == DELETE + data = JSON.parse(message.data, symbolize_names: true) + for kind in [FEATURES, SEGMENTS] + key = key_for_path(kind, data[:path]) + if key + update_sink_or_data_store.delete(kind, key, data[:version]) + break + end + end + else + @config.logger.warn { "[LDClient] Unknown message received: #{method}" } end + + @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil) + rescue JSON::ParserError => e + @config.logger.error { "[LDClient] JSON parsing failed for method #{method}. Ignoring event." } + error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( + LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA, + 0, + e.to_s, + Time.now + ) + @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info) + + # Re-raise the exception so the SSE implementation can catch it and restart the stream. + raise end - else - @config.logger.warn { "[LDClient] Unknown message received: #{method}" } end - @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::VALID, nil) - rescue JSON::ParserError => e - @config.logger.error { "[LDClient] JSON parsing failed for method #{method}. Ignoring event." } - error_info = LaunchDarkly::Interfaces::DataSource::ErrorInfo.new( - LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA, - 0, - e.to_s, - Time.now - ) - @data_source_update_sink&.update_status(LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, error_info) - - # Re-raise the exception so the SSE implementation can catch it and restart the stream. - raise - end - end - - def key_for_path(kind, path) - path.start_with?(KEY_PATHS[kind]) ? path[KEY_PATHS[kind].length..-1] : nil - end + def key_for_path(kind, path) + path.start_with?(KEY_PATHS[kind]) ? path[KEY_PATHS[kind].length..-1] : nil + end - def log_connection_started - @connection_attempt_start_time = Impl::Util::current_time_millis - end + def log_connection_started + @connection_attempt_start_time = Impl::Util::current_time_millis + end - def log_connection_result(is_success) - if !@diagnostic_accumulator.nil? && @connection_attempt_start_time > 0 - @diagnostic_accumulator.record_stream_init(@connection_attempt_start_time, !is_success, - Impl::Util::current_time_millis - @connection_attempt_start_time) - @connection_attempt_start_time = 0 + def log_connection_result(is_success) + if !@diagnostic_accumulator.nil? && @connection_attempt_start_time > 0 + @diagnostic_accumulator.record_stream_init(@connection_attempt_start_time, !is_success, + Impl::Util::current_time_millis - @connection_attempt_start_time) + @connection_attempt_start_time = 0 + end + end end end end end + diff --git a/lib/ldclient-rb/impl/expiring_cache.rb b/lib/ldclient-rb/impl/expiring_cache.rb index 5067ffbb..c397d565 100644 --- a/lib/ldclient-rb/impl/expiring_cache.rb +++ b/lib/ldclient-rb/impl/expiring_cache.rb @@ -1,77 +1,80 @@ module LaunchDarkly - # A thread-safe cache with maximum number of entries and TTL. - # Adapted from https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/ttl/cache.rb - # under MIT license with the following changes: - # * made thread-safe - # * removed many unused methods - # * reading a key does not reset its expiration time, only writing - # @api private - class ExpiringCache - def initialize(max_size, ttl) - @max_size = max_size - @ttl = ttl - @data_lru = {} - @data_ttl = {} - @lock = Mutex.new - end + module Impl + # A thread-safe cache with maximum number of entries and TTL. + # Adapted from https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/ttl/cache.rb + # under MIT license with the following changes: + # * made thread-safe + # * removed many unused methods + # * reading a key does not reset its expiration time, only writing + # @api private + class ExpiringCache + def initialize(max_size, ttl) + @max_size = max_size + @ttl = ttl + @data_lru = {} + @data_ttl = {} + @lock = Mutex.new + end - def [](key) - @lock.synchronize do - ttl_evict - @data_lru[key] + def [](key) + @lock.synchronize do + ttl_evict + @data_lru[key] + end end - end - def []=(key, val) - @lock.synchronize do - ttl_evict + def []=(key, val) + @lock.synchronize do + ttl_evict + + @data_lru.delete(key) + @data_ttl.delete(key) - @data_lru.delete(key) - @data_ttl.delete(key) + @data_lru[key] = val + @data_ttl[key] = Time.now.to_f - @data_lru[key] = val - @data_ttl[key] = Time.now.to_f + if @data_lru.size > @max_size + key, _ = @data_lru.first # hashes have a FIFO ordering in Ruby - if @data_lru.size > @max_size - key, _ = @data_lru.first # hashes have a FIFO ordering in Ruby + @data_ttl.delete(key) + @data_lru.delete(key) + end - @data_ttl.delete(key) - @data_lru.delete(key) + val end - - val end - end - def delete(key) - @lock.synchronize do - ttl_evict + def delete(key) + @lock.synchronize do + ttl_evict - @data_lru.delete(key) - @data_ttl.delete(key) + @data_lru.delete(key) + @data_ttl.delete(key) + end end - end - def clear - @lock.synchronize do - @data_lru.clear - @data_ttl.clear + def clear + @lock.synchronize do + @data_lru.clear + @data_ttl.clear + end end - end - private + private - def ttl_evict - ttl_horizon = Time.now.to_f - @ttl - key, time = @data_ttl.first + def ttl_evict + ttl_horizon = Time.now.to_f - @ttl + key, time = @data_ttl.first - until time.nil? || time > ttl_horizon - @data_ttl.delete(key) - @data_lru.delete(key) + until time.nil? || time > ttl_horizon + @data_ttl.delete(key) + @data_lru.delete(key) - key, time = @data_ttl.first + key, time = @data_ttl.first + end end end end end + diff --git a/lib/ldclient-rb/impl/memoized_value.rb b/lib/ldclient-rb/impl/memoized_value.rb index 450924ee..2a297a0f 100644 --- a/lib/ldclient-rb/impl/memoized_value.rb +++ b/lib/ldclient-rb/impl/memoized_value.rb @@ -1,32 +1,35 @@ module LaunchDarkly - # Simple implementation of a thread-safe memoized value whose generator function will never be - # run more than once, and whose value can be overridden by explicit assignment. - # Note that we no longer use this class and it will be removed in a future version. - # @api private - class MemoizedValue - def initialize(&generator) - @generator = generator - @mutex = Mutex.new - @inited = false - @value = nil - end + module Impl + # Simple implementation of a thread-safe memoized value whose generator function will never be + # run more than once, and whose value can be overridden by explicit assignment. + # Note that we no longer use this class and it will be removed in a future version. + # @api private + class MemoizedValue + def initialize(&generator) + @generator = generator + @mutex = Mutex.new + @inited = false + @value = nil + end - def get - @mutex.synchronize do - unless @inited - @value = @generator.call - @inited = true + def get + @mutex.synchronize do + unless @inited + @value = @generator.call + @inited = true + end end + @value end - @value - end - def set(value) - @mutex.synchronize do - @value = value - @inited = true + def set(value) + @mutex.synchronize do + @value = value + @inited = true + end end end end end + diff --git a/lib/ldclient-rb/impl/non_blocking_thread_pool.rb b/lib/ldclient-rb/impl/non_blocking_thread_pool.rb index f2c6174d..575761b0 100644 --- a/lib/ldclient-rb/impl/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/impl/non_blocking_thread_pool.rb @@ -4,43 +4,46 @@ require "thread" module LaunchDarkly - # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather - # than blocking. Also provides a way to wait for all jobs to finish without shutting down. - # @api private - class NonBlockingThreadPool - def initialize(capacity, name = 'LD/NonBlockingThreadPool') - @capacity = capacity - @pool = Concurrent::FixedThreadPool.new(capacity, name: name) - @semaphore = Concurrent::Semaphore.new(capacity) - end - - # Attempts to submit a job, but only if a worker is available. Unlike the regular post method, - # this returns a value: true if the job was submitted, false if all workers are busy. - def post - unless @semaphore.try_acquire(1) - return + module Impl + # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather + # than blocking. Also provides a way to wait for all jobs to finish without shutting down. + # @api private + class NonBlockingThreadPool + def initialize(capacity, name = 'LD/NonBlockingThreadPool') + @capacity = capacity + @pool = Concurrent::FixedThreadPool.new(capacity, name: name) + @semaphore = Concurrent::Semaphore.new(capacity) end - @pool.post do - begin - yield - ensure - @semaphore.release(1) + + # Attempts to submit a job, but only if a worker is available. Unlike the regular post method, + # this returns a value: true if the job was submitted, false if all workers are busy. + def post + unless @semaphore.try_acquire(1) + return + end + @pool.post do + begin + yield + ensure + @semaphore.release(1) + end end end - end - # Waits until no jobs are executing, without shutting down the pool. - def wait_all - @semaphore.acquire(@capacity) - @semaphore.release(@capacity) - end + # Waits until no jobs are executing, without shutting down the pool. + def wait_all + @semaphore.acquire(@capacity) + @semaphore.release(@capacity) + end - def shutdown - @pool.shutdown - end + def shutdown + @pool.shutdown + end - def wait_for_termination - @pool.wait_for_termination + def wait_for_termination + @pool.wait_for_termination + end end end end + diff --git a/lib/ldclient-rb/impl/simple_lru_cache.rb b/lib/ldclient-rb/impl/simple_lru_cache.rb index b2bb0e91..4578a38b 100644 --- a/lib/ldclient-rb/impl/simple_lru_cache.rb +++ b/lib/ldclient-rb/impl/simple_lru_cache.rb @@ -1,25 +1,28 @@ module LaunchDarkly - # A non-thread-safe implementation of a LRU cache set with only add and reset methods. - # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb - # @api private - class SimpleLRUCacheSet - def initialize(capacity) - @values = {} - @capacity = capacity - end + module Impl + # A non-thread-safe implementation of a LRU cache set with only add and reset methods. + # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb + # @api private + class SimpleLRUCacheSet + def initialize(capacity) + @values = {} + @capacity = capacity + end - # Adds a value to the cache or marks it recent if it was already there. Returns true if already there. - def add(value) - found = true - @values.delete(value) { found = false } - @values[value] = true - @values.shift if @values.length > @capacity - found - end + # Adds a value to the cache or marks it recent if it was already there. Returns true if already there. + def add(value) + found = true + @values.delete(value) { found = false } + @values[value] = true + @values.shift if @values.length > @capacity + found + end - def clear - @values = {} + def clear + @values = {} + end end end end + From fe3c9f7c6aacb5e77d6ac6b8c5564a201fdcc754 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 18:26:34 +0000 Subject: [PATCH 04/11] adjust requires and implementations --- lib/ldclient-rb.rb | 25 +++---- lib/ldclient-rb/config.rb | 3 +- lib/ldclient-rb/events.rb | 12 ++-- lib/ldclient-rb/impl/big_segments.rb | 8 +-- lib/ldclient-rb/impl/data_source/requestor.rb | 3 +- lib/ldclient-rb/impl/data_store.rb | 1 - lib/ldclient-rb/impl/evaluator.rb | 5 +- lib/ldclient-rb/impl/event_sender.rb | 7 +- .../impl/integrations/file_data_source.rb | 6 +- lib/ldclient-rb/impl/migrations/migrator.rb | 3 +- lib/ldclient-rb/impl/migrations/tracker.rb | 3 +- lib/ldclient-rb/impl/repeating_task.rb | 4 +- lib/ldclient-rb/impl/util.rb | 65 ++++++++++++++++++ lib/ldclient-rb/in_memory_store.rb | 1 + .../integrations/util/store_wrapper.rb | 5 +- lib/ldclient-rb/ldclient.rb | 8 ++- lib/ldclient-rb/util.rb | 68 +------------------ spec/config_spec.rb | 2 +- spec/polling_spec.rb | 8 +-- spec/requestor_spec.rb | 6 +- spec/simple_lru_cache_spec.rb | 4 +- spec/store_spec.rb | 4 +- spec/stream_spec.rb | 4 +- spec/util_spec.rb | 4 +- 24 files changed, 130 insertions(+), 129 deletions(-) diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 81289fe2..8eaee8f7 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -5,23 +5,14 @@ module LaunchDarkly end -require "ldclient-rb/version" -require "ldclient-rb/interfaces" -require "ldclient-rb/util" -require "ldclient-rb/flags_state" -require "ldclient-rb/migrations" -require "ldclient-rb/ldclient" -require "ldclient-rb/cache_store" -require "ldclient-rb/expiring_cache" -require "ldclient-rb/memoized_value" -require "ldclient-rb/in_memory_store" +# Public APIs - these define the main interfaces users interact with require "ldclient-rb/config" require "ldclient-rb/context" -require "ldclient-rb/reference" -require "ldclient-rb/stream" -require "ldclient-rb/polling" -require "ldclient-rb/simple_lru_cache" -require "ldclient-rb/non_blocking_thread_pool" -require "ldclient-rb/events" -require "ldclient-rb/requestor" +require "ldclient-rb/flags_state" require "ldclient-rb/integrations" +require "ldclient-rb/interfaces" +require "ldclient-rb/ldclient" +require "ldclient-rb/migrations" +require "ldclient-rb/reference" +require "ldclient-rb/util" +require "ldclient-rb/version" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c0fb197c..ef556a05 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -1,4 +1,5 @@ require "logger" +require "ldclient-rb/impl/cache_store" module LaunchDarkly # @@ -477,7 +478,7 @@ def self.default_events_uri # @return [Object] the Rails cache if in Rails, or a simple in-memory implementation otherwise # def self.default_cache_store - defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : ThreadSafeMemoryStore.new + defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : Impl::ThreadSafeMemoryStore.new end # diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 17ef5438..311037af 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -3,6 +3,8 @@ require "ldclient-rb/impl/event_sender" require "ldclient-rb/impl/event_summarizer" require "ldclient-rb/impl/event_types" +require "ldclient-rb/impl/non_blocking_thread_pool" +require "ldclient-rb/impl/simple_lru_cache" require "ldclient-rb/impl/util" require "concurrent" @@ -141,7 +143,7 @@ def initialize(sdk_key, config, client = nil, diagnostic_accumulator = nil, test @inbox_full = Concurrent::AtomicBoolean.new(false) event_sender = (test_properties || {})[:event_sender] || - Impl::EventSender.new(sdk_key, config, client || Util.new_http_client(config.events_uri, config)) + Impl::EventSender.new(sdk_key, config, client || Impl::Util.new_http_client(config.events_uri, config)) @timestamp_fn = (test_properties || {})[:timestamp_fn] || proc { Impl::Util.current_time_millis } @omit_anonymous_contexts = config.omit_anonymous_contexts @@ -235,7 +237,7 @@ def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @event_sender = event_sender @sampler = LaunchDarkly::Impl::Sampler.new(Random.new) - @context_keys = SimpleLRUCacheSet.new(config.context_keys_capacity) + @context_keys = Impl::SimpleLRUCacheSet.new(config.context_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) @@ -243,10 +245,10 @@ def initialize(inbox, sdk_key, config, diagnostic_accumulator, event_sender) @events_in_last_batch = 0 outbox = EventBuffer.new(config.capacity, config.logger) - flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS, 'LD/EventDispatcher/FlushWorkers') + flush_workers = Impl::NonBlockingThreadPool.new(MAX_FLUSH_WORKERS, 'LD/EventDispatcher/FlushWorkers') if !@diagnostic_accumulator.nil? - diagnostic_event_workers = NonBlockingThreadPool.new(1, 'LD/EventDispatcher/DiagnosticEventWorkers') + diagnostic_event_workers = Impl::NonBlockingThreadPool.new(1, 'LD/EventDispatcher/DiagnosticEventWorkers') init_event = @diagnostic_accumulator.create_init_event(config) send_diagnostic_event(init_event, diagnostic_event_workers) else @@ -383,7 +385,7 @@ def trigger_flush(outbox, flush_workers) @last_known_past_time.value = (result.time_from_server.to_f * 1000).to_i end rescue => e - Util.log_exception(@config.logger, "Unexpected error in event processor", e) + Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end outbox.clear if success # Reset our internal state, these events now belong to the flush worker diff --git a/lib/ldclient-rb/impl/big_segments.rb b/lib/ldclient-rb/impl/big_segments.rb index ec19537a..cdccbeaa 100644 --- a/lib/ldclient-rb/impl/big_segments.rb +++ b/lib/ldclient-rb/impl/big_segments.rb @@ -1,8 +1,8 @@ require "ldclient-rb/config" -require "ldclient-rb/expiring_cache" +require "ldclient-rb/impl/expiring_cache" require "ldclient-rb/impl/repeating_task" +require "ldclient-rb/impl/util" require "ldclient-rb/interfaces" -require "ldclient-rb/util" require "digest" @@ -45,7 +45,7 @@ def get_context_membership(context_key) membership = EMPTY_MEMBERSHIP if membership.nil? @cache[context_key] = membership rescue => e - LaunchDarkly::Util.log_exception(@logger, "Big Segment store membership query returned error", e) + Impl::Util.log_exception(@logger, "Big Segment store membership query returned error", e) return BigSegmentMembershipResult.new(nil, BigSegmentsStatus::STORE_ERROR) end end @@ -67,7 +67,7 @@ def poll_store_and_update_status metadata = @store.get_metadata new_status = Interfaces::BigSegmentStoreStatus.new(true, !metadata || stale?(metadata.last_up_to_date)) rescue => e - LaunchDarkly::Util.log_exception(@logger, "Big Segment store status query returned error", e) + Impl::Util.log_exception(@logger, "Big Segment store status query returned error", e) end end @last_status = new_status diff --git a/lib/ldclient-rb/impl/data_source/requestor.rb b/lib/ldclient-rb/impl/data_source/requestor.rb index 61ee6d55..2ce893ac 100644 --- a/lib/ldclient-rb/impl/data_source/requestor.rb +++ b/lib/ldclient-rb/impl/data_source/requestor.rb @@ -1,4 +1,5 @@ require "ldclient-rb/impl/model/serialization" +require "ldclient-rb/impl/util" require "concurrent/atomics" require "json" @@ -27,7 +28,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @http_client = LaunchDarkly::Util.new_http_client(config.base_uri, config) + @http_client = Impl::Util.new_http_client(config.base_uri, config) .use(:auto_inflate) .headers("Accept-Encoding" => "gzip") @cache = @config.cache_store diff --git a/lib/ldclient-rb/impl/data_store.rb b/lib/ldclient-rb/impl/data_store.rb index 8897ccaa..c04a1b6f 100644 --- a/lib/ldclient-rb/impl/data_store.rb +++ b/lib/ldclient-rb/impl/data_store.rb @@ -4,7 +4,6 @@ module LaunchDarkly module Impl module DataStore - class DataKind FEATURES = "features".freeze SEGMENTS = "segments".freeze diff --git a/lib/ldclient-rb/impl/evaluator.rb b/lib/ldclient-rb/impl/evaluator.rb index fd895efa..932d9102 100644 --- a/lib/ldclient-rb/impl/evaluator.rb +++ b/lib/ldclient-rb/impl/evaluator.rb @@ -4,6 +4,7 @@ require "ldclient-rb/impl/evaluator_operators" require "ldclient-rb/impl/model/feature_flag" require "ldclient-rb/impl/model/segment" +require "ldclient-rb/impl/util" module LaunchDarkly module Impl @@ -152,11 +153,11 @@ def evaluate(flag, context) begin detail = eval_internal(flag, context, result, state) rescue EvaluationException => exn - LaunchDarkly::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) + Impl::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) result.detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(exn.error_kind)) return result, state rescue => exn - LaunchDarkly::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) + Impl::Util.log_exception(@logger, "Unexpected error when evaluating flag #{flag.key}", exn) result.detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) return result, state end diff --git a/lib/ldclient-rb/impl/event_sender.rb b/lib/ldclient-rb/impl/event_sender.rb index c730b04a..754aee6d 100644 --- a/lib/ldclient-rb/impl/event_sender.rb +++ b/lib/ldclient-rb/impl/event_sender.rb @@ -1,4 +1,5 @@ require "ldclient-rb/impl/unbounded_pool" +require "ldclient-rb/impl/util" require "securerandom" require "http" @@ -21,7 +22,7 @@ def initialize(sdk_key, config, http_client = nil, retry_interval = DEFAULT_RETR @logger = config.logger @retry_interval = retry_interval @http_client_pool = UnboundedPool.new( - lambda { LaunchDarkly::Util.new_http_client(@config.events_uri, @config) }, + lambda { Impl::Util.new_http_client(@config.events_uri, @config) }, lambda { |client| client.close }) end @@ -81,9 +82,9 @@ def send_event_data(event_data, description, is_diagnostic) end return EventSenderResult.new(true, false, res_time) end - must_shutdown = !LaunchDarkly::Util.http_error_recoverable?(status) + must_shutdown = !Impl::Util.http_error_recoverable?(status) can_retry = !must_shutdown && attempt == 0 - message = LaunchDarkly::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") + message = Impl::Util.http_error_message(status, "event delivery", can_retry ? "will retry" : "some events were dropped") @logger.error { "[LDClient] #{message}" } if must_shutdown return EventSenderResult.new(false, true, nil) diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb index e009dc7d..8c0b2476 100644 --- a/lib/ldclient-rb/impl/integrations/file_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -1,5 +1,5 @@ require 'ldclient-rb/in_memory_store' -require 'ldclient-rb/util' +require 'ldclient-rb/impl/util' require 'concurrent/atomics' require 'json' @@ -82,7 +82,7 @@ def load_all begin load_file(path, all_data) rescue => exn - LaunchDarkly::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + Impl::Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) @data_source_update_sink&.update_status( LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::INVALID_DATA, 0, exn.to_s, Time.now) @@ -212,7 +212,7 @@ def initialize(resolved_paths, interval, reloader, logger) end reloader.call if changed rescue => exn - LaunchDarkly::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) + Impl::Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) end end end diff --git a/lib/ldclient-rb/impl/migrations/migrator.rb b/lib/ldclient-rb/impl/migrations/migrator.rb index 6b882acb..e680e25b 100644 --- a/lib/ldclient-rb/impl/migrations/migrator.rb +++ b/lib/ldclient-rb/impl/migrations/migrator.rb @@ -1,4 +1,5 @@ require 'thread' +require 'ldclient-rb/impl/util' module LaunchDarkly module Impl @@ -274,7 +275,7 @@ def run() begin result = @fn.call(@payload) rescue => e - LaunchDarkly::Util.log_exception(@logger, "Unexpected error running method for '#{origin}' origin", e) + Impl::Util.log_exception(@logger, "Unexpected error running method for '#{origin}' origin", e) result = LaunchDarkly::Result.fail("'#{origin}' operation raised an exception", e) end diff --git a/lib/ldclient-rb/impl/migrations/tracker.rb b/lib/ldclient-rb/impl/migrations/tracker.rb index 761d5ed2..28fe5398 100644 --- a/lib/ldclient-rb/impl/migrations/tracker.rb +++ b/lib/ldclient-rb/impl/migrations/tracker.rb @@ -1,5 +1,6 @@ require "set" require "ldclient-rb/impl/sampler" +require "ldclient-rb/impl/util" require "logger" module LaunchDarkly @@ -67,7 +68,7 @@ def consistent(is_consistent) begin @consistent = is_consistent.call rescue => e - LaunchDarkly::Util.log_exception(@logger, "Exception raised during consistency check; failed to record measurement", e) + Impl::Util.log_exception(@logger, "Exception raised during consistency check; failed to record measurement", e) end end end diff --git a/lib/ldclient-rb/impl/repeating_task.rb b/lib/ldclient-rb/impl/repeating_task.rb index a6335ae0..5d272b20 100644 --- a/lib/ldclient-rb/impl/repeating_task.rb +++ b/lib/ldclient-rb/impl/repeating_task.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/util" +require "ldclient-rb/impl/util" require "concurrent/atomics" @@ -26,7 +26,7 @@ def start begin @task.call rescue => e - LaunchDarkly::Util.log_exception(@logger, "Uncaught exception from repeating task", e) + Impl::Util.log_exception(@logger, "Uncaught exception from repeating task", e) end delta = @interval - (Time.now - started_at) if delta > 0 diff --git a/lib/ldclient-rb/impl/util.rb b/lib/ldclient-rb/impl/util.rb index 9e7faacc..46fc0b00 100644 --- a/lib/ldclient-rb/impl/util.rb +++ b/lib/ldclient-rb/impl/util.rb @@ -1,3 +1,6 @@ +require "uri" +require "http" + module LaunchDarkly module Impl module Util @@ -93,6 +96,68 @@ def self.validate_payload_filter_key(value, logger) } nil end + + # + # Append the payload filter key query parameter to the provided URI. + # + # @param uri [String] + # @param config [Config] + # @return [String] + # + def self.add_payload_filter_key(uri, config) + return uri if config.payload_filter_key.nil? + + begin + parsed = URI.parse(uri) + new_query_params = URI.decode_www_form(String(parsed.query)) << ["filter", config.payload_filter_key] + parsed.query = URI.encode_www_form(new_query_params) + parsed.to_s + rescue URI::InvalidURIError + config.logger.warn { "[LDClient] URI could not be parsed. No filtering will be applied." } + uri + end + end + + def self.new_http_client(uri_s, config) + http_client_options = {} + if config.socket_factory + http_client_options["socket_class"] = config.socket_factory + end + proxy = URI.parse(uri_s).find_proxy + unless proxy.nil? + http_client_options["proxy"] = { + proxy_address: proxy.host, + proxy_port: proxy.port, + proxy_username: proxy.user, + proxy_password: proxy.password, + } + end + HTTP::Client.new(http_client_options) + .timeout({ + read: config.read_timeout, + connect: config.connect_timeout, + }) + .persistent(uri_s) + end + + def self.log_exception(logger, message, exc) + logger.error { "[LDClient] #{message}: #{exc.inspect}" } + logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } + end + + def self.http_error_recoverable?(status) + if status >= 400 && status < 500 + status == 400 || status == 408 || status == 429 + else + true + end + end + + def self.http_error_message(status, context, recoverable_message) + desc = (status == 401 || status == 403) ? " (invalid SDK key)" : "" + message = http_error_recoverable?(status) ? recoverable_message : "giving up permanently" + "HTTP error #{status}#{desc} for #{context} - #{message}" + end end end end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index ccb18cae..2e41f3d7 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -1,4 +1,5 @@ require "concurrent/atomics" +require "ldclient-rb/impl/data_store" module LaunchDarkly diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 808e2e33..03e3a91f 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -1,6 +1,7 @@ require "concurrent/atomics" -require "ldclient-rb/expiring_cache" +require "ldclient-rb/impl/expiring_cache" +require "ldclient-rb/interfaces" module LaunchDarkly module Integrations @@ -37,7 +38,7 @@ def initialize(core, opts) expiration_seconds = opts[:expiration] || 15 if expiration_seconds > 0 capacity = opts[:capacity] || 1000 - @cache = ExpiringCache.new(capacity, expiration_seconds) + @cache = Impl::ExpiringCache.new(capacity, expiration_seconds) else @cache = nil end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 16f8a442..4097c843 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -5,6 +5,8 @@ require "ldclient-rb/impl/data_source/null_processor" require "ldclient-rb/impl/diagnostic_events" require "ldclient-rb/impl/evaluator" +require "ldclient-rb/events" +require "ldclient-rb/in_memory_store" require "ldclient-rb/impl/evaluation_with_hook_result" require "ldclient-rb/impl/flag_tracker" require "ldclient-rb/impl/store_client_wrapper" @@ -715,12 +717,12 @@ def create_default_data_source(sdk_key, config, diagnostic_accumulator) end raise ArgumentError, "sdk_key must not be nil" if sdk_key.nil? # see LDClient constructor comment on sdk_key if config.stream? - StreamProcessor.new(sdk_key, config, diagnostic_accumulator) + Impl::DataSource::StreamProcessor.new(sdk_key, config, diagnostic_accumulator) else config.logger.info { "Disabling streaming API" } config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } - requestor = Requestor.new(sdk_key, config) - PollingProcessor.new(config, requestor) + requestor = Impl::DataSource::Requestor.new(sdk_key, config) + Impl::DataSource::PollingProcessor.new(config, requestor) end end diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 865448ad..242e5447 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,4 @@ -require "uri" -require "http" +require "ldclient-rb/impl/util" module LaunchDarkly # @@ -64,69 +63,4 @@ def success? @exception = exception end end - - # @api private - module Util - # - # Append the payload filter key query parameter to the provided URI. - # - # @param uri [String] - # @param config [Config] - # @return [String] - # - def self.add_payload_filter_key(uri, config) - return uri if config.payload_filter_key.nil? - - begin - parsed = URI.parse(uri) - new_query_params = URI.decode_www_form(String(parsed.query)) << ["filter", config.payload_filter_key] - parsed.query = URI.encode_www_form(new_query_params) - parsed.to_s - rescue URI::InvalidURIError - config.logger.warn { "[LDClient] URI could not be parsed. No filtering will be applied." } - uri - end - end - - def self.new_http_client(uri_s, config) - http_client_options = {} - if config.socket_factory - http_client_options["socket_class"] = config.socket_factory - end - proxy = URI.parse(uri_s).find_proxy - unless proxy.nil? - http_client_options["proxy"] = { - proxy_address: proxy.host, - proxy_port: proxy.port, - proxy_username: proxy.user, - proxy_password: proxy.password, - } - end - HTTP::Client.new(http_client_options) - .timeout({ - read: config.read_timeout, - connect: config.connect_timeout, - }) - .persistent(uri_s) - end - - def self.log_exception(logger, message, exc) - logger.error { "[LDClient] #{message}: #{exc.inspect}" } - logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } - end - - def self.http_error_recoverable?(status) - if status >= 400 && status < 500 - status == 400 || status == 408 || status == 429 - else - true - end - end - - def self.http_error_message(status, context, recoverable_message) - desc = (status == 401 || status == 403) ? " (invalid SDK key)" : "" - message = Util.http_error_recoverable?(status) ? recoverable_message : "giving up permanently" - "HTTP error #{status}#{desc} for #{context} - #{message}" - end - end end diff --git a/spec/config_spec.rb b/spec/config_spec.rb index 63af36ee..c3f5939f 100644 --- a/spec/config_spec.rb +++ b/spec/config_spec.rb @@ -40,7 +40,7 @@ module LaunchDarkly expect(subject.default_cache_store).to eq :cache end it "uses memory store if Rails is not available" do - expect(subject.default_cache_store).to be_an_instance_of ThreadSafeMemoryStore + expect(subject.default_cache_store).to be_an_instance_of Impl::ThreadSafeMemoryStore end end describe ".default_logger" do diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 2578dfe2..d1a8133e 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -4,8 +4,8 @@ require "spec_helper" module LaunchDarkly - describe PollingProcessor do - subject { PollingProcessor } + describe Impl::DataSource::PollingProcessor do + subject { Impl::DataSource::PollingProcessor } let(:executor) { SynchronousExecutor.new } let(:status_broadcaster) { Impl::Broadcaster.new(executor, $null_log) } let(:flag_change_broadcaster) { Impl::Broadcaster.new(executor, $null_log) } @@ -99,7 +99,7 @@ def with_processor(store, initialize_to_valid = false) describe 'HTTP errors' do def verify_unrecoverable_http_error(status) - allow(requestor).to receive(:request_all_data).and_raise(UnexpectedResponseError.new(status)) + allow(requestor).to receive(:request_all_data).and_raise(Impl::DataSource::UnexpectedResponseError.new(status)) listener = ListenerSpy.new status_broadcaster.add_listener(listener) @@ -118,7 +118,7 @@ def verify_unrecoverable_http_error(status) end def verify_recoverable_http_error(status) - allow(requestor).to receive(:request_all_data).and_raise(UnexpectedResponseError.new(status)) + allow(requestor).to receive(:request_all_data).and_raise(Impl::DataSource::UnexpectedResponseError.new(status)) listener = ListenerSpy.new status_broadcaster.add_listener(listener) diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 5f60b337..5eff1d58 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -3,11 +3,11 @@ require "spec_helper" module LaunchDarkly - describe Requestor do + describe Impl::DataSource::Requestor do let(:sdk_key) { "secret" } def with_requestor(base_uri, opts = {}) - r = Requestor.new(sdk_key, Config.new({ base_uri: base_uri, application: {id: "id", version: "version"} }.merge(opts))) + r = Impl::DataSource::Requestor.new(sdk_key, Config.new({ base_uri: base_uri, application: {id: "id", version: "version"} }.merge(opts))) begin yield r ensure @@ -188,7 +188,7 @@ def with_requestor(base_uri, opts = {}) server.setup_response("/") do |_, res| res.status = 400 end - expect { requestor.request_all_data }.to raise_error(UnexpectedResponseError) + expect { requestor.request_all_data }.to raise_error(LaunchDarkly::Impl::DataSource::UnexpectedResponseError) end end end diff --git a/spec/simple_lru_cache_spec.rb b/spec/simple_lru_cache_spec.rb index d7b507e0..8e7bc79e 100644 --- a/spec/simple_lru_cache_spec.rb +++ b/spec/simple_lru_cache_spec.rb @@ -1,8 +1,8 @@ require "spec_helper" module LaunchDarkly - describe SimpleLRUCacheSet do - subject { SimpleLRUCacheSet } + describe Impl::SimpleLRUCacheSet do + subject { Impl::SimpleLRUCacheSet } it "retains values up to capacity" do lru = subject.new(3) diff --git a/spec/store_spec.rb b/spec/store_spec.rb index 7a74231b..e213e871 100644 --- a/spec/store_spec.rb +++ b/spec/store_spec.rb @@ -1,8 +1,8 @@ require "spec_helper" module LaunchDarkly - describe ThreadSafeMemoryStore do - subject { ThreadSafeMemoryStore } + describe Impl::ThreadSafeMemoryStore do + subject { Impl::ThreadSafeMemoryStore } let(:store) { subject.new } it "can read and write" do store.write("key", "value") diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index c08edc0b..2aaf18d2 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -3,8 +3,8 @@ require "spec_helper" module LaunchDarkly - describe StreamProcessor do - subject { StreamProcessor } + describe Impl::DataSource::StreamProcessor do + subject { Impl::DataSource::StreamProcessor } let(:executor) { SynchronousExecutor.new } let(:status_broadcaster) { Impl::Broadcaster.new(executor, $null_log) } let(:flag_change_broadcaster) { Impl::Broadcaster.new(executor, $null_log) } diff --git a/spec/util_spec.rb b/spec/util_spec.rb index 2219ab3c..5b727915 100644 --- a/spec/util_spec.rb +++ b/spec/util_spec.rb @@ -1,7 +1,7 @@ require "spec_helper" module LaunchDarkly - describe Util do + describe Impl::Util do describe 'log_exception' do let(:logger) { double } @@ -11,7 +11,7 @@ module LaunchDarkly begin raise StandardError.new 'asdf' rescue StandardError => exn - Util.log_exception(logger, "message", exn) + Impl::Util.log_exception(logger, "message", exn) end end end From 69036c6efb6002ea241b19daa52e64eec8e73acb Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 21:58:15 +0000 Subject: [PATCH 05/11] address imports in specs --- lib/ldclient-rb/impl/data_system/fdv1.rb | 11 ++++++----- lib/ldclient-rb/ldclient.rb | 14 +++++++++----- spec/expiring_cache_spec.rb | 7 ++++--- spec/impl/data_system/fdv1_spec.rb | 12 ++++++------ spec/polling_spec.rb | 1 + spec/requestor_spec.rb | 1 + spec/simple_lru_cache_spec.rb | 1 + spec/store_spec.rb | 1 + spec/stream_spec.rb | 1 + 9 files changed, 30 insertions(+), 19 deletions(-) diff --git a/lib/ldclient-rb/impl/data_system/fdv1.rb b/lib/ldclient-rb/impl/data_system/fdv1.rb index e5e20062..a2fcdd13 100644 --- a/lib/ldclient-rb/impl/data_system/fdv1.rb +++ b/lib/ldclient-rb/impl/data_system/fdv1.rb @@ -2,6 +2,9 @@ require 'ldclient-rb/impl/broadcaster' require 'ldclient-rb/impl/data_source' require 'ldclient-rb/impl/data_source/null_processor' +require 'ldclient-rb/impl/data_source/polling' +require 'ldclient-rb/impl/data_source/requestor' +require 'ldclient-rb/impl/data_source/stream' require 'ldclient-rb/impl/data_store' require 'ldclient-rb/impl/data_system' require 'ldclient-rb/impl/store_client_wrapper' @@ -149,14 +152,12 @@ def target_availability return LaunchDarkly::Impl::DataSource::NullUpdateProcessor.new if @config.offline? || @config.use_ldd? if @config.stream? - require 'ldclient-rb/stream' - return LaunchDarkly::StreamProcessor.new(@sdk_key, @config, @diagnostic_accumulator) + return LaunchDarkly::Impl::DataSource::StreamProcessor.new(@sdk_key, @config, @diagnostic_accumulator) end # Polling processor - require 'ldclient-rb/polling' - requestor = LaunchDarkly::Requestor.new(@sdk_key, @config) - LaunchDarkly::PollingProcessor.new(@config, requestor) + requestor = LaunchDarkly::Impl::DataSource::Requestor.new(@sdk_key, @config) + LaunchDarkly::Impl::DataSource::PollingProcessor.new(@config, requestor) end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 4097c843..af570dc8 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,16 +1,20 @@ require "ldclient-rb/impl/big_segments" require "ldclient-rb/impl/broadcaster" +require "ldclient-rb/impl/context" require "ldclient-rb/impl/data_source" -require "ldclient-rb/impl/data_store" require "ldclient-rb/impl/data_source/null_processor" +require "ldclient-rb/impl/data_source/polling" +require "ldclient-rb/impl/data_source/requestor" +require "ldclient-rb/impl/data_source/stream" +require "ldclient-rb/impl/data_store" require "ldclient-rb/impl/diagnostic_events" -require "ldclient-rb/impl/evaluator" -require "ldclient-rb/events" -require "ldclient-rb/in_memory_store" require "ldclient-rb/impl/evaluation_with_hook_result" +require "ldclient-rb/impl/evaluator" require "ldclient-rb/impl/flag_tracker" -require "ldclient-rb/impl/store_client_wrapper" require "ldclient-rb/impl/migrations/tracker" +require "ldclient-rb/impl/store_client_wrapper" +require "ldclient-rb/events" +require "ldclient-rb/in_memory_store" require "concurrent" require "concurrent/atomics" require "digest/sha1" diff --git a/spec/expiring_cache_spec.rb b/spec/expiring_cache_spec.rb index 9ce6f671..bd805351 100644 --- a/spec/expiring_cache_spec.rb +++ b/spec/expiring_cache_spec.rb @@ -1,9 +1,10 @@ +require 'spec_helper' require 'timecop' -require "ldclient-rb/expiring_cache" +require "ldclient-rb/impl/expiring_cache" module LaunchDarkly - describe ExpiringCache do - subject { ExpiringCache } + describe Impl::ExpiringCache do + subject { Impl::ExpiringCache } before(:each) do Timecop.freeze(Time.now) diff --git a/spec/impl/data_system/fdv1_spec.rb b/spec/impl/data_system/fdv1_spec.rb index 9c905a2f..40f8c349 100644 --- a/spec/impl/data_system/fdv1_spec.rb +++ b/spec/impl/data_system/fdv1_spec.rb @@ -24,18 +24,18 @@ module DataSystem end it "creates streaming processor by default" do - allow(LaunchDarkly::StreamProcessor).to receive(:new).and_call_original + allow(LaunchDarkly::Impl::DataSource::StreamProcessor).to receive(:new).and_call_original subject.start - expect(LaunchDarkly::StreamProcessor).to have_received(:new).with(sdk_key, config, nil) + expect(LaunchDarkly::Impl::DataSource::StreamProcessor).to have_received(:new).with(sdk_key, config, nil) end context "with polling mode" do let(:config) { LaunchDarkly::Config.new(stream: false) } it "creates polling processor" do - allow(LaunchDarkly::PollingProcessor).to receive(:new).and_call_original + allow(LaunchDarkly::Impl::DataSource::PollingProcessor).to receive(:new).and_call_original subject.start - expect(LaunchDarkly::PollingProcessor).to have_received(:new) + expect(LaunchDarkly::Impl::DataSource::PollingProcessor).to have_received(:new) end end @@ -249,7 +249,7 @@ module DataSystem diagnostic_accumulator = double("DiagnosticAccumulator") subject.set_diagnostic_accumulator(diagnostic_accumulator) - expect(LaunchDarkly::StreamProcessor).to receive(:new).with(sdk_key, config, diagnostic_accumulator).and_call_original + expect(LaunchDarkly::Impl::DataSource::StreamProcessor).to receive(:new).with(sdk_key, config, diagnostic_accumulator).and_call_original subject.start end @@ -261,7 +261,7 @@ module DataSystem subject.set_diagnostic_accumulator(diagnostic_accumulator) # PollingProcessor doesn't accept diagnostic_accumulator - expect(LaunchDarkly::PollingProcessor).to receive(:new).with(config, anything).and_call_original + expect(LaunchDarkly::Impl::DataSource::PollingProcessor).to receive(:new).with(config, anything).and_call_original subject.start end end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index d1a8133e..6c961b9e 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/data_source/polling" require "ldclient-rb/impl/model/feature_flag" require "ldclient-rb/impl/model/segment" require 'ostruct' diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 5eff1d58..22230876 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,4 +1,5 @@ require "http_util" +require "ldclient-rb/impl/data_source/requestor" require "model_builders" require "spec_helper" diff --git a/spec/simple_lru_cache_spec.rb b/spec/simple_lru_cache_spec.rb index 8e7bc79e..27ac9527 100644 --- a/spec/simple_lru_cache_spec.rb +++ b/spec/simple_lru_cache_spec.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/simple_lru_cache" require "spec_helper" module LaunchDarkly diff --git a/spec/store_spec.rb b/spec/store_spec.rb index e213e871..5b577fd9 100644 --- a/spec/store_spec.rb +++ b/spec/store_spec.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/cache_store" require "spec_helper" module LaunchDarkly diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 2aaf18d2..7f70ee9c 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/data_source/stream" require "ld-eventsource" require "model_builders" require "spec_helper" From e56145b208cd68c82bfb1381a5d351c0849c86b4 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 22:17:53 +0000 Subject: [PATCH 06/11] update spec folder structure --- .../cache_store_spec.rb} | 1 + spec/{ => impl/data_source}/polling_spec.rb | 0 spec/{ => impl/data_source}/requestor_spec.rb | 0 spec/{ => impl/data_source}/stream_spec.rb | 0 spec/{ => impl}/expiring_cache_spec.rb | 41 +++++---- spec/{ => impl}/simple_lru_cache_spec.rb | 1 + spec/impl/util_spec.rb | 88 +++++++++++-------- spec/util_spec.rb | 19 ---- 8 files changed, 80 insertions(+), 70 deletions(-) rename spec/{store_spec.rb => impl/cache_store_spec.rb} (99%) rename spec/{ => impl/data_source}/polling_spec.rb (100%) rename spec/{ => impl/data_source}/requestor_spec.rb (100%) rename spec/{ => impl/data_source}/stream_spec.rb (100%) rename spec/{ => impl}/expiring_cache_spec.rb (76%) rename spec/{ => impl}/simple_lru_cache_spec.rb (99%) delete mode 100644 spec/util_spec.rb diff --git a/spec/store_spec.rb b/spec/impl/cache_store_spec.rb similarity index 99% rename from spec/store_spec.rb rename to spec/impl/cache_store_spec.rb index 5b577fd9..7aeee20e 100644 --- a/spec/store_spec.rb +++ b/spec/impl/cache_store_spec.rb @@ -11,3 +11,4 @@ module LaunchDarkly end end end + diff --git a/spec/polling_spec.rb b/spec/impl/data_source/polling_spec.rb similarity index 100% rename from spec/polling_spec.rb rename to spec/impl/data_source/polling_spec.rb diff --git a/spec/requestor_spec.rb b/spec/impl/data_source/requestor_spec.rb similarity index 100% rename from spec/requestor_spec.rb rename to spec/impl/data_source/requestor_spec.rb diff --git a/spec/stream_spec.rb b/spec/impl/data_source/stream_spec.rb similarity index 100% rename from spec/stream_spec.rb rename to spec/impl/data_source/stream_spec.rb diff --git a/spec/expiring_cache_spec.rb b/spec/impl/expiring_cache_spec.rb similarity index 76% rename from spec/expiring_cache_spec.rb rename to spec/impl/expiring_cache_spec.rb index bd805351..c5bb51de 100644 --- a/spec/expiring_cache_spec.rb +++ b/spec/impl/expiring_cache_spec.rb @@ -29,52 +29,63 @@ module LaunchDarkly end it "evicts entries based on max size" do - c = subject.new(2, 300) + c = subject.new(3, 300) c[:a] = 1 c[:b] = 2 c[:c] = 3 + c[:d] = 4 expect(c[:a]).to be nil expect(c[:b]).to eq 2 expect(c[:c]).to eq 3 + expect(c[:d]).to eq 4 end - it "does not reset LRU on get" do - c = subject.new(2, 300) + it "resets TTL on put" do + c = subject.new(3, 300) c[:a] = 1 c[:b] = 2 - c[:a] + + Timecop.freeze(Time.now + 250) + + c[:a] = 1.5 + + Timecop.freeze(Time.now + 100) + c[:c] = 3 - expect(c[:a]).to be nil - expect(c[:b]).to eq 2 + expect(c[:a]).to eq 1.5 + expect(c[:b]).to be nil expect(c[:c]).to eq 3 end it "resets LRU on put" do - c = subject.new(2, 300) + c = subject.new(3, 300) c[:a] = 1 c[:b] = 2 - c[:a] = 1 c[:c] = 3 + c[:a] = 1.5 + c[:d] = 4 - expect(c[:a]).to eq 1 + expect(c[:a]).to eq 1.5 expect(c[:b]).to be nil expect(c[:c]).to eq 3 + expect(c[:d]).to eq 4 end - it "resets TTL on put" do + it "does not reset LRU on get" do c = subject.new(3, 300) c[:a] = 1 c[:b] = 2 - - Timecop.freeze(Time.now + 330) - c[:a] = 1 c[:c] = 3 + c[:a] + c[:d] = 4 - expect(c[:a]).to eq 1 - expect(c[:b]).to be nil + expect(c[:a]).to be nil + expect(c[:b]).to eq 2 expect(c[:c]).to eq 3 + expect(c[:d]).to eq 4 end end end + diff --git a/spec/simple_lru_cache_spec.rb b/spec/impl/simple_lru_cache_spec.rb similarity index 99% rename from spec/simple_lru_cache_spec.rb rename to spec/impl/simple_lru_cache_spec.rb index 27ac9527..508fa661 100644 --- a/spec/simple_lru_cache_spec.rb +++ b/spec/impl/simple_lru_cache_spec.rb @@ -25,3 +25,4 @@ module LaunchDarkly end end end + diff --git a/spec/impl/util_spec.rb b/spec/impl/util_spec.rb index 3d01d62a..d1313a31 100644 --- a/spec/impl/util_spec.rb +++ b/spec/impl/util_spec.rb @@ -2,49 +2,65 @@ module LaunchDarkly module Impl - describe "payload filter key validation" do - let(:logger) { double } + describe Util do + describe 'log_exception' do + let(:logger) { double } - it "silently discards nil" do - expect(logger).not_to receive(:warn) - expect(Util.validate_payload_filter_key(nil, logger)).to be_nil + it "logs error data" do + expect(logger).to receive(:error) + expect(logger).to receive(:debug) + begin + raise StandardError.new 'asdf' + rescue StandardError => exn + Util.log_exception(logger, "message", exn) + end + end end - [true, 1, 1.0, [], {}].each do |value| - it "returns nil for invalid type #{value.class}" do - expect(logger).to receive(:warn) - expect(Util.validate_payload_filter_key(value, logger)).to be_nil + describe "payload filter key validation" do + let(:logger) { double } + + it "silently discards nil" do + expect(logger).not_to receive(:warn) + expect(Util.validate_payload_filter_key(nil, logger)).to be_nil end - end - [ - "", - "-cannot-start-with-dash", - "_cannot-start-with-underscore", - "-cannot-start-with-period", - "no spaces for you", - "org@special/characters", - ].each do |value| - it "returns nil for invalid value #{value}" do - expect(logger).to receive(:warn) - expect(Util.validate_payload_filter_key(value, logger)).to be_nil + [true, 1, 1.0, [], {}].each do |value| + it "returns nil for invalid type #{value.class}" do + expect(logger).to receive(:warn) + expect(Util.validate_payload_filter_key(value, logger)).to be_nil + end end - end - [ - "camelCase", - "snake_case", - "kebab-case", - "with.dots", - "with_underscores", - "with-hyphens", - "with1234numbers", - "with.many_1234-mixtures", - "1start-with-number", - ].each do |value| - it "passes for value #{value}" do - expect(logger).not_to receive(:warn) - expect(Util.validate_payload_filter_key(value, logger)).to eq(value) + [ + "", + "-cannot-start-with-dash", + "_cannot-start-with-underscore", + "-cannot-start-with-period", + "no spaces for you", + "org@special/characters", + ].each do |value| + it "returns nil for invalid value #{value}" do + expect(logger).to receive(:warn) + expect(Util.validate_payload_filter_key(value, logger)).to be_nil + end + end + + [ + "camelCase", + "snake_case", + "kebab-case", + "with.dots", + "with_underscores", + "with-hyphens", + "with1234numbers", + "with.many_1234-mixtures", + "1start-with-number", + ].each do |value| + it "passes for value #{value}" do + expect(logger).not_to receive(:warn) + expect(Util.validate_payload_filter_key(value, logger)).to eq(value) + end end end end diff --git a/spec/util_spec.rb b/spec/util_spec.rb deleted file mode 100644 index 5b727915..00000000 --- a/spec/util_spec.rb +++ /dev/null @@ -1,19 +0,0 @@ -require "spec_helper" - -module LaunchDarkly - describe Impl::Util do - describe 'log_exception' do - let(:logger) { double } - - it "logs error data" do - expect(logger).to receive(:error) - expect(logger).to receive(:debug) - begin - raise StandardError.new 'asdf' - rescue StandardError => exn - Impl::Util.log_exception(logger, "message", exn) - end - end - end - end -end From 4a2952b3b2d72cd9521771b1203f91444426b3cb Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Thu, 4 Dec 2025 23:19:57 +0000 Subject: [PATCH 07/11] fix missing updates --- lib/ldclient-rb/events.rb | 4 ++-- lib/ldclient-rb/impl/data_source/polling.rb | 3 ++- lib/ldclient-rb/ldclient.rb | 7 ++++--- spec/impl/util_spec.rb | 1 + 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 311037af..f7fa4336 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -283,7 +283,7 @@ def main_loop(inbox, outbox, flush_workers, diagnostic_event_workers) dispatch_event(message, outbox) end rescue => e - Util.log_exception(@config.logger, "Unexpected error in event processor", e) + Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -410,7 +410,7 @@ def send_diagnostic_event(event, diagnostic_event_workers) begin @event_sender.send_event_data(event.to_json, "diagnostic event", true) rescue => e - Util.log_exception(@config.logger, "Unexpected error in event processor", e) + Impl::Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end diff --git a/lib/ldclient-rb/impl/data_source/polling.rb b/lib/ldclient-rb/impl/data_source/polling.rb index 8dc43534..16d3c38b 100644 --- a/lib/ldclient-rb/impl/data_source/polling.rb +++ b/lib/ldclient-rb/impl/data_source/polling.rb @@ -1,4 +1,5 @@ require "ldclient-rb/impl/repeating_task" +require "ldclient-rb/impl/util" require "concurrent/atomics" require "json" @@ -69,7 +70,7 @@ def poll stop_with_error_info error_info end rescue StandardError => e - Util.log_exception(@config.logger, "Exception while polling", e) + Impl::Util.log_exception(@config.logger, "Exception while polling", e) @config.data_source_update_sink&.update_status( LaunchDarkly::Interfaces::DataSource::Status::INTERRUPTED, LaunchDarkly::Interfaces::DataSource::ErrorInfo.new(LaunchDarkly::Interfaces::DataSource::ErrorInfo::UNKNOWN, 0, e.to_s, Time.now) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index af570dc8..1ff9b183 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -13,6 +13,7 @@ require "ldclient-rb/impl/flag_tracker" require "ldclient-rb/impl/migrations/tracker" require "ldclient-rb/impl/store_client_wrapper" +require "ldclient-rb/impl/util" require "ldclient-rb/events" require "ldclient-rb/in_memory_store" require "concurrent" @@ -617,7 +618,7 @@ def all_flags_state(context, options={}) begin features = @store.all(FEATURES) rescue => exn - Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) + Impl::Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) return FeatureFlagsState.new(false) end @@ -634,7 +635,7 @@ def all_flags_state(context, options={}) detail = eval_result.detail rescue => exn detail = EvaluationDetail.new(nil, nil, EvaluationReason::error(EvaluationReason::ERROR_EXCEPTION)) - Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) + Impl::Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) end requires_experiment_data = experiment?(f, detail.reason) @@ -804,7 +805,7 @@ def evaluate_internal(key, context, default, with_reasons) record_flag_eval(feature, context, detail, default, with_reasons) [detail, feature, nil] rescue => exn - Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) + Impl::Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = Evaluator.error_result(EvaluationReason::ERROR_EXCEPTION, default) record_flag_eval_error(feature, context, default, detail.reason, with_reasons) [detail, feature, exn.to_s] diff --git a/spec/impl/util_spec.rb b/spec/impl/util_spec.rb index d1313a31..a94df2ae 100644 --- a/spec/impl/util_spec.rb +++ b/spec/impl/util_spec.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/util" require "spec_helper" module LaunchDarkly From c4cf616a369430c5add5988536f7d335f1785c07 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 5 Dec 2025 17:09:02 +0000 Subject: [PATCH 08/11] Move private constants to datastore module --- lib/ldclient-rb/impl/data_source.rb | 6 +- lib/ldclient-rb/impl/data_source/stream.rb | 12 +- lib/ldclient-rb/impl/data_store.rb | 110 +++--------------- lib/ldclient-rb/impl/data_store/data_kind.rb | 108 +++++++++++++++++ lib/ldclient-rb/impl/dependency_tracker.rb | 6 +- .../impl/integrations/file_data_source.rb | 10 +- lib/ldclient-rb/impl/model/serialization.rb | 10 +- lib/ldclient-rb/in_memory_store.rb | 18 --- lib/ldclient-rb/integrations/test_data.rb | 12 +- lib/ldclient-rb/ldclient.rb | 8 +- spec/feature_store_spec_base.rb | 4 +- spec/impl/data_source/polling_spec.rb | 12 +- spec/impl/data_source/stream_spec.rb | 12 +- spec/impl/data_source_spec.rb | 36 +++--- spec/impl/data_store_spec.rb | 18 +-- spec/impl/evaluator_spec_base.rb | 4 +- spec/impl/model/preprocessed_data_spec.rb | 2 +- spec/impl/model/serialization_spec.rb | 8 +- spec/integrations/file_data_source_spec.rb | 26 ++--- spec/integrations/redis_stores_spec.rb | 16 +-- spec/integrations/test_data_spec.rb | 20 ++-- spec/ldclient_spec.rb | 16 +-- spec/model_builders.rb | 12 +- 23 files changed, 248 insertions(+), 238 deletions(-) create mode 100644 lib/ldclient-rb/impl/data_store/data_kind.rb diff --git a/lib/ldclient-rb/impl/data_source.rb b/lib/ldclient-rb/impl/data_source.rb index 44cf5eab..4eed9ecc 100644 --- a/lib/ldclient-rb/impl/data_source.rb +++ b/lib/ldclient-rb/impl/data_source.rb @@ -52,7 +52,7 @@ def init(all_data) monitor_store_update do if @flag_change_broadcaster.has_listeners? old_data = {} - LaunchDarkly::ALL_KINDS.each do |kind| + Impl::DataStore::ALL_KINDS.each do |kind| old_data[kind] = @data_store.all(kind) end end @@ -153,7 +153,7 @@ def update_status(new_state, new_error) private def compute_changed_items_for_full_data_set(old_data, new_data) affected_items = Set.new - LaunchDarkly::ALL_KINDS.each do |kind| + Impl::DataStore::ALL_KINDS.each do |kind| old_items = old_data[kind] || {} new_items = new_data[kind] || {} @@ -177,7 +177,7 @@ def update_status(new_state, new_error) # private def send_change_events(affected_items) affected_items.each do |item| - if item[:kind] == LaunchDarkly::FEATURES + if item[:kind] == Impl::DataStore::FEATURES @flag_change_broadcaster.broadcast(LaunchDarkly::Interfaces::FlagChange.new(item[:key])) end end diff --git a/lib/ldclient-rb/impl/data_source/stream.rb b/lib/ldclient-rb/impl/data_source/stream.rb index 6359679b..22b47fac 100644 --- a/lib/ldclient-rb/impl/data_source/stream.rb +++ b/lib/ldclient-rb/impl/data_source/stream.rb @@ -1,4 +1,6 @@ require "ldclient-rb/impl/model/serialization" +require "ldclient-rb/impl/util" +require "ldclient-rb/in_memory_store" require "concurrent/atomics" require "json" @@ -18,8 +20,8 @@ module DataSource # @api private KEY_PATHS = { - FEATURES => "/flags/", - SEGMENTS => "/segments/", + Impl::DataStore::FEATURES => "/flags/", + Impl::DataStore::SEGMENTS => "/segments/", } # @api private @@ -55,7 +57,7 @@ def start } log_connection_started - uri = Util.add_payload_filter_key(@config.stream_uri + "/all", @config) + uri = Impl::Util.add_payload_filter_key(@config.stream_uri + "/all", @config) @es = SSE::Client.new(uri, **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| @@ -142,7 +144,7 @@ def process_message(message) @ready.set elsif method == PATCH data = JSON.parse(message.data, symbolize_names: true) - for kind in [FEATURES, SEGMENTS] + for kind in [Impl::DataStore::FEATURES, Impl::DataStore::SEGMENTS] key = key_for_path(kind, data[:path]) if key item = Impl::Model.deserialize(kind, data[:data], @config.logger) @@ -152,7 +154,7 @@ def process_message(message) end elsif method == DELETE data = JSON.parse(message.data, symbolize_names: true) - for kind in [FEATURES, SEGMENTS] + for kind in [Impl::DataStore::FEATURES, Impl::DataStore::SEGMENTS] key = key_for_path(kind, data[:path]) if key update_sink_or_data_store.delete(kind, key, data[:version]) diff --git a/lib/ldclient-rb/impl/data_store.rb b/lib/ldclient-rb/impl/data_store.rb index c04a1b6f..86ae9562 100644 --- a/lib/ldclient-rb/impl/data_store.rb +++ b/lib/ldclient-rb/impl/data_store.rb @@ -1,108 +1,26 @@ require 'concurrent' require "ldclient-rb/interfaces" +require "ldclient-rb/impl/data_store/data_kind" module LaunchDarkly module Impl module DataStore - class DataKind - FEATURES = "features".freeze - SEGMENTS = "segments".freeze - - FEATURE_PREREQ_FN = lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }.freeze - - attr_reader :namespace - attr_reader :priority - - # - # @param namespace [String] - # @param priority [Integer] - # - def initialize(namespace:, priority:) - @namespace = namespace - @priority = priority - end - + # These constants denote the types of data that can be stored in the feature store. If + # we add another storable data type in the future, as long as it follows the same pattern + # (having "key", "version", and "deleted" properties), we only need to add a corresponding + # constant here and the existing store should be able to handle it. # - # Maintain the same behavior when these data kinds were standard ruby hashes. - # - # @param key [Symbol] - # @return [Object] - # - def [](key) - return priority if key == :priority - return namespace if key == :namespace - return get_dependency_keys_fn() if key == :get_dependency_keys - nil - end - - # - # Retrieve the dependency keys for a particular data kind. Right now, this is only defined for flags. - # - def get_dependency_keys_fn() - return nil unless @namespace == FEATURES - - FEATURE_PREREQ_FN - end - - def eql?(other) - other.is_a?(DataKind) && namespace == other.namespace && priority == other.priority - end - - def hash - [namespace, priority].hash - end - end - - class StatusProvider - include LaunchDarkly::Interfaces::DataStore::StatusProvider - - def initialize(store, update_sink) - # @type [LaunchDarkly::Impl::FeatureStoreClientWrapper] - @store = store - # @type [UpdateSink] - @update_sink = update_sink - end - - def status - @update_sink.last_status.get - end - - def monitoring_enabled? - @store.monitoring_enabled? - end - - def add_listener(listener) - @update_sink.broadcaster.add_listener(listener) - end - - def remove_listener(listener) - @update_sink.broadcaster.remove_listener(listener) - end - end - - class UpdateSink - include LaunchDarkly::Interfaces::DataStore::UpdateSink - - # @return [LaunchDarkly::Impl::Broadcaster] - attr_reader :broadcaster - - # @return [Concurrent::AtomicReference] - attr_reader :last_status + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # to ensure data consistency during non-atomic updates. - def initialize(broadcaster) - @broadcaster = broadcaster - @last_status = Concurrent::AtomicReference.new( - LaunchDarkly::Interfaces::DataStore::Status.new(true, false) - ) - end + # @api private + FEATURES = DataKind.new(namespace: "features", priority: 1).freeze - def update_status(status) - return if status.nil? + # @api private + SEGMENTS = DataKind.new(namespace: "segments", priority: 0).freeze - old_status = @last_status.get_and_set(status) - @broadcaster.broadcast(status) unless old_status == status - end - end + # @api private + ALL_KINDS = [FEATURES, SEGMENTS].freeze end end -end +end \ No newline at end of file diff --git a/lib/ldclient-rb/impl/data_store/data_kind.rb b/lib/ldclient-rb/impl/data_store/data_kind.rb new file mode 100644 index 00000000..c04a1b6f --- /dev/null +++ b/lib/ldclient-rb/impl/data_store/data_kind.rb @@ -0,0 +1,108 @@ +require 'concurrent' +require "ldclient-rb/interfaces" + +module LaunchDarkly + module Impl + module DataStore + class DataKind + FEATURES = "features".freeze + SEGMENTS = "segments".freeze + + FEATURE_PREREQ_FN = lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } }.freeze + + attr_reader :namespace + attr_reader :priority + + # + # @param namespace [String] + # @param priority [Integer] + # + def initialize(namespace:, priority:) + @namespace = namespace + @priority = priority + end + + # + # Maintain the same behavior when these data kinds were standard ruby hashes. + # + # @param key [Symbol] + # @return [Object] + # + def [](key) + return priority if key == :priority + return namespace if key == :namespace + return get_dependency_keys_fn() if key == :get_dependency_keys + nil + end + + # + # Retrieve the dependency keys for a particular data kind. Right now, this is only defined for flags. + # + def get_dependency_keys_fn() + return nil unless @namespace == FEATURES + + FEATURE_PREREQ_FN + end + + def eql?(other) + other.is_a?(DataKind) && namespace == other.namespace && priority == other.priority + end + + def hash + [namespace, priority].hash + end + end + + class StatusProvider + include LaunchDarkly::Interfaces::DataStore::StatusProvider + + def initialize(store, update_sink) + # @type [LaunchDarkly::Impl::FeatureStoreClientWrapper] + @store = store + # @type [UpdateSink] + @update_sink = update_sink + end + + def status + @update_sink.last_status.get + end + + def monitoring_enabled? + @store.monitoring_enabled? + end + + def add_listener(listener) + @update_sink.broadcaster.add_listener(listener) + end + + def remove_listener(listener) + @update_sink.broadcaster.remove_listener(listener) + end + end + + class UpdateSink + include LaunchDarkly::Interfaces::DataStore::UpdateSink + + # @return [LaunchDarkly::Impl::Broadcaster] + attr_reader :broadcaster + + # @return [Concurrent::AtomicReference] + attr_reader :last_status + + def initialize(broadcaster) + @broadcaster = broadcaster + @last_status = Concurrent::AtomicReference.new( + LaunchDarkly::Interfaces::DataStore::Status.new(true, false) + ) + end + + def update_status(status) + return if status.nil? + + old_status = @last_status.get_and_set(status) + @broadcaster.broadcast(status) unless old_status == status + end + end + end + end +end diff --git a/lib/ldclient-rb/impl/dependency_tracker.rb b/lib/ldclient-rb/impl/dependency_tracker.rb index 0784c00c..faf12f30 100644 --- a/lib/ldclient-rb/impl/dependency_tracker.rb +++ b/lib/ldclient-rb/impl/dependency_tracker.rb @@ -39,7 +39,7 @@ def update_dependencies_from(from_kind, from_key, from_item) def self.segment_keys_from_clauses(clauses) clauses.flat_map do |clause| if clause.op == :segmentMatch - clause.values.map { |value| {kind: LaunchDarkly::SEGMENTS, key: value }} + clause.values.map { |value| {kind: DataStore::SEGMENTS, key: value }} else [] end @@ -54,13 +54,13 @@ def self.segment_keys_from_clauses(clauses) def self.compute_dependencies_from(from_kind, from_item) return Set.new if from_item.nil? - if from_kind == LaunchDarkly::FEATURES + if from_kind == DataStore::FEATURES prereq_keys = from_item.prerequisites.map { |prereq| {kind: from_kind, key: prereq.key} } segment_keys = from_item.rules.flat_map { |rule| DependencyTracker.segment_keys_from_clauses(rule.clauses) } results = Set.new(prereq_keys) results.merge(segment_keys) - elsif from_kind == LaunchDarkly::SEGMENTS + elsif from_kind == DataStore::SEGMENTS kind_and_keys = from_item.rules.flat_map do |rule| DependencyTracker.segment_keys_from_clauses(rule.clauses) end diff --git a/lib/ldclient-rb/impl/integrations/file_data_source.rb b/lib/ldclient-rb/impl/integrations/file_data_source.rb index 8c0b2476..398e90d0 100644 --- a/lib/ldclient-rb/impl/integrations/file_data_source.rb +++ b/lib/ldclient-rb/impl/integrations/file_data_source.rb @@ -75,8 +75,8 @@ def stop def load_all all_data = { - FEATURES => {}, - SEGMENTS => {}, + Impl::DataStore::FEATURES => {}, + Impl::DataStore::SEGMENTS => {}, } @paths.each do |path| begin @@ -105,14 +105,14 @@ def load_file(path, all_data) parsed = parse_content(IO.read(path)) (parsed[:flags] || {}).each do |key, flag| flag[:version] = version - add_item(all_data, FEATURES, flag) + add_item(all_data, Impl::DataStore::FEATURES, flag) end (parsed[:flagValues] || {}).each do |key, value| - add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value, version)) + add_item(all_data, Impl::DataStore::FEATURES, make_flag_with_value(key.to_s, value, version)) end (parsed[:segments] || {}).each do |key, segment| segment[:version] = version - add_item(all_data, SEGMENTS, segment) + add_item(all_data, Impl::DataStore::SEGMENTS, segment) end end diff --git a/lib/ldclient-rb/impl/model/serialization.rb b/lib/ldclient-rb/impl/model/serialization.rb index 088112b8..ef3a3389 100644 --- a/lib/ldclient-rb/impl/model/serialization.rb +++ b/lib/ldclient-rb/impl/model/serialization.rb @@ -34,7 +34,7 @@ module Model # SDK code outside of Impl::Model should use this method instead of calling the model class # constructors directly, so as not to rely on implementation details. # - # @param kind [Hash] normally either FEATURES or SEGMENTS + # @param kind [Hash] normally either Impl::DataStore::FEATURES or Impl::DataStore::SEGMENTS # @param input [object] a JSON string or a parsed hash (or a data model object, in which case # we'll just return the original object) # @param logger [Logger|nil] logs errors if there are any data validation problems @@ -44,9 +44,9 @@ def self.deserialize(kind, input, logger = nil) return input if !input.is_a?(String) && !input.is_a?(Hash) data = input.is_a?(Hash) ? input : JSON.parse(input, symbolize_names: true) case kind - when FEATURES + when Impl::DataStore::FEATURES FeatureFlag.new(data, logger) - when SEGMENTS + when Impl::DataStore::SEGMENTS Segment.new(data, logger) else data @@ -63,8 +63,8 @@ def self.serialize(kind, item) # Translates a { flags: ..., segments: ... } object received from LaunchDarkly to the data store format. def self.make_all_store_data(received_data, logger = nil) { - FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) }, - SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) }, + Impl::DataStore::FEATURES => (received_data[:flags] || {}).transform_values { |data| FeatureFlag.new(data, logger) }, + Impl::DataStore::SEGMENTS => (received_data[:segments] || {}).transform_values { |data| Segment.new(data, logger) }, } end end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 2e41f3d7..200b629c 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -2,24 +2,6 @@ require "ldclient-rb/impl/data_store" module LaunchDarkly - - # These constants denote the types of data that can be stored in the feature store. If - # we add another storable data type in the future, as long as it follows the same pattern - # (having "key", "version", and "deleted" properties), we only need to add a corresponding - # constant here and the existing store should be able to handle it. - # - # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter - # to ensure data consistency during non-atomic updates. - - # @api private - FEATURES = Impl::DataStore::DataKind.new(namespace: "features", priority: 1).freeze - - # @api private - SEGMENTS = Impl::DataStore::DataKind.new(namespace: "segments", priority: 0).freeze - - # @api private - ALL_KINDS = [FEATURES, SEGMENTS].freeze - # # Default implementation of the LaunchDarkly client's feature store, using an in-memory # cache. This object holds feature flags and related data received from LaunchDarkly. diff --git a/lib/ldclient-rb/integrations/test_data.rb b/lib/ldclient-rb/integrations/test_data.rb index 6cc0da17..880f4517 100644 --- a/lib/ldclient-rb/integrations/test_data.rb +++ b/lib/ldclient-rb/integrations/test_data.rb @@ -121,10 +121,10 @@ def update(flag_builder) if @current_flags[flag_key] version = @current_flags[flag_key][:version] end - new_flag = Impl::Model.deserialize(FEATURES, flag_builder.build(version+1)) + new_flag = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::Impl::DataStore::FEATURES, flag_builder.build(version+1)) @current_flags[flag_key] = new_flag end - update_item(FEATURES, new_flag) + update_item(LaunchDarkly::Impl::DataStore::FEATURES, new_flag) self end @@ -147,7 +147,7 @@ def update(flag_builder) # @return [TestData] the TestData instance # def use_preconfigured_flag(flag) - use_preconfigured_item(FEATURES, flag, @current_flags) + use_preconfigured_item(LaunchDarkly::Impl::DataStore::FEATURES, flag, @current_flags) end # @@ -167,7 +167,7 @@ def use_preconfigured_flag(flag) # @return [TestData] the TestData instance # def use_preconfigured_segment(segment) - use_preconfigured_item(SEGMENTS, segment, @current_segments) + use_preconfigured_item(LaunchDarkly::Impl::DataStore::SEGMENTS, segment, @current_segments) end private def use_preconfigured_item(kind, item, current) @@ -198,8 +198,8 @@ def use_preconfigured_segment(segment) def make_init_data @lock.with_read_lock do { - FEATURES => @current_flags.clone, - SEGMENTS => @current_segments.clone, + LaunchDarkly::Impl::DataStore::FEATURES => @current_flags.clone, + LaunchDarkly::Impl::DataStore::SEGMENTS => @current_segments.clone, } end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1ff9b183..825813c4 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -121,8 +121,8 @@ def postfork(wait_for_sec = 5) @big_segment_store_manager = Impl::BigSegmentStoreManager.new(@config.big_segments, @config.logger) @big_segment_store_status_provider = @big_segment_store_manager.status_provider - get_flag = lambda { |key| @store.get(FEATURES, key) } - get_segment = lambda { |key| @store.get(SEGMENTS, key) } + get_flag = lambda { |key| @store.get(Impl::DataStore::FEATURES, key) } + get_segment = lambda { |key| @store.get(Impl::DataStore::SEGMENTS, key) } get_big_segments_membership = lambda { |key| @big_segment_store_manager.get_context_membership(key) } @evaluator = LaunchDarkly::Impl::Evaluator.new(get_flag, get_segment, get_big_segments_membership, @config.logger) @@ -616,7 +616,7 @@ def all_flags_state(context, options={}) end begin - features = @store.all(FEATURES) + features = @store.all(Impl::DataStore::FEATURES) rescue => exn Impl::Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) return FeatureFlagsState.new(false) @@ -779,7 +779,7 @@ def evaluate_internal(key, context, default, with_reasons) end begin - feature = @store.get(FEATURES, key) + feature = @store.get(Impl::DataStore::FEATURES, key) rescue # Ignored end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index c5f72d40..82d34e11 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -191,8 +191,8 @@ def new_version_plus(f, delta_version, attrs = {}) deleted: false, } with_inited_store([]) do |store| - store.upsert(LaunchDarkly::FEATURES, flag) - expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + store.upsert(LaunchDarkly::Impl::DataStore::FEATURES, flag) + expect(store.get(LaunchDarkly::Impl::DataStore::FEATURES, flag[:key])).to eq flag end end end diff --git a/spec/impl/data_source/polling_spec.rb b/spec/impl/data_source/polling_spec.rb index 6c961b9e..a4021112 100644 --- a/spec/impl/data_source/polling_spec.rb +++ b/spec/impl/data_source/polling_spec.rb @@ -36,10 +36,10 @@ def with_processor(store, initialize_to_valid = false) flag = Impl::Model::FeatureFlag.new({ key: 'flagkey', version: 1 }) segment = Impl::Model::Segment.new({ key: 'segkey', version: 1 }) all_data = { - FEATURES => { + Impl::DataStore::FEATURES => { flagkey: flag, }, - SEGMENTS => { + Impl::DataStore::SEGMENTS => { segkey: segment, }, } @@ -50,8 +50,8 @@ def with_processor(store, initialize_to_valid = false) with_processor(store) do |processor| ready = processor.start ready.wait - expect(store.get(FEATURES, "flagkey")).to eq(flag) - expect(store.get(SEGMENTS, "segkey")).to eq(segment) + expect(store.get(Impl::DataStore::FEATURES, "flagkey")).to eq(flag) + expect(store.get(Impl::DataStore::SEGMENTS, "segkey")).to eq(segment) end end @@ -75,8 +75,8 @@ def with_processor(store, initialize_to_valid = false) with_processor(store) do |processor| ready = processor.start ready.wait - expect(store.get(FEATURES, "flagkey")).to eq(flag) - expect(store.get(SEGMENTS, "segkey")).to eq(segment) + expect(store.get(Impl::DataStore::FEATURES, "flagkey")).to eq(flag) + expect(store.get(Impl::DataStore::SEGMENTS, "segkey")).to eq(segment) expect(listener.statuses.count).to eq(1) expect(listener.statuses[0].state).to eq(Interfaces::DataSource::Status::VALID) diff --git a/spec/impl/data_source/stream_spec.rb b/spec/impl/data_source/stream_spec.rb index 7f70ee9c..0232ac15 100644 --- a/spec/impl/data_source/stream_spec.rb +++ b/spec/impl/data_source/stream_spec.rb @@ -27,26 +27,26 @@ module LaunchDarkly it "will accept PUT methods" do processor.send(:process_message, put_message) - expect(config.feature_store.get(FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf")) - expect(config.feature_store.get(SEGMENTS, "segkey")).to eq(Segments.from_hash(key: "segkey")) + expect(config.feature_store.get(Impl::DataStore::FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf")) + expect(config.feature_store.get(Impl::DataStore::SEGMENTS, "segkey")).to eq(Segments.from_hash(key: "segkey")) end it "will accept PATCH methods for flags" do processor.send(:process_message, patch_flag_message) - expect(config.feature_store.get(FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf", version: 1)) + expect(config.feature_store.get(Impl::DataStore::FEATURES, "asdf")).to eq(Flags.from_hash(key: "asdf", version: 1)) end it "will accept PATCH methods for segments" do processor.send(:process_message, patch_seg_message) - expect(config.feature_store.get(SEGMENTS, "asdf")).to eq(Segments.from_hash(key: "asdf", version: 1)) + expect(config.feature_store.get(Impl::DataStore::SEGMENTS, "asdf")).to eq(Segments.from_hash(key: "asdf", version: 1)) end it "will accept DELETE methods for flags" do processor.send(:process_message, patch_flag_message) processor.send(:process_message, delete_flag_message) - expect(config.feature_store.get(FEATURES, "key")).to eq(nil) + expect(config.feature_store.get(Impl::DataStore::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do processor.send(:process_message, patch_seg_message) processor.send(:process_message, delete_seg_message) - expect(config.feature_store.get(SEGMENTS, "key")).to eq(nil) + expect(config.feature_store.get(Impl::DataStore::SEGMENTS, "key")).to eq(nil) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn diff --git a/spec/impl/data_source_spec.rb b/spec/impl/data_source_spec.rb index 98e23468..0ca94ba9 100644 --- a/spec/impl/data_source_spec.rb +++ b/spec/impl/data_source_spec.rb @@ -49,7 +49,7 @@ module Impl describe "simple flag change listener" do let(:all_data) { { - LaunchDarkly::FEATURES => { + LaunchDarkly::Impl::DataStore::FEATURES => { flag1: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 1 }), flag2: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 1 }), flag3: LaunchDarkly::Impl::Model::FeatureFlag.new( @@ -75,7 +75,7 @@ module Impl } ), }, - LaunchDarkly::SEGMENTS => { + LaunchDarkly::Impl::DataStore::SEGMENTS => { segment1: LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 1 }), segment2: LaunchDarkly::Impl::Model::Segment.new({ key: 'segment2', version: 1 }), }, @@ -89,7 +89,7 @@ module Impl flag_change_broadcaster.add_listener(listener) updated_data = { - LaunchDarkly::FEATURES => { + LaunchDarkly::Impl::DataStore::FEATURES => { flag1: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 2 }), flag4: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag4', version: 1 }), }, @@ -110,10 +110,10 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 2 })) + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 2 })) # TODO(sc-197908): Once the store starts returning a success status on upsert, the flag change notification # can start ignoring duplicate requests like this. - # sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 2 })) + # sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 2 })) expect(listener.statuses.count).to eq(1) expect(listener.statuses[0].key).to eq('flag1') @@ -125,10 +125,10 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.delete(LaunchDarkly::FEATURES, "flag1", 2) + sink.delete(LaunchDarkly::Impl::DataStore::FEATURES, "flag1", 2) # TODO(sc-197908): Once the store starts returning a success status on delete, the flag change notification # can start ignoring duplicate requests like this. - # sink.delete(LaunchDarkly::FEATURES, :flag1, 2) + # sink.delete(LaunchDarkly::Impl::DataStore::FEATURES, :flag1, 2) expect(listener.statuses.count).to eq(1) expect(listener.statuses[0].key).to eq("flag1") @@ -140,7 +140,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment2', version: 2 })) + sink.upsert(LaunchDarkly::Impl::DataStore::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment2', version: 2 })) # TODO(sc-197908): Once the store starts returning a success status on upsert, the flag change notification # can start ignoring duplicate requests like this. # sink.upsert(LaunchDarkly::Impl::Model::Segment.new({ key: 'segment2', version: 2 })) @@ -153,7 +153,7 @@ module Impl describe "prerequisite flag change listener" do let(:all_data) { { - LaunchDarkly::FEATURES => { + LaunchDarkly::Impl::DataStore::FEATURES => { flag1: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag1', version: 1, prerequisites: [{key: 'flag2', variation: 0}] }), flag2: LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 1, prerequisites: [{key: 'flag3', variation: 0}, {key: 'flag4', variation: 0}, {key: 'flag6', variation: 0}] }), @@ -183,7 +183,7 @@ module Impl } ), }, - LaunchDarkly::SEGMENTS => { + LaunchDarkly::Impl::DataStore::SEGMENTS => { segment1: LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 1 }), segment2: LaunchDarkly::Impl::Model::Segment.new( { @@ -218,7 +218,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag4', version: 2 })) + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag4', version: 2 })) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag4') expect(listener.statuses[1].key).to eq('flag2') @@ -231,7 +231,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag3', version: 2, prerequisities: [{key: 'flag4', variation: 0}] })) + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag3', version: 2, prerequisities: [{key: 'flag4', variation: 0}] })) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag3') expect(listener.statuses[1].key).to eq('flag2') @@ -244,7 +244,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 2, prerequisities: [{key: 'flag3', variation: 0}] })) + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 2, prerequisities: [{key: 'flag3', variation: 0}] })) expect(listener.statuses.count).to eq(2) expect(listener.statuses[0].key).to eq('flag2') expect(listener.statuses[1].key).to eq('flag1') @@ -256,7 +256,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.delete(LaunchDarkly::FEATURES, "flag4", 2) + sink.delete(LaunchDarkly::Impl::DataStore::FEATURES, "flag4", 2) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag4') expect(listener.statuses[1].key).to eq('flag2') @@ -269,10 +269,10 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 2 })) + sink.upsert(LaunchDarkly::Impl::DataStore::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 2 })) # TODO(sc-197908): Once the store starts returning a success status on upsert, the flag change notification # can start ignoring duplicate requests like this. - # sink.upsert(LaunchDarkly::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 2 })) + # sink.upsert(LaunchDarkly::Impl::DataStore::SEGMENTS, LaunchDarkly::Impl::Model::Segment.new({ key: 'segment1', version: 2 })) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag6') @@ -286,10 +286,10 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.delete(LaunchDarkly::SEGMENTS, 'segment2', 2) + sink.delete(LaunchDarkly::Impl::DataStore::SEGMENTS, 'segment2', 2) # TODO(sc-197908): Once the store starts returning a success status on upsert, the flag change notification # can start ignoring duplicate requests like this. - # sink.delete(LaunchDarkly::SEGMENTS, 'segment2', 2) + # sink.delete(LaunchDarkly::Impl::DataStore::SEGMENTS, 'segment2', 2) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag6') diff --git a/spec/impl/data_store_spec.rb b/spec/impl/data_store_spec.rb index 738c6fbb..cce0964e 100644 --- a/spec/impl/data_store_spec.rb +++ b/spec/impl/data_store_spec.rb @@ -6,15 +6,15 @@ module DataStore describe DataKind do describe "eql?" do it "constant instances are equal to themselves" do - expect(LaunchDarkly::FEATURES.eql?(LaunchDarkly::FEATURES)).to be true - expect(LaunchDarkly::SEGMENTS.eql?(LaunchDarkly::SEGMENTS)).to be true + expect(FEATURES.eql?(FEATURES)).to be true + expect(SEGMENTS.eql?(SEGMENTS)).to be true end it "same constructions are equal" do - expect(LaunchDarkly::FEATURES.eql?(DataKind.new(namespace: "features", priority: 1))).to be true + expect(FEATURES.eql?(DataKind.new(namespace: "features", priority: 1))).to be true expect(DataKind.new(namespace: "features", priority: 1).eql?(DataKind.new(namespace: "features", priority: 1))).to be true - expect(LaunchDarkly::SEGMENTS.eql?(DataKind.new(namespace: "segments", priority: 0))).to be true + expect(SEGMENTS.eql?(DataKind.new(namespace: "segments", priority: 0))).to be true expect(DataKind.new(namespace: "segments", priority: 0).eql?(DataKind.new(namespace: "segments", priority: 0))).to be true end @@ -29,22 +29,22 @@ module DataStore it "handles non-DataKind objects" do ["example", true, 1, 1.0, [], {}].each do |obj| - expect(LaunchDarkly::FEATURES.eql?(obj)).to be false + expect(FEATURES.eql?(obj)).to be false end end end describe "hash" do it "constant instances are equal to themselves" do - expect(LaunchDarkly::FEATURES.hash).to be LaunchDarkly::FEATURES.hash - expect(LaunchDarkly::SEGMENTS.hash).to be LaunchDarkly::SEGMENTS.hash + expect(FEATURES.hash).to be FEATURES.hash + expect(SEGMENTS.hash).to be SEGMENTS.hash end it "same constructions are equal" do - expect(LaunchDarkly::FEATURES.hash).to be DataKind.new(namespace: "features", priority: 1).hash + expect(FEATURES.hash).to be DataKind.new(namespace: "features", priority: 1).hash expect(DataKind.new(namespace: "features", priority: 1).hash).to be DataKind.new(namespace: "features", priority: 1).hash - expect(LaunchDarkly::SEGMENTS.hash).to be DataKind.new(namespace: "segments", priority: 0).hash + expect(SEGMENTS.hash).to be DataKind.new(namespace: "segments", priority: 0).hash expect(DataKind.new(namespace: "segments", priority: 0).hash).to be DataKind.new(namespace: "segments", priority: 0).hash end diff --git a/spec/impl/evaluator_spec_base.rb b/spec/impl/evaluator_spec_base.rb index 4e041f94..17296fbe 100644 --- a/spec/impl/evaluator_spec_base.rb +++ b/spec/impl/evaluator_spec_base.rb @@ -17,7 +17,7 @@ def initialize(logger) end def with_flag(flag) - @flags[flag[:key]] = Model.deserialize(FEATURES, flag) + @flags[flag[:key]] = Model.deserialize(DataStore::FEATURES, flag) self end @@ -27,7 +27,7 @@ def with_unknown_flag(key) end def with_segment(segment) - @segments[segment[:key]] = Model.deserialize(SEGMENTS, segment) + @segments[segment[:key]] = Model.deserialize(DataStore::SEGMENTS, segment) self end diff --git a/spec/impl/model/preprocessed_data_spec.rb b/spec/impl/model/preprocessed_data_spec.rb index 753590d5..bda9c31d 100644 --- a/spec/impl/model/preprocessed_data_spec.rb +++ b/spec/impl/model/preprocessed_data_spec.rb @@ -30,7 +30,7 @@ module DataModelPreprocessing ], } flag = Model::FeatureFlag.new(original_flag) - json = Model.serialize(FEATURES, flag) + json = Model.serialize(DataStore::FEATURES, flag) parsed = JSON.parse(json, symbolize_names: true) expect(parsed).to eq(original_flag) end diff --git a/spec/impl/model/serialization_spec.rb b/spec/impl/model/serialization_spec.rb index f2d364eb..360af825 100644 --- a/spec/impl/model/serialization_spec.rb +++ b/spec/impl/model/serialization_spec.rb @@ -7,27 +7,27 @@ module Model describe "model serialization" do it "serializes flag" do flag = FlagBuilder.new("flagkey").version(1).build - json = Model.serialize(FEATURES, flag) + json = Model.serialize(Impl::DataStore::FEATURES, flag) expect(JSON.parse(json, symbolize_names: true)).to eq flag.data end it "serializes segment" do segment = SegmentBuilder.new("segkey").version(1).build - json = Model.serialize(SEGMENTS, segment) + json = Model.serialize(Impl::DataStore::SEGMENTS, segment) expect(JSON.parse(json, symbolize_names: true)).to eq segment.data end it "deserializes flag with no rules or prerequisites" do flag_in = { key: "flagkey", version: 1 } json = flag_in.to_json - flag_out = Model.deserialize(FEATURES, json, nil) + flag_out = Model.deserialize(Impl::DataStore::FEATURES, json, nil) expect(flag_out.data).to eq flag_in end it "deserializes segment" do segment_in = { key: "segkey", version: 1 } json = segment_in.to_json - segment_out = Model.deserialize(SEGMENTS, json, nil) + segment_out = Model.deserialize(Impl::DataStore::SEGMENTS, json, nil) expect(segment_out.data).to eq segment_in end end diff --git a/spec/integrations/file_data_source_spec.rb b/spec/integrations/file_data_source_spec.rb index 8b22e902..c4d10137 100644 --- a/spec/integrations/file_data_source_spec.rb +++ b/spec/integrations/file_data_source_spec.rb @@ -146,8 +146,8 @@ def with_data_source(options, initialize_to_valid = false) file = make_temp_file('{"flagValues":{"key":"value"}}') with_data_source({ paths: [ file.path ] }) do |_| expect(@store.initialized?).to eq(false) - expect(@store.all(LaunchDarkly::FEATURES)).to eq({}) - expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({}) + expect(@store.all(LaunchDarkly::Impl::DataStore::FEATURES)).to eq({}) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS)).to eq({}) end end @@ -159,8 +159,8 @@ def with_data_source(options, initialize_to_valid = false) ds.start expect(@store.initialized?).to eq(true) - expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + expect(@store.all(LaunchDarkly::Impl::DataStore::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq(all_segment_keys) expect(listener.statuses.count).to eq(1) expect(listener.statuses[0].state).to eq(LaunchDarkly::Interfaces::DataSource::Status::VALID) @@ -172,8 +172,8 @@ def with_data_source(options, initialize_to_valid = false) with_data_source({ paths: [ file.path ] }) do |ds| ds.start expect(@store.initialized?).to eq(true) - expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + expect(@store.all(LaunchDarkly::Impl::DataStore::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq(all_segment_keys) end end @@ -214,8 +214,8 @@ def with_data_source(options, initialize_to_valid = false) with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| ds.start expect(@store.initialized?).to eq(true) - expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) + expect(@store.all(LaunchDarkly::Impl::DataStore::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) end end @@ -239,7 +239,7 @@ def with_data_source(options, initialize_to_valid = false) with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| ds.start expect(@store.initialized?).to eq(false) - expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([]) + expect(@store.all(LaunchDarkly::Impl::DataStore::FEATURES).keys).to eq([]) end end @@ -249,11 +249,11 @@ def with_data_source(options, initialize_to_valid = false) with_data_source({ paths: [ file.path ] }) do |ds| event = ds.start expect(event.set?).to eq(true) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq([]) IO.write(file, all_properties_json) sleep(0.5) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq([]) end end @@ -264,13 +264,13 @@ def test_auto_reload(options) with_data_source(options) do |ds| event = ds.start expect(event.set?).to eq(true) - expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + expect(@store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys).to eq([]) sleep(1) IO.write(file, all_properties_json) max_time = 10 - ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } + ok = wait_for_condition(10) { @store.all(LaunchDarkly::Impl::DataStore::SEGMENTS).keys == all_segment_keys } expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" end end diff --git a/spec/integrations/redis_stores_spec.rb b/spec/integrations/redis_stores_spec.rb index 1f59469a..791ed64e 100644 --- a/spec/integrations/redis_stores_spec.rb +++ b/spec/integrations/redis_stores_spec.rb @@ -105,11 +105,11 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) ensure_stop(tester.create_feature_store) do |store| - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + store.init(LaunchDarkly::Impl::DataStore::FEATURES => { flag[:key] => flag }) my_ver = { key: "foo", version: 10 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) + store.upsert(LaunchDarkly::Impl::DataStore::FEATURES, my_ver) + result = store.get(LaunchDarkly::Impl::DataStore::FEATURES, flag[:key]) expect(result[:version]).to eq 10 end end @@ -122,11 +122,11 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve tester = RedisStoreTester.new({ test_hook: test_hook, logger: $null_logger }) ensure_stop(tester.create_feature_store) do |store| - store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) + store.init(LaunchDarkly::Impl::DataStore::FEATURES => { flag[:key] => flag }) my_ver = { key: "foo", version: 2 } - store.upsert(LaunchDarkly::FEATURES, my_ver) - result = store.get(LaunchDarkly::FEATURES, flag[:key]) + store.upsert(LaunchDarkly::Impl::DataStore::FEATURES, my_ver) + result = store.get(LaunchDarkly::Impl::DataStore::FEATURES, flag[:key]) expect(result[:version]).to eq 3 end end @@ -138,7 +138,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve store = tester.create_feature_store begin - store.init(LaunchDarkly::FEATURES => { }) + store.init(LaunchDarkly::Impl::DataStore::FEATURES => { }) store.stop expect { unowned_pool.with {} }.to raise_error(ConnectionPool::PoolShuttingDownError) @@ -153,7 +153,7 @@ def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_ve store = tester.create_feature_store begin - store.init(LaunchDarkly::FEATURES => { }) + store.init(LaunchDarkly::Impl::DataStore::FEATURES => { }) store.stop expect { unowned_pool.with {} }.not_to raise_error diff --git a/spec/integrations/test_data_spec.rb b/spec/integrations/test_data_spec.rb index f576175d..3c28fd4e 100644 --- a/spec/integrations/test_data_spec.rb +++ b/spec/integrations/test_data_spec.rb @@ -7,7 +7,7 @@ module Integrations td = Integrations::TestData.data_source config = Config.new(send_events: false, data_source: td) client = LDClient.new('sdkKey', config) - expect(config.feature_store.all(FEATURES)).to eql({}) + expect(config.feature_store.all(LaunchDarkly::Impl::DataStore::FEATURES)).to eql({}) client.close end @@ -16,7 +16,7 @@ module Integrations td.update(td.flag('flag')) config = Config.new(send_events: false, data_source: td) client = LDClient.new('sdkKey', config) - expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -35,7 +35,7 @@ module Integrations config2 = Config.new(send_events: false, data_source: td) client2 = LDClient.new('sdkKey', config2) - expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -43,7 +43,7 @@ module Integrations on: true, version: 1, }) - expect(config2.feature_store.get(FEATURES, 'flag').data).to eql({ + expect(config2.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 0 }, @@ -54,7 +54,7 @@ module Integrations td.update(td.flag('flag').variation_for_all(false)) - expect(config.feature_store.get(FEATURES, 'flag').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, @@ -62,7 +62,7 @@ module Integrations on: true, version: 2, }) - expect(config2.feature_store.get(FEATURES, 'flag').data).to eql({ + expect(config2.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'flag').data).to eql({ key: 'flag', variations: [true, false], fallthrough: { variation: 1 }, @@ -83,22 +83,22 @@ module Integrations config = Config.new(send_events: false, data_source: td) client = LDClient.new('sdkKey', config) - expect(config.feature_store.get(FEATURES, 'my-flag').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'my-flag').data).to eql({ key: 'my-flag', version: 1000, on: true }) - expect(config.feature_store.get(SEGMENTS, 'my-segment').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::SEGMENTS, 'my-segment').data).to eql({ key: 'my-segment', version: 2000 }) td.use_preconfigured_flag({ key: 'my-flag', on: false }) - expect(config.feature_store.get(FEATURES, 'my-flag').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::FEATURES, 'my-flag').data).to eql({ key: 'my-flag', version: 1001, on: false }) td.use_preconfigured_segment({ key: 'my-segment', included: [ 'x' ] }) - expect(config.feature_store.get(SEGMENTS, 'my-segment').data).to eql({ + expect(config.feature_store.get(LaunchDarkly::Impl::DataStore::SEGMENTS, 'my-segment').data).to eql({ key: 'my-segment', version: 2001, included: [ 'x' ] }) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 04778116..a97f8371 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -74,7 +74,7 @@ module LaunchDarkly context "feature store data ordering" do let(:dependency_ordering_test_data) { { - FEATURES => { + Impl::DataStore::FEATURES => { a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, c: { key: "c" }, @@ -82,7 +82,7 @@ module LaunchDarkly e: { key: "e" }, f: { key: "f" }, }, - SEGMENTS => { + Impl::DataStore::SEGMENTS => { o: { key: "o" }, }, } @@ -91,8 +91,8 @@ module LaunchDarkly it "passes data set to feature store in correct order on init" do store = CapturingFeatureStore.new td = Integrations::TestData.data_source - dependency_ordering_test_data[FEATURES].each { |_, flag| td.use_preconfigured_flag(flag) } - dependency_ordering_test_data[SEGMENTS].each { |_, segment| td.use_preconfigured_segment(segment) } + dependency_ordering_test_data[Impl::DataStore::FEATURES].each { |_, flag| td.use_preconfigured_flag(flag) } + dependency_ordering_test_data[Impl::DataStore::SEGMENTS].each { |_, segment| td.use_preconfigured_segment(segment) } with_client(test_config(feature_store: store, data_source: td)) do |_| data = store.received_data @@ -100,14 +100,14 @@ module LaunchDarkly expect(data.count).to eq(2) # Segments should always come first - expect(data.keys[0]).to be(SEGMENTS) - expect(data.values[0].count).to eq(dependency_ordering_test_data[SEGMENTS].count) + expect(data.keys[0]).to be(Impl::DataStore::SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[Impl::DataStore::SEGMENTS].count) # Features should be ordered so that a flag always appears after its prerequisites, if any - expect(data.keys[1]).to be(FEATURES) + expect(data.keys[1]).to be(Impl::DataStore::FEATURES) flags_map = data.values[1] flags_list = flags_map.values - expect(flags_list.count).to eq(dependency_ordering_test_data[FEATURES].count) + expect(flags_list.count).to eq(dependency_ordering_test_data[Impl::DataStore::FEATURES].count) flags_list.each_with_index do |item, item_index| (item[:prerequisites] || []).each do |prereq| prereq = flags_map[prereq[:key].to_sym] diff --git a/spec/model_builders.rb b/spec/model_builders.rb index 110e184e..fd0114f3 100644 --- a/spec/model_builders.rb +++ b/spec/model_builders.rb @@ -4,7 +4,7 @@ class Flags def self.from_hash(data) - LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::FEATURES, data) + LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::Impl::DataStore::FEATURES, data) end def self.boolean_flag_with_rules(*rules) @@ -20,7 +20,7 @@ def self.boolean_flag_with_clauses(*clauses) class Segments def self.from_hash(data) - LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::SEGMENTS, data) + LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::Impl::DataStore::SEGMENTS, data) end end @@ -226,21 +226,21 @@ def initialize end def flag(data) - f = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::FEATURES, data) + f = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::Impl::DataStore::FEATURES, data) @flags[f.key.to_sym] = f self end def segment(data) - s = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::SEGMENTS, data) + s = LaunchDarkly::Impl::Model.deserialize(LaunchDarkly::Impl::DataStore::SEGMENTS, data) @segments[s.key.to_sym] = s self end def to_store_data { - LaunchDarkly::FEATURES => @flags, - LaunchDarkly::SEGMENTS => @segments, + LaunchDarkly::Impl::DataStore::FEATURES => @flags, + LaunchDarkly::Impl::DataStore::SEGMENTS => @segments, } end From 674d3a92c7ba56c79629fe345ef3d39cf74db628 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 5 Dec 2025 21:02:45 +0000 Subject: [PATCH 09/11] fix lint issue --- spec/impl/data_source_spec.rb | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/spec/impl/data_source_spec.rb b/spec/impl/data_source_spec.rb index 0ca94ba9..8ead6513 100644 --- a/spec/impl/data_source_spec.rb +++ b/spec/impl/data_source_spec.rb @@ -231,7 +231,9 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag3', version: 2, prerequisities: [{key: 'flag4', variation: 0}] })) + sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ + key: 'flag3', version: 2, prerequisities: [{key: 'flag4', variation: 0}] + })) expect(listener.statuses.count).to eq(3) expect(listener.statuses[0].key).to eq('flag3') expect(listener.statuses[1].key).to eq('flag2') @@ -244,7 +246,9 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 2, prerequisities: [{key: 'flag3', variation: 0}] })) + sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ + key: 'flag2', version: 2, prerequisities: [{key: 'flag3', variation: 0}] + })) expect(listener.statuses.count).to eq(2) expect(listener.statuses[0].key).to eq('flag2') expect(listener.statuses[1].key).to eq('flag1') From 26f64dc54dfede8759e1e89dde2d18659ee3f816 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Fri, 5 Dec 2025 21:14:01 +0000 Subject: [PATCH 10/11] fix failing build --- spec/impl/data_source_spec.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/spec/impl/data_source_spec.rb b/spec/impl/data_source_spec.rb index 8ead6513..5d2d2847 100644 --- a/spec/impl/data_source_spec.rb +++ b/spec/impl/data_source_spec.rb @@ -231,7 +231,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag3', version: 2, prerequisities: [{key: 'flag4', variation: 0}] })) expect(listener.statuses.count).to eq(3) @@ -246,7 +246,7 @@ module Impl listener = ListenerSpy.new flag_change_broadcaster.add_listener(listener) - sink.upsert(LaunchDarkly::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ + sink.upsert(LaunchDarkly::Impl::DataStore::FEATURES, LaunchDarkly::Impl::Model::FeatureFlag.new({ key: 'flag2', version: 2, prerequisities: [{key: 'flag3', variation: 0}] })) expect(listener.statuses.count).to eq(2) From 99ae1d9caea87f008b55064d9be040f287004a03 Mon Sep 17 00:00:00 2001 From: jsonbailey Date: Mon, 8 Dec 2025 21:42:12 +0000 Subject: [PATCH 11/11] remove unnecessary private comments for files in impl folder --- lib/ldclient-rb/impl.rb | 1 - lib/ldclient-rb/impl/cache_store.rb | 4 ---- lib/ldclient-rb/impl/data_source/polling.rb | 1 - lib/ldclient-rb/impl/data_source/requestor.rb | 2 -- lib/ldclient-rb/impl/data_source/stream.rb | 6 ------ lib/ldclient-rb/impl/data_store.rb | 3 --- lib/ldclient-rb/impl/expiring_cache.rb | 1 - lib/ldclient-rb/impl/memoized_value.rb | 1 - lib/ldclient-rb/impl/non_blocking_thread_pool.rb | 1 - lib/ldclient-rb/impl/simple_lru_cache.rb | 1 - 10 files changed, 21 deletions(-) diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 5ed4eb00..14d9e608 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -6,7 +6,6 @@ module LaunchDarkly # # @since 5.5.0 # @api private - # module Impl # code is in ldclient-rb/impl/ end diff --git a/lib/ldclient-rb/impl/cache_store.rb b/lib/ldclient-rb/impl/cache_store.rb index 7662ea04..e32566c0 100644 --- a/lib/ldclient-rb/impl/cache_store.rb +++ b/lib/ldclient-rb/impl/cache_store.rb @@ -2,12 +2,8 @@ module LaunchDarkly module Impl - # # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. - # - # @api private - # class ThreadSafeMemoryStore # # Default constructor diff --git a/lib/ldclient-rb/impl/data_source/polling.rb b/lib/ldclient-rb/impl/data_source/polling.rb index 16d3c38b..502b87d7 100644 --- a/lib/ldclient-rb/impl/data_source/polling.rb +++ b/lib/ldclient-rb/impl/data_source/polling.rb @@ -8,7 +8,6 @@ module LaunchDarkly module Impl module DataSource - # @api private class PollingProcessor def initialize(config, requestor) @config = config diff --git a/lib/ldclient-rb/impl/data_source/requestor.rb b/lib/ldclient-rb/impl/data_source/requestor.rb index 2ce893ac..b612df72 100644 --- a/lib/ldclient-rb/impl/data_source/requestor.rb +++ b/lib/ldclient-rb/impl/data_source/requestor.rb @@ -9,7 +9,6 @@ module LaunchDarkly module Impl module DataSource - # @api private class UnexpectedResponseError < StandardError def initialize(status) @status = status @@ -21,7 +20,6 @@ def status end end - # @api private class Requestor CacheEntry = Struct.new(:etag, :body) diff --git a/lib/ldclient-rb/impl/data_source/stream.rb b/lib/ldclient-rb/impl/data_source/stream.rb index 1ae7ac3b..bded4070 100644 --- a/lib/ldclient-rb/impl/data_source/stream.rb +++ b/lib/ldclient-rb/impl/data_source/stream.rb @@ -9,22 +9,16 @@ module LaunchDarkly module Impl module DataSource - # @api private PUT = :put - # @api private PATCH = :patch - # @api private DELETE = :delete - # @api private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes - # @api private KEY_PATHS = { Impl::DataStore::FEATURES => "/flags/", Impl::DataStore::SEGMENTS => "/segments/", } - # @api private class StreamProcessor def initialize(sdk_key, config, diagnostic_accumulator = nil) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/impl/data_store.rb b/lib/ldclient-rb/impl/data_store.rb index 86ae9562..1a99fbb3 100644 --- a/lib/ldclient-rb/impl/data_store.rb +++ b/lib/ldclient-rb/impl/data_store.rb @@ -13,13 +13,10 @@ module DataStore # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter # to ensure data consistency during non-atomic updates. - # @api private FEATURES = DataKind.new(namespace: "features", priority: 1).freeze - # @api private SEGMENTS = DataKind.new(namespace: "segments", priority: 0).freeze - # @api private ALL_KINDS = [FEATURES, SEGMENTS].freeze end end diff --git a/lib/ldclient-rb/impl/expiring_cache.rb b/lib/ldclient-rb/impl/expiring_cache.rb index c397d565..97ea87e1 100644 --- a/lib/ldclient-rb/impl/expiring_cache.rb +++ b/lib/ldclient-rb/impl/expiring_cache.rb @@ -7,7 +7,6 @@ module Impl # * made thread-safe # * removed many unused methods # * reading a key does not reset its expiration time, only writing - # @api private class ExpiringCache def initialize(max_size, ttl) @max_size = max_size diff --git a/lib/ldclient-rb/impl/memoized_value.rb b/lib/ldclient-rb/impl/memoized_value.rb index 2a297a0f..8a69a552 100644 --- a/lib/ldclient-rb/impl/memoized_value.rb +++ b/lib/ldclient-rb/impl/memoized_value.rb @@ -4,7 +4,6 @@ module Impl # Simple implementation of a thread-safe memoized value whose generator function will never be # run more than once, and whose value can be overridden by explicit assignment. # Note that we no longer use this class and it will be removed in a future version. - # @api private class MemoizedValue def initialize(&generator) @generator = generator diff --git a/lib/ldclient-rb/impl/non_blocking_thread_pool.rb b/lib/ldclient-rb/impl/non_blocking_thread_pool.rb index 575761b0..a76b12d6 100644 --- a/lib/ldclient-rb/impl/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/impl/non_blocking_thread_pool.rb @@ -7,7 +7,6 @@ module LaunchDarkly module Impl # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather # than blocking. Also provides a way to wait for all jobs to finish without shutting down. - # @api private class NonBlockingThreadPool def initialize(capacity, name = 'LD/NonBlockingThreadPool') @capacity = capacity diff --git a/lib/ldclient-rb/impl/simple_lru_cache.rb b/lib/ldclient-rb/impl/simple_lru_cache.rb index 4578a38b..d378bea8 100644 --- a/lib/ldclient-rb/impl/simple_lru_cache.rb +++ b/lib/ldclient-rb/impl/simple_lru_cache.rb @@ -3,7 +3,6 @@ module LaunchDarkly module Impl # A non-thread-safe implementation of a LRU cache set with only add and reset methods. # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb - # @api private class SimpleLRUCacheSet def initialize(capacity) @values = {}