From f5092af8b94a58c09ab74f7d3870229a6424776e Mon Sep 17 00:00:00 2001 From: Andrew Shannon Brown Date: Mon, 23 Jul 2018 17:09:29 -0700 Subject: [PATCH 001/135] Remove @ashanbrown from codeowners --- CODEOWNERS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CODEOWNERS b/CODEOWNERS index 44429ee1..8b137891 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -1 +1 @@ -* @ashanbrown + From fd63b2b84cd7806bbbacb094b0ac3ce2502fe94f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:40:01 -0700 Subject: [PATCH 002/135] log exception stacktraces at debug level --- lib/ldclient-rb/events.rb | 4 ++-- lib/ldclient-rb/ldclient.rb | 12 +++--------- lib/ldclient-rb/util.rb | 5 +++++ spec/ldclient_spec.rb | 11 ----------- 4 files changed, 10 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 0c9a0ece..202fc235 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -142,7 +142,7 @@ def main_loop(queue, buffer, flush_workers) message.completed end rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end end @@ -226,7 +226,7 @@ def trigger_flush(buffer, flush_workers) resp = EventPayloadSendTask.new.run(@sdk_key, @config, @client, payload, @formatter) handle_response(resp) if !resp.nil? rescue => e - @config.logger.warn { "[LDClient] Unexpected error in event processor: #{e.inspect}. \nTrace: #{e.backtrace}" } + Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end buffer.clear if success # Reset our internal state, these events now belong to the flush worker diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c0e872d..3f0f6d9a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -162,7 +162,7 @@ def variation(key, user, default) @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) return value rescue => exn - @config.logger.warn { "[LDClient] Error evaluating feature flag: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating feature flag", exn) @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) return default end @@ -210,7 +210,7 @@ def all_flags(user) # TODO rescue if necessary Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - @config.logger.warn { "[LDClient] Error evaluating all flags: #{exn.inspect}. \nTrace: #{exn.backtrace}" } + Util.log_exception(@config.logger, "Error evaluating all flags", exn) return Hash.new end end @@ -226,12 +226,6 @@ def close @store.stop end - def log_exception(caller, exn) - error_traceback = "#{exn.inspect} #{exn}\n\t#{exn.backtrace.join("\n\t")}" - error = "[LDClient] Unexpected exception in #{caller}: #{error_traceback}" - @config.logger.error { error } - end - def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s @@ -252,7 +246,7 @@ def make_feature_event(flag, user, variation, value, default) } end - private :evaluate, :log_exception, :sanitize_user, :make_feature_event + private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 6ba70dbc..99ee2477 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,6 +1,11 @@ module LaunchDarkly module Util + def self.log_exception(logger, message, exc) + logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } + end + def self.http_error_recoverable?(status) if status >= 400 && status < 500 status == 400 || status == 408 || status == 429 diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 8e4b5eb5..68c57166 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -130,17 +130,6 @@ def event_processor end end - describe '#log_exception' do - it "log error data" do - expect(client.instance_variable_get(:@config).logger).to receive(:error) - begin - raise StandardError.new 'asdf' - rescue StandardError => exn - client.send(:log_exception, 'caller', exn) - end - end - end - describe 'with send_events: false' do let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } let(:client) { subject.new("secret", config) } From d4be186ed2026056dd9768fd2b265181f9353c72 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:48:15 -0700 Subject: [PATCH 003/135] re-add minimal unit test --- spec/util_spec.rb | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 spec/util_spec.rb diff --git a/spec/util_spec.rb b/spec/util_spec.rb new file mode 100644 index 00000000..25881aaa --- /dev/null +++ b/spec/util_spec.rb @@ -0,0 +1,17 @@ +require "spec_helper" + +describe LaunchDarkly::Util do + describe 'log_exception' do + let(:logger) { double() } + + it "logs error data" do + expect(logger).to receive(:warn) + expect(logger).to receive(:debug) + begin + raise StandardError.new 'asdf' + rescue StandardError => exn + LaunchDarkly::Util.log_exception(logger, "message", exn) + end + end + end +end From d73d66c19c03511905aa9eef827bb656b19791be Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 1 Aug 2018 17:51:32 -0700 Subject: [PATCH 004/135] log exceptions at error level --- lib/ldclient-rb/util.rb | 2 +- spec/util_spec.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 99ee2477..707ba3ce 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -2,7 +2,7 @@ module LaunchDarkly module Util def self.log_exception(logger, message, exc) - logger.warn { "[LDClient] #{message}: #{exc.inspect}" } + logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } end diff --git a/spec/util_spec.rb b/spec/util_spec.rb index 25881aaa..50a72f76 100644 --- a/spec/util_spec.rb +++ b/spec/util_spec.rb @@ -5,7 +5,7 @@ let(:logger) { double() } it "logs error data" do - expect(logger).to receive(:warn) + expect(logger).to receive(:error) expect(logger).to receive(:debug) begin raise StandardError.new 'asdf' From ca15234e9214701061528d3ce702c20d34d3a9a9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:30:19 -0700 Subject: [PATCH 005/135] add new version of all_flags that captures more metadata --- lib/ldclient-rb.rb | 1 + lib/ldclient-rb/flags_state.rb | 51 +++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++++++++---- spec/ldclient_spec.rb | 91 ++++++++++++++++++++++++++++++++++ 4 files changed, 176 insertions(+), 11 deletions(-) create mode 100644 lib/ldclient-rb/flags_state.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index ce9d0307..7264b220 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,6 +1,7 @@ require "ldclient-rb/version" require "ldclient-rb/util" require "ldclient-rb/evaluation" +require "ldclient-rb/flags_state" require "ldclient-rb/ldclient" require "ldclient-rb/cache_store" require "ldclient-rb/expiring_cache" diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb new file mode 100644 index 00000000..f68dc20b --- /dev/null +++ b/lib/ldclient-rb/flags_state.rb @@ -0,0 +1,51 @@ + +module LaunchDarkly + # + # A snapshot of the state of all feature flags with regard to a specific user, generated by + # calling the client's all_flags_state method. + # + class FeatureFlagsState + def initialize(valid) + @flag_values = {} + @flag_metadata = {} + @valid = valid + end + + # Used internally to build the state map. + def add_flag(flag, value, variation) + key = flag[:key] + @flag_values[key] = value + meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta[:variation] = variation if !variation.nil? + meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + @flag_metadata[key] = meta + end + + # Returns true if this object contains a valid snapshot of feature flag state, or false if the + # state could not be computed (for instance, because the client was offline or there was no user). + def valid? + @valid + end + + # Returns the value of an individual feature flag at the time the state was recorded. + # Returns nil if the flag returned the default value, or if there was no such flag. + def flag_value(key) + @flag_values[key] + end + + # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, + # its value will be nil. + def values_map + @flag_values + end + + # Returns a JSON string representation of the entire state map, in the format used by the + # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that + # will be used to "bootstrap" the JavaScript client. + def json_string + ret = @flag_values.clone + ret['$flagsState'] = @flag_metadata + ret.to_json + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 3f0f6d9a..5c64b7e7 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -193,26 +193,48 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user + # Returns all feature flag values for the given user. This method is deprecated - please use + # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not + # generate analytics events correctly if you pass the result of all_flags. # def all_flags(user) - sanitize_user(user) - return Hash.new if @config.offline? + all_flags_state(user).values_map + end - unless user - @config.logger.error { "[LDClient] Must specify user in all_flags" } - return Hash.new + # + # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # including the flag values and also metadata that can be used on the front end. This method does not + # send analytics events back to LaunchDarkly. + # + def all_flags_state(user) + return FeatureFlagsState.new(false) if @config.offline? + + unless user && !user[:key].nil? + @config.logger.error { "[LDClient] User and user key must be specified in all_flags_state" } + return FeatureFlagsState.new(false) end + sanitize_user(user) + begin features = @store.all(FEATURES) - - # TODO rescue if necessary - Hash[features.map{ |k, f| [k, evaluate(f, user, @store, @config.logger)[:value]] }] rescue => exn - Util.log_exception(@config.logger, "Error evaluating all flags", exn) - return Hash.new + Util.log_exception(@config.logger, "Unable to read flags for all_flags_state", exn) + return FeatureFlagsState.new(false) + end + + state = FeatureFlagsState.new(true) + features.each do |k, f| + begin + result = evaluate(f, user, @store, @config.logger) + state.add_flag(f, result[:value], result[:variation]) + rescue => exn + Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) + state.add_flag(f, nil, nil) + end end + + state end # diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 68c57166..9d13dee0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -99,6 +99,97 @@ def event_processor end end + describe '#all_flags' do + let(:flag1) { { key: "key1", offVariation: 0, variations: [ 'value1' ] } } + let(:flag2) { { key: "key2", offVariation: 0, variations: [ 'value2' ] } } + + it "returns flag values" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({ key: 'userkey' }) + expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "returns empty map for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags(nil) + expect(result).to eq({}) + end + + it "returns empty map for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = client.all_flags({}) + expect(result).to eq({}) + end + + it "returns empty map if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + result = offline_client.all_flags(nil) + expect(result).to eq({}) + end + end + + describe '#all_flags_state' do + let(:flag1) { { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } } + let(:flag2) { { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } } + + it "returns flags state" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end + + it "returns empty state for nil user" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state(nil) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state for nil user key" do + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = client.all_flags_state({}) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + + it "returns empty state if offline" do + offline_config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) + + state = offline_client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be false + expect(state.values_map).to eq({}) + end + end + describe '#secure_mode_hash' do it "will return the expected value for a known message and secret" do result = client.secure_mode_hash({key: :Message}) From ed19523fd0d93306204929248e179945fdabf10f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 17 Aug 2018 16:37:43 -0700 Subject: [PATCH 006/135] add tests for FeatureFlagsState --- spec/flags_state_spec.rb | 56 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 spec/flags_state_spec.rb diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb new file mode 100644 index 00000000..9241028d --- /dev/null +++ b/spec/flags_state_spec.rb @@ -0,0 +1,56 @@ +require "spec_helper" + +describe LaunchDarkly::FeatureFlagsState do + subject { LaunchDarkly::FeatureFlagsState } + + it "can get flag value" do + state = subject.new(true) + flag = { key: 'key' } + state.add_flag(flag, 'value', 1) + + expect(state.flag_value('key')).to eq 'value' + end + + it "returns nil for unknown flag" do + state = subject.new(true) + + expect(state.flag_value('key')).to be nil + end + + it "can be converted to values map" do + state = subject.new(true) + flag1 = { key: 'key1' } + flag2 = { key: 'key2' } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) + end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + result = JSON.parse(state.json_string) + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + '$flagsState' => { + 'key1' => { + 'variation' => 0, + 'version' => 100, + 'trackEvents' => false + }, + 'key2' => { + 'variation' => 1, + 'version' => 200, + 'trackEvents' => true, + 'debugEventsUntilDate' => 1000 + } + } + }) + end +end From 73f2d892fa166b5ccf2b68f268f77c04a49462ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 12:59:10 -0700 Subject: [PATCH 007/135] provide as_json method that returns a hash instead of just a string --- lib/ldclient-rb/flags_state.rb | 22 +++++++++++++++++----- lib/ldclient-rb/ldclient.rb | 4 ++-- spec/flags_state_spec.rb | 30 +++++++++++++++++++++--------- spec/ldclient_spec.rb | 16 ++++++++-------- 4 files changed, 48 insertions(+), 24 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index f68dc20b..a5af6c5a 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -1,3 +1,4 @@ +require 'json' module LaunchDarkly # @@ -35,17 +36,28 @@ def flag_value(key) # Returns a map of flag keys to flag values. If a flag would have evaluated to the default value, # its value will be nil. + # + # Do not use this method if you are passing data to the front end to "bootstrap" the JavaScript client. + # Instead, use as_json. def values_map @flag_values end - # Returns a JSON string representation of the entire state map, in the format used by the - # LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end that - # will be used to "bootstrap" the JavaScript client. - def json_string + # Returns a hash that can be used as a JSON representation of the entire state map, in the format + # used by the LaunchDarkly JavaScript SDK. Use this method if you are passing data to the front end + # in order to "bootstrap" the JavaScript client. + # + # Do not rely on the exact shape of this data, as it may change in future to support the needs of + # the JavaScript client. + def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata - ret.to_json + ret + end + + # Same as as_json, but converts the JSON structure into a string. + def to_json(*a) + as_json.to_json(a) end end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 5c64b7e7..c8addbca 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,8 +194,8 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK (2.0.0 and later) will not - # generate analytics events correctly if you pass the result of all_flags. + # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # events correctly if you pass the result of all_flags. # def all_flags(user) all_flags_state(user).values_map diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 9241028d..e6e1c17c 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -27,30 +27,42 @@ expect(state.values_map).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) end - it "can be converted to JSON string" do + it "can be converted to JSON structure" do state = subject.new(true) flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } state.add_flag(flag1, 'value1', 0) state.add_flag(flag2, 'value2', 1) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) end + + it "can be converted to JSON string" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + object = state.as_json + str = state.to_json + expect(object.to_json).to eq(str) + end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 9d13dee0..b5939ea1 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -145,21 +145,21 @@ def event_processor values = state.values_map expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2' }) - result = JSON.parse(state.json_string) + result = state.as_json expect(result).to eq({ 'key1' => 'value1', 'key2' => 'value2', '$flagsState' => { 'key1' => { - 'variation' => 0, - 'version' => 100, - 'trackEvents' => false + :variation => 0, + :version => 100, + :trackEvents => false }, 'key2' => { - 'variation' => 1, - 'version' => 200, - 'trackEvents' => true, - 'debugEventsUntilDate' => 1000 + :variation => 1, + :version => 200, + :trackEvents => true, + :debugEventsUntilDate => 1000 } } }) From ab896b1e801f944166c5525e6aa1d00cf333da0b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:01:32 -0700 Subject: [PATCH 008/135] state can be serialized with JSON.generate --- lib/ldclient-rb/flags_state.rb | 5 ++++- spec/flags_state_spec.rb | 16 +++++++++++++++- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a5af6c5a..09f88975 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,7 +3,9 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. + # calling the client's all_flags_state method. Serializing this object to JSON using + # JSON.generate (or the to_json method) will produce the appropriate data structure for + # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState def initialize(valid) @@ -52,6 +54,7 @@ def values_map def as_json(*) # parameter is unused, but may be passed if we're using the json gem ret = @flag_values.clone ret['$flagsState'] = @flag_metadata + ret['$valid'] = @valid ret end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index e6e1c17c..3d21029b 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -1,4 +1,5 @@ require "spec_helper" +require "json" describe LaunchDarkly::FeatureFlagsState do subject { LaunchDarkly::FeatureFlagsState } @@ -50,7 +51,8 @@ :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end @@ -65,4 +67,16 @@ str = state.to_json expect(object.to_json).to eq(str) end + + it "uses our custom serializer with JSON.generate" do + state = subject.new(true) + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true, debugEventsUntilDate: 1000 } + state.add_flag(flag1, 'value1', 0) + state.add_flag(flag2, 'value2', 1) + + stringFromToJson = state.to_json + stringFromGenerate = JSON.generate(state) + expect(stringFromGenerate).to eq(stringFromToJson) + end end From 00347c66ae17167910d316617e061d85f6793681 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 20 Aug 2018 20:02:41 -0700 Subject: [PATCH 009/135] add $valid --- spec/ldclient_spec.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b5939ea1..5dbb8195 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -161,7 +161,8 @@ def event_processor :trackEvents => true, :debugEventsUntilDate => 1000 } - } + }, + '$valid' => true }) end From bdac27e1cf37e2c95c4455d705a99aaa2a948b28 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 21 Aug 2018 11:46:14 -0700 Subject: [PATCH 010/135] add ability to filter for only client-side flags --- lib/ldclient-rb/ldclient.rb | 17 +++++++++++++++-- spec/ldclient_spec.rb | 16 ++++++++++++++++ 2 files changed, 31 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index c8addbca..e9873679 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -194,9 +194,12 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use - # all_flags_state instead. Current versions of the client-side SDK will not generate analytics + # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics # events correctly if you pass the result of all_flags. # + # @param user [Hash] The end user requesting the feature flags + # @return [Hash] a hash of feature flag keys to values + # def all_flags(user) all_flags_state(user).values_map end @@ -206,7 +209,13 @@ def all_flags(user) # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # - def all_flags_state(user) + # @param user [Hash] The end user requesting the feature flags + # @param options={} [Hash] Optional parameters to control how the state is generated + # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the + # client-side SDK should be included in the state. By default, all flags are included. + # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # + def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? unless user && !user[:key].nil? @@ -224,7 +233,11 @@ def all_flags_state(user) end state = FeatureFlagsState.new(true) + client_only = options[:client_side_only] || false features.each do |k, f| + if client_only && !f[:clientSide] + next + end begin result = evaluate(f, user, @store, @config.logger) state.add_flag(f, result[:value], result[:variation]) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 5dbb8195..ae76a678 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -166,6 +166,22 @@ def event_processor }) end + it "can be filtered for only client-side flags" do + flag1 = { key: "server-side-1", offVariation: 0, variations: [ 'a' ], clientSide: false } + flag2 = { key: "server-side-2", offVariation: 0, variations: [ 'b' ], clientSide: false } + flag3 = { key: "client-side-1", offVariation: 0, variations: [ 'value1' ], clientSide: true } + flag4 = { key: "client-side-2", offVariation: 0, variations: [ 'value2' ], clientSide: true } + config.feature_store.init({ LaunchDarkly::FEATURES => { + flag1[:key] => flag1, flag2[:key] => flag2, flag3[:key] => flag3, flag4[:key] => flag4 + }}) + + state = client.all_flags_state({ key: 'userkey' }, client_side_only: true) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From cee4c18aa0a6330cd3e24f6c9b11914cae57d34d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 19:58:42 -0700 Subject: [PATCH 011/135] implement evaluation with explanations --- lib/ldclient-rb/evaluation.rb | 185 +++++++++++++++++---------- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/flags_state.rb | 3 +- lib/ldclient-rb/ldclient.rb | 124 ++++++++++-------- spec/evaluation_spec.rb | 221 +++++++++++++++++++-------------- spec/ldclient_spec.rb | 98 ++++++++++++--- 6 files changed, 403 insertions(+), 229 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b4dd796c..b803f4a2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,6 +2,31 @@ require "semantic" module LaunchDarkly + # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # an explanation of how it was calculated. + class EvaluationDetail + def initialize(value, variation, reason) + @value = value + @variation = variation + @reason = reason + end + + # @return [Object] The result of the flag evaluation. This will be either one of the flag's + # variations or the default value that was passed to the `variation` method. + attr_reader :value + + # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. + # 0 for the first variation - or `nil` if the default value was returned. + attr_reader :variation + + # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + attr_reader :reason + + def ==(other) + @value == other.value && @variation == other.variation && @reason == other.reason + end + end + module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] @@ -110,101 +135,109 @@ def self.comparator(converter) class EvaluationError < StandardError end - # Evaluates a feature flag, returning a hash containing the evaluation result and any events - # generated during prerequisite evaluation. Raises EvaluationError if the flag is not well-formed - # Will return nil, but not raise an exception, indicating that the rules (including fallthrough) did not match - # In that case, the caller should return the default value. - def evaluate(flag, user, store, logger) - if flag.nil? - raise EvaluationError, "Flag does not exist" - end + # Used internally to hold an evaluation result and the events that were generated from prerequisites. + EvalResult = Struct.new(:detail, :events) + + def error_result(errorKind, value = nil) + EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) + end + # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns + # the default value. Error conditions produce a result with an error reason, not an exception. + def evaluate(flag, user, store, logger) if user.nil? || user[:key].nil? - raise EvaluationError, "Invalid user" + return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] if flag[:on] - res = eval_internal(flag, user, store, events, logger) - if !res.nil? - res[:events] = events - return res + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) + end + + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + end + + + def eval_internal(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + if !prereq_failure_reason.nil? + return get_off_value(flag, prereq_failure_reason) + end + + # Check user target matches + (flag[:targets] || []).each do |target| + (target[:values] || []).each do |value| + if value == user[:key] + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + end + end + end + + # Check custom rules + rules = flag[:rules] || [] + rules.each_index do |i| + rule = rules[i] + if rule_match_user(rule, user, store) + return get_value_for_variation_or_rollout(flag, rule, user, + { kind: 'RULE_MATCH', ruleIndex: i, ruleId: rule[:id] }, logger) end end - offVariation = flag[:offVariation] - if !offVariation.nil? && offVariation < flag[:variations].length - value = flag[:variations][offVariation] - return { variation: offVariation, value: value, events: events } + # Check the fallthrough rule + if !flag[:fallthrough].nil? + return get_value_for_variation_or_rollout(flag, flag[:fallthrough], user, + { kind: 'FALLTHROUGH' }, logger) end - { variation: nil, value: nil, events: events } + return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def eval_internal(flag, user, store, events, logger) - failed_prereq = false - # Evaluate prerequisites, if any + def check_prerequisites(flag, user, store, events, logger) + failed_prereqs = [] + (flag[:prerequisites] || []).each do |prerequisite| - prereq_flag = store.get(FEATURES, prerequisite[:key]) + prereq_ok = true + prereq_key = prerequisite[:key] + prereq_flag = store.get(FEATURES, prereq_key) if prereq_flag.nil? || !prereq_flag[:on] - failed_prereq = true + logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } + prereq_ok = false + elsif !prereq_flag[:on] + prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) event = { kind: "feature", - key: prereq_flag[:key], - variation: prereq_res.nil? ? nil : prereq_res[:variation], - value: prereq_res.nil? ? nil : prereq_res[:value], + key: prereq_key, + variation: prereq_res.variation, + value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], trackEvents: prereq_flag[:trackEvents], debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.nil? || prereq_res[:variation] != prerequisite[:variation] - failed_prereq = true + if prereq_res.variation != prerequisite[:variation] + prereq_ok = false end rescue => exn - logger.error { "[LDClient] Error evaluating prerequisite: #{exn.inspect}" } - failed_prereq = true + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + prereq_ok = false end end - end - - if failed_prereq - return nil - end - # The prerequisites were satisfied. - # Now walk through the evaluation steps and get the correct - # variation index - eval_rules(flag, user, store) - end - - def eval_rules(flag, user, store) - # Check user target matches - (flag[:targets] || []).each do |target| - (target[:values] || []).each do |value| - if value == user[:key] - return { variation: target[:variation], value: get_variation(flag, target[:variation]) } - end + if !prereq_ok + failed_prereqs.push(prereq_key) end end - - # Check custom rules - (flag[:rules] || []).each do |rule| - return variation_for_user(rule, user, flag) if rule_match_user(rule, user, store) - end - # Check the fallthrough rule - if !flag[:fallthrough].nil? - return variation_for_user(flag[:fallthrough], user, flag) + if failed_prereqs.empty? + return nil end - - # Not even the fallthrough matched-- return the off variation or default - nil + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end def get_variation(flag, index) @@ -257,9 +290,9 @@ def clause_match_user_no_segments(clause, user) maybe_negate(clause, match_any(op, val, clause[:values])) end - def variation_for_user(rule, user, flag) + def variation_index_for_user(flag, rule, user) if !rule[:variation].nil? # fixed variation - return { variation: rule[:variation], value: get_variation(flag, rule[:variation]) } + return rule[:variation] elsif !rule[:rollout].nil? # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] @@ -268,12 +301,12 @@ def variation_for_user(rule, user, flag) rollout[:variations].each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum - return { variation: variate[:variation], value: get_variation(flag, variate[:variation]) } + return variate[:variation] end end nil else # the rule isn't well-formed - raise EvaluationError, "Rule does not define a variation or rollout" + nil end end @@ -350,5 +383,31 @@ def match_any(op, value, values) end return false end + + :private + + def get_variation(flag, index, reason) + if index < 0 || index >= flag[:variations].length + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") + return error_result('MALFORMED_FLAG') + end + EvaluationDetail.new(flag[:variations][index], index, reason) + end + + def get_off_value(flag, reason) + if flag[:offVariation].nil? # off variation unspecified - return default value + return EvaluationDetail.new(nil, nil, reason) + end + get_variation(flag, flag[:offVariation], reason) + end + + def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) + index = variation_index_for_user(flag, vr, user) + if index.nil? + logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") + return error_result('MALFORMED_FLAG') + end + return get_variation(flag, index, reason) + end end end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 202fc235..e19d6b02 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -363,6 +363,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:reason] = event[:reason] if !event[:reason].nil? out when "identify" { diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 09f88975..05079920 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,12 +15,13 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation) + def add_flag(flag, value, variation, reason = nil) key = flag[:key] @flag_values[key] = value meta = { version: flag[:version], trackEvents: flag[:trackEvents] } meta[:variation] = variation if !variation.nil? meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index e9873679..8efd422a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -120,52 +120,11 @@ def initialized? # @return the variation to show the user, or the # default value if there's an an error def variation(key, user, default) - return default if @config.offline? - - if !initialized? - if @store.initialized? - @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } - else - @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - end - - sanitize_user(user) if !user.nil? - feature = @store.get(FEATURES, key) - - if feature.nil? - @config.logger.info { "[LDClient] Unknown feature flag #{key}. Returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return default - end - - unless user - @config.logger.error { "[LDClient] Must specify user" } - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + evaluate_internal(key, user, default, false).value + end - begin - res = evaluate(feature, user, @store, @config.logger) - if !res[:events].nil? - res[:events].each do |event| - @event_processor.add_event(event) - end - end - value = res[:value] - if value.nil? - @config.logger.debug { "[LDClient] Result value is null in toggle" } - value = default - end - @event_processor.add_event(make_feature_event(feature, user, res[:variation], value, default)) - return value - rescue => exn - Util.log_exception(@config.logger, "Error evaluating feature flag", exn) - @event_processor.add_event(make_feature_event(feature, user, nil, default, default)) - return default - end + def variation_detail(key, user, default) + evaluate_internal(key, user, default, true) end # @@ -213,6 +172,8 @@ def all_flags(user) # @param options={} [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. + # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included + # in the state. By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +195,17 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false + with_reasons = options[:with_reasons] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result[:value], result[:variation]) + state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) end end @@ -261,27 +223,83 @@ def close @store.stop end + :private + + # @return [EvaluationDetail] + def evaluate_internal(key, user, default, include_reasons_in_events) + if @config.offline? + return error_result('CLIENT_NOT_READY', default) + end + + if !initialized? + if @store.initialized? + @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } + else + @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) + return error_result('CLIENT_NOT_READY', default) + end + end + + sanitize_user(user) if !user.nil? + feature = @store.get(FEATURES, key) + + if feature.nil? + @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } + detail = error_result('FLAG_NOT_FOUND', default) + @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, + reason: include_reasons_in_events ? detail.reason : nil) + return detail + end + + unless user + @config.logger.error { "[LDClient] Must specify user" } + detail = error_result('USER_NOT_SPECIFIED', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + + begin + res = evaluate(feature, user, @store, @config.logger) + if !res.events.nil? + res.events.each do |event| + @event_processor.add_event(event) + end + end + detail = res.detail + if detail.variation.nil? + detail = EvaluationDetail.new(default, nil, detail.reason) + end + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + rescue => exn + Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) + detail = error_result('EXCEPTION', default) + @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + return detail + end + end + def sanitize_user(user) if user[:key] user[:key] = user[:key].to_s end end - def make_feature_event(flag, user, variation, value, default) + def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", key: flag[:key], user: user, - variation: variation, - value: value, + variation: detail.variation, + value: detail.value, default: default, version: flag[:version], trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate] + debugEventsUntilDate: flag[:debugEventsUntilDate], + reason: with_reasons ? detail.reason : nil } end - - private :evaluate, :sanitize_user, :make_feature_event end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index a8d980ae..d5ee1097 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -2,6 +2,9 @@ describe LaunchDarkly::Evaluation do subject { LaunchDarkly::Evaluation } + + include LaunchDarkly::Evaluation + let(:features) { LaunchDarkly::InMemoryFeatureStore.new } let(:user) { @@ -14,7 +17,13 @@ let(:logger) { LaunchDarkly::Config.default_logger } - include LaunchDarkly::Evaluation + def boolean_flag_with_rules(rules) + { key: 'feature', on: true, rules: rules, fallthrough: { variation: 0 }, variations: [ false, true ] } + end + + def boolean_flag_with_clauses(clauses) + boolean_flag_with_rules([{ id: 'ruleid', clauses: clauses, variation: 1 }]) + end describe "evaluate" do it "returns off variation if flag is off" do @@ -26,7 +35,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns nil if flag is off and off variation is unspecified" do @@ -37,7 +49,10 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: nil, value: nil, events: []}) + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation if prerequisite is not found" do @@ -50,7 +65,11 @@ variations: ['a', 'b', 'c'] } user = { key: 'x' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "returns off variation and event if prerequisite of a prerequisite is not found" do @@ -73,11 +92,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns off variation and event if prerequisite is not met" do @@ -99,11 +122,15 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 1, value: 'b', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "returns fallthrough variation and event if prerequisite is met and there are no rules" do @@ -125,11 +152,14 @@ } features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] - expect(evaluate(flag, user, features, logger)).to eq({variation: 0, value: 'a', events: events_should_be}) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) end it "matches user from targets" do @@ -144,57 +174,96 @@ variations: ['a', 'b', 'c'] } user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end it "matches user from rules" do - flag = { - key: 'feature0', - on: true, - rules: [ - { - clauses: [ - { - attribute: 'key', - op: 'in', - values: [ 'userkey' ] - } - ], - variation: 2 - } - ], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: ['a', 'b', 'c'] - } + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 1 } + flag = boolean_flag_with_rules([rule]) user = { key: 'userkey' } - expect(evaluate(flag, user, features, logger)).to eq({variation: 2, value: 'c', events: []}) + detail = LaunchDarkly::EvaluationDetail.new(true, 1, + { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is too high" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: 999 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule variation is negative" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], variation: -1 } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has neither variation nor rollout" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }] } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if rule has a rollout with no variations" do + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { variations: [] } } + flag = boolean_flag_with_rules([rule]) + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) end end - describe "clause_match_user" do + describe "clause" do it "can match built-in attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -208,23 +277,24 @@ user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be true + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } - - expect(clause_match_user(clause, user, features)).to be false + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } + flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {clause_match_user(clause, user, features)}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) end end @@ -326,7 +396,8 @@ it "should return #{shouldBe} for #{value1} #{op} #{value2}" do user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } - expect(clause_match_user(clause, user, features)).to be shouldBe + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe end end end @@ -385,17 +456,6 @@ end end - def make_flag(key) - { - key: key, - rules: [], - variations: [ false, true ], - on: true, - fallthrough: { variation: 0 }, - version: 1 - } - end - def make_segment(key) { key: key, @@ -424,35 +484,30 @@ def make_user_matching_clause(user, attr) end describe 'segment matching' do - it 'explicitly includes user' do - segment = make_segment('segkey') - segment[:included] = [ user[:key] ] + def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) + flag = boolean_flag_with_clauses([clause]) + evaluate(flag, user, features, logger).detail.value + end - result = clause_match_user(clause, user, features) - expect(result).to be true + it 'explicitly includes user' do + segment = make_segment('segkey') + segment[:included] = [ user[:key] ] + expect(test_segment_match(segment)).to be true end it 'explicitly excludes user' do segment = make_segment('segkey') segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it 'both includes and excludes user; include takes priority' do segment = make_segment('segkey') segment[:included] = [ user[:key] ] segment[:excluded] = [ user[:key] ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is absent' do @@ -462,11 +517,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user by rule when weight is nil' do @@ -477,11 +528,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it 'matches user with full rollout' do @@ -492,11 +539,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with zero rollout" do @@ -507,11 +550,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end it "matches user with multiple clauses" do @@ -522,11 +561,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be true + expect(test_segment_match(segment)).to be true end it "doesn't match user with multiple clauses if a clause doesn't match" do @@ -538,11 +573,7 @@ def make_user_matching_clause(user, attr) } segment = make_segment('segkey') segment[:rules] = [ segRule ] - features.upsert(LaunchDarkly::SEGMENTS, segment) - clause = make_segment_match_clause(segment) - - result = clause_match_user(clause, user, features) - expect(result).to be false + expect(test_segment_match(segment)).to be false end end end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index ae76a678..efaa1438 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -34,11 +34,18 @@ def event_processor end describe '#variation' do - it "will return the default value if the client is offline" do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do result = offline_client.variation("doesntmatter", user, "default") expect(result).to eq "default" end + it "returns the default value for an unknown feature" do + expect(client.variation("badkey", user, "default")).to eq "default" + end + it "queues a feature request event for an unknown feature" do expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", key: "badkey", user: user, value: "default", default: "default" @@ -46,56 +53,113 @@ def event_processor client.variation("badkey", user, "default") end + it "returns the value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(client.variation("key", user, "default")).to eq "value" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: user, variation: 0, - value: true, + value: "value", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], user, "default") + client.variation("key", user, "default") end it "queues a feature event for an existing feature when user is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: nil, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 )) - client.variation(feature[:key], nil, "default") + client.variation("key", nil, "default") end it "queues a feature event for an existing feature when user key is nil" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) - config.feature_store.upsert(LaunchDarkly::FEATURES, feature) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) bad_user = { name: "Bob" } expect(event_processor).to receive(:add_event).with(hash_including( kind: "feature", - key: feature[:key], - version: feature[:version], + key: "key", + version: 100, user: bad_user, variation: nil, value: "default", default: "default", trackEvents: true, - debugEventsUntilDate: nil + debugEventsUntilDate: 1000 + )) + client.variation("key", bad_user, "default") + end + end + + describe '#variation_detail' do + feature_with_value = { key: "key", on: false, offVariation: 0, variations: ["value"], version: 100, + trackEvents: true, debugEventsUntilDate: 1000 } + + it "returns the default value if the client is offline" do + result = offline_client.variation_detail("doesntmatter", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'CLIENT_NOT_READY' }) + expect(result).to eq expected + end + + it "returns the default value for an unknown feature" do + result = client.variation_detail("badkey", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND'}) + expect(result).to eq expected + end + + it "queues a feature request event for an unknown feature" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", key: "badkey", user: user, value: "default", default: "default", + reason: { kind: 'ERROR', errorKind: 'FLAG_NOT_FOUND' } + )) + client.variation_detail("badkey", user, "default") + end + + it "returns a value for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("value", 0, { kind: 'OFF' }) + expect(result).to eq expected + end + + it "queues a feature request event for an existing feature" do + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "feature", + key: "key", + version: 100, + user: user, + variation: 0, + value: "value", + default: "default", + trackEvents: true, + debugEventsUntilDate: 1000, + reason: { kind: "OFF" } )) - client.variation(feature[:key], bad_user, "default") + client.variation_detail("key", user, "default") end end From d2c2ab81abd6e19934a2e444993cef1e1285e069 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:03:36 -0700 Subject: [PATCH 012/135] misc cleanup --- lib/ldclient-rb/evaluation.rb | 21 +++++++-------------- 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index b803f4a2..7a316aca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -156,21 +156,21 @@ def evaluate(flag, user, store, logger) return EvalResult.new(detail, events) end - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }), events) + return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) end def eval_internal(flag, user, store, events, logger) prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? - return get_off_value(flag, prereq_failure_reason) + return get_off_value(flag, prereq_failure_reason, logger) end # Check user target matches (flag[:targets] || []).each do |target| (target[:values] || []).each do |value| if value == user[:key] - return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }) + return get_variation(flag, target[:variation], { kind: 'TARGET_MATCH' }, logger) end end end @@ -240,13 +240,6 @@ def check_prerequisites(flag, user, store, events, logger) { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } end - def get_variation(flag, index) - if index >= flag[:variations].length - raise EvaluationError, "Invalid variation index" - end - flag[:variations][index] - end - def rule_match_user(rule, user, store) return false if !rule[:clauses] @@ -386,7 +379,7 @@ def match_any(op, value, values) :private - def get_variation(flag, index, reason) + def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": invalid variation index") return error_result('MALFORMED_FLAG') @@ -394,11 +387,11 @@ def get_variation(flag, index, reason) EvaluationDetail.new(flag[:variations][index], index, reason) end - def get_off_value(flag, reason) + def get_off_value(flag, reason, logger) if flag[:offVariation].nil? # off variation unspecified - return default value return EvaluationDetail.new(nil, nil, reason) end - get_variation(flag, flag[:offVariation], reason) + get_variation(flag, flag[:offVariation], reason, logger) end def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) @@ -407,7 +400,7 @@ def get_value_for_variation_or_rollout(flag, vr, user, reason, logger) logger.error("[LDClient] Data inconsistency in feature flag \"#{flag[:key]}\": variation/rollout object with no variation or rollout") return error_result('MALFORMED_FLAG') end - return get_variation(flag, index, reason) + return get_variation(flag, index, reason, logger) end end end From 64a00a1a9388e85cb26e5650da97fa2029198d64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:14:37 -0700 Subject: [PATCH 013/135] misc cleanup, more error checking --- lib/ldclient-rb/evaluation.rb | 6 +- spec/evaluation_spec.rb | 111 +++++++++++++++++++++++++++++++++- 2 files changed, 111 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7a316aca..7dfbc3db 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -132,9 +132,6 @@ def self.comparator(converter) end } - class EvaluationError < StandardError - end - # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) @@ -268,9 +265,8 @@ def clause_match_user_no_segments(clause, user) return false if val.nil? op = OPERATORS[clause[:op].to_sym] - if op.nil? - raise EvaluationError, "Unsupported operator #{clause[:op]} in evaluation" + return false end if val.is_a? Enumerable diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index d5ee1097..9cb148ff 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -55,6 +55,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq([]) end + it "returns an error if off variation is too high" do + flag = { + key: 'feature', + on: false, + offVariation: 999, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if off variation is negative" do + flag = { + key: 'feature', + on: false, + offVariation: -1, + fallthrough: { variation: 0 }, + variations: ['a', 'b', 'c'] + } + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, + { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "returns off variation if prerequisite is not found" do flag = { key: 'feature0', @@ -162,9 +194,69 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns an error if fallthrough variation is too high" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: 999 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough variation is negative" do + flag = { + key: 'feature', + on: true, + fallthrough: { variation: -1 }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has no variation or rollout" do + flag = { + key: 'feature', + on: true, + fallthrough: { }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + + it "returns an error if fallthrough has a rollout with no variations" do + flag = { + key: 'feature', + on: true, + fallthrough: { rollout: { variations: [] } }, + offVariation: 1, + variations: ['a', 'b', 'c'] + } + user = { key: 'userkey' } + detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq([]) + end + it "matches user from targets" do flag = { - key: 'feature0', + key: 'feature', on: true, targets: [ { values: [ 'whoever', 'userkey' ], variation: 2 } @@ -259,6 +351,23 @@ def boolean_flag_with_clauses(clauses) expect(evaluate(flag, user, features, logger).detail.value).to be false end + it "returns false for unknown operator" do + user = { key: 'x', name: 'Bob' } + clause = { attribute: 'name', op: 'unknown', values: [4] } + flag = boolean_flag_with_clauses([clause]) + expect(evaluate(flag, user, features, logger).detail.value).to be false + end + + it "does not stop evaluating rules after clause with unknown operator" do + user = { key: 'x', name: 'Bob' } + clause0 = { attribute: 'name', op: 'unknown', values: [4] } + rule0 = { clauses: [ clause0 ], variation: 1 } + clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } + rule1 = { clauses: [ clause1 ], variation: 1 } + flag = boolean_flag_with_rules([rule0, rule1]) + expect(evaluate(flag, user, features, logger).detail.value).to be true + end + it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } From 46b642b0c0498bfba69577a544226a33f9095cd6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 20:49:58 -0700 Subject: [PATCH 014/135] don't keep evaluating prerequisites if one fails --- lib/ldclient-rb/evaluation.rb | 10 ++-------- spec/evaluation_spec.rb | 6 +++--- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 7dfbc3db..51cf3c66 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -192,8 +192,6 @@ def eval_internal(flag, user, store, events, logger) end def check_prerequisites(flag, user, store, events, logger) - failed_prereqs = [] - (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -227,14 +225,10 @@ def check_prerequisites(flag, user, store, events, logger) end end if !prereq_ok - failed_prereqs.push(prereq_key) + return { kind: 'PREREQUISITE_FAILED', prerequisiteKey: prereq_key } end end - - if failed_prereqs.empty? - return nil - end - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: failed_prereqs } + nil end def rule_match_user(rule, user, store) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 9cb148ff..7f0c82b4 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -98,7 +98,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['badfeature'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) result = evaluate(flag, user, features, logger) expect(result.detail).to eq(detail) expect(result.events).to eq([]) @@ -125,7 +125,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil @@ -155,7 +155,7 @@ def boolean_flag_with_clauses(clauses) features.upsert(LaunchDarkly::FEATURES, flag1) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, - { kind: 'PREREQUISITES_FAILED', prerequisiteKeys: ['feature1'] }) + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil From 855c4e2be634b475957d46cda6870d1c52b326ed Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:28:28 -0700 Subject: [PATCH 015/135] doc comment --- lib/ldclient-rb/ldclient.rb | 40 ++++++++++++++++++++++++++++++++++++- 1 file changed, 39 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 8efd422a..1d5c23a1 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -115,7 +115,7 @@ def initialized? # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default=false the default value of the flag + # @param default the default value of the flag # # @return the variation to show the user, or the # default value if there's an an error @@ -123,6 +123,44 @@ def variation(key, user, default) evaluate_internal(key, user, default, false).value end + # + # Determines the variation of a feature flag for a user, like `variation`, but also + # provides additional information about how this value was calculated. + # + # The return value of `variation_detail` is an `EvaluationDetail` object, which has + # three properties: + # + # `value`: the value that was calculated for this user (same as the return value + # of `variation`) + # + # `variation`: the positional index of this value in the flag, e.g. 0 for the first + # variation - or `nil` if it is the default value + # + # `reason`: a hash describing the main reason why this value was selected. Its `:kind` + # property will be one of the following: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and + # `:ruleId` properties indicate the positional index and unique identifier of the rule + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation; the + # `:prerequisiteKey` property indicates the key of the prerequisite that failed + # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due + # to an unexpected error, and therefore returned the default value; the `:errorKind` + # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # + # The `reason` will also be included in analytics events, if you are capturing + # detailed event data for this flag. + # + # @param key [String] the unique feature key for the feature flag, as shown + # on the LaunchDarkly dashboard + # @param user [Hash] a hash containing parameters for the end user requesting the flag + # @param default the default value of the flag + # + # @return an `EvaluationDetail` object describing the result + # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end From a0f002f3c1e1cdb8313b5f116d9ba909e4d0e17d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:34:25 -0700 Subject: [PATCH 016/135] rename variation to variation_index --- lib/ldclient-rb/evaluation.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 10 +++++----- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 51cf3c66..bd4544dc 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -5,9 +5,9 @@ module LaunchDarkly # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail - def initialize(value, variation, reason) + def initialize(value, variation_index, reason) @value = value - @variation = variation + @variation_index = variation_index @reason = reason end @@ -17,13 +17,13 @@ def initialize(value, variation, reason) # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. # 0 for the first variation - or `nil` if the default value was returned. - attr_reader :variation + attr_reader :variation_index # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason def ==(other) - @value == other.value && @variation == other.variation && @reason == other.reason + @value == other.value && @variation_index == other.variation_index && @reason == other.reason end end @@ -208,7 +208,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, - variation: prereq_res.variation, + variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], prereqOf: flag[:key], @@ -216,7 +216,7 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation != prerequisite[:variation] + if prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end rescue => exn diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1d5c23a1..177b91a2 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -133,8 +133,8 @@ def variation(key, user, default) # `value`: the value that was calculated for this user (same as the return value # of `variation`) # - # `variation`: the positional index of this value in the flag, e.g. 0 for the first - # variation - or `nil` if it is the default value + # `variation_index`: the positional index of this value in the flag, e.g. 0 for the + # first variation - or `nil` if the default value was returned # # `reason`: a hash describing the main reason why this value was selected. Its `:kind` # property will be one of the following: @@ -240,7 +240,7 @@ def all_flags_state(user, options={}) end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation.nil? + if detail.variation_index.nil? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) @@ -329,7 +329,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) kind: "feature", key: flag[:key], user: user, - variation: detail.variation, + variation: detail.variation_index, value: detail.value, default: default, version: flag[:version], From 4ec43db7e4b7d58ad04bf5f9dde015f0eed0a816 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 22 Aug 2018 22:44:24 -0700 Subject: [PATCH 017/135] comment --- lib/ldclient-rb/ldclient.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 177b91a2..1c2d2257 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,7 +211,7 @@ def all_flags(user) # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state. By default, they are not included. + # in the state (see `variation_detail`). By default, they are not included. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) From 9622e0116f5b4a513e705630a19603842d07cd75 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 23 Aug 2018 17:11:29 -0700 Subject: [PATCH 018/135] more test coverage, convenience method --- lib/ldclient-rb/evaluation.rb | 6 ++++++ lib/ldclient-rb/ldclient.rb | 2 +- spec/ldclient_spec.rb | 17 +++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index bd4544dc..4f6cbb0e 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -22,6 +22,12 @@ def initialize(value, variation_index, reason) # @return [Hash] An object describing the main factor that influenced the flag evaluation value. attr_reader :reason + # @return [boolean] True if the flag evaluated to the default value rather than to one of its + # variations. + def default_value? + variation_index.nil? + end + def ==(other) @value == other.value && @variation_index == other.variation_index && @reason == other.reason end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 1c2d2257..a87344ed 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -305,7 +305,7 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end detail = res.detail - if detail.variation_index.nil? + if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index efaa1438..d76f7834 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -59,6 +59,13 @@ def event_processor expect(client.variation("key", user, "default")).to eq "value" end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + expect(client.variation("key", user, "default")).to eq "default" + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) @@ -144,6 +151,16 @@ def event_processor expect(result).to eq expected end + it "returns the default value if a feature evaluates to nil" do + empty_feature = { key: "key", on: false, offVariation: nil } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, empty_feature) + result = client.variation_detail("key", user, "default") + expected = LaunchDarkly::EvaluationDetail.new("default", nil, { kind: 'OFF' }) + expect(result).to eq expected + expect(result.default_value?).to be true + end + it "queues a feature request event for an existing feature" do config.feature_store.init({ LaunchDarkly::FEATURES => {} }) config.feature_store.upsert(LaunchDarkly::FEATURES, feature_with_value) From 084d9eacf32a6cc36ff1a150dc3bef9190ba2b64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:25:31 -0700 Subject: [PATCH 019/135] fix event generation for a prerequisite that is off --- lib/ldclient-rb/evaluation.rb | 26 ++++++++++++-------------- spec/evaluation_spec.rb | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 44 insertions(+), 14 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..aa4eb20d 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -153,17 +153,15 @@ def evaluate(flag, user, store, logger) end events = [] - - if flag[:on] - detail = eval_internal(flag, user, store, events, logger) - return EvalResult.new(detail, events) - end - - return EvalResult.new(get_off_value(flag, { kind: 'OFF' }, logger), events) + detail = eval_internal(flag, user, store, events, logger) + return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + if !flag[:on] + return get_off_value(flag, { kind: 'OFF' }, logger) + end + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) @@ -203,14 +201,17 @@ def check_prerequisites(flag, user, store, events, logger) prereq_key = prerequisite[:key] prereq_flag = store.get(FEATURES, prereq_key) - if prereq_flag.nil? || !prereq_flag[:on] + if prereq_flag.nil? logger.error { "[LDClient] Could not retrieve prerequisite flag \"#{prereq_key}\" when evaluating \"#{flag[:key]}\"" } prereq_ok = false - elsif !prereq_flag[:on] - prereq_ok = false else begin prereq_res = eval_internal(prereq_flag, user, store, events, logger) + # Note that if the prerequisite flag is off, we don't consider it a match no matter what its + # off variation was. But we still need to evaluate it in order to generate an event. + if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] + prereq_ok = false + end event = { kind: "feature", key: prereq_key, @@ -222,9 +223,6 @@ def check_prerequisites(flag, user, store, events, logger) debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] } events.push(event) - if prereq_res.variation_index != prerequisite[:variation] - prereq_ok = false - end rescue => exn Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) prereq_ok = false diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 7f0c82b4..3af960c6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -135,6 +135,38 @@ def boolean_flag_with_clauses(clauses) expect(result.events).to eq(events_should_be) end + it "returns off variation and event if prerequisite is off" do + flag = { + key: 'feature0', + on: true, + prerequisites: [{key: 'feature1', variation: 1}], + fallthrough: { variation: 0 }, + offVariation: 1, + variations: ['a', 'b', 'c'], + version: 1 + } + flag1 = { + key: 'feature1', + on: false, + # note that even though it returns the desired variation, it is still off and therefore not a match + offVariation: 1, + fallthrough: { variation: 0 }, + variations: ['d', 'e'], + version: 2 + } + features.upsert(LaunchDarkly::FEATURES, flag1) + user = { key: 'x' } + detail = LaunchDarkly::EvaluationDetail.new('b', 1, + { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) + events_should_be = [{ + kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + trackEvents: nil, debugEventsUntilDate: nil + }] + result = evaluate(flag, user, features, logger) + expect(result.detail).to eq(detail) + expect(result.events).to eq(events_should_be) + end + it "returns off variation and event if prerequisite is not met" do flag = { key: 'feature0', From 02b5712c434c7a4e6524d6e3752c09be4437feca Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 29 Aug 2018 11:27:26 -0700 Subject: [PATCH 020/135] fix private --- lib/ldclient-rb/evaluation.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 4f6cbb0e..a16d9adb 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -373,7 +373,7 @@ def match_any(op, value, values) return false end - :private + private def get_variation(flag, index, reason, logger) if index < 0 || index >= flag[:variations].length diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a87344ed..7e86662b 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -261,7 +261,7 @@ def close @store.stop end - :private + private # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) From 88676380bed1f147d04c8852f58ddb4f294e0eb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 4 Oct 2018 19:04:17 -0700 Subject: [PATCH 021/135] add option to reduce front-end metadata for untracked flags --- lib/ldclient-rb/flags_state.rb | 10 ++++++--- lib/ldclient-rb/ldclient.rb | 10 +++++++-- spec/flags_state_spec.rb | 3 +-- spec/ldclient_spec.rb | 41 ++++++++++++++++++++++++++++++++-- 4 files changed, 55 insertions(+), 9 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index 05079920..a6036bde 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -15,13 +15,17 @@ def initialize(valid) end # Used internally to build the state map. - def add_flag(flag, value, variation, reason = nil) + def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value - meta = { version: flag[:version], trackEvents: flag[:trackEvents] } + meta = {} + if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + meta[:version] = flag[:version] + meta[:reason] = reason if !reason.nil? + end meta[:variation] = variation if !variation.nil? + meta[:trackEvents] = true if flag[:trackEvents] meta[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - meta[:reason] = reason if !reason.nil? @flag_metadata[key] = meta end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 7e86662b..4ad7928e 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -212,6 +212,10 @@ def all_flags(user) # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included # in the state (see `variation_detail`). By default, they are not included. + # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON # def all_flags_state(user, options={}) @@ -234,16 +238,18 @@ def all_flags_state(user, options={}) state = FeatureFlagsState.new(true) client_only = options[:client_side_only] || false with_reasons = options[:with_reasons] || false + details_only_if_tracked = options[:details_only_for_tracked_flags] || false features.each do |k, f| if client_only && !f[:clientSide] next end begin result = evaluate(f, user, @store, @config.logger) - state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil) + state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, + details_only_if_tracked) rescue => exn Util.log_exception(@config.logger, "Error evaluating flag \"#{k}\" in all_flags_state", exn) - state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil) + state.add_flag(f, nil, nil, with_reasons ? { kind: 'ERROR', errorKind: 'EXCEPTION' } : nil, details_only_if_tracked) end end diff --git a/spec/flags_state_spec.rb b/spec/flags_state_spec.rb index 3d21029b..bda55b11 100644 --- a/spec/flags_state_spec.rb +++ b/spec/flags_state_spec.rb @@ -42,8 +42,7 @@ '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index d76f7834..6b923775 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -233,8 +233,7 @@ def event_processor '$flagsState' => { 'key1' => { :variation => 0, - :version => 100, - :trackEvents => false + :version => 100 }, 'key2' => { :variation => 1, @@ -263,6 +262,44 @@ def event_processor expect(values).to eq({ 'client-side-1' => 'value1', 'client-side-2' => 'value2' }) end + it "can omit details for untracked flags" do + flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } + flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + + config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) + + state = client.all_flags_state({ key: 'userkey' }) + expect(state.valid?).to be true + + values = state.values_map + expect(values).to eq({ 'key1' => 'value1', 'key2' => 'value2', 'key3' => 'value3' }) + + result = state.as_json + expect(result).to eq({ + 'key1' => 'value1', + 'key2' => 'value2', + 'key3' => 'value3', + '$flagsState' => { + 'key1' => { + :variation => 0, + :version => 100 + }, + 'key2' => { + :variation => 1, + :version => 200, + :trackEvents => true + }, + 'key3' => { + :variation => 1, + :version => 300, + :debugEventsUntilDate => 1000 + } + }, + '$valid' => true + }) + end + it "returns empty state for nil user" do config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2 } }) From 9ea43e022a331d7c5ad577aad0b6d68d59ca22bd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 8 Oct 2018 16:42:06 -0700 Subject: [PATCH 022/135] fix logic for whether a flag is tracked in all_flags_state --- lib/ldclient-rb/flags_state.rb | 6 +++++- spec/ldclient_spec.rb | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index a6036bde..b761149c 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -19,7 +19,11 @@ def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = fal key = flag[:key] @flag_values[key] = value meta = {} - if !details_only_if_tracked || flag[:trackEvents] || flag[:debugEventsUntilDate] + with_details = !details_only_if_tracked || flag[:trackEvents] + if !with_details && flag[:debugEventsUntilDate] + with_details = flag[:debugEventsUntilDate] > (Time.now.to_f * 1000).to_i + end + if with_details meta[:version] = flag[:version] meta[:reason] = reason if !reason.nil? end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6b923775..262f53f9 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -263,13 +263,14 @@ def event_processor end it "can omit details for untracked flags" do + future_time = (Time.now.to_f * 1000).to_i + 100000 flag1 = { key: "key1", version: 100, offVariation: 0, variations: [ 'value1' ], trackEvents: false } flag2 = { key: "key2", version: 200, offVariation: 1, variations: [ 'x', 'value2' ], trackEvents: true } - flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: 1000 } + flag3 = { key: "key3", version: 300, offVariation: 1, variations: [ 'x', 'value3' ], debugEventsUntilDate: future_time } config.feature_store.init({ LaunchDarkly::FEATURES => { 'key1' => flag1, 'key2' => flag2, 'key3' => flag3 } }) - state = client.all_flags_state({ key: 'userkey' }) + state = client.all_flags_state({ key: 'userkey' }, { details_only_for_tracked_flags: true }) expect(state.valid?).to be true values = state.values_map @@ -282,8 +283,7 @@ def event_processor 'key3' => 'value3', '$flagsState' => { 'key1' => { - :variation => 0, - :version => 100 + :variation => 0 }, 'key2' => { :variation => 1, @@ -293,7 +293,7 @@ def event_processor 'key3' => { :variation => 1, :version => 300, - :debugEventsUntilDate => 1000 + :debugEventsUntilDate => future_time } }, '$valid' => true From cce8e84964835b8d6d02ddff612a1af1e179e1c9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 10:23:48 -0700 Subject: [PATCH 023/135] implement file data source --- ldclient-rb.gemspec | 1 + lib/ldclient-rb.rb | 1 + lib/ldclient-rb/config.rb | 10 +- lib/ldclient-rb/file_data_source.rb | 209 ++++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 33 +++-- spec/file_data_source_spec.rb | 167 ++++++++++++++++++++++ 6 files changed, 404 insertions(+), 17 deletions(-) create mode 100644 lib/ldclient-rb/file_data_source.rb create mode 100644 spec/file_data_source_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index a9bbfb23..9e7d5d04 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -40,4 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index 7264b220..d3ee6ffc 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -18,3 +18,4 @@ require "ldclient-rb/events" require "ldclient-rb/redis_store" require "ldclient-rb/requestor" +require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 3b62b2a3..dc89d30a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -61,8 +61,11 @@ class Config # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every # analytics event. By default, events will only include the user key, except for one "index" event # that provides the full details for the user. - # @option opts [Object] :update_processor An object that will receive feature flag data from LaunchDarkly. - # Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from + # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. + # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object + # as parameters, and returns an object that can obtain feature flag data and put it into the feature + # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. # @return [type] [description] # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity def initialize(opts = {}) @@ -88,6 +91,7 @@ def initialize(opts = {}) @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false @update_processor = opts[:update_processor] + @update_processor_factory = opts[:update_processor_factory] end # @@ -218,6 +222,8 @@ def offline? attr_reader :update_processor + attr_reader :update_processor_factory + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb new file mode 100644 index 00000000..65ba0735 --- /dev/null +++ b/lib/ldclient-rb/file_data_source.rb @@ -0,0 +1,209 @@ +require 'concurrent/atomics' +require 'json' +require 'yaml' +require 'listen' +require 'pathname' + +module LaunchDarkly + + # + # Provides a way to use local files as a source of feature flag state. This would typically be + # used in a test environment, to operate using a predetermined feature flag state without an + # actual LaunchDarkly connection. + # + # To use this component, call `FileDataSource.factory`, and store its return value in the + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # to `factory`, set `paths` to the file path(s) of your data file(s): + # + # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # + # This will cause the client not to connect to LaunchDarkly to get feature flags. The + # client may still make network connections to send analytics events, unless you have disabled + # this with Config.send_events or Config.offline. + # + # Flag data files can be either JSON or YAML. They contain an object with three possible + # properties: + # + # - "flags": Feature flag definitions. + # - "flagValues": Simplified feature flags that contain only a value. + # - "segments": User segment definitions. + # + # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # and is subject to change. Rather than trying to construct these objects yourself, it is simpler + # to request existing flags directly from the LaunchDarkly server in JSON format, and use this + # output as the starting point for your file. In Linux you would do this: + # + # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # + # The output will look something like this (but with many more properties): + # + # { + # "flags": { + # "flag-key-1": { + # "key": "flag-key-1", + # "on": true, + # "variations": [ "a", "b" ] + # } + # }, + # "segments": { + # "segment-key-1": { + # "key": "segment-key-1", + # "includes": [ "user-key-1" ] + # } + # } + # } + # + # Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + # by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + # set specific flag keys to specific values. For that, you can use a much simpler format: + # + # { + # "flagValues": { + # "my-string-flag-key": "value-1", + # "my-boolean-flag-key": true, + # "my-integer-flag-key": 3 + # } + # } + # + # Or, in YAML: + # + # flagValues: + # my-string-flag-key: "value-1" + # my-boolean-flag-key: true + # my-integer-flag-key: 1 + # + # It is also possible to specify both "flags" and "flagValues", if you want some flags + # to have simple values and others to have complex behavior. However, it is an error to use the + # same flag key or segment key more than once, either in a single file or across multiple files. + # + # If the data source encounters any error in any file-- malformed content, a missing file, or a + # duplicate key-- it will not load flags from any of the files. + # + class FileDataSource + def self.factory(options={}) + return Proc.new do |sdk_key, config| + FileDataSourceImpl.new(config.feature_store, config.logger, options) + end + end + end + + class FileDataSourceImpl + def initialize(feature_store, logger, options={}) + @feature_store = feature_store + @logger = logger + @paths = options[:paths] || [] + @auto_update = options[:auto_update] + @initialized = Concurrent::AtomicBoolean.new(false) + @ready = Concurrent::Event.new + end + + def initialized? + @initialized.value + end + + def start + ready = Concurrent::Event.new + + # We will return immediately regardless of whether the file load succeeded or failed - + # the difference can be detected by checking "initialized?" + ready.set + + load_all + + if @auto_update + # If we're going to watch files, then the start event will be set the first time we get + # a successful load. + @listener = start_listener + end + + ready + end + + def stop + @listener.stop if !@listener.nil? + end + + private + + def load_all + all_data = { + FEATURES => {}, + SEGMENTS => {} + } + @paths.each do |path| + begin + load_file(path, all_data) + rescue => exn + Util.log_exception(@logger, "Unable to load flag data from \"#{path}\"", exn) + return + end + end + @feature_store.init(all_data) + @initialized.make_true + end + + def load_file(path, all_data) + parsed = parse_content(IO.read(path)) + (parsed[:flags] || {}).each do |key, flag| + add_item(all_data, FEATURES, flag) + end + (parsed[:flagValues] || {}).each do |key, value| + add_item(all_data, FEATURES, make_flag_with_value(key.to_s, value)) + end + (parsed[:segments] || {}).each do |key, segment| + add_item(all_data, SEGMENTS, segment) + end + end + + def parse_content(content) + if content.strip.start_with?("{") + JSON.parse(content, symbolize_names: true) + else + symbolize_all_keys(YAML.load(content)) + end + end + + def symbolize_all_keys(value) + # This is necessary because YAML.load doesn't have an option for parsing keys as symbols, and + # the SDK expects all objects to be formatted that way. + if value.is_a?(Hash) + value.map{ |k, v| [k.to_sym, symbolize_all_keys(v)] }.to_h + elsif value.is_a?(Array) + value.map{ |v| symbolize_all_keys(v) } + else + value + end + end + + def add_item(all_data, kind, item) + items = all_data[kind] || {} + if !items[item[:key]].nil? + raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" + end + items[item[:key]] = item + end + + def make_flag_with_value(key, value) + { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + } + end + + def start_listener + resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + path_set = resolved_paths.to_set + dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq + l = Listen.to(*dir_paths) do |modified, added, removed| + paths = modified + added + removed + if paths.any? { |p| path_set.include?(p) } + load_all + end + end + l.start + l + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 4ad7928e..94c24229 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -39,22 +39,11 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - requestor = Requestor.new(sdk_key, config) - - if @config.offline? - @update_processor = NullUpdateProcessor.new + if @config.update_processor + @update_processor = @config.update_processor else - if @config.update_processor.nil? - if @config.stream? - @update_processor = StreamProcessor.new(sdk_key, config, requestor) - else - @config.logger.info { "Disabling streaming API" } - @config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } - @update_processor = PollingProcessor.new(config, requestor) - end - else - @update_processor = @config.update_processor - end + factory = @config.update_processor || self.method(:create_default_update_processor) + @update_processor = factory.call(sdk_key, config) end ready = @update_processor.start @@ -269,6 +258,20 @@ def close private + def create_default_update_processor(sdk_key, config) + if config.offline? + return NullUpdateProcessor.new + end + requestor = Requestor.new(sdk_key, config) + if config.stream? + StreamProcessor.new(sdk_key, config, requestor) + else + config.logger.info { "Disabling streaming API" } + config.logger.warn { "You should only disable the streaming API if instructed to do so by LaunchDarkly support" } + PollingProcessor.new(config, requestor) + end + end + # @return [EvaluationDetail] def evaluate_internal(key, user, default, include_reasons_in_events) if @config.offline? diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb new file mode 100644 index 00000000..c163d385 --- /dev/null +++ b/spec/file_data_source_spec.rb @@ -0,0 +1,167 @@ +require "spec_helper" +require "tempfile" + +describe LaunchDarkly::FileDataSource do + let(:full_flag_1_key) { "flag1" } + let(:flag_value_1_key) { "flag2" } + let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:full_segment_1_key) { "seg1" } + let(:all_segment_keys) { [ full_segment_1_key ] } + + let(:flag_only_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + } + } +EOF +} + + let(:all_properties_json) { <<-EOF + { + "flags": { + "flag1": { + "key": "flag1", + "on": true + } + }, + "flagValues": { + "flag2": "value2" + }, + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } + + let(:all_properties_yaml) { <<-EOF +--- +flags: + flag1: + key: flag1 + "on": true +flagValues: + flag2: value2 +segments: + seg1: + key: seg1 + include: ["user1"] +EOF + } + + let(:bad_file_path) { "no-such-file" } + + before do + @config = LaunchDarkly::Config.new + @store = @config.feature_store + end + + def make_temp_file(content) + file = Tempfile.new('flags') + IO.write(file, content) + file + end + + def with_data_source(options) + factory = LaunchDarkly::FileDataSource.factory(options) + ds = factory.call('', @config) + begin + yield ds + ensure + ds.stop + end + end + + it "doesn't load flags prior to start" do + file = make_temp_file('{"flagValues":{"key":"value"}}') + with_data_source({ paths: [ file.path ] }) do |ds| + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES)).to eq({}) + expect(@store.all(LaunchDarkly::SEGMENTS)).to eq({}) + end + end + + it "loads flags on start - from JSON" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "loads flags on start - from YAML" do + file = make_temp_file(all_properties_yaml) + with_data_source({ paths: [ file.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(true) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq(all_flag_keys) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq(all_segment_keys) + end + end + + it "sets start event and initialized on successful load" do + file = make_temp_file(all_properties_json) + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(true) + end + end + + it "sets start event and does not set initialized on unsuccessful load" do + with_data_source({ paths: [ bad_file_path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + end + end + + it "does not reload modified file if auto-update is off" do + file = make_temp_file(flag_only_json) + + with_data_source({ paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + IO.write(file, all_properties_json) + sleep(0.5) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + end + end + + it "reloads modified file if auto-update is on" do + file = make_temp_file(flag_only_json) + + with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) + + sleep(1) + IO.write(file, all_properties_json) + + max_time = 10 + ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } + expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" + end + end + + def wait_for_condition(max_time) + deadline = Time.now + max_time + while Time.now < deadline + return true if yield + sleep(0.1) + end + false + end +end From 22ebdeddf21c3d7cf9602add1442e934ead6b43d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 11:03:21 -0700 Subject: [PATCH 024/135] add poll interval param, tolerate single file path string, add doc comments --- lib/ldclient-rb/file_data_source.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 65ba0735..c61ddcf9 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -80,6 +80,20 @@ module LaunchDarkly # duplicate key-- it will not load flags from any of the files. # class FileDataSource + # + # Returns a factory for the file data source component. + # + # @param options [Hash] the configuration options + # @option options [Array] :paths The paths of the source files for loading flag data. These + # may be absolute paths or relative to the current working directory. + # @option options [Boolean] :auto_update True if the data source should watch for changes to + # the source file(s) and reload flags whenever there is a change. Note that auto-updating + # will only work if all of the files you specified have valid directory paths at startup time. + # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for + # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do + # not need to set this as there is a native OS mechanism for detecting file changes; on other + # platforms, the default interval is one second. + # def self.factory(options={}) return Proc.new do |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) @@ -92,7 +106,11 @@ def initialize(feature_store, logger, options={}) @feature_store = feature_store @logger = logger @paths = options[:paths] || [] + if @paths.is_a? String + @paths = [ @paths ] + end @auto_update = options[:auto_update] + @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -196,7 +214,11 @@ def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - l = Listen.to(*dir_paths) do |modified, added, removed| + opts = {} + if !@poll_interval.nil? + opts[:latency] = @poll_interval + end + l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } load_all From b864390a2079c6588e3fae0d8f8cfce359136cb6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:02:29 -0700 Subject: [PATCH 025/135] make listen dependency optional --- ldclient-rb.gemspec | 2 +- lib/ldclient-rb/file_data_source.rb | 23 ++++++++++++++++++++--- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9e7d5d04..0b8f4f9d 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -29,6 +29,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "rake", "~> 10.0" spec.add_development_dependency "rspec_junit_formatter", "~> 0.3.0" spec.add_development_dependency "timecop", "~> 0.9.1" + spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] @@ -40,5 +41,4 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "hashdiff", "~> 0.2" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" - spec.add_runtime_dependency "listen", "~> 3.0" end diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index c61ddcf9..833d6ec3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -1,10 +1,21 @@ require 'concurrent/atomics' require 'json' require 'yaml' -require 'listen' require 'pathname' module LaunchDarkly + # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the + # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' + # gem has been provided by the host app. + @@have_listen = false + begin + require 'listen' + @@have_listen = true + rescue + end + def self.can_watch_files? + @@have_listen + end # # Provides a way to use local files as a source of feature flag state. This would typically be @@ -87,8 +98,10 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that auto-updating - # will only work if all of the files you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. In order to use this + # feature, you must install the 'listen' gem - it is not included by default to avoid adding + # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files + # you specified have valid directory paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do # not need to set this as there is a native OS mechanism for detecting file changes; on other @@ -110,6 +123,10 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] + if @auto_update && !LaunchDarkly::can_watch_files? + @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } + @auto_update = false + end @poll_interval = options[:poll_interval] @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 789b5a4b54de8d84802af0579bacabbd07f92169 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 12:04:07 -0700 Subject: [PATCH 026/135] readme --- README.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/README.md b/README.md index 1790b2d4..ead2bb6b 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,10 @@ else end ``` +Using flag data from a file +--------------------------- +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + Learn more ----------- From 31a62c59a8f2209dbd758ca27fe113825b2a2943 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 31 Oct 2018 13:20:02 -0700 Subject: [PATCH 027/135] fix key handling and client integration, add tests --- lib/ldclient-rb/file_data_source.rb | 2 +- lib/ldclient-rb/ldclient.rb | 2 +- spec/file_data_source_spec.rb | 46 ++++++++++++++++++++++++++--- 3 files changed, 44 insertions(+), 6 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 833d6ec3..10588b5d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -215,7 +215,7 @@ def add_item(all_data, kind, item) if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key]] = item + items[item[:key].to_sym] = item end def make_flag_with_value(key, value) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 94c24229..f8a75780 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -42,7 +42,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) if @config.update_processor @update_processor = @config.update_processor else - factory = @config.update_processor || self.method(:create_default_update_processor) + factory = @config.update_processor_factory || self.method(:create_default_update_processor) @update_processor = factory.call(sdk_key, config) end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c163d385..cf5d52ad 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -3,17 +3,23 @@ describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } + let(:full_flag_1_value) { "on" } let(:flag_value_1_key) { "flag2" } - let(:all_flag_keys) { [ full_flag_1_key, flag_value_1_key ] } + let(:flag_value_1) { "value2" } + let(:all_flag_keys) { [ full_flag_1_key.to_sym, flag_value_1_key.to_sym ] } let(:full_segment_1_key) { "seg1" } - let(:all_segment_keys) { [ full_segment_1_key ] } + let(:all_segment_keys) { [ full_segment_1_key.to_sym ] } let(:flag_only_json) { <<-EOF { "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } } } @@ -25,7 +31,11 @@ "flags": { "flag1": { "key": "flag1", - "on": true + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] } }, "flagValues": { @@ -156,6 +166,34 @@ def with_data_source(options) end end + it "evaluates simplified flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(flag_value_1_key, { key: 'user' }, '') + expect(value).to eq(flag_value_1) + ensure + client.close + end + end + + it "evaluates full flag with client as expected" do + file = make_temp_file(all_properties_json) + factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) + config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + client = LaunchDarkly::LDClient.new('sdkKey', config) + + begin + value = client.variation(full_flag_1_key, { key: 'user' }, '') + expect(value).to eq(full_flag_1_value) + ensure + client.close + end + end + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From 778cb6dc5e4c2c367ccd2c1c7399a1338ec5196a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:08:46 -0700 Subject: [PATCH 028/135] debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..58c754ba 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: From 20dbef28105da9a1eca453ee86f2ff90267f4793 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 12:13:58 -0700 Subject: [PATCH 029/135] debugging --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 58c754ba..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -22,7 +22,7 @@ ruby-docker-template: &ruby-docker-template - run: gem install bundler - run: bundle install - run: mkdir ./rspec - - run: LISTEN_GEM_DEBUGGING=2 bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: path: ./rspec - store_artifacts: @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done From f1c00b1616a6767dd350c44497ba71d6b03e4bff Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 15:47:10 -0700 Subject: [PATCH 030/135] add fallback polling logic, fix tests --- lib/ldclient-rb/file_data_source.rb | 85 ++++++++++++++++++++++------- spec/file_data_source_spec.rb | 23 +++++++- 2 files changed, 86 insertions(+), 22 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 10588b5d..ae19bea8 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -13,7 +13,7 @@ module LaunchDarkly @@have_listen = true rescue end - def self.can_watch_files? + def self.have_listen? @@have_listen end @@ -23,10 +23,10 @@ def self.can_watch_files? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # `update_processor_class` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_factory = FileDataSource.factory(paths: [ myFilePath ]) + # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -98,14 +98,15 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. In order to use this - # feature, you must install the 'listen' gem - it is not included by default to avoid adding - # unwanted dependencies to the SDK. Note that auto-updating will only work if all of the files - # you specified have valid directory paths at startup time. + # the source file(s) and reload flags whenever there is a change. Note that the default + # implementation of this feature is based on polling the filesystem, which may not perform + # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted + # dependencies to the SDK), its native file watching mechanism will be used instead. Note + # that auto-updating will only work if all of the files you specified have valid directory + # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for - # file modifications - used only if auto_update is true. On Linux and Mac platforms, you do - # not need to set this as there is a native OS mechanism for detecting file changes; on other - # platforms, the default interval is one second. + # file modifications - used only if auto_update is true, and if the native file-watching + # mechanism from 'listen' is not being used. # def self.factory(options={}) return Proc.new do |sdk_key, config| @@ -123,11 +124,8 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - if @auto_update && !LaunchDarkly::can_watch_files? - @logger.error { "[LDClient] To use the auto_update option for FileDataSource, you must install the 'listen' gem." } - @auto_update = false - end - @poll_interval = options[:poll_interval] + @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new end @@ -229,12 +227,17 @@ def make_flag_with_value(key, value) def start_listener resolved_paths = @paths.map { |p| Pathname.new(File.absolute_path(p)).realpath.to_s } + if @use_listen + start_listener_with_listen_gem(resolved_paths) + else + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + end + end + + def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq - opts = {} - if !@poll_interval.nil? - opts[:latency] = @poll_interval - end + opts = { latency: @poll_interval } l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed if paths.any? { |p| path_set.include?(p) } @@ -244,5 +247,49 @@ def start_listener l.start l end + + # + # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. + # + class FileDataSourcePoller + def initialize(resolved_paths, interval, reloader) + @stopped = Concurrent::AtomicBoolean.new(false) + get_file_times = Proc.new do + ret = {} + resolved_paths.each do |path| + begin + ret[path] = File.mtime(path) + rescue + ret[path] = nil + end + end + ret + end + last_times = get_file_times.call + @thread = Thread.new do + while true + sleep interval + break if @stopped.value + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end + end + if changed + reloader.call + end + end + end + end + + def stop + @stopped.make_true + @thread.run # wakes it up if it's sleeping + end + end end end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index cf5d52ad..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -71,10 +71,18 @@ before do @config = LaunchDarkly::Config.new @store = @config.feature_store + @tmp_dir = Dir.mktmpdir + end + + after do + FileUtils.remove_dir(@tmp_dir) end def make_temp_file(content) - file = Tempfile.new('flags') + # Note that we don't create our files in the default temp file directory, but rather in an empty directory + # that we made. That's because (depending on the platform) the temp file directory may contain huge numbers + # of files, which can make the file watcher perform poorly enough to break the tests. + file = Tempfile.new('flags', @tmp_dir) IO.write(file, content) file end @@ -149,10 +157,11 @@ def with_data_source(options) end end - it "reloads modified file if auto-update is on" do + def test_auto_reload(options) file = make_temp_file(flag_only_json) + options[:paths] = [ file.path ] - with_data_source({ auto_update: true, paths: [ file.path ] }) do |ds| + with_data_source(options) do |ds| event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) @@ -166,6 +175,14 @@ def with_data_source(options) end end + it "reloads modified file if auto-update is on" do + test_auto_reload({ auto_update: true }) + end + + it "reloads modified file in polling mode" do + test_auto_reload({ auto_update: true, force_polling: true, poll_interval: 0.1 }) + end + it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) From 198b843bba00fe92e9cfa9ef658c2649ce09be2f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 16:02:22 -0700 Subject: [PATCH 031/135] rm debugging --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done From c5d1823372044bd067049fed90fb8e1f13428d94 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:25:16 -0700 Subject: [PATCH 032/135] debugging --- spec/file_data_source_spec.rb | 2 ++ 1 file changed, 2 insertions(+) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..194ebc2c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,6 +68,8 @@ let(:bad_file_path) { "no-such-file" } + Thread.report_on_exception = true + before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 9baffe35cf84bbfdbf77f01989437620f4124bc7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:38:15 -0700 Subject: [PATCH 033/135] debugging --- .circleci/config.yml | 2 +- spec/file_data_source_spec.rb | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..05bc4746 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - bundle exec rspec spec; + LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; done diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 194ebc2c..5267a5f2 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -68,8 +68,6 @@ let(:bad_file_path) { "no-such-file" } - Thread.report_on_exception = true - before do @config = LaunchDarkly::Config.new @store = @config.feature_store From 4d8121592756df99aefbef4c0aeb78032f544046 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:47:03 -0700 Subject: [PATCH 034/135] debugging --- lib/ldclient-rb/file_data_source.rb | 2 ++ spec/file_data_source_spec.rb | 1 + 2 files changed, 3 insertions(+) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index ae19bea8..de8ef34e 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -238,8 +238,10 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } + puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed + puts('*** got listener notification: #{paths}') if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 5267a5f2..f06c19f9 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -168,6 +168,7 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) + puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 30d0cd270acf6518555e126bad28c689177ebb1d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:48:09 -0700 Subject: [PATCH 035/135] debugging --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index de8ef34e..9a63e56b 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -241,7 +241,7 @@ def start_listener_with_listen_gem(resolved_paths) puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts('*** got listener notification: #{paths}') + puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end From 8cb2ed9adc1a7ac486f077eeb37d0100fa9d9bb5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 1 Nov 2018 17:51:27 -0700 Subject: [PATCH 036/135] comment correction --- lib/ldclient-rb/file_data_source.rb | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 9a63e56b..71f3a8be 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -23,10 +23,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_class` property of your LaunchDarkly client configuration. In the options + # `update_processor_factory` property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # config.update_processor_class = FileDataSource.factory(paths: [ myFilePath ]) + # factory = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(update_processor_factory: factory) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled From a10f973ad98f033bd480e2ca9568041e826cd02b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:29 -0700 Subject: [PATCH 037/135] documentation --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 71f3a8be..721eff75 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -107,7 +107,7 @@ class FileDataSource # paths at startup time. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching - # mechanism from 'listen' is not being used. + # mechanism from 'listen' is not being used. The default value is 1 second. # def self.factory(options={}) return Proc.new do |sdk_key, config| From 16cf9c086c06344d352b6e85bb6e02449af44cc1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:15:54 -0700 Subject: [PATCH 038/135] always use YAML parser --- lib/ldclient-rb/file_data_source.rb | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 721eff75..a607923d 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -190,11 +190,10 @@ def load_file(path, all_data) end def parse_content(content) - if content.strip.start_with?("{") - JSON.parse(content, symbolize_names: true) - else - symbolize_all_keys(YAML.load(content)) - end + # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while + # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least + # for all the samples of actual flag data that we've tested). + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) From 27d954e7f5f84ba4b87573ff80e9304a4eedab3b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:21:29 -0700 Subject: [PATCH 039/135] report internal error that shouldn't happen --- lib/ldclient-rb/file_data_source.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index a607923d..fae68123 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -209,7 +209,8 @@ def symbolize_all_keys(value) end def add_item(all_data, kind, item) - items = all_data[kind] || {} + items = all_data[kind] + raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash if !items[item[:key]].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end From fd308a9de3142b8fd493a995411d320a42664932 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:24:28 -0700 Subject: [PATCH 040/135] add test for multiple files --- spec/file_data_source_spec.rb | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index f06c19f9..c0af4c67 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -24,7 +24,19 @@ } } EOF -} + } + + let(:segment_only_json) { <<-EOF + { + "segments": { + "seg1": { + "key": "seg1", + "include": ["user1"] + } + } + } +EOF + } let(:all_properties_json) { <<-EOF { @@ -143,6 +155,16 @@ def with_data_source(options) end end + it "can load multiple files" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(segment_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) + expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From 1d016bfc9349000c8ddffce20b48634e1e20d6b3 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:26:10 -0700 Subject: [PATCH 041/135] fix duplicate key checking (string vs. symbol problem) --- lib/ldclient-rb/file_data_source.rb | 5 +++-- spec/file_data_source_spec.rb | 11 +++++++++++ 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index fae68123..aebd9709 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -211,10 +211,11 @@ def symbolize_all_keys(value) def add_item(all_data, kind, item) items = all_data[kind] raise ArgumentError, "Received unknown item kind #{kind} in add_data" if items.nil? # shouldn't be possible since we preinitialize the hash - if !items[item[:key]].nil? + key = item[:key].to_sym + if !items[key].nil? raise ArgumentError, "#{kind[:namespace]} key \"#{item[:key]}\" was used more than once" end - items[item[:key].to_sym] = item + items[key] = item end def make_flag_with_value(key, value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c0af4c67..10e49e3c 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -160,11 +160,22 @@ def with_data_source(options) file2 = make_temp_file(segment_only_json) with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| ds.start + expect(@store.initialized?).to eq(true) expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([ full_flag_1_key.to_sym ]) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([ full_segment_1_key.to_sym ]) end end + it "does not allow duplicate keys" do + file1 = make_temp_file(flag_only_json) + file2 = make_temp_file(flag_only_json) + with_data_source({ paths: [ file1.path, file2.path ] }) do |ds| + ds.start + expect(@store.initialized?).to eq(false) + expect(@store.all(LaunchDarkly::FEATURES).keys).to eq([]) + end + end + it "does not reload modified file if auto-update is off" do file = make_temp_file(flag_only_json) From c3e66d35c64909084d6d879fa485497fddf6c4a4 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:44:09 -0700 Subject: [PATCH 042/135] Don't use 'listen' in JRuby 9.1 --- lib/ldclient-rb/file_data_source.rb | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index aebd9709..23834be4 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -99,12 +99,12 @@ class FileDataSource # @option options [Array] :paths The paths of the source files for loading flag data. These # may be absolute paths or relative to the current working directory. # @option options [Boolean] :auto_update True if the data source should watch for changes to - # the source file(s) and reload flags whenever there is a change. Note that the default - # implementation of this feature is based on polling the filesystem, which may not perform - # well. If you install the 'listen' gem (not included by default, to avoid adding unwanted - # dependencies to the SDK), its native file watching mechanism will be used instead. Note - # that auto-updating will only work if all of the files you specified have valid directory - # paths at startup time. + # the source file(s) and reload flags whenever there is a change. Auto-updating will only + # work if all of the files you specified have valid directory paths at startup time. + # Note that the default implementation of this feature is based on polling the filesystem, + # which may not perform well. If you install the 'listen' gem (not included by default, to + # avoid adding unwanted dependencies to the SDK), its native file watching mechanism will be + # used instead. However, 'listen' will not be used in JRuby 9.1 due to a known instability. # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. @@ -125,7 +125,15 @@ def initialize(feature_store, logger, options={}) @paths = [ @paths ] end @auto_update = options[:auto_update] - @use_listen = @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + if @auto_update && LaunchDarkly.have_listen? && !options[:force_polling] # force_polling is used only for tests + # We have seen unreliable behavior in the 'listen' gem in JRuby 9.1 (https://github.com/guard/listen/issues/449). + # Therefore, on that platform we'll fall back to file polling instead. + if defined?(JRUBY_VERSION) && JRUBY_VERSION.start_with?("9.1.") + @use_listen = false + else + @use_listen = true + end + end @poll_interval = options[:poll_interval] || 1 @initialized = Concurrent::AtomicBoolean.new(false) @ready = Concurrent::Event.new From 1a36fd86ab5b867ad265e89f13d9c8e839278b39 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 11:50:22 -0700 Subject: [PATCH 043/135] rm debugging --- .circleci/config.yml | 2 +- lib/ldclient-rb/file_data_source.rb | 2 -- spec/file_data_source_spec.rb | 1 - 3 files changed, 1 insertion(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 05bc4746..df9dac51 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -93,5 +93,5 @@ jobs: do rvm use $i; cp "Gemfile.lock.$i" Gemfile.lock; - LISTEN_GEM_DEBUGGING=2 bundle exec rspec spec; + bundle exec rspec spec; done diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 23834be4..1549f6ec 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -248,10 +248,8 @@ def start_listener_with_listen_gem(resolved_paths) path_set = resolved_paths.to_set dir_paths = resolved_paths.map{ |p| File.dirname(p) }.uniq opts = { latency: @poll_interval } - puts('*** starting listener') l = Listen.to(*dir_paths, opts) do |modified, added, removed| paths = modified + added + removed - puts("*** got listener notification: #{paths}") if paths.any? { |p| path_set.include?(p) } load_all end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 10e49e3c..60107e26 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -201,7 +201,6 @@ def test_auto_reload(options) sleep(1) IO.write(file, all_properties_json) - puts('*** modified the file') max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } From 78ba8150b1a486b2a568ff7ac59f8b589fdfe98e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:02:32 -0700 Subject: [PATCH 044/135] better error handling in poll thread --- lib/ldclient-rb/file_data_source.rb | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 1549f6ec..c5207afb 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -240,7 +240,7 @@ def start_listener if @use_listen start_listener_with_listen_gem(resolved_paths) else - FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all)) + FileDataSourcePoller.new(resolved_paths, @poll_interval, self.method(:load_all), @logger) end end @@ -262,14 +262,14 @@ def start_listener_with_listen_gem(resolved_paths) # Used internally by FileDataSource to track data file changes if the 'listen' gem is not available. # class FileDataSourcePoller - def initialize(resolved_paths, interval, reloader) + def initialize(resolved_paths, interval, reloader, logger) @stopped = Concurrent::AtomicBoolean.new(false) get_file_times = Proc.new do ret = {} resolved_paths.each do |path| begin ret[path] = File.mtime(path) - rescue + rescue Errno::ENOENT ret[path] = nil end end @@ -280,17 +280,19 @@ def initialize(resolved_paths, interval, reloader) while true sleep interval break if @stopped.value - new_times = get_file_times.call - changed = false - last_times.each do |path, old_time| - new_time = new_times[path] - if !new_time.nil? && new_time != old_time - changed = true - break + begin + new_times = get_file_times.call + changed = false + last_times.each do |path, old_time| + new_time = new_times[path] + if !new_time.nil? && new_time != old_time + changed = true + break + end end - end - if changed - reloader.call + reloader.call if changed + rescue => exn + Util.log_exception(logger, "Unexpected exception in FileDataSourcePoller", exn) end end end From 38f534fd3b5968a7d6f75cf5f214be768f810f9f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 2 Nov 2018 12:51:09 -0700 Subject: [PATCH 045/135] don't use Thread.raise to stop PollingProcessor thread; add test for PollingProcessor.stop --- lib/ldclient-rb/polling.rb | 3 +- spec/polling_spec.rb | 81 ++++++++++++++++++++++++++------------ 2 files changed, 58 insertions(+), 26 deletions(-) diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 15965201..4ecd93f8 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,8 @@ def start def stop if @stopped.make_true if @worker && @worker.alive? - @worker.raise "shutting down client" + @worker.run # causes the thread to wake up if it's currently in a sleep + @worker.join end @config.logger.info { "[LDClient] Polling connection stopped" } end diff --git a/spec/polling_spec.rb b/spec/polling_spec.rb index 8183b8c3..690147d0 100644 --- a/spec/polling_spec.rb +++ b/spec/polling_spec.rb @@ -3,10 +3,17 @@ describe LaunchDarkly::PollingProcessor do subject { LaunchDarkly::PollingProcessor } - let(:store) { LaunchDarkly::InMemoryFeatureStore.new } - let(:config) { LaunchDarkly::Config.new(feature_store: store) } let(:requestor) { double() } - let(:processor) { subject.new(config, requestor) } + + def with_processor(store) + config = LaunchDarkly::Config.new(feature_store: store) + processor = subject.new(config, requestor) + begin + yield processor + ensure + processor.stop + end + end describe 'successful request' do flag = { key: 'flagkey', version: 1 } @@ -22,47 +29,60 @@ it 'puts feature data in store' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) - expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(store.get(LaunchDarkly::FEATURES, "flagkey")).to eq(flag) + expect(store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(segment) + end end it 'sets initialized to true' do allow(requestor).to receive(:request_all_data).and_return(all_data) - ready = processor.start - ready.wait - expect(processor.initialized?).to be true - expect(store.initialized?).to be true + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + ready.wait + expect(processor.initialized?).to be true + expect(store.initialized?).to be true + end end end describe 'connection error' do it 'does not cause immediate failure, does not set initialized' do allow(requestor).to receive(:request_all_data).and_raise(StandardError.new("test error")) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false - expect(store.initialized?).to be false + store = LaunchDarkly::InMemoryFeatureStore.new + with_processor(store) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + expect(store.initialized?).to be false + end end end describe 'HTTP errors' do def verify_unrecoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be true - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be true + expect(processor.initialized?).to be false + end end def verify_recoverable_http_error(status) allow(requestor).to receive(:request_all_data).and_raise(LaunchDarkly::UnexpectedResponseError.new(status)) - ready = processor.start - finished = ready.wait(0.2) - expect(finished).to be false - expect(processor.initialized?).to be false + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + ready = processor.start + finished = ready.wait(0.2) + expect(finished).to be false + expect(processor.initialized?).to be false + end end it 'stops immediately for error 401' do @@ -85,5 +105,16 @@ def verify_recoverable_http_error(status) verify_recoverable_http_error(503) end end -end + describe 'stop' do + it 'stops promptly rather than continuing to wait for poll interval' do + with_processor(LaunchDarkly::InMemoryFeatureStore.new) do |processor| + sleep(1) # somewhat arbitrary, but should ensure that it has started polling + start_time = Time.now + processor.stop + end_time = Time.now + expect(end_time - start_time).to be <(LaunchDarkly::Config.default_poll_interval - 5) + end + end + end +end From 5a875c8db7fff721c60040334c6da2df1133c9d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 10 Dec 2018 12:24:24 -0800 Subject: [PATCH 046/135] test on most recent patch version of each Ruby minor version --- .circleci/config.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..7fe98354 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -32,22 +32,22 @@ jobs: test-2.2: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.2.9-jessie + - image: circleci/ruby:2.2.10-jessie - image: redis test-2.3: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.3.6-jessie + - image: circleci/ruby:2.3.7-jessie - image: redis test-2.4: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.4.4-stretch + - image: circleci/ruby:2.4.5-stretch - image: redis test-2.5: <<: *ruby-docker-template docker: - - image: circleci/ruby:2.5.1-stretch + - image: circleci/ruby:2.5.3-stretch - image: redis test-jruby-9.2: <<: *ruby-docker-template From cf7d9002e1adac2335d50ccb20b278dfedce4ad6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:47:40 -0800 Subject: [PATCH 047/135] factor common logic out of RedisFeatureStore, add integrations module --- lib/ldclient-rb.rb | 2 + lib/ldclient-rb/in_memory_store.rb | 2 + lib/ldclient-rb/integrations.rb | 257 +++++++++++++++++++++++ lib/ldclient-rb/interfaces.rb | 102 +++++++++ lib/ldclient-rb/redis_store.rb | 327 +++++++++++++---------------- spec/feature_store_spec_base.rb | 2 +- spec/integrations_helpers_spec.rb | 276 ++++++++++++++++++++++++ spec/redis_feature_store_spec.rb | 28 +-- 8 files changed, 803 insertions(+), 193 deletions(-) create mode 100644 lib/ldclient-rb/integrations.rb create mode 100644 lib/ldclient-rb/interfaces.rb create mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index d3ee6ffc..a1d7ffd9 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,4 +1,5 @@ require "ldclient-rb/version" +require "ldclient-rb/interfaces" require "ldclient-rb/util" require "ldclient-rb/evaluation" require "ldclient-rb/flags_state" @@ -17,5 +18,6 @@ require "ldclient-rb/event_summarizer" require "ldclient-rb/events" require "ldclient-rb/redis_store" +require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index e3e85879..4814c85d 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -20,6 +20,8 @@ module LaunchDarkly # streaming API. # class InMemoryFeatureStore + include LaunchDarkly::Interfaces::FeatureStore + def initialize @items = Hash.new @lock = Concurrent::ReadWriteLock.new diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb new file mode 100644 index 00000000..21910b09 --- /dev/null +++ b/lib/ldclient-rb/integrations.rb @@ -0,0 +1,257 @@ +require "concurrent/atomics" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + + module Helpers + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + INITED_CACHE_KEY = "$inited" + + private_constant :INITED_CACHE_KEY + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + capacity = opts[:capacity] || 1000 + if expiration_seconds > 0 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[INITED_CACHE_KEY] + if result.nil? + result = @core.initialized_internal? + @cache[INITED_CACHE_KEY] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + def all_cache_key(kind) + kind + end + + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb new file mode 100644 index 00000000..09e7797d --- /dev/null +++ b/lib/ldclient-rb/interfaces.rb @@ -0,0 +1,102 @@ + +module LaunchDarkly + module Interfaces + # + # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly + # client uses the feature store to persist feature flags and related objects received from + # the LaunchDarkly service. Implementations must support concurrent access and updates. + # For more about how feature stores can be used, see: + # [Using a persistent feature store](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # An entity that can be stored in a feature store is a hash that can be converted to and from + # JSON, and that has at a minimum the following properties: `:key`, a string that is unique + # among entities of the same kind; `:version`, an integer that is higher for newer data; + # `:deleted`, a boolean (optional, defaults to false) that if true means this is a + # placeholder for a deleted entity. + # + # Examples of a "kind" are feature flags and segments; each of these is associated with an + # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are + # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # + # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations + # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new + # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # simpler. + # + module FeatureStore + # + # Initializes (or re-initializes) the store with the specified set of entities. Any + # existing entries will be removed. Implementations can assume that this data set is up to + # date-- there is no need to perform individual version comparisons between the existing + # objects and the supplied features. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init(all_data) + end + + # + # Returns the entity to which the specified key is mapped, if any. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found, or if the stored entity's + # `:deleted` property was true + # + def get(kind, key) + end + + # + # Returns all stored entities of the specified kind, not including deleted entities. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def all(kind) + end + + # + # Attempt to add an entity, or update an existing entity with the same key. An update + # should only succeed if the new item's `:version` is greater than the old one; + # otherwise, the method should do nothing. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # + def upsert(kind, item) + end + + # + # Attempt to delete an entity if it exists. Deletion should only succeed if the + # `version` parameter is greater than the existing entity's `:version`; otherwise, the + # method should do nothing. + # + # @param kind [Object] the kind of entity to delete + # @param key [String] the unique key of the entity + # @param version [Integer] the entity must have a lower version than this to be deleted + # + def delete(kind, key, version) + end + + # + # Checks whether this store has been initialized. That means that `init` has been called + # either by this process, or (if the store can be shared) by another process. This + # method will be called frequently, so it should be efficient. You can assume that if it + # has returned true once, it can continue to return true, i.e. a store cannot become + # uninitialized again. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end +end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..99912f5f 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # @@ -13,6 +12,9 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # + # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # implementation class may change in the future. + # class RedisFeatureStore begin require "redis" @@ -22,6 +24,139 @@ class RedisFeatureStore REDIS_ENABLED = false end + include LaunchDarkly::Interfaces::FeatureStore + + # + # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. + # + class RedisFeatureStoreCore + def initialize(opts) + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ +in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + # exposed for testing + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + + private_constant :RedisFeatureStoreCore + # # Constructor for a RedisFeatureStore instance. # @@ -31,45 +166,17 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally - # @option opts [Object] :pool custom connection pool, used for testing only + # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) if !REDIS_ENABLED raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") end - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = RedisFeatureStore.default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || RedisFeatureStore.default_prefix - @logger = opts[:logger] || Config.default_logger - - expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 - if expiration_seconds > 0 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - @stopped = Concurrent::AtomicBoolean.new(false) - @inited = MemoizedValue.new { - query_inited - } - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ -and prefix: #{@prefix}") - end + @core = RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) end # @@ -77,178 +184,42 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - 'redis://localhost:6379/0' + LaunchDarkly::Integrations::Redis.default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - 'launchdarkly' + LaunchDarkly::Integrations::Redis.default_prefix end def get(kind, key) - f = @cache.nil? ? nil : @cache[cache_key(kind, key)] - if f.nil? - @logger.debug { "RedisFeatureStore: no cache hit for #{key} in '#{kind[:namespace]}', requesting from Redis" } - f = with_connection do |redis| - begin - get_redis(kind, redis, key.to_sym) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis in '#{kind[:namespace]}', with error: #{e}" } - nil - end - end - end - if f.nil? - @logger.debug { "RedisFeatureStore: #{key} not found in '#{kind[:namespace]}'" } - nil - elsif f[:deleted] - @logger.debug { "RedisFeatureStore: #{key} was deleted in '#{kind[:namespace]}', returning nil" } - nil - else - f - end + @wrapper.get(kind, key) end def all(kind) - fs = {} - with_connection do |redis| - begin - hashfs = redis.hgetall(items_key(kind)) - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve all '#{kind[:namespace]}' items from Redis with error: #{e}; returning none" } - hashfs = {} - end - hashfs.each do |k, jsonItem| - f = JSON.parse(jsonItem, symbolize_names: true) - if !f[:deleted] - fs[k.to_sym] = f - end - end - end - fs + @wrapper.all(kind) end def delete(kind, key, version) - update_with_versioning(kind, { key: key, version: version, deleted: true }) + @wrapper.delete(kind, key, version) end def init(all_data) - @cache.clear if !@cache.nil? - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - begin - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - items.each { |key, item| - put_cache(kind, key.to_sym, item) - } - rescue => e - @logger.error { "RedisFeatureStore: could not initialize '#{kind[:namespace]}' in Redis, error: #{e}" } - end - end - end - @inited.set(true) - @logger.info { "RedisFeatureStore: initialized with #{count} items" } + @wrapper.init(all_data) end def upsert(kind, item) - update_with_versioning(kind, item) + @wrapper.upsert(kind, item) end def initialized? - @inited.get + @wrapper.initialized? end def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - @cache.clear if !@cache.nil? - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(kind, redis, key) - begin - json_item = redis.hget(items_key(kind), key) - if json_item - item = JSON.parse(json_item, symbolize_names: true) - put_cache(kind, key, item) - item - else - nil - end - rescue => e - @logger.error { "RedisFeatureStore: could not retrieve #{key} from Redis, error: #{e}" } - nil - end - end - - def put_cache(kind, key, value) - @cache[cache_key(kind, key)] = value if !@cache.nil? - end - - def update_with_versioning(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(kind, redis, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - begin - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - else - put_cache(kind, key.to_sym, new_item) - end - rescue => e - @logger.error { "RedisFeatureStore: could not store #{key} in Redis, error: #{e}" } - end - else - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ - in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - end - - def query_inited - with_connection { |redis| redis.exists(items_key(FEATURES)) } + @wrapper.stop end end end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index d6c1cedc..0e0f1ca9 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,6 +1,6 @@ require "spec_helper" -RSpec.shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method| let(:feature0) { { diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb new file mode 100644 index 00000000..24404a72 --- /dev/null +++ b/spec/integrations_helpers_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d27cdb39..d5ccfb65 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -10,12 +10,12 @@ $null_log.level = ::Logger::FATAL -def create_redis_store() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 60) +def create_redis_store(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) end -def create_redis_store_uncached() - LaunchDarkly::RedisFeatureStore.new(prefix: $my_prefix, logger: $null_log, expiration: 0) +def create_redis_store_uncached(opts = {}) + LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) end @@ -32,9 +32,10 @@ def create_redis_store_uncached() include_examples "feature_store", method(:create_redis_store_uncached) end - def add_concurrent_modifier(store, other_client, flag, start_version, end_version) + def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) + test_hook = Object.new version_counter = start_version - expect(store).to receive(:before_update_transaction) { |base_key, key| + expect(test_hook).to receive(:before_update_transaction) { |base_key, key| if version_counter <= end_version new_flag = flag.clone new_flag[:version] = version_counter @@ -42,18 +43,18 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio version_counter = version_counter + 1 end }.at_least(:once) + test_hook end it "handles upsert race condition against external client with lower version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 2, 4) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 2, 4) - my_ver = { key: "foo", version: 10 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) @@ -64,15 +65,14 @@ def add_concurrent_modifier(store, other_client, flag, start_version, end_versio end it "handles upsert race condition against external client with higher version" do - store = create_redis_store other_client = Redis.new({ url: "redis://localhost:6379" }) + flag = { key: "foo", version: 1 } + test_hook = make_concurrent_modifier_test_hook(other_client, flag, 3, 3) + store = create_redis_store({ test_hook: test_hook }) begin - flag = { key: "foo", version: 1 } store.init(LaunchDarkly::FEATURES => { flag[:key] => flag }) - add_concurrent_modifier(store, other_client, flag, 3, 3) - my_ver = { key: "foo", version: 2 } store.upsert(LaunchDarkly::FEATURES, my_ver) result = store.get(LaunchDarkly::FEATURES, flag[:key]) From 4d34bc4811d1ee4ae7c65aa58e5d9c6e0ec0b28a Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 17:53:42 -0800 Subject: [PATCH 048/135] fix method reference --- lib/ldclient-rb/integrations.rb | 6 +++--- lib/ldclient-rb/redis_store.rb | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 21910b09..8f806fbb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -9,7 +9,7 @@ module Redis # # @return [String] the default Redis URL # - def default_redis_url + def self.default_redis_url 'redis://localhost:6379/0' end @@ -18,7 +18,7 @@ def default_redis_url # # @return [String] the default key prefix # - def default_prefix + def self.default_prefix 'launchdarkly' end @@ -40,7 +40,7 @@ def default_prefix # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # - def new_feature_store(opts) + def self.new_feature_store(opts) return RedisFeatureStore.new(opts) end end diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 99912f5f..6a429ce0 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -36,13 +36,13 @@ def initialize(opts) @redis_opts[:url] = opts[:redis_url] end if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis.default_redis_url + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url end max_connections = opts[:max_connections] || 16 @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do Redis.new(@redis_opts) end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis.default_prefix + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix @logger = opts[:logger] || Config.default_logger @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented @@ -184,14 +184,14 @@ def initialize(opts = {}) # running at `localhost` with its default port. # def self.default_redis_url - LaunchDarkly::Integrations::Redis.default_redis_url + LaunchDarkly::Integrations::Redis::default_redis_url end # # Default value for the `prefix` constructor parameter. # def self.default_prefix - LaunchDarkly::Integrations::Redis.default_prefix + LaunchDarkly::Integrations::Redis::default_prefix end def get(kind, key) From 19182adce2bfa73a4e7fb9fd7edccd604edc7ac6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:14:04 -0800 Subject: [PATCH 049/135] misc cleanup --- lib/ldclient-rb/integrations.rb | 32 ++-- lib/ldclient-rb/interfaces.rb | 2 +- lib/ldclient-rb/redis_store.rb | 4 +- spec/integrations_helpers_spec.rb | 276 ------------------------------ 4 files changed, 25 insertions(+), 289 deletions(-) delete mode 100644 spec/integrations_helpers_spec.rb diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 8f806fbb..4d49d1c4 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,7 +1,13 @@ require "concurrent/atomics" module LaunchDarkly + # + # Tools for connecting the LaunchDarkly client to other software. + # module Integrations + # + # Integration with [Redis](https://redis.io/). + # module Redis # # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of @@ -45,7 +51,10 @@ def self.new_feature_store(opts) end end - module Helpers + # + # Support code that may be useful for integrations. + # + module Util # # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} # pattern that delegates part of its behavior to another object, while providing optional caching @@ -59,10 +68,6 @@ module Helpers class CachingStoreWrapper include LaunchDarkly::Interfaces::FeatureStore - INITED_CACHE_KEY = "$inited" - - private_constant :INITED_CACHE_KEY - # # Creates a new store wrapper instance. # @@ -75,8 +80,8 @@ def initialize(core, opts) @core = core expiration_seconds = opts[:expiration] || 15 - capacity = opts[:capacity] || 1000 if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 @cache = ExpiringCache.new(capacity, expiration_seconds) else @cache = nil @@ -146,10 +151,10 @@ def initialized? if @cache.nil? result = @core.initialized_internal? else - result = @cache[INITED_CACHE_KEY] + result = @cache[inited_cache_key] if result.nil? result = @core.initialized_internal? - @cache[INITED_CACHE_KEY] = result + @cache[inited_cache_key] = result end end @@ -163,12 +168,19 @@ def stop private + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. def all_cache_key(kind) kind end - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" end def item_if_not_deleted(item) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 09e7797d..6226cbe1 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -20,7 +20,7 @@ module Interfaces # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new - # implementation, see {LaunchDarkly::Integrations::Helpers} for tools that can make this task + # implementation, see {LaunchDarkly::Integrations::Util} for tools that can make this task # simpler. # module FeatureStore diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 6a429ce0..97cec272 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -12,7 +12,7 @@ module LaunchDarkly # installed. Then, create an instance and store it in the `feature_store` property # of your client configuration. # - # @deprecated Use {LaunchDarkly::Integrations::Redis#new_feature_store} instead. This specific + # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific # implementation class may change in the future. # class RedisFeatureStore @@ -176,7 +176,7 @@ def initialize(opts = {}) end @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Helpers::CachingStoreWrapper.new(@core, opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) end # diff --git a/spec/integrations_helpers_spec.rb b/spec/integrations_helpers_spec.rb deleted file mode 100644 index 24404a72..00000000 --- a/spec/integrations_helpers_spec.rb +++ /dev/null @@ -1,276 +0,0 @@ -require "spec_helper" - -describe LaunchDarkly::Integrations::Helpers::CachingStoreWrapper do - subject { LaunchDarkly::Integrations::Helpers::CachingStoreWrapper } - - THINGS = { namespace: "things" } - - shared_examples "tests" do |cached| - opts = cached ? { expiration: 30 } : { expiration: 0 } - - it "gets item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets deleted item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1, deleted: true } - itemv2 = { key: key, version: 2, deleted: false } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true - - core.force_set(THINGS, itemv2) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet - end - - it "gets missing item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - item = { key: key, version: 1 } - - expect(wrapper.get(THINGS, key)).to eq nil - - core.force_set(THINGS, item) - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result - end - - it "gets all items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) - - core.force_remove(THINGS, item2[:key]) - expect(wrapper.all(THINGS)).to eq (cached ? - { item1[:key] => item1, item2[:key] => item2 } : - { item1[:key] => item1 }) - end - - it "gets all items filtering out deleted items" do - core = MockCore.new - wrapper = subject.new(core, opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1, deleted: true } - - core.force_set(THINGS, item1) - core.force_set(THINGS, item2) - expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) - end - - it "upserts item successfully" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv1 - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - # if we have a cache, verify that the new item is now cached by writing a different value - # to the underlying data - Get should still return the cached item - if cached - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) - end - - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "deletes item" do - core = MockCore.new - wrapper = subject.new(core, opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2, deleted: true } - itemv3 = { key: key, version: 3 } - - core.force_set(THINGS, itemv1) - expect(wrapper.get(THINGS, key)).to eq itemv1 - - wrapper.delete(THINGS, key, 2) - expect(core.data[THINGS][key]).to eq itemv2 - - core.force_set(THINGS, itemv3) # make a change that bypasses the cache - - expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) - end - end - - context "cached" do - include_examples "tests", true - - cached_opts = { expiration: 30 } - - it "get uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.get(THINGS, item1[:key])).to eq item1 - end - - it "get all uses values from init" do - core = MockCore.new - wrapper = subject.new(core, cached_opts) - item1 = { key: "flag1", version: 1 } - item2 = { key: "flag2", version: 1 } - - wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) - core.force_remove(THINGS, item1[:key]) - - expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) - end - - it "upsert doesn't update cache if unsuccessful" do - # This is for an upsert where the data in the store has a higher version. In an uncached - # store, this is just a no-op as far as the wrapper is concerned so there's nothing to - # test here. In a cached store, we need to verify that the cache has been refreshed - # using the data that was found in the store. - core = MockCore.new - wrapper = subject.new(core, cached_opts) - key = "flag" - itemv1 = { key: key, version: 1 } - itemv2 = { key: key, version: 2 } - - wrapper.upsert(THINGS, itemv2) - expect(core.data[THINGS][key]).to eq itemv2 - - wrapper.upsert(THINGS, itemv1) - expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same - - itemv3 = { key: key, version: 3 } - core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache - expect(wrapper.get(THINGS, key)).to eq itemv2 - end - - it "initialized? can cache false result" do - core = MockCore.new - wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - sleep(0.5) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - # From this point on it should remain true and the method should not be called - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - end - - context "uncached" do - include_examples "tests", false - - uncached_opts = { expiration: 0 } - - it "queries internal initialized state only if not already inited" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - core.inited = true - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - - core.inited = false - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 2 - end - - it "does not query internal initialized state if init has been called" do - core = MockCore.new - wrapper = subject.new(core, uncached_opts) - - expect(wrapper.initialized?).to eq false - expect(core.inited_query_count).to eq 1 - - wrapper.init({}) - - expect(wrapper.initialized?).to eq true - expect(core.inited_query_count).to eq 1 - end - end - - class MockCore - def initialize - @data = {} - @inited = false - @inited_query_count = 0 - end - - attr_reader :data - attr_reader :inited_query_count - attr_accessor :inited - - def force_set(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - @data[kind][item[:key]] = item - end - - def force_remove(kind, key) - @data[kind].delete(key) if @data.has_key?(kind) - end - - def init_internal(all_data) - @data = all_data - @inited = true - end - - def get_internal(kind, key) - items = @data[kind] - items.nil? ? nil : items[key] - end - - def get_all_internal(kind) - @data[kind] - end - - def upsert_internal(kind, item) - @data[kind] = {} if !@data.has_key?(kind) - old_item = @data[kind][item[:key]] - return old_item if !old_item.nil? && old_item[:version] >= item[:version] - @data[kind][item[:key]] = item - item - end - - def initialized_internal? - @inited_query_count = @inited_query_count + 1 - @inited - end - end -end From 5941638a33c7ecf703a565eabab0584871da8670 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:23:17 -0800 Subject: [PATCH 050/135] comment --- lib/ldclient-rb/integrations.rb | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 4d49d1c4..2df5e04c 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -240,6 +240,9 @@ def get_all_internal(kind) # parameter if the update succeeded, or the previously existing entity in the store if the # update failed; this is used for the caching logic). # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update # @return [Hash] the entity as it now exists in the store after the update From b4cf610105cba3f2e540d5c933b4826bb8a85b77 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 18:25:04 -0800 Subject: [PATCH 051/135] re-add test file --- spec/integrations_util_spec.rb | 276 +++++++++++++++++++++++++++++++++ 1 file changed, 276 insertions(+) create mode 100644 spec/integrations_util_spec.rb diff --git a/spec/integrations_util_spec.rb b/spec/integrations_util_spec.rb new file mode 100644 index 00000000..e7890802 --- /dev/null +++ b/spec/integrations_util_spec.rb @@ -0,0 +1,276 @@ +require "spec_helper" + +describe LaunchDarkly::Integrations::Util::CachingStoreWrapper do + subject { LaunchDarkly::Integrations::Util::CachingStoreWrapper } + + THINGS = { namespace: "things" } + + shared_examples "tests" do |cached| + opts = cached ? { expiration: 30 } : { expiration: 0 } + + it "gets item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? itemv1 : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets deleted item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1, deleted: true } + itemv2 = { key: key, version: 2, deleted: false } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq nil # item is filtered out because deleted is true + + core.force_set(THINGS, itemv2) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv2) # if cached, we will not see the new underlying value yet + end + + it "gets missing item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + item = { key: key, version: 1 } + + expect(wrapper.get(THINGS, key)).to eq nil + + core.force_set(THINGS, item) + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : item) # the cache can retain a nil result + end + + it "gets all items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1, item2[:key] => item2 }) + + core.force_remove(THINGS, item2[:key]) + expect(wrapper.all(THINGS)).to eq (cached ? + { item1[:key] => item1, item2[:key] => item2 } : + { item1[:key] => item1 }) + end + + it "gets all items filtering out deleted items" do + core = MockCore.new + wrapper = subject.new(core, opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1, deleted: true } + + core.force_set(THINGS, item1) + core.force_set(THINGS, item2) + expect(wrapper.all(THINGS)).to eq({ item1[:key] => item1 }) + end + + it "upserts item successfully" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv1 + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + # if we have a cache, verify that the new item is now cached by writing a different value + # to the underlying data - Get should still return the cached item + if cached + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) + end + + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "deletes item" do + core = MockCore.new + wrapper = subject.new(core, opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2, deleted: true } + itemv3 = { key: key, version: 3 } + + core.force_set(THINGS, itemv1) + expect(wrapper.get(THINGS, key)).to eq itemv1 + + wrapper.delete(THINGS, key, 2) + expect(core.data[THINGS][key]).to eq itemv2 + + core.force_set(THINGS, itemv3) # make a change that bypasses the cache + + expect(wrapper.get(THINGS, key)).to eq (cached ? nil : itemv3) + end + end + + context "cached" do + include_examples "tests", true + + cached_opts = { expiration: 30 } + + it "get uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.get(THINGS, item1[:key])).to eq item1 + end + + it "get all uses values from init" do + core = MockCore.new + wrapper = subject.new(core, cached_opts) + item1 = { key: "flag1", version: 1 } + item2 = { key: "flag2", version: 1 } + + wrapper.init({ THINGS => { item1[:key] => item1, item2[:key] => item2 } }) + core.force_remove(THINGS, item1[:key]) + + expect(wrapper.all(THINGS)).to eq ({ item1[:key] => item1, item2[:key] => item2 }) + end + + it "upsert doesn't update cache if unsuccessful" do + # This is for an upsert where the data in the store has a higher version. In an uncached + # store, this is just a no-op as far as the wrapper is concerned so there's nothing to + # test here. In a cached store, we need to verify that the cache has been refreshed + # using the data that was found in the store. + core = MockCore.new + wrapper = subject.new(core, cached_opts) + key = "flag" + itemv1 = { key: key, version: 1 } + itemv2 = { key: key, version: 2 } + + wrapper.upsert(THINGS, itemv2) + expect(core.data[THINGS][key]).to eq itemv2 + + wrapper.upsert(THINGS, itemv1) + expect(core.data[THINGS][key]).to eq itemv2 # value in store remains the same + + itemv3 = { key: key, version: 3 } + core.force_set(THINGS, itemv3) # bypasses cache so we can verify that itemv2 is in the cache + expect(wrapper.get(THINGS, key)).to eq itemv2 + end + + it "initialized? can cache false result" do + core = MockCore.new + wrapper = subject.new(core, { expiration: 0.2 }) # use a shorter cache TTL for this test + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + sleep(0.5) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + # From this point on it should remain true and the method should not be called + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + end + + context "uncached" do + include_examples "tests", false + + uncached_opts = { expiration: 0 } + + it "queries internal initialized state only if not already inited" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + core.inited = true + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + + core.inited = false + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 2 + end + + it "does not query internal initialized state if init has been called" do + core = MockCore.new + wrapper = subject.new(core, uncached_opts) + + expect(wrapper.initialized?).to eq false + expect(core.inited_query_count).to eq 1 + + wrapper.init({}) + + expect(wrapper.initialized?).to eq true + expect(core.inited_query_count).to eq 1 + end + end + + class MockCore + def initialize + @data = {} + @inited = false + @inited_query_count = 0 + end + + attr_reader :data + attr_reader :inited_query_count + attr_accessor :inited + + def force_set(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + @data[kind][item[:key]] = item + end + + def force_remove(kind, key) + @data[kind].delete(key) if @data.has_key?(kind) + end + + def init_internal(all_data) + @data = all_data + @inited = true + end + + def get_internal(kind, key) + items = @data[kind] + items.nil? ? nil : items[key] + end + + def get_all_internal(kind) + @data[kind] + end + + def upsert_internal(kind, item) + @data[kind] = {} if !@data.has_key?(kind) + old_item = @data[kind][item[:key]] + return old_item if !old_item.nil? && old_item[:version] >= item[:version] + @data[kind][item[:key]] = item + item + end + + def initialized_internal? + @inited_query_count = @inited_query_count + 1 + @inited + end + end +end From 3f9ef3b9166832d2f3bb551d348822697d4a7d38 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:05:59 -0800 Subject: [PATCH 052/135] misc cleanup --- lib/ldclient-rb.rb | 3 +- lib/ldclient-rb/impl.rb | 10 + .../impl/integrations/redis_impl.rb | 153 ++++++++++ lib/ldclient-rb/integrations.rb | 262 +----------------- lib/ldclient-rb/integrations/redis.rb | 48 ++++ .../integrations/util/store_wrapper.rb | 222 +++++++++++++++ lib/ldclient-rb/interfaces.rb | 3 + lib/ldclient-rb/redis_store.rb | 153 +--------- .../store_wrapper_spec.rb} | 0 9 files changed, 454 insertions(+), 400 deletions(-) create mode 100644 lib/ldclient-rb/impl.rb create mode 100644 lib/ldclient-rb/impl/integrations/redis_impl.rb create mode 100644 lib/ldclient-rb/integrations/redis.rb create mode 100644 lib/ldclient-rb/integrations/util/store_wrapper.rb rename spec/{integrations_util_spec.rb => integrations/store_wrapper_spec.rb} (100%) diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index a1d7ffd9..e355a304 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -17,7 +17,6 @@ require "ldclient-rb/non_blocking_thread_pool" require "ldclient-rb/event_summarizer" require "ldclient-rb/events" -require "ldclient-rb/redis_store" -require "ldclient-rb/integrations" require "ldclient-rb/requestor" require "ldclient-rb/file_data_source" +require "ldclient-rb/integrations" diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb new file mode 100644 index 00000000..85079baf --- /dev/null +++ b/lib/ldclient-rb/impl.rb @@ -0,0 +1,10 @@ + +module LaunchDarkly + # + # Low-level implementation classes. Everything in this module should be considered non-public + # and subject to change with any release. + # + module Impl + # code is in ldclient-rb/impl/ + end +end diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb new file mode 100644 index 00000000..325b936e --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -0,0 +1,153 @@ +require "concurrent/atomics" +require "json" + +require "ldclient-rb/integrations/util/store_wrapper" +require "ldclient-rb/redis_store" # eventually that file should be moved inside this one + +module LaunchDarkly + module Impl + module Integrations + module Redis + # + # Internal implementation of the Redis feature store, intended to be used with CachingStoreWrapper. + # + class RedisFeatureStoreCore + begin + require "redis" + require "connection_pool" + REDIS_ENABLED = true + rescue ScriptError, StandardError + REDIS_ENABLED = false + end + + def initialize(opts) + if !REDIS_ENABLED + raise RuntimeError.new("can't use Redis feature store because one of these gems is missing: redis, connection_pool") + end + + @redis_opts = opts[:redis_opts] || Hash.new + if opts[:redis_url] + @redis_opts[:url] = opts[:redis_url] + end + if !@redis_opts.include?(:url) + @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url + end + max_connections = opts[:max_connections] || 16 + @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do + ::Redis.new(@redis_opts) + end + @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix + @logger = opts[:logger] || Config.default_logger + @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented + + @stopped = Concurrent::AtomicBoolean.new(false) + + with_connection do |redis| + @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ + and prefix: #{@prefix}") + end + end + + def init_internal(all_data) + count = 0 + with_connection do |redis| + all_data.each do |kind, items| + redis.multi do |multi| + multi.del(items_key(kind)) + count = count + items.count + items.each { |key, item| + redis.hset(items_key(kind), key, item.to_json) + } + end + end + end + @logger.info { "RedisFeatureStore: initialized with #{count} items" } + end + + def get_internal(kind, key) + with_connection do |redis| + get_redis(redis, kind, key) + end + end + + def get_all_internal(kind) + fs = {} + with_connection do |redis| + hashfs = redis.hgetall(items_key(kind)) + hashfs.each do |k, json_item| + f = JSON.parse(json_item, symbolize_names: true) + fs[k.to_sym] = f + end + end + fs + end + + def upsert_internal(kind, new_item) + base_key = items_key(kind) + key = new_item[:key] + try_again = true + final_item = new_item + while try_again + try_again = false + with_connection do |redis| + redis.watch(base_key) do + old_item = get_redis(redis, kind, key) + before_update_transaction(base_key, key) + if old_item.nil? || old_item[:version] < new_item[:version] + result = redis.multi do |multi| + multi.hset(base_key, key, new_item.to_json) + end + if result.nil? + @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } + try_again = true + end + else + final_item = old_item + action = new_item[:deleted] ? "delete" : "update" + @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ + in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } + end + redis.unwatch + end + end + end + final_item + end + + def initialized_internal? + with_connection { |redis| redis.exists(items_key(FEATURES)) } + end + + def stop + if @stopped.make_true + @pool.shutdown { |redis| redis.close } + end + end + + private + + def before_update_transaction(base_key, key) + @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? + end + + def items_key(kind) + @prefix + ":" + kind[:namespace] + end + + def cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + def with_connection + @pool.with { |redis| yield(redis) } + end + + def get_redis(redis, kind, key) + json_item = redis.hget(items_key(kind), key) + json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 2df5e04c..02b2d435 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,4 @@ -require "concurrent/atomics" +require "ldclient-rb/integrations/redis" module LaunchDarkly # @@ -8,265 +8,19 @@ module Integrations # # Integration with [Redis](https://redis.io/). # + # @since 5.5.0 + # module Redis - # - # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of - # Redis running at `localhost` with its default port. - # - # @return [String] the default Redis URL - # - def self.default_redis_url - 'redis://localhost:6379/0' - end - - # - # Default value for the `prefix` option for {new_feature_store}. - # - # @return [String] the default key prefix - # - def self.default_prefix - 'launchdarkly' - end - - # - # Creates a Redis-backed persistent feature store. - # - # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). - # - # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly - # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired - # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object - # - def self.new_feature_store(opts) - return RedisFeatureStore.new(opts) - end + # code is in ldclient-rb/impl/integrations/redis_impl end # - # Support code that may be useful for integrations. + # Support code that may be helpful in creating integrations. + # + # @since 5.5.0 # module Util - # - # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} - # pattern that delegates part of its behavior to another object, while providing optional caching - # behavior and other logic that would otherwise be repeated in every feature store implementation. - # This makes it easier to create new database integrations by implementing only the database-specific - # logic. - # - # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner - # implementation object. - # - class CachingStoreWrapper - include LaunchDarkly::Interfaces::FeatureStore - - # - # Creates a new store wrapper instance. - # - # @param core [Object] an object that implements the {FeatureStoreCore} methods - # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching - # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # - def initialize(core, opts) - @core = core - - expiration_seconds = opts[:expiration] || 15 - if expiration_seconds > 0 - capacity = opts[:capacity] || 1000 - @cache = ExpiringCache.new(capacity, expiration_seconds) - else - @cache = nil - end - - @inited = Concurrent::AtomicBoolean.new(false) - end - - def init(all_data) - @core.init_internal(all_data) - @inited.make_true - - if !@cache.nil? - @cache.clear - all_data.each do |kind, items| - @cache[kind] = items_if_not_deleted(items) - items.each do |key, item| - @cache[item_cache_key(kind, key)] = [item] - end - end - end - end - - def get(kind, key) - if !@cache.nil? - cache_key = item_cache_key(kind, key) - cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values - return item_if_not_deleted(cached[0]) if !cached.nil? - end - - item = @core.get_internal(kind, key) - - if !@cache.nil? - @cache[cache_key] = [item] - end - - item_if_not_deleted(item) - end - - def all(kind) - if !@cache.nil? - items = @cache[all_cache_key(kind)] - return items if !items.nil? - end - - items = items_if_not_deleted(@core.get_all_internal(kind)) - @cache[all_cache_key(kind)] = items if !@cache.nil? - items - end - - def upsert(kind, item) - new_state = @core.upsert_internal(kind, item) - - if !@cache.nil? - @cache[item_cache_key(kind, item[:key])] = [new_state] - @cache.delete(all_cache_key(kind)) - end - end - - def delete(kind, key, version) - upsert(kind, { key: key, version: version, deleted: true }) - end - - def initialized? - return true if @inited.value - - if @cache.nil? - result = @core.initialized_internal? - else - result = @cache[inited_cache_key] - if result.nil? - result = @core.initialized_internal? - @cache[inited_cache_key] = result - end - end - - @inited.make_true if result - result - end - - def stop - @core.stop - end - - private - - # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. - def item_cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - # The result of a call to get_all_internal is cached using the "kind" object as a key. - def all_cache_key(kind) - kind - end - - # The result of initialized_internal? is cached using this key. - def inited_cache_key - "$inited" - end - - def item_if_not_deleted(item) - (item.nil? || item[:deleted]) ? nil : item - end - - def items_if_not_deleted(items) - items.select { |key, item| !item[:deleted] } - end - end - - # - # This module describes the methods that you must implement on your own object in order to - # use {CachingStoreWrapper}. - # - module FeatureStoreCore - # - # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, - # but the wrapper will take care of updating the cache if caching is enabled. - # - # @param all_data [Hash] a hash where each key is one of the data kind objects, and each - # value is in turn a hash of string keys to entities - # - def init_internal(all_data) - end - - # - # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @param key [String] the unique key of the entity to get - # @return [Hash] the entity; nil if the key was not found - # - def get_internal(kind, key) - end - - # - # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} - # except that 1. the wrapper will take care of filtering out deleted entities by checking the - # `:deleted` property, so you can just return exactly what was in the data store, and 2. the - # wrapper will take care of checking and updating the cache if caching is enabled. - # - # @param kind [Object] the kind of entity to get - # @return [Hash] a hash where each key is the entity's `:key` property and each value - # is the entity - # - def get_all_internal(kind) - end - - # - # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} - # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. - # the method is expected to return the final state of the entity (i.e. either the `item` - # parameter if the update succeeded, or the previously existing entity in the store if the - # update failed; this is used for the caching logic). - # - # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} - # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. - # - # @param kind [Object] the kind of entity to add or update - # @param item [Hash] the entity to add or update - # @return [Hash] the entity as it now exists in the store after the update - # - def upsert_internal(kind, item) - end - - # - # Checks whether this store has been initialized. This is the same as - # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern - # for efficiency, because the wrapper will use caching and memoization in order to call the method - # as little as possible. - # - # @return [Boolean] true if the store is in an initialized state - # - def initialized_internal? - end - - # - # Performs any necessary cleanup to shut down the store when the client is being shut down. - # - def stop - end - end + # code is in ldclient-rb/integrations/util/ end end end diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb new file mode 100644 index 00000000..54221f76 --- /dev/null +++ b/lib/ldclient-rb/integrations/redis.rb @@ -0,0 +1,48 @@ +require "ldclient-rb/impl/integrations/redis_impl" + +module LaunchDarkly + module Integrations + module Redis + # + # Default value for the `redis_url` option for {new_feature_store}. This points to an instance of + # Redis running at `localhost` with its default port. + # + # @return [String] the default Redis URL + # + def self.default_redis_url + 'redis://localhost:6379/0' + end + + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Redis-backed persistent feature store. + # + # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts) + return RedisFeatureStore.new(opts) + end + end + end +end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb new file mode 100644 index 00000000..58ecb2c4 --- /dev/null +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -0,0 +1,222 @@ +require "concurrent/atomics" + +require "ldclient-rb/expiring_cache" + +module LaunchDarkly + module Integrations + module Util + # + # CachingStoreWrapper is a partial implementation of the {LaunchDarkly::Interfaces::FeatureStore} + # pattern that delegates part of its behavior to another object, while providing optional caching + # behavior and other logic that would otherwise be repeated in every feature store implementation. + # This makes it easier to create new database integrations by implementing only the database-specific + # logic. + # + # The mixin {FeatureStoreCore} describes the methods that need to be supported by the inner + # implementation object. + # + class CachingStoreWrapper + include LaunchDarkly::Interfaces::FeatureStore + + # + # Creates a new store wrapper instance. + # + # @param core [Object] an object that implements the {FeatureStoreCore} methods + # @param opts [Hash] a hash that may include cache-related options; all others will be ignored + # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # + def initialize(core, opts) + @core = core + + expiration_seconds = opts[:expiration] || 15 + if expiration_seconds > 0 + capacity = opts[:capacity] || 1000 + @cache = ExpiringCache.new(capacity, expiration_seconds) + else + @cache = nil + end + + @inited = Concurrent::AtomicBoolean.new(false) + end + + def init(all_data) + @core.init_internal(all_data) + @inited.make_true + + if !@cache.nil? + @cache.clear + all_data.each do |kind, items| + @cache[kind] = items_if_not_deleted(items) + items.each do |key, item| + @cache[item_cache_key(kind, key)] = [item] + end + end + end + end + + def get(kind, key) + if !@cache.nil? + cache_key = item_cache_key(kind, key) + cached = @cache[cache_key] # note, item entries in the cache are wrapped in an array so we can cache nil values + return item_if_not_deleted(cached[0]) if !cached.nil? + end + + item = @core.get_internal(kind, key) + + if !@cache.nil? + @cache[cache_key] = [item] + end + + item_if_not_deleted(item) + end + + def all(kind) + if !@cache.nil? + items = @cache[all_cache_key(kind)] + return items if !items.nil? + end + + items = items_if_not_deleted(@core.get_all_internal(kind)) + @cache[all_cache_key(kind)] = items if !@cache.nil? + items + end + + def upsert(kind, item) + new_state = @core.upsert_internal(kind, item) + + if !@cache.nil? + @cache[item_cache_key(kind, item[:key])] = [new_state] + @cache.delete(all_cache_key(kind)) + end + end + + def delete(kind, key, version) + upsert(kind, { key: key, version: version, deleted: true }) + end + + def initialized? + return true if @inited.value + + if @cache.nil? + result = @core.initialized_internal? + else + result = @cache[inited_cache_key] + if result.nil? + result = @core.initialized_internal? + @cache[inited_cache_key] = result + end + end + + @inited.make_true if result + result + end + + def stop + @core.stop + end + + private + + # We use just one cache for 3 kinds of objects. Individual entities use a key like 'features:my-flag'. + def item_cache_key(kind, key) + kind[:namespace] + ":" + key.to_s + end + + # The result of a call to get_all_internal is cached using the "kind" object as a key. + def all_cache_key(kind) + kind + end + + # The result of initialized_internal? is cached using this key. + def inited_cache_key + "$inited" + end + + def item_if_not_deleted(item) + (item.nil? || item[:deleted]) ? nil : item + end + + def items_if_not_deleted(items) + items.select { |key, item| !item[:deleted] } + end + end + + # + # This module describes the methods that you must implement on your own object in order to + # use {CachingStoreWrapper}. + # + module FeatureStoreCore + # + # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, + # but the wrapper will take care of updating the cache if caching is enabled. + # + # @param all_data [Hash] a hash where each key is one of the data kind objects, and each + # value is in turn a hash of string keys to entities + # + def init_internal(all_data) + end + + # + # Retrieves a single entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#get} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @param key [String] the unique key of the entity to get + # @return [Hash] the entity; nil if the key was not found + # + def get_internal(kind, key) + end + + # + # Retrieves all entities of the specified kind. This is the same as {LaunchDarkly::Interfaces::FeatureStore#all} + # except that 1. the wrapper will take care of filtering out deleted entities by checking the + # `:deleted` property, so you can just return exactly what was in the data store, and 2. the + # wrapper will take care of checking and updating the cache if caching is enabled. + # + # @param kind [Object] the kind of entity to get + # @return [Hash] a hash where each key is the entity's `:key` property and each value + # is the entity + # + def get_all_internal(kind) + end + + # + # Attempts to add or update an entity. This is the same as {LaunchDarkly::Interfaces::FeatureStore#upsert} + # except that 1. the wrapper will take care of updating the cache if caching is enabled, and 2. + # the method is expected to return the final state of the entity (i.e. either the `item` + # parameter if the update succeeded, or the previously existing entity in the store if the + # update failed; this is used for the caching logic). + # + # Note that FeatureStoreCore does not have a `delete` method. This is because {CachingStoreWrapper} + # implements `delete` by simply calling `upsert` with an item whose `:deleted` property is true. + # + # @param kind [Object] the kind of entity to add or update + # @param item [Hash] the entity to add or update + # @return [Hash] the entity as it now exists in the store after the update + # + def upsert_internal(kind, item) + end + + # + # Checks whether this store has been initialized. This is the same as + # {LaunchDarkly::Interfaces::FeatureStore#initialized?} except that there is less of a concern + # for efficiency, because the wrapper will use caching and memoization in order to call the method + # as little as possible. + # + # @return [Boolean] true if the store is in an initialized state + # + def initialized_internal? + end + + # + # Performs any necessary cleanup to shut down the store when the client is being shut down. + # + def stop + end + end + end + end +end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 6226cbe1..510e1636 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -1,5 +1,8 @@ module LaunchDarkly + # + # Mixins that define the required methods of various pluggable components used by the client. + # module Interfaces # # Mixin that defines the required methods of a feature store implementation. The LaunchDarkly diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 97cec272..32a9507d 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,9 @@ require "concurrent/atomics" require "json" +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/integrations/redis_impl" + module LaunchDarkly # # An implementation of the LaunchDarkly client's feature store that uses a Redis @@ -16,146 +19,12 @@ module LaunchDarkly # implementation class may change in the future. # class RedisFeatureStore - begin - require "redis" - require "connection_pool" - REDIS_ENABLED = true - rescue ScriptError, StandardError - REDIS_ENABLED = false - end - include LaunchDarkly::Interfaces::FeatureStore - # - # Internal implementation of the Redis feature store. We put a CachingStoreWrapper around this. - # - class RedisFeatureStoreCore - def initialize(opts) - @redis_opts = opts[:redis_opts] || Hash.new - if opts[:redis_url] - @redis_opts[:url] = opts[:redis_url] - end - if !@redis_opts.include?(:url) - @redis_opts[:url] = LaunchDarkly::Integrations::Redis::default_redis_url - end - max_connections = opts[:max_connections] || 16 - @pool = opts[:pool] || ConnectionPool.new(size: max_connections) do - Redis.new(@redis_opts) - end - @prefix = opts[:prefix] || LaunchDarkly::Integrations::Redis::default_prefix - @logger = opts[:logger] || Config.default_logger - @test_hook = opts[:test_hook] # used for unit tests, deliberately undocumented - - @stopped = Concurrent::AtomicBoolean.new(false) - - with_connection do |redis| - @logger.info("RedisFeatureStore: using Redis instance at #{redis.connection[:host]}:#{redis.connection[:port]} \ - and prefix: #{@prefix}") - end - end - - def init_internal(all_data) - count = 0 - with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| - multi.del(items_key(kind)) - count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } - end - end - end - @logger.info { "RedisFeatureStore: initialized with #{count} items" } - end - - def get_internal(kind, key) - with_connection do |redis| - get_redis(redis, kind, key) - end - end - - def get_all_internal(kind) - fs = {} - with_connection do |redis| - hashfs = redis.hgetall(items_key(kind)) - hashfs.each do |k, json_item| - f = JSON.parse(json_item, symbolize_names: true) - fs[k.to_sym] = f - end - end - fs - end - - def upsert_internal(kind, new_item) - base_key = items_key(kind) - key = new_item[:key] - try_again = true - final_item = new_item - while try_again - try_again = false - with_connection do |redis| - redis.watch(base_key) do - old_item = get_redis(redis, kind, key) - before_update_transaction(base_key, key) - if old_item.nil? || old_item[:version] < new_item[:version] - result = redis.multi do |multi| - multi.hset(base_key, key, new_item.to_json) - end - if result.nil? - @logger.debug { "RedisFeatureStore: concurrent modification detected, retrying" } - try_again = true - end - else - final_item = old_item - action = new_item[:deleted] ? "delete" : "update" - @logger.warn { "RedisFeatureStore: attempted to #{action} #{key} version: #{old_item[:version]} \ -in '#{kind[:namespace]}' with a version that is the same or older: #{new_item[:version]}" } - end - redis.unwatch - end - end - end - final_item - end - - def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } - end - - def stop - if @stopped.make_true - @pool.shutdown { |redis| redis.close } - end - end - - private - - # exposed for testing - def before_update_transaction(base_key, key) - @test_hook.before_update_transaction(base_key, key) if !@test_hook.nil? - end - - def items_key(kind) - @prefix + ":" + kind[:namespace] - end - - def cache_key(kind, key) - kind[:namespace] + ":" + key.to_s - end - - def with_connection - @pool.with { |redis| yield(redis) } - end - - def get_redis(redis, kind, key) - json_item = redis.hget(items_key(kind), key) - json_item.nil? ? nil : JSON.parse(json_item, symbolize_names: true) - end - end - - private_constant :RedisFeatureStoreCore + # Note that this class is now just a facade around CachingStoreWrapper, which is in turn delegating + # to RedisFeatureStoreCore where the actual database logic is. This class was retained for historical + # reasons, so that existing code can still call RedisFeatureStore.new. In the future, we will migrate + # away from exposing these concrete classes and use factory methods instead. # # Constructor for a RedisFeatureStore instance. @@ -171,12 +40,8 @@ def get_redis(redis, kind, key) # @option opts [Object] :pool custom connection pool, if desired # def initialize(opts = {}) - if !REDIS_ENABLED - raise RuntimeError.new("can't use RedisFeatureStore because one of these gems is missing: redis, connection_pool") - end - - @core = RedisFeatureStoreCore.new(opts) - @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(@core, opts) + core = LaunchDarkly::Impl::Integrations::Redis::RedisFeatureStoreCore.new(opts) + @wrapper = LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) end # diff --git a/spec/integrations_util_spec.rb b/spec/integrations/store_wrapper_spec.rb similarity index 100% rename from spec/integrations_util_spec.rb rename to spec/integrations/store_wrapper_spec.rb From fa831f9a3fc6db3bf1eabff4030eaa13ae11d03c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 14 Dec 2018 19:18:27 -0800 Subject: [PATCH 053/135] misc cleanup --- lib/ldclient-rb/impl/integrations/redis_impl.rb | 3 --- lib/ldclient-rb/integrations.rb | 1 + lib/ldclient-rb/integrations/redis.rb | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 325b936e..497b01c5 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -1,9 +1,6 @@ require "concurrent/atomics" require "json" -require "ldclient-rb/integrations/util/store_wrapper" -require "ldclient-rb/redis_store" # eventually that file should be moved inside this one - module LaunchDarkly module Impl module Integrations diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 02b2d435..c48074a0 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,4 +1,5 @@ require "ldclient-rb/integrations/redis" +require "ldclient-rb/integrations/util/store_wrapper" module LaunchDarkly # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 54221f76..b81097c6 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -1,4 +1,4 @@ -require "ldclient-rb/impl/integrations/redis_impl" +require "ldclient-rb/redis_store" # eventually we will just refer to impl/integrations/redis_impl directly module LaunchDarkly module Integrations From ea68da433cc5eaeaeac8c557364c94a20a21d93f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:33:52 -0800 Subject: [PATCH 054/135] initial DynamoDB implementation --- .circleci/config.yml | 11 + ldclient-rb.gemspec | 1 + .../impl/integrations/dynamodb_impl.rb | 231 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 10 + lib/ldclient-rb/integrations/dynamodb.rb | 31 +++ .../dynamodb_feature_store_spec.rb | 77 ++++++ 6 files changed, 361 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/dynamodb_impl.rb create mode 100644 lib/ldclient-rb/integrations/dynamodb.rb create mode 100644 spec/integrations/dynamodb_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..f19ae7bc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -34,26 +34,31 @@ jobs: docker: - image: circleci/ruby:2.2.9-jessie - image: redis + - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie - image: redis + - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch - image: redis + - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk - image: redis + - image: amazon/dynamodb-local # The following very slow job uses an Ubuntu container to run the Ruby versions that # CircleCI doesn't provide Docker images for. @@ -63,8 +68,11 @@ jobs: environment: - RUBIES: "jruby-9.1.17.0" steps: + - run: sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" - run: sudo apt-get -q update - run: sudo apt-get -qy install redis-server + - run: sudo apt-cache policy docker-ce + - run: sudo apt-get -qy install docker-ce - checkout - run: name: install all Ruby versions @@ -84,6 +92,9 @@ jobs: bundle install; mv Gemfile.lock "Gemfile.lock.$i" done + - run: + command: docker run -p 8000:8000 amazon/dynamodb-local + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..8b1f4cc7 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -21,6 +21,7 @@ Gem::Specification.new do |spec| spec.require_paths = ["lib"] spec.extensions = 'ext/mkrf_conf.rb' + spec.add_development_dependency "aws-sdk-dynamodb", "~> 1.18" spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb new file mode 100644 index 00000000..8eb1dd2a --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -0,0 +1,231 @@ +require "concurrent/atomics" +require "json" + +module LaunchDarkly + module Impl + module Integrations + module DynamoDB + # + # Internal implementation of the DynamoDB feature store, intended to be used with CachingStoreWrapper. + # + class DynamoDBFeatureStoreCore + begin + require "aws-sdk-dynamodb" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + begin + require "aws-sdk" + AWS_SDK_ENABLED = true + rescue ScriptError, StandardError + AWS_SDK_ENABLED = false + end + end + + PARTITION_KEY = "namespace" + SORT_KEY = "key" + + VERSION_ATTRIBUTE = "version" + ITEM_JSON_ATTRIBUTE = "item" + + def initialize(table_name, opts) + if !AWS_SDK_ENABLED + raise RuntimeError.new("can't use DynamoDB feature store without the aws-sdk or aws-sdk-dynamodb gem") + end + + @table_name = table_name + @prefix = opts[:prefix] + @logger = opts[:logger] || Config.default_logger + + @stopped = Concurrent::AtomicBoolean.new(false) + + if !opts[:existing_client].nil? + @client = opts[:existing_client] + else + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + end + + @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = read_existing_keys(all_data.keys) + + requests = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + requests.push({ put_request: { item: marshal_item(kind, item) } }) + unused_old_keys.delete([ namespace_for_kind(kind), item[:key] ]) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + del_item = make_keys_hash(tuple[0], tuple[1]) + requests.push({ delete_request: { key: del_item } }) + end + + # Now set the special key that we check in initialized_internal? + inited_item = make_keys_hash(inited_key, inited_key) + requests.push({ put_request: { item: inited_item } }) + + DynamoDBUtil.batch_write_requests(@client, @table_name, requests) + + @logger.info { "Initialized table #{@table_name} with #{num_items} items" } + end + + def get_internal(kind, key) + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + req = make_query_for_kind(kind) + while true + resp = @client.query(req) + resp.items.each do |item| + item_out = unmarshal_item(item) + items_out[item_out[:key].to_sym] = item_out + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + items_out + end + + def upsert_internal(kind, new_item) + encoded_item = marshal_item(kind, new_item) + begin + @client.put_item({ + table_name: @table_name, + item: encoded_item, + condition_expression: "attribute_not_exists(#namespace) or attribute_not_exists(#key) or :version > #version", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY, + "#version" => VERSION_ATTRIBUTE + }, + expression_attribute_values: { + ":version" => new_item[:version] + } + }) + new_item + rescue Aws::DynamoDB::Errors::ConditionalCheckFailedException + # The item was not updated because there's a newer item in the database. + # We must now read the item that's in the database and return it, so CachingStoreWrapper can cache it. + get_internal(kind, new_item[:key]) + end + end + + def initialized_internal? + resp = get_item_by_keys(inited_key, inited_key) + !resp.item.nil? && resp.item.length > 0 + end + + def stop + # AWS client doesn't seem to have a close method + end + + private + + def prefixed_namespace(base_str) + (@prefix.nil? || @prefix == "") ? base_str : "#{@prefix}:#{base_str}" + end + + def namespace_for_kind(kind) + prefixed_namespace(kind[:namespace]) + end + + def inited_key + prefixed_namespace("$inited") + end + + def make_keys_hash(namespace, key) + { + PARTITION_KEY => namespace, + SORT_KEY => key + } + end + + def make_query_for_kind(kind) + { + table_name: @table_name, + consistent_read: true, + key_conditions: { + PARTITION_KEY => { + comparison_operator: "EQ", + attribute_value_list: [ namespace_for_kind(kind) ] + } + } + } + end + + def get_item_by_keys(namespace, key) + @client.get_item({ + table_name: @table_name, + key: make_keys_hash(namespace, key) + }) + end + + def read_existing_keys(kinds) + keys = Set.new + kinds.each do |kind| + req = make_query_for_kind(kind).merge({ + projection_expression: "#namespace, #key", + expression_attribute_names: { + "#namespace" => PARTITION_KEY, + "#key" => SORT_KEY + } + }) + while true + resp = @client.query(req) + resp.items.each do |item| + namespace = item[PARTITION_KEY] + key = item[SORT_KEY] + keys.add([ namespace, key ]) + end + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + end + keys + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class DynamoDBUtil + # + # Calls client.batch_write_item as many times as necessary to submit all of the given requests. + # The requests array is consumed. + # + def self.batch_write_requests(client, table, requests) + batch_size = 25 + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + client.batch_write_item({ request_items: { table => chunk } }) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index c48074a0..029c4243 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -6,6 +7,15 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). + # + # @since 5.5.0 + # + module DynamoDB + # code is in ldclient-rb/impl/integrations/dynamodb_impl + end + # # Integration with [Redis](https://redis.io/). # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb new file mode 100644 index 00000000..553f54e9 --- /dev/null +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -0,0 +1,31 @@ +require "ldclient-rb/impl/integrations/dynamodb_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module DynamoDB + # + # Creates a DynamoDB-backed persistent feature store. + # + # To use this method, you must first have the `aws_sdk` gem installed. Then, + # put the object returned by this method into the `feature_store` property of your + # client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) + # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) + # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :max_connections size of the Redis connection pool + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @option opts [Object] :pool custom connection pool, if desired + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(table_name, opts) + core = LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBFeatureStoreCore.new(table_name, opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb new file mode 100644 index 00000000..4a0e3cbf --- /dev/null +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -0,0 +1,77 @@ +require "feature_store_spec_base" +require "aws-sdk-dynamodb" +require "spec_helper" + + +$table_name = 'LD_DYNAMODB_TEST_TABLE' +$endpoint = 'http://localhost:8000' +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$dynamodb_opts = { + credentials: Aws::Credentials.new("key", "secret"), + region: "us-east-1", + endpoint: $endpoint +} + +$base_opts = { + dynamodb_opts: $dynamodb_opts, + prefix: $my_prefix, + logger: $null_log +} + +def create_dynamodb_store(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_dynamodb_store_uncached(opts = {}) + LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, + opts.merge($base_opts).merge({ expiration: 0 })) +end + +def create_table_if_necessary + client = create_test_client + begin + client.describe_table({ table_name: $table_name }) + return # no error, table exists + rescue Blahbhbhba + # fall through to code below - we'll create the table + end + + req = { + table_name: $table_name, + key_schema: [ + { attribute_name: "namespace", key_type: "HASH" }, + { attribute_name: "key", key_type: "RANGE" } + ], + attribute_definitions: [ + { attribute_name: "namespace", attribute_type: "S" }, + { attribute_name: "key", attribute_type: "S" } + ] + } + client.create_table(req) + + # When DynamoDB creates a table, it may not be ready to use immediately +end + +def create_test_client + Aws::DynamoDB::Client.new($dynamodb_opts) +end + + +describe "DynamoDB feature store" do + + # These tests will all fail if there isn't a local DynamoDB instance running. + + create_table_if_necessary + + context "with local cache" do + include_examples "feature_store", method(:create_dynamodb_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_dynamodb_store_uncached) + end +end From bde227450dee5c868e099fbc5c20de7c80b272ee Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:56:38 -0800 Subject: [PATCH 055/135] fix exception name --- spec/integrations/dynamodb_feature_store_spec.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4a0e3cbf..98e32ed6 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -36,7 +36,7 @@ def create_table_if_necessary begin client.describe_table({ table_name: $table_name }) return # no error, table exists - rescue Blahbhbhba + rescue Aws::DynamoDB::Errors::ResourceNotFoundException # fall through to code below - we'll create the table end From 4e493172c97a5cbf745176167d3b4a5aec637e45 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 10:59:17 -0800 Subject: [PATCH 056/135] fix test setup --- spec/integrations/dynamodb_feature_store_spec.rb | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 98e32ed6..38104fb3 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -49,7 +49,11 @@ def create_table_if_necessary attribute_definitions: [ { attribute_name: "namespace", attribute_type: "S" }, { attribute_name: "key", attribute_type: "S" } - ] + ], + provisioned_throughput: { + read_capacity_units: 1, + write_capacity_units: 1 + } } client.create_table(req) From c71bbec59a1b76f933c21f9acc7d55860d1b4303 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:32:30 -0800 Subject: [PATCH 057/135] comments --- lib/ldclient-rb/integrations.rb | 6 ++++++ lib/ldclient-rb/integrations/dynamodb.rb | 6 +++--- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index 029c4243..bfaed2eb 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -10,6 +10,9 @@ module Integrations # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # + # Note that in order to use this integration, you must first install one of the AWS SDK gems: either + # `aws-sdk-dynamodb`, or the full `aws-sdk`. + # # @since 5.5.0 # module DynamoDB @@ -19,6 +22,9 @@ module DynamoDB # # Integration with [Redis](https://redis.io/). # + # Note that in order to use this integration, you must first install the `redis` and `connection-pool` + # gems. + # # @since 5.5.0 # module Redis diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 553f54e9..66d3b583 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -7,9 +7,9 @@ module DynamoDB # # Creates a DynamoDB-backed persistent feature store. # - # To use this method, you must first have the `aws_sdk` gem installed. Then, - # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or + # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property + # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) From cfe3b188df3ef64139310bc73dce03e9891c5883 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 11:48:04 -0800 Subject: [PATCH 058/135] readme --- README.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index ead2bb6b..43819554 100644 --- a/README.md +++ b/README.md @@ -121,6 +121,11 @@ else end ``` +Database integrations +--------------------- + +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. + Using flag data from a file --------------------------- For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. @@ -153,9 +158,9 @@ About LaunchDarkly * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Python Twisted](http://docs.launchdarkly.com/docs/python-twisted-sdk-reference "LaunchDarkly Python Twisted SDK") * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") + * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") From 69cf890825ab41a5529242b0f4cb90f46bb81a5b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 12:08:41 -0800 Subject: [PATCH 059/135] fix doc comment --- lib/ldclient-rb/integrations/dynamodb.rb | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index 66d3b583..c9ded019 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -12,14 +12,12 @@ module DynamoDB # of your client configuration ({LaunchDarkly::Config}). # # @param opts [Hash] the configuration options - # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) - # @option opts [Hash] :redis_opts options to pass to the Redis constructor (if you want to specify more than just `redis_url`) - # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly + # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) + # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :max_connections size of the Redis connection pool # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache - # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # def self.new_feature_store(table_name, opts) From 321eb6eeb247764437233f8478b5ac3c1f9e6492 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 14:45:13 -0800 Subject: [PATCH 060/135] greatly improve documentation comments --- lib/ldclient-rb.rb | 7 + lib/ldclient-rb/cache_store.rb | 1 + lib/ldclient-rb/config.rb | 291 ++++++++++++------ lib/ldclient-rb/evaluation.rb | 64 +++- lib/ldclient-rb/event_summarizer.rb | 3 + lib/ldclient-rb/events.rb | 16 + lib/ldclient-rb/expiring_cache.rb | 1 + lib/ldclient-rb/file_data_source.rb | 8 +- lib/ldclient-rb/flags_state.rb | 5 +- lib/ldclient-rb/impl.rb | 2 + .../integrations/util/store_wrapper.rb | 3 + lib/ldclient-rb/interfaces.rb | 42 +++ lib/ldclient-rb/ldclient.rb | 116 ++++--- lib/ldclient-rb/memoized_value.rb | 2 + lib/ldclient-rb/newrelic.rb | 1 + lib/ldclient-rb/non_blocking_thread_pool.rb | 6 +- lib/ldclient-rb/polling.rb | 1 + lib/ldclient-rb/requestor.rb | 3 +- lib/ldclient-rb/simple_lru_cache.rb | 1 + lib/ldclient-rb/stream.rb | 8 + lib/ldclient-rb/user_filter.rb | 1 + lib/ldclient-rb/util.rb | 1 + lib/sse_client/sse_client.rb | 7 + scripts/gendocs.sh | 9 + 24 files changed, 446 insertions(+), 153 deletions(-) create mode 100755 scripts/gendocs.sh diff --git a/lib/ldclient-rb.rb b/lib/ldclient-rb.rb index e355a304..e5477ecb 100644 --- a/lib/ldclient-rb.rb +++ b/lib/ldclient-rb.rb @@ -1,3 +1,10 @@ + +# +# Namespace for the LaunchDarkly Ruby SDK. +# +module LaunchDarkly +end + require "ldclient-rb/version" require "ldclient-rb/interfaces" require "ldclient-rb/util" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..a0a50fbf 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -7,6 +7,7 @@ module LaunchDarkly # # @see https://github.com/plataformatec/faraday-http-cache # @see https://github.com/ruby-concurrency/thread_safe + # @private # class ThreadSafeMemoryStore # diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index dc89d30a..e16e998a 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -8,66 +8,35 @@ module LaunchDarkly # # class Config + # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # # Constructor for creating custom LaunchDarkly configurations. # # @param opts [Hash] the configuration options - # @option opts [Logger] :logger A logger to use for messages from the - # LaunchDarkly client. Defaults to the Rails logger in a Rails - # environment, or stdout otherwise. - # @option opts [String] :base_uri ("https://app.launchdarkly.com") The base - # URL for the LaunchDarkly server. Most users should use the default value. - # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") The - # URL for the LaunchDarkly streaming events server. Most users should use the default value. - # @option opts [String] :events_uri ("https://events.launchdarkly.com") The - # URL for the LaunchDarkly events server. Most users should use the default value. - # @option opts [Integer] :capacity (10000) The capacity of the events - # buffer. The client buffers up to this many events in memory before - # flushing. If the capacity is exceeded before the buffer is flushed, - # events will be discarded. - # @option opts [Float] :flush_interval (30) The number of seconds between - # flushes of the event buffer. - # @option opts [Float] :read_timeout (10) The read timeout for network - # connections in seconds. - # @option opts [Float] :connect_timeout (2) The connect timeout for network - # connections in seconds. - # @option opts [Object] :cache_store A cache store for the Faraday HTTP caching - # library. Defaults to the Rails cache in a Rails environment, or a - # thread-safe in-memory store otherwise. - # @option opts [Object] :feature_store A store for feature flags and related data. Defaults to an in-memory - # cache, or you can use RedisFeatureStore. - # @option opts [Boolean] :use_ldd (false) Whether you are using the LaunchDarkly relay proxy in - # daemon mode. In this configuration, the client will not use a streaming connection to listen - # for updates, but instead will get feature state from a Redis instance. The `stream` and - # `poll_interval` options will be ignored if this option is set to true. - # @option opts [Boolean] :offline (false) Whether the client should be initialized in - # offline mode. In offline mode, default values are returned for all flags and no - # remote network requests are made. - # @option opts [Float] :poll_interval (30) The number of seconds between polls for flag updates - # if streaming is off. - # @option opts [Boolean] :stream (true) Whether or not the streaming API should be used to receive flag updates. - # Streaming should only be disabled on the advice of LaunchDarkly support. - # @option opts [Boolean] all_attributes_private (false) If true, all user attributes (other than the key) - # will be private, not just the attributes specified in `private_attribute_names`. - # @option opts [Array] :private_attribute_names Marks a set of attribute names private. Any users sent to - # LaunchDarkly with this configuration active will have attributes with these names removed. - # @option opts [Boolean] :send_events (true) Whether or not to send events back to LaunchDarkly. - # This differs from `offline` in that it affects only the sending of client-side events, not - # streaming or polling for events from the server. - # @option opts [Integer] :user_keys_capacity (1000) The number of user keys that the event processor - # can remember at any one time, so that duplicate user details will not be sent in analytics events. - # @option opts [Float] :user_keys_flush_interval (300) The interval in seconds at which the event - # processor will reset its set of known user keys. - # @option opts [Boolean] :inline_users_in_events (false) Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. - # @option opts [Object] :update_processor (DEPRECATED) An object that will receive feature flag data from - # LaunchDarkly. Defaults to either the streaming or the polling processor, can be customized for tests. - # @option opts [Object] :update_processor_factory A function that takes the SDK and configuration object - # as parameters, and returns an object that can obtain feature flag data and put it into the feature - # store. Defaults to creating either the streaming or the polling processor, can be customized for tests. - # @return [type] [description] - # rubocop:disable Metrics/AbcSize, Metrics/PerceivedComplexity + # @option opts [Logger] :logger See {#logger}. + # @option opts [String] :base_uri ("https://app.launchdarkly.com") See {#base_uri}. + # @option opts [String] :stream_uri ("https://stream.launchdarkly.com") See {#stream_uri}. + # @option opts [String] :events_uri ("https://events.launchdarkly.com") See {#events_uri}. + # @option opts [Integer] :capacity (10000) See {#capacity}. + # @option opts [Float] :flush_interval (30) See {#flush_interval}. + # @option opts [Float] :read_timeout (10) See {#read_timeout}. + # @option opts [Float] :connect_timeout (2) See {#connect_timeout}. + # @option opts [Object] :cache_store See {#cache_store}. + # @option opts [Object] :feature_store See {#feature_store}. + # @option opts [Boolean] :use_ldd (false) See {#use_ldd?}. + # @option opts [Boolean] :offline (false) See {#offline?}. + # @option opts [Float] :poll_interval (30) See {#poll_interval}. + # @option opts [Boolean] :stream (true) See {#stream?}. + # @option opts [Boolean] all_attributes_private (false) See {#all_attributes_private}. + # @option opts [Array] :private_attribute_names See {#private_attribute_names}. + # @option opts [Boolean] :send_events (true) See {#send_events}. + # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. + # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. + # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. + # @option opts [Object] :update_processor See {#update_processor}. + # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @stream_uri = (opts[:stream_uri] || Config.default_stream_uri).chomp("/") @@ -95,43 +64,56 @@ def initialize(opts = {}) end # - # The base URL for the LaunchDarkly server. + # The base URL for the LaunchDarkly server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly server. attr_reader :base_uri # - # The base URL for the LaunchDarkly streaming server. + # The base URL for the LaunchDarkly streaming server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly streaming server. attr_reader :stream_uri # - # The base URL for the LaunchDarkly events server. + # The base URL for the LaunchDarkly events server. This is configurable mainly for testing + # purposes; most users should use the default value. + # @return [String] # - # @return [String] The configured base URL for the LaunchDarkly events server. attr_reader :events_uri # # Whether streaming mode should be enabled. Streaming mode asynchronously updates - # feature flags in real-time using server-sent events. + # feature flags in real-time using server-sent events. Streaming is enabled by default, and + # should only be disabled on the advice of LaunchDarkly support. + # @return [Boolean] # - # @return [Boolean] True if streaming mode should be enabled def stream? @stream end # - # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, we do - # not use polling or streaming to get feature flag updates from the server, but instead - # read them from a Redis instance that is updated by the proxy. + # Whether to use the LaunchDarkly relay proxy in daemon mode. In this mode, the client does not + # use polling or streaming to get feature flag updates from the server, but instead reads them + # from the {#feature_store feature store}, which is assumed to be a database that is populated by + # a LaunchDarkly relay proxy. For more information, see ["The relay proxy"](https://docs.launchdarkly.com/v2.0/docs/the-relay-proxy) + # and ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # All other properties related to streaming or polling are ignored if this option is set to true. + # + # @return [Boolean] # - # @return [Boolean] True if using the LaunchDarkly relay proxy in daemon mode def use_ldd? @use_ldd end - # TODO docs + # + # Whether the client should be initialized in offline mode. In offline mode, default values are + # returned for all flags and no remote network requests are made. + # @return [Boolean] + # def offline? @offline end @@ -139,20 +121,23 @@ def offline? # # The number of seconds between flushes of the event buffer. Decreasing the flush interval means # that the event buffer is less likely to reach capacity. + # @return [Float] # - # @return [Float] The configured number of seconds between flushes of the event buffer. attr_reader :flush_interval # # The number of seconds to wait before polling for feature flag updates. This option has no - # effect unless streaming is disabled + # effect unless streaming is disabled. + # @return [Float] + # attr_reader :poll_interval # # The configured logger for the LaunchDarkly client. The client library uses the log to - # print warning and error messages. + # print warning and error messages. If not specified, this defaults to the Rails logger + # in a Rails environment, or stdout otherwise. + # @return [Logger] # - # @return [Logger] The configured logger attr_reader :logger # @@ -161,114 +146,208 @@ def offline? # the buffer is flushed, events will be discarded. # Increasing the capacity means that events are less likely to be discarded, # at the cost of consuming more memory. + # @return [Integer] # - # @return [Integer] The configured capacity of the event buffer attr_reader :capacity # - # The store for the Faraday HTTP caching library. Stores should respond to - # 'read' and 'write' requests. + # A store for HTTP caching. This must support the semantics used by the + # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults + # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # @return [Object] # - # @return [Object] The configured store for the Faraday HTTP caching library. attr_reader :cache_store # - # The read timeout for network connections in seconds. + # The read timeout for network connections in seconds. This does not apply to the streaming + # connection, which uses a longer timeout since the server does not send data constantly. + # @return [Float] # - # @return [Float] The read timeout in seconds. attr_reader :read_timeout # # The connect timeout for network connections in seconds. + # @return [Float] # - # @return [Float] The connect timeout in seconds. attr_reader :connect_timeout # - # A store for feature flag configuration rules. + # A store for feature flags and related data. The client uses it to store all data received + # from LaunchDarkly, and uses the last stored data when evaluating flags. Defaults to + # {InMemoryFeatureStore}; for other implementations, see {LaunchDarkly::Integrations}. + # + # For more information, see ["Using a persistent feature store"](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). + # + # @return [LaunchDarkly::Interfaces::FeatureStore] # attr_reader :feature_store - # The proxy configuration string + # + # The proxy configuration string. + # @return [String] # attr_reader :proxy + # + # True if all user attributes (other than the key) should be considered private. This means + # that the attribute values will not be sent to LaunchDarkly in analytics events and will not + # appear on the LaunchDarkly dashboard. + # @return [Boolean] + # @see #private_attribute_names + # attr_reader :all_attributes_private + # + # A list of user attribute names that should always be considered private. This means that the + # attribute values will not be sent to LaunchDarkly in analytics events and will not appear on + # the LaunchDarkly dashboard. + # + # You can also specify the same behavior for an individual flag evaluation by storing an array + # of attribute names in the `:privateAttributeNames` property (note camelcase name) of the + # user object. + # + # @return [Array] + # @see #all_attributes_private + # attr_reader :private_attribute_names # - # Whether to send events back to LaunchDarkly. + # Whether to send events back to LaunchDarkly. This differs from {#offline?} in that it affects + # only the sending of client-side events, not streaming or polling for events from the server. + # @return [Boolean] # attr_reader :send_events # - # The number of user keys that the event processor can remember at any one time, so that - # duplicate user details will not be sent in analytics events. + # The number of user keys that the event processor can remember at any one time. This reduces the + # amount of duplicate user details sent in analytics events. + # @return [Integer] + # @see #user_keys_flush_interval # attr_reader :user_keys_capacity # # The interval in seconds at which the event processor will reset its set of known user keys. + # @return [Float] + # @see #user_keys_capacity # attr_reader :user_keys_flush_interval # - # Whether to include full user details in every - # analytics event. By default, events will only include the user key, except for one "index" event - # that provides the full details for the user. + # Whether to include full user details in every analytics event. By default, events will only + # include the user key, except for one "index" event that provides the full details for the user. + # The only reason to change this is if you are using the Analytics Data Stream. + # @return [Boolean] # attr_reader :inline_users_in_events + # + # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, + # the client uses its standard polling or streaming implementation; this is customizable for + # testing purposes. + # @return [LaunchDarkly::Interfaces::UpdateProcessor] + # @deprecated The preferred way to set this is now with {#update_processor_factory}. + # attr_reader :update_processor + # + # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly + # By default, the client uses its standard polling or streaming implementation; this is + # customizable for testing purposes. + # + # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It + # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # + # @return [lambda] + # @see FileDataSource + # attr_reader :update_processor_factory - + # # The default LaunchDarkly client configuration. This configuration sets # reasonable defaults for most users. - # # @return [Config] The default LaunchDarkly configuration. + # def self.default Config.new end + # + # The default value for {#capacity}. + # @return [Integer] 10000 + # def self.default_capacity 10000 end + # + # The default value for {#base_uri}. + # @return [String] "https://app.launchdarkly.com" + # def self.default_base_uri "https://app.launchdarkly.com" end + # + # The default value for {#stream_uri}. + # @return [String] "https://stream.launchdarkly.com" + # def self.default_stream_uri "https://stream.launchdarkly.com" end + # + # The default value for {#events_uri}. + # @return [String] "https://events.launchdarkly.com" + # def self.default_events_uri "https://events.launchdarkly.com" end + # + # The default value for {#cache_store}. + # @return [Object] the Rails cache if in Rails, or a simple in-memory implementation otherwise + # def self.default_cache_store defined?(Rails) && Rails.respond_to?(:cache) ? Rails.cache : ThreadSafeMemoryStore.new end + # + # The default value for {#flush_interval}. + # @return [Float] 10 + # def self.default_flush_interval 10 end + # + # The default value for {#read_timeout}. + # @return [Float] 10 + # def self.default_read_timeout 10 end + # + # The default value for {#connect_timeout}. + # @return [Float] 10 + # def self.default_connect_timeout 2 end + # + # The default value for {#proxy}. + # @return [String] nil + # def self.default_proxy nil end + # + # The default value for {#logger}. + # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) Rails.logger @@ -279,34 +358,66 @@ def self.default_logger end end + # + # The default value for {#stream?}. + # @return [Boolean] true + # def self.default_stream true end + # + # The default value for {#use_ldd?}. + # @return [Boolean] false + # def self.default_use_ldd false end + # + # The default value for {#feature_store}. + # @return [LaunchDarkly::Interfaces::FeatureStore] an {InMemoryFeatureStore} + # def self.default_feature_store InMemoryFeatureStore.new end + # + # The default value for {#offline?}. + # @return [Boolean] false + # def self.default_offline false end + # + # The default value for {#poll_interval}. + # @return [Float] 30 + # def self.default_poll_interval 30 end + # + # The default value for {#send_events}. + # @return [Boolean] true + # def self.default_send_events true end + # + # The default value for {#user_keys_capacity}. + # @return [Integer] 1000 + # def self.default_user_keys_capacity 1000 end + # + # The default value for {#user_keys_flush_interval}. + # @return [Float] 300 + # def self.default_user_keys_flush_interval 300 end diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f73eb1ed..f873a6e3 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -2,7 +2,7 @@ require "semantic" module LaunchDarkly - # An object returned by `LDClient.variation_detail`, combining the result of a flag evaluation with + # An object returned by {LDClient#variation_detail}, combining the result of a flag evaluation with # an explanation of how it was calculated. class EvaluationDetail def initialize(value, variation_index, reason) @@ -11,19 +11,66 @@ def initialize(value, variation_index, reason) @reason = reason end - # @return [Object] The result of the flag evaluation. This will be either one of the flag's - # variations or the default value that was passed to the `variation` method. + # + # The result of the flag evaluation. This will be either one of the flag's variations, or the + # default value that was passed to {LDClient#variation_detail}. It is the same as the return + # value of {LDClient#variation}. + # + # @return [Object] + # attr_reader :value - # @return [int|nil] The index of the returned value within the flag's list of variations, e.g. - # 0 for the first variation - or `nil` if the default value was returned. + # + # The index of the returned value within the flag's list of variations. The first variation is + # 0, the second is 1, etc. This is `nil` if the default value was returned. + # + # @return [int|nil] + # attr_reader :variation_index - # @return [Hash] An object describing the main factor that influenced the flag evaluation value. + # + # An object describing the main factor that influenced the flag evaluation value. + # + # This object is currently represented as a Hash, which may have the following keys: + # + # `:kind`: The general category of reason. Possible values: + # + # * `'OFF'`: the flag was off and therefore returned its configured off value + # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules + # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag + # * `'RULE_MATCH'`: the user matched one of the flag's rules + # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one + # prerequisite flag that either was off or did not return the desired variation + # * `'ERROR'`: the flag could not be evaluated, so the default value was returned + # + # `:ruleIndex`: If the kind was `RULE_MATCH`, this is the positional index of the + # matched rule (0 for the first rule). + # + # `:ruleId`: If the kind was `RULE_MATCH`, this is the rule's unique identifier. + # + # `:prerequisiteKey`: If the kind was `PREREQUISITE_FAILED`, this is the flag key of + # the prerequisite flag that failed. + # + # `:errorKind`: If the kind was `ERROR`, this indicates the type of error: + # + # * `'CLIENT_NOT_READY'`: the caller tried to evaluate a flag before the client had + # successfully initialized + # * `'FLAG_NOT_FOUND'`: the caller provided a flag key that did not match any known flag + # * `'MALFORMED_FLAG'`: there was an internal inconsistency in the flag data, e.g. a + # rule specified a nonexistent variation + # * `'USER_NOT_SPECIFIED'`: the user object or user key was not provied + # * `'EXCEPTION'`: an unexpected exception stopped flag evaluation + # + # @return [Hash] + # attr_reader :reason - # @return [boolean] True if the flag evaluated to the default value rather than to one of its - # variations. + # + # Tests whether the flag evaluation returned a default value. This is the same as checking + # whether {#variation_index} is nil. + # + # @return [Boolean] + # def default_value? variation_index.nil? end @@ -33,6 +80,7 @@ def ==(other) end end + # @private module Evaluation BUILTINS = [:key, :ip, :country, :email, :firstName, :lastName, :avatar, :name, :anonymous] diff --git a/lib/ldclient-rb/event_summarizer.rb b/lib/ldclient-rb/event_summarizer.rb index 1c55b524..c48a400f 100644 --- a/lib/ldclient-rb/event_summarizer.rb +++ b/lib/ldclient-rb/event_summarizer.rb @@ -1,11 +1,14 @@ module LaunchDarkly + # @private EventSummary = Struct.new(:start_date, :end_date, :counters) # Manages the state of summarizable information for the EventProcessor, including the # event counters and user deduplication. Note that the methods of this class are # deliberately not thread-safe; the EventProcessor is responsible for enforcing # synchronization across both the summarizer and the event queue. + # + # @private class EventSummarizer def initialize clear diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index e19d6b02..cbae5ac5 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -9,6 +9,10 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + private_constant :MAX_FLUSH_WORKERS + private_constant :CURRENT_SCHEMA_VERSION + + # @private class NullEventProcessor def add_event(event) end @@ -20,6 +24,7 @@ def stop end end + # @private class EventMessage def initialize(event) @event = event @@ -27,12 +32,15 @@ def initialize(event) attr_reader :event end + # @private class FlushMessage end + # @private class FlushUsersMessage end + # @private class SynchronousMessage def initialize @reply = Concurrent::Semaphore.new(0) @@ -47,12 +55,15 @@ def wait_for_completion end end + # @private class TestSyncMessage < SynchronousMessage end + # @private class StopMessage < SynchronousMessage end + # @private class EventProcessor def initialize(sdk_key, config, client = nil) @queue = Queue.new @@ -99,6 +110,7 @@ def wait_until_inactive end end + # @private class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @@ -252,8 +264,10 @@ def handle_response(res) end end + # @private FlushPayload = Struct.new(:events, :summary) + # @private class EventBuffer def initialize(capacity, logger) @capacity = capacity @@ -290,6 +304,7 @@ def clear end end + # @private class EventPayloadSendTask def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) @@ -327,6 +342,7 @@ def run(sdk_key, config, client, payload, formatter) end end + # @private class EventOutputFormatter def initialize(config) @inline_users = config.inline_users_in_events diff --git a/lib/ldclient-rb/expiring_cache.rb b/lib/ldclient-rb/expiring_cache.rb index 6d8c48f8..fa6051c9 100644 --- a/lib/ldclient-rb/expiring_cache.rb +++ b/lib/ldclient-rb/expiring_cache.rb @@ -6,6 +6,7 @@ module LaunchDarkly # * made thread-safe # * removed many unused methods # * reading a key does not reset its expiration time, only writing + # @private class ExpiringCache def initialize(max_size, ttl) @max_size = max_size diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index da80f26a..120276fc 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -7,12 +7,15 @@ module LaunchDarkly # To avoid pulling in 'listen' and its transitive dependencies for people who aren't using the # file data source or who don't need auto-updating, we only enable auto-update if the 'listen' # gem has been provided by the host app. + # @private @@have_listen = false begin require 'listen' @@have_listen = true rescue LoadError end + + # @private def self.have_listen? @@have_listen end @@ -45,7 +48,7 @@ def self.have_listen? # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all # # The output will look something like this (but with many more properties): # @@ -92,6 +95,8 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource + include LaunchDarkly::Interfaces::UpdateProcessor + # # Returns a factory for the file data source component. # @@ -116,6 +121,7 @@ def self.factory(options={}) end end + # @private class FileDataSourceImpl def initialize(feature_store, logger, options={}) @feature_store = feature_store diff --git a/lib/ldclient-rb/flags_state.rb b/lib/ldclient-rb/flags_state.rb index b761149c..4efe1404 100644 --- a/lib/ldclient-rb/flags_state.rb +++ b/lib/ldclient-rb/flags_state.rb @@ -3,8 +3,8 @@ module LaunchDarkly # # A snapshot of the state of all feature flags with regard to a specific user, generated by - # calling the client's all_flags_state method. Serializing this object to JSON using - # JSON.generate (or the to_json method) will produce the appropriate data structure for + # calling the {LDClient#all_flags_state}. Serializing this object to JSON using + # `JSON.generate` (or the `to_json` method) will produce the appropriate data structure for # bootstrapping the LaunchDarkly JavaScript client. # class FeatureFlagsState @@ -15,6 +15,7 @@ def initialize(valid) end # Used internally to build the state map. + # @private def add_flag(flag, value, variation, reason = nil, details_only_if_tracked = false) key = flag[:key] @flag_values[key] = value diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 85079baf..3df0d7e3 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -4,6 +4,8 @@ module LaunchDarkly # Low-level implementation classes. Everything in this module should be considered non-public # and subject to change with any release. # + # @since 5.5.0 + # module Impl # code is in ldclient-rb/impl/ end diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 58ecb2c4..46a648c1 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -153,6 +153,7 @@ module FeatureStoreCore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init_internal(all_data) end @@ -214,6 +215,8 @@ def initialized_internal? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # def stop end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 510e1636..c9c38cfe 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -35,6 +35,7 @@ module FeatureStore # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities + # @return [void] # def init(all_data) end @@ -67,6 +68,7 @@ def all(kind) # # @param kind [Object] the kind of entity to add or update # @param item [Hash] the entity to add or update + # @return [void] # def upsert(kind, item) end @@ -79,6 +81,7 @@ def upsert(kind, item) # @param kind [Object] the kind of entity to delete # @param key [String] the unique key of the entity # @param version [Integer] the entity must have a lower version than this to be deleted + # @return [void] # def delete(kind, key, version) end @@ -98,6 +101,45 @@ def initialized? # # Performs any necessary cleanup to shut down the store when the client is being shut down. # + # @return [void] + # + def stop + end + end + + # + # Mixin that defines the required methods of an update processor implementation. This is + # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. + # + # The client has its own standard implementation, which uses either a streaming connection or + # polling depending on your configuration. Normally you will not need to use another one + # except for testing purposes. {FileDataSource} provides one such test fixture. + # + module UpdateProcessor + # + # Checks whether the processor has finished initializing. Initialization is considered done + # once it has received one complete data set from LaunchDarkly. + # + # @return [Boolean] true if initialization is complete + # + def initialized? + end + + # + # Puts the processor into an active state. Normally this means it will make its first + # connection attempt to LaunchDarkly. If `start` has already been called, calling it again + # should simply return the same value as the first call. + # + # @return [Concurrent::Event] an Event which will be set once initialization is complete + # + def start + end + + # + # Puts the processor into an inactive state and releases all of its resources. + # This state should be considered permanent (`start` does not have to work after `stop`). + # def stop end end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f8a75780..ffd82084 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -10,7 +10,6 @@ module LaunchDarkly # A client for LaunchDarkly. Client instances are thread-safe. Users # should create a single client instance for the lifetime of the application. # - # class LDClient include Evaluation # @@ -18,7 +17,6 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # - # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object # @@ -57,15 +55,41 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) end end + # + # Tells the client that all pending analytics events should be delivered as soon as possible. + # + # When the LaunchDarkly client generates analytics events (from {#variation}, {#variation_detail}, + # {#identify}, or {#track}), they are queued on a worker thread. The event thread normally + # sends all queued events to LaunchDarkly at regular intervals, controlled by the + # {Config#flush_interval} option. Calling `flush` triggers a send without waiting for the + # next interval. + # + # Flushing is asynchronous, so this method will return before it is complete. However, if you + # call {#close}, events are guaranteed to be sent before that method returns. + # def flush @event_processor.flush end - def toggle?(key, user, default = False) + # + # @param key [String] the feature flag key + # @param user [Hash] the user properties + # @param default [Boolean] (false) the value to use if the flag cannot be evaluated + # @return [Boolean] the flag value + # @deprecated Use {#variation} instead. + # + def toggle?(key, user, default = false) @config.logger.warn { "[LDClient] toggle? is deprecated. Use variation instead" } variation(key, user, default) end + # + # Creates a hash string that can be used by the JavaScript SDK to identify a user. + # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # + # @param user [Hash] the user properties + # @return [String] a hash string + # def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end @@ -78,13 +102,13 @@ def initialized? # # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a +:key+ . + # the user hash should contain a `:key`. # # @example Basic user hash # {key: "user@example.com"} # - # For authenticated users, the +:key+ should be the unique identifier for - # your user. For anonymous users, the +:key+ should be a session identifier + # For authenticated users, the `:key` should be the unique identifier for + # your user. For anonymous users, the `:key` should be a session identifier # or cookie. In either case, the only requirement is that the key # is unique to a user. # @@ -93,7 +117,7 @@ def initialized? # @example More complete user hash # {key: "user@example.com", ip: "127.0.0.1", country: "US"} # - # The user hash can contain arbitrary custom attributes stored in a +:custom+ sub-hash: + # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: # # @example A user hash with custom attributes # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} @@ -113,66 +137,61 @@ def variation(key, user, default) end # - # Determines the variation of a feature flag for a user, like `variation`, but also + # Determines the variation of a feature flag for a user, like {#variation}, but also # provides additional information about how this value was calculated. # - # The return value of `variation_detail` is an `EvaluationDetail` object, which has - # three properties: - # - # `value`: the value that was calculated for this user (same as the return value - # of `variation`) - # - # `variation_index`: the positional index of this value in the flag, e.g. 0 for the - # first variation - or `nil` if the default value was returned - # - # `reason`: a hash describing the main reason why this value was selected. Its `:kind` - # property will be one of the following: - # - # * `'OFF'`: the flag was off and therefore returned its configured off value - # * `'FALLTHROUGH'`: the flag was on but the user did not match any targets or rules - # * `'TARGET_MATCH'`: the user key was specifically targeted for this flag - # * `'RULE_MATCH'`: the user matched one of the flag's rules; the `:ruleIndex` and - # `:ruleId` properties indicate the positional index and unique identifier of the rule - # * `'PREREQUISITE_FAILED`': the flag was considered off because it had at least one - # prerequisite flag that either was off or did not return the desired variation; the - # `:prerequisiteKey` property indicates the key of the prerequisite that failed - # * `'ERROR'`: the flag could not be evaluated, e.g. because it does not exist or due - # to an unexpected error, and therefore returned the default value; the `:errorKind` - # property describes the nature of the error, such as `'FLAG_NOT_FOUND'` + # The return value of `variation_detail` is an {EvaluationDetail} object, which has + # three properties: the result value, the positional index of this value in the flag's + # list of variations, and an object describing the main reason why this value was + # selected. See {EvaluationDetail} for more on these properties. # - # The `reason` will also be included in analytics events, if you are capturing - # detailed event data for this flag. + # Calling `variation_detail` instead of `variation` also causes the "reason" data to + # be included in analytics events, if you are capturing detailed event data for this flag. # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag # @param default the default value of the flag # - # @return an `EvaluationDetail` object describing the result + # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) evaluate_internal(key, user, default, true) end # - # Registers the user + # Registers the user. This method simply creates an analytics event containing the user + # properties, so that LaunchDarkly will know about that user if it does not already. # - # @param [Hash] The user to register + # Calling {#variation} or {#variation_detail} also sends the user information to + # LaunchDarkly (if events are enabled), so you only need to use {#identify} if you + # want to identify the user without evaluating a flag. # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. + # + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @return [void] + # def identify(user) sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end # - # Tracks that a user performed an event + # Tracks that a user performed an event. This method creates a "custom" analytics event + # containing the specified event name (key), user properties, and optional data. + # + # Note that event delivery is asynchronous, so the event may not actually be sent + # until later; see {#flush}. # # @param event_name [String] The name of the event - # @param user [Hash] The user that performed the event. This should be the same user hash used in calls to {#toggle?} + # @param user [Hash] The user to register; this can have all the same user properties + # described in {#variation} # @param data [Hash] A hash containing any additional data associated with the event - # # @return [void] + # def track(event_name, user, data) sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) @@ -181,7 +200,7 @@ def track(event_name, user, data) # # Returns all feature flag values for the given user. This method is deprecated - please use # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of all_flags. + # events correctly if you pass the result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values @@ -191,21 +210,21 @@ def all_flags(user) end # - # Returns a FeatureFlagsState object that encapsulates the state of all feature flags for a given user, + # Returns a {FeatureFlagsState} object that encapsulates the state of all feature flags for a given user, # including the flag values and also metadata that can be used on the front end. This method does not # send analytics events back to LaunchDarkly. # # @param user [Hash] The end user requesting the feature flags - # @param options={} [Hash] Optional parameters to control how the state is generated + # @param options [Hash] Optional parameters to control how the state is generated # @option options [Boolean] :client_side_only (false) True if only flags marked for use with the # client-side SDK should be included in the state. By default, all flags are included. # @option options [Boolean] :with_reasons (false) True if evaluation reasons should be included - # in the state (see `variation_detail`). By default, they are not included. + # in the state (see {#variation_detail}). By default, they are not included. # @option options [Boolean] :details_only_for_tracked_flags (false) True if any flag metadata that is - # normally only used for event generation - such as flag versions and evaluation reasons - should be - # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size - # of the JSON data if you are passing the flag state to the front end. - # @return [FeatureFlagsState] a FeatureFlagsState object which can be serialized to JSON + # normally only used for event generation - such as flag versions and evaluation reasons - should be + # omitted for any flag that does not have event tracking or debugging turned on. This reduces the size + # of the JSON data if you are passing the flag state to the front end. + # @return [FeatureFlagsState] a {FeatureFlagsState} object which can be serialized to JSON # def all_flags_state(user, options={}) return FeatureFlagsState.new(false) if @config.offline? @@ -246,7 +265,7 @@ def all_flags_state(user, options={}) end # - # Releases all network connections and other resources held by the client, making it no longer usable + # Releases all network connections and other resources held by the client, making it no longer usable. # # @return [void] def close @@ -351,6 +370,7 @@ def make_feature_event(flag, user, detail, default, with_reasons) # # Used internally when the client is offline. + # @private # class NullUpdateProcessor def start diff --git a/lib/ldclient-rb/memoized_value.rb b/lib/ldclient-rb/memoized_value.rb index 3ba766a6..ddddb7e0 100644 --- a/lib/ldclient-rb/memoized_value.rb +++ b/lib/ldclient-rb/memoized_value.rb @@ -2,6 +2,8 @@ module LaunchDarkly # Simple implementation of a thread-safe memoized value whose generator function will never be # run more than once, and whose value can be overridden by explicit assignment. + # Note that we no longer use this class and it will be removed in a future version. + # @private class MemoizedValue def initialize(&generator) @generator = generator diff --git a/lib/ldclient-rb/newrelic.rb b/lib/ldclient-rb/newrelic.rb index ed6eb4e4..5c9b7d48 100644 --- a/lib/ldclient-rb/newrelic.rb +++ b/lib/ldclient-rb/newrelic.rb @@ -1,4 +1,5 @@ module LaunchDarkly + # @private class LDNewRelic begin require "newrelic_rpm" diff --git a/lib/ldclient-rb/non_blocking_thread_pool.rb b/lib/ldclient-rb/non_blocking_thread_pool.rb index 81b7ea14..28ec42a9 100644 --- a/lib/ldclient-rb/non_blocking_thread_pool.rb +++ b/lib/ldclient-rb/non_blocking_thread_pool.rb @@ -3,10 +3,10 @@ require "concurrent/executors" require "thread" -# Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather -# than blocking. Also provides a way to wait for all jobs to finish without shutting down. - module LaunchDarkly + # Simple wrapper for a FixedThreadPool that rejects new jobs if all the threads are busy, rather + # than blocking. Also provides a way to wait for all jobs to finish without shutting down. + # @private class NonBlockingThreadPool def initialize(capacity) @capacity = capacity diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4ecd93f8..4c6769f3 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -2,6 +2,7 @@ require "thread" module LaunchDarkly + # @private class PollingProcessor def initialize(config, requestor) @config = config diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 25cce121..3e244fbe 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -3,7 +3,7 @@ require "faraday/http_cache" module LaunchDarkly - + # @private class UnexpectedResponseError < StandardError def initialize(status) @status = status @@ -14,6 +14,7 @@ def status end end + # @private class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/simple_lru_cache.rb b/lib/ldclient-rb/simple_lru_cache.rb index 64b1a709..4eda4e27 100644 --- a/lib/ldclient-rb/simple_lru_cache.rb +++ b/lib/ldclient-rb/simple_lru_cache.rb @@ -2,6 +2,7 @@ module LaunchDarkly # A non-thread-safe implementation of a LRU cache set with only add and reset methods. # Based on https://github.com/SamSaffron/lru_redux/blob/master/lib/lru_redux/cache.rb + # @private class SimpleLRUCacheSet def initialize(capacity) @values = {} diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 2151e945..660d7063 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -3,18 +3,26 @@ require "sse_client" module LaunchDarkly + # @private PUT = :put + # @private PATCH = :patch + # @private DELETE = :delete + # @private INDIRECT_PUT = :'indirect/put' + # @private INDIRECT_PATCH = :'indirect/patch' + # @private READ_TIMEOUT_SECONDS = 300 # 5 minutes; the stream should send a ping every 3 minutes + # @private KEY_PATHS = { FEATURES => "/flags/", SEGMENTS => "/segments/" } + # @private class StreamProcessor def initialize(sdk_key, config, requestor) @sdk_key = sdk_key diff --git a/lib/ldclient-rb/user_filter.rb b/lib/ldclient-rb/user_filter.rb index 449d8d2e..8cbf67ca 100644 --- a/lib/ldclient-rb/user_filter.rb +++ b/lib/ldclient-rb/user_filter.rb @@ -2,6 +2,7 @@ require "set" module LaunchDarkly + # @private class UserFilter def initialize(config) @all_attributes_private = config.all_attributes_private diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 707ba3ce..e303e18a 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,5 +1,6 @@ module LaunchDarkly + # @private module Util def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb index 9f285360..5b7e0fd9 100644 --- a/lib/sse_client/sse_client.rb +++ b/lib/sse_client/sse_client.rb @@ -3,6 +3,13 @@ require "thread" require "uri" +# +# A lightweight Server-Sent Events implementation based on the `socketry` gem. +# +# This module will be moved to a separate gem in the future. +# +# @private +# module SSE # # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh new file mode 100755 index 00000000..6280355e --- /dev/null +++ b/scripts/gendocs.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +gem install --conservative yard +gem install --conservative redcarpet # provides Markdown formatting + +# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" +PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" + +yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md From 59759545c5e227f810655598f16e825b4903315e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:04:00 -0800 Subject: [PATCH 061/135] comment fixes --- lib/ldclient-rb/file_data_source.rb | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 120276fc..adc32ab6 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -25,8 +25,8 @@ def self.have_listen? # used in a test environment, to operate using a predetermined feature flag state without an # actual LaunchDarkly connection. # - # To use this component, call `FileDataSource.factory`, and store its return value in the - # `update_processor_factory` property of your LaunchDarkly client configuration. In the options + # To use this component, call {FileDataSource#factory}, and store its return value in the + # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # # factory = FileDataSource.factory(paths: [ myFilePath ]) @@ -34,21 +34,23 @@ def self.have_listen? # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled - # this with Config.send_events or Config.offline. + # this with {Config#send_events} or {Config#offline?}. # # Flag data files can be either JSON or YAML. They contain an object with three possible # properties: # - # - "flags": Feature flag definitions. - # - "flagValues": Simplified feature flags that contain only a value. - # - "segments": User segment definitions. + # - `flags`: Feature flag definitions. + # - `flagValues`: Simplified feature flags that contain only a value. + # - `segments`: User segment definitions. # - # The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + # The format of the data in `flags` and `segments` is defined by the LaunchDarkly application # and is subject to change. Rather than trying to construct these objects yourself, it is simpler # to request existing flags directly from the LaunchDarkly server in JSON format, and use this # output as the starting point for your file. In Linux you would do this: # - # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` + # curl -H "Authorization: YOUR_SDK_KEY" https://app.launchdarkly.com/sdk/latest-all + # ``` # # The output will look something like this (but with many more properties): # @@ -95,8 +97,6 @@ def self.have_listen? # duplicate key-- it will not load flags from any of the files. # class FileDataSource - include LaunchDarkly::Interfaces::UpdateProcessor - # # Returns a factory for the file data source component. # @@ -113,6 +113,7 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. + # @return an object that can be stored in {Config#update_processor_factory} # def self.factory(options={}) return Proc.new do |sdk_key, config| From 414af9957bdf1897c399d8131bcfe04d027b0c89 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 15 Dec 2018 15:08:43 -0800 Subject: [PATCH 062/135] change name of "update processor" to "data source" --- lib/ldclient-rb/config.rb | 28 ++++++++++++++-------------- lib/ldclient-rb/file_data_source.rb | 12 +++++------- lib/ldclient-rb/interfaces.rb | 12 ++++++------ lib/ldclient-rb/ldclient.rb | 18 +++++++++--------- spec/file_data_source_spec.rb | 4 ++-- spec/ldclient_spec.rb | 8 ++++---- 6 files changed, 40 insertions(+), 42 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index e16e998a..64ad7378 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -34,8 +34,9 @@ class Config # @option opts [Integer] :user_keys_capacity (1000) See {#user_keys_capacity}. # @option opts [Float] :user_keys_flush_interval (300) See {#user_keys_flush_interval}. # @option opts [Boolean] :inline_users_in_events (false) See {#inline_users_in_events}. - # @option opts [Object] :update_processor See {#update_processor}. - # @option opts [Object] :update_processor_factory See {#update_processor_factory}. + # @option opts [Object] :data_source See {#data_source}. + # @option opts [Object] :update_processor Obsolete synonym for `data_source`. + # @option opts [Object] :update_processor_factory Obsolete synonym for `data_source`. # def initialize(opts = {}) @base_uri = (opts[:base_uri] || Config.default_base_uri).chomp("/") @@ -59,6 +60,7 @@ def initialize(opts = {}) @user_keys_capacity = opts[:user_keys_capacity] || Config.default_user_keys_capacity @user_keys_flush_interval = opts[:user_keys_flush_interval] || Config.default_user_keys_flush_interval @inline_users_in_events = opts[:inline_users_in_events] || false + @data_source = opts[:data_source] || opts[:update_processor] || opts[:update_processor_factory] @update_processor = opts[:update_processor] @update_processor_factory = opts[:update_processor_factory] end @@ -245,22 +247,20 @@ def offline? # An object that is responsible for receiving feature flag data from LaunchDarkly. By default, # the client uses its standard polling or streaming implementation; this is customizable for # testing purposes. - # @return [LaunchDarkly::Interfaces::UpdateProcessor] - # @deprecated The preferred way to set this is now with {#update_processor_factory}. # - attr_reader :update_processor - - # - # Factory for an object that is responsible for receiving feature flag data from LaunchDarkly - # By default, the client uses its standard polling or streaming implementation; this is - # customizable for testing purposes. - # - # The factory is a lambda or Proc that takes two parameters: the SDK key and the {Config}. It - # must return an object that conforms to {LaunchDarkly::Interfaces::UpdateProcessor}. + # This may be set to either an object that conforms to {LaunchDarkly::Interfaces::DataSource}, + # or a lambda (or Proc) that takes two parameters-- SDK key and {Config}-- and returns such an + # object. # - # @return [lambda] + # @return [LaunchDarkly::Interfaces::DataSource|lambda] # @see FileDataSource # + attr_reader :data_source + + # @deprecated This is replaced by {#data_source}. + attr_reader :update_processor + + # @deprecated This is replaced by {#data_source}. attr_reader :update_processor_factory # diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index adc32ab6..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -26,11 +26,11 @@ def self.have_listen? # actual LaunchDarkly connection. # # To use this component, call {FileDataSource#factory}, and store its return value in the - # {Config#update_processor_factory} property of your LaunchDarkly client configuration. In the options + # {Config#data_source} property of your LaunchDarkly client configuration. In the options # to `factory`, set `paths` to the file path(s) of your data file(s): # - # factory = FileDataSource.factory(paths: [ myFilePath ]) - # config = LaunchDarkly::Config.new(update_processor_factory: factory) + # file_source = FileDataSource.factory(paths: [ myFilePath ]) + # config = LaunchDarkly::Config.new(data_source: file_source) # # This will cause the client not to connect to LaunchDarkly to get feature flags. The # client may still make network connections to send analytics events, unless you have disabled @@ -113,12 +113,10 @@ class FileDataSource # @option options [Float] :poll_interval The minimum interval, in seconds, between checks for # file modifications - used only if auto_update is true, and if the native file-watching # mechanism from 'listen' is not being used. The default value is 1 second. - # @return an object that can be stored in {Config#update_processor_factory} + # @return an object that can be stored in {Config#data_source} # def self.factory(options={}) - return Proc.new do |sdk_key, config| - FileDataSourceImpl.new(config.feature_store, config.logger, options) - end + return lambda { |sdk_key, config| FileDataSourceImpl.new(config.feature_store, config.logger, options) } end end diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index c9c38cfe..912472b5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -108,17 +108,17 @@ def stop end # - # Mixin that defines the required methods of an update processor implementation. This is - # the component that delivers feature flag data from LaunchDarkly to the LDClient by putting + # Mixin that defines the required methods of a data source implementation. This is the + # component that delivers feature flag data from LaunchDarkly to the LDClient by putting # the data in the {FeatureStore}. It is expected to run concurrently on its own thread. # # The client has its own standard implementation, which uses either a streaming connection or # polling depending on your configuration. Normally you will not need to use another one # except for testing purposes. {FileDataSource} provides one such test fixture. # - module UpdateProcessor + module DataSource # - # Checks whether the processor has finished initializing. Initialization is considered done + # Checks whether the data source has finished initializing. Initialization is considered done # once it has received one complete data set from LaunchDarkly. # # @return [Boolean] true if initialization is complete @@ -127,7 +127,7 @@ def initialized? end # - # Puts the processor into an active state. Normally this means it will make its first + # Puts the data source into an active state. Normally this means it will make its first # connection attempt to LaunchDarkly. If `start` has already been called, calling it again # should simply return the same value as the first call. # @@ -137,7 +137,7 @@ def start end # - # Puts the processor into an inactive state and releases all of its resources. + # Puts the data source into an inactive state and releases all of its resources. # This state should be considered permanent (`start` does not have to work after `stop`). # def stop diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index ffd82084..868c65bd 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -37,19 +37,19 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) return # requestor and update processor are not used in this mode end - if @config.update_processor - @update_processor = @config.update_processor + data_source_or_factory = @config.data_source || self.method(:create_default_data_source) + if data_source_or_factory.respond_to? :call + @data_source = data_source_or_factory.call(sdk_key, config) else - factory = @config.update_processor_factory || self.method(:create_default_update_processor) - @update_processor = factory.call(sdk_key, config) + @data_source = data_source_or_factory end - ready = @update_processor.start + ready = @data_source.start if wait_for_sec > 0 ok = ready.wait(wait_for_sec) if !ok @config.logger.error { "[LDClient] Timeout encountered waiting for LaunchDarkly client initialization" } - elsif !@update_processor.initialized? + elsif !@data_source.initialized? @config.logger.error { "[LDClient] LaunchDarkly client initialization failed" } end end @@ -97,7 +97,7 @@ def secure_mode_hash(user) # Returns whether the client has been initialized and is ready to serve feature flag requests # @return [Boolean] true if the client has been initialized def initialized? - @config.offline? || @config.use_ldd? || @update_processor.initialized? + @config.offline? || @config.use_ldd? || @data_source.initialized? end # @@ -270,14 +270,14 @@ def all_flags_state(user, options={}) # @return [void] def close @config.logger.info { "[LDClient] Closing LaunchDarkly client..." } - @update_processor.stop + @data_source.stop @event_processor.stop @store.stop end private - def create_default_update_processor(sdk_key, config) + def create_default_data_source(sdk_key, config) if config.offline? return NullUpdateProcessor.new end diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 60107e26..28a0c06f 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -219,7 +219,7 @@ def test_auto_reload(options) it "evaluates simplified flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin @@ -233,7 +233,7 @@ def test_auto_reload(options) it "evaluates full flag with client as expected" do file = make_temp_file(all_properties_json) factory = LaunchDarkly::FileDataSource.factory({ paths: file.path }) - config = LaunchDarkly::Config.new(send_events: false, update_processor_factory: factory) + config = LaunchDarkly::Config.new(send_events: false, data_source: factory) client = LaunchDarkly::LDClient.new('sdkKey', config) begin diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 262f53f9..b3a9592c 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -7,8 +7,8 @@ let(:offline_client) do subject.new("secret", offline_config) end - let(:update_processor) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, update_processor: update_processor}) } + let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } + let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } let(:client) do subject.new("secret", config) end @@ -357,7 +357,7 @@ def event_processor end describe 'with send_events: false' do - let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, update_processor: update_processor}) } + let(:config) { LaunchDarkly::Config.new({offline: true, send_events: false, data_source: null_data}) } let(:client) { subject.new("secret", config) } it "uses a NullEventProcessor" do @@ -367,7 +367,7 @@ def event_processor end describe 'with send_events: true' do - let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, update_processor: update_processor}) } + let(:config_with_events) { LaunchDarkly::Config.new({offline: false, send_events: true, data_source: null_data}) } let(:client_with_events) { subject.new("secret", config_with_events) } it "does not use a NullEventProcessor" do From fdb0291849c5faca7c4b8b5a644f342945b8fbb0 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 11:37:38 -0800 Subject: [PATCH 063/135] default dynamodb_opts to {} --- lib/ldclient-rb/impl/integrations/dynamodb_impl.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index 8eb1dd2a..ebaa0445 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -41,7 +41,7 @@ def initialize(table_name, opts) if !opts[:existing_client].nil? @client = opts[:existing_client] else - @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts]) + @client = Aws::DynamoDB::Client.new(opts[:dynamodb_opts] || {}) end @logger.info("DynamoDBFeatureStore: using DynamoDB table \"#{table_name}\"") From 65ee009c9cef4ae3066b5faa41b67119a9c85ba5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 21 Dec 2018 12:47:15 -0800 Subject: [PATCH 064/135] fix Unicode handling in polling requests --- lib/ldclient-rb/requestor.rb | 2 +- .../sse_shared.rb => http_util.rb} | 44 ++++++---- spec/requestor_spec.rb | 82 ++++++++++--------- spec/sse_client/sse_client_spec.rb | 24 +++++- spec/sse_client/streaming_http_spec.rb | 3 +- 5 files changed, 99 insertions(+), 56 deletions(-) rename spec/{sse_client/sse_shared.rb => http_util.rb} (56%) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..8922e82c 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -20,7 +20,7 @@ def initialize(sdk_key, config) @sdk_key = sdk_key @config = config @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store + builder.use :http_cache, store: @config.cache_store, serializer: Marshal builder.adapter :net_http_persistent end diff --git a/spec/sse_client/sse_shared.rb b/spec/http_util.rb similarity index 56% rename from spec/sse_client/sse_shared.rb rename to spec/http_util.rb index 3ecabb57..434cafc8 100644 --- a/spec/sse_client/sse_shared.rb +++ b/spec/http_util.rb @@ -4,23 +4,28 @@ require "webrick/https" class StubHTTPServer + attr_reader :requests + def initialize @port = 50000 begin - @server = create_server(@port) + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE @port += 1 retry end + @requests = [] end - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) end def start @@ -38,6 +43,19 @@ def base_uri def setup_response(uri_path, &action) @server.mount_proc(uri_path, action) end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end end class StubProxyServer < StubHTTPServer @@ -49,19 +67,15 @@ def initialize @request_count = 0 end - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ ProxyContentHandler: proc do |req,res| if !@connect_status.nil? res.status = @connect_status end @request_count += 1 end - ) + })) end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..7f2b8ad7 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,58 @@ +require "http_util" require "spec_helper" -require "faraday" describe LaunchDarkly::Requestor do describe ".request_all_flags" do describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" - }) - ) - } it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + config = LaunchDarkly::Config.new(base_uri: server.base_uri.to_s, proxy: proxy.base_uri.to_s) + r = LaunchDarkly::Requestor.new("sdk-key", config) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end end - - requestor.request_all_data() end end describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" - }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) + it "sends headers" do + content = '{"flags": {}}' + sdk_key = 'sdk-key' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new(sdk_key, LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + r.request_all_data + expect(server.requests.length).to eq 1 + req = server.requests[0] + expect(req.header['authorization']).to eq [sdk_key] + expect(req.header['user-agent']).to eq ["RubyClient/" + LaunchDarkly::VERSION] + end + end + + it "receives data" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) + end + end + + it "handles Unicode content" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + # Note that the ETag header here is important because without it, the HTTP cache will not be used, + # and the cache is what required a fix to handle Unicode properly. See: + # https://github.com/launchdarkly/ruby-client/issues/90 + r = LaunchDarkly::Requestor.new("sdk-key", LaunchDarkly::Config.new({ base_uri: server.base_uri.to_s })) + result = r.request_all_data + expect(result).to eq(JSON.parse(content, symbolize_names: true)) end - requestor.request_all_data() end end end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..3adca889 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server @@ -70,6 +70,28 @@ def with_client(client) end end + it "handles Unicode correctly (assuming UTF-8)" do + please = "proszę" + thank_you = "dziękuję" + events_body = <<-EOT +event: #{please} +data: #{thank_you} + +EOT + with_server do |server| + server.setup_ok_response("/", events_body, "text/event-stream") + + event_sink = Queue.new + client = subject.new(server.base_uri) do |c| + c.on_event { |event| event_sink << event } + end + + with_client(client) do |client| + expect(event_sink.pop).to eq(SSE::SSEEvent.new(please.to_sym, thank_you, nil)) + end + end + end + it "reconnects after error response" do events_body = <<-EOT event: go diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..fbe60b96 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server @@ -119,6 +119,7 @@ def with_connection(cxn) end it "throws error if proxy responds with error status" do + body = "hi" with_server do |server| server.setup_response("/") do |req,res| res.body = body From 86820ea710d8698b21b78ac093487c918e26bcbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:31:44 -0800 Subject: [PATCH 065/135] initial Consul implementation --- .circleci/config.yml | 16 +++ ldclient-rb.gemspec | 1 + .../impl/integrations/consul_impl.rb | 132 ++++++++++++++++++ lib/ldclient-rb/integrations.rb | 12 ++ lib/ldclient-rb/integrations/consul.rb | 37 +++++ .../integrations/consul_feature_store_spec.rb | 37 +++++ 6 files changed, 235 insertions(+) create mode 100644 lib/ldclient-rb/impl/integrations/consul_impl.rb create mode 100644 lib/ldclient-rb/integrations/consul.rb create mode 100644 spec/integrations/consul_feature_store_spec.rb diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..45540d63 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,30 +33,35 @@ jobs: <<: *ruby-docker-template docker: - image: circleci/ruby:2.2.9-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.3: <<: *ruby-docker-template docker: - image: circleci/ruby:2.3.6-jessie + - image: consul - image: redis - image: amazon/dynamodb-local test-2.4: <<: *ruby-docker-template docker: - image: circleci/ruby:2.4.4-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-2.5: <<: *ruby-docker-template docker: - image: circleci/ruby:2.5.1-stretch + - image: consul - image: redis - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: - image: circleci/jruby:9-jdk + - image: consul - image: redis - image: amazon/dynamodb-local @@ -93,8 +98,19 @@ jobs: mv Gemfile.lock "Gemfile.lock.$i" done - run: + name: start DynamoDB command: docker run -p 8000:8000 amazon/dynamodb-local background: true + - run: + name: download Consul + command: wget https://releases.hashicorp.com/consul/0.8.0/consul_0.8.0_linux_amd64.zip + - run: + name: extract Consul + command: unzip consul_0.8.0_linux_amd64.zip + - run: + name: start Consul + command: ./consul agent -dev + background: true - run: name: run tests for all versions shell: /bin/bash -leo pipefail diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..35fbf45c 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -25,6 +25,7 @@ Gem::Specification.new do |spec| spec.add_development_dependency "bundler", "~> 1.7" spec.add_development_dependency "rspec", "~> 3.2" spec.add_development_dependency "codeclimate-test-reporter", "~> 0" + spec.add_development_dependency "diplomat", ">= 2.0.2" spec.add_development_dependency "redis", "~> 3.3.5" spec.add_development_dependency "connection_pool", ">= 2.1.2" spec.add_development_dependency "rake", "~> 10.0" diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb new file mode 100644 index 00000000..48d308c2 --- /dev/null +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -0,0 +1,132 @@ +require "json" + +module LaunchDarkly + module Impl + module Integrations + module Consul + # + # Internal implementation of the Consul feature store, intended to be used with CachingStoreWrapper. + # + class ConsulFeatureStoreCore + begin + require "diplomat" + CONSUL_ENABLED = true + rescue ScriptError, StandardError + CONSUL_ENABLED = false + end + + def initialize(opts) + if !CONSUL_ENABLED + raise RuntimeError.new("can't use Consul feature store without the 'diplomat' gem") + end + + @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' + @logger = opts[:logger] || Config.default_logger + @client = Diplomat::Kv.new(configuration: opts[:consul_config]) + + @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") + end + + def init_internal(all_data) + # Start by reading the existing keys; we will later delete any of these that weren't in all_data. + unused_old_keys = set() + unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + + ops = [] + num_items = 0 + + # Insert or update every provided item + all_data.each do |kind, items| + items.values.each do |item| + value = item.to_json + key = item_key(kind, item[:key]) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => value } }) + unused_old_keys.delete(key) + num_items = num_items + 1 + end + end + + # Now delete any previously existing items whose keys were not in the current data + unused_old_keys.each do |tuple| + ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) + end + + # Now set the special key that we check in initialized_internal? + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + + ConsulUtil.batch_operations(ops) + + @logger.info { "Initialized database with #{num_items} items" } + end + + def get_internal(kind, key) + + resp = get_item_by_keys(namespace_for_kind(kind), key) + unmarshal_item(resp.item) + end + + def get_all_internal(kind) + items_out = {} + + items_out + end + + def upsert_internal(kind, new_item) + + end + + def initialized_internal? + + end + + def stop + # There's no way to close the Consul client + end + + private + + def item_key(kind, key) + kind_key(kind) + '/' + key + end + + def kind_key(kind) + @prefix + kind[:namespace] + end + + def inited_key + @prefix + '$inited' + end + + def marshal_item(kind, item) + make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ + VERSION_ATTRIBUTE => item[:version], + ITEM_JSON_ATTRIBUTE => item.to_json + }) + end + + def unmarshal_item(item) + return nil if item.nil? || item.length == 0 + json_attr = item[ITEM_JSON_ATTRIBUTE] + raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? + JSON.parse(json_attr, symbolize_names: true) + end + end + + class ConsulUtil + # + # Submits as many transactions as necessary to submit all of the given operations. + # The ops array is consumed. + # + def self.batch_write_requests(ops) + batch_size = 64 # Consul can only do this many at a time + while true + chunk = requests.shift(batch_size) + break if chunk.empty? + Diplomat::Kv.txn(chunk) + end + end + end + end + end + end +end diff --git a/lib/ldclient-rb/integrations.rb b/lib/ldclient-rb/integrations.rb index bfaed2eb..8c9f6249 100644 --- a/lib/ldclient-rb/integrations.rb +++ b/lib/ldclient-rb/integrations.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/integrations/consul" require "ldclient-rb/integrations/dynamodb" require "ldclient-rb/integrations/redis" require "ldclient-rb/integrations/util/store_wrapper" @@ -7,6 +8,17 @@ module LaunchDarkly # Tools for connecting the LaunchDarkly client to other software. # module Integrations + # + # Integration with [Consul](https://www.consul.io/). + # + # Note that in order to use this integration, you must first install the gem `diplomat`. + # + # @since 5.5.0 + # + module Consul + # code is in ldclient-rb/impl/integrations/consul_impl + end + # # Integration with [DynamoDB](https://aws.amazon.com/dynamodb/). # diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb new file mode 100644 index 00000000..7450d3b9 --- /dev/null +++ b/lib/ldclient-rb/integrations/consul.rb @@ -0,0 +1,37 @@ +require "ldclient-rb/impl/integrations/consul_impl" +require "ldclient-rb/integrations/util/store_wrapper" + +module LaunchDarkly + module Integrations + module Consul + # + # Default value for the `prefix` option for {new_feature_store}. + # + # @return [String] the default key prefix + # + def self.default_prefix + 'launchdarkly' + end + + # + # Creates a Consul-backed persistent feature store. + # + # To use this method, you must first install the gem `diplomat`. Then, put the object returned by + # this method into the `feature_store` property of your client configuration ({LaunchDarkly::Config}). + # + # @param opts [Hash] the configuration options + # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default + # Consul client configuration + # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly + # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` + # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :capacity (1000) maximum number of items in the cache + # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object + # + def self.new_feature_store(opts, &block) + core = LaunchDarkly::Impl::Integrations::Consul::ConsulFeatureStoreCore.new(opts) + return LaunchDarkly::Integrations::Util::CachingStoreWrapper.new(core, opts) + end + end + end +end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb new file mode 100644 index 00000000..1aa6f919 --- /dev/null +++ b/spec/integrations/consul_feature_store_spec.rb @@ -0,0 +1,37 @@ +require "feature_store_spec_base" +#require "diplomat" +require "spec_helper" + + +$my_prefix = 'testprefix' +$null_log = ::Logger.new($stdout) +$null_log.level = ::Logger::FATAL + +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} + +def create_consul_store(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 60 })) +end + +def create_consul_store_uncached(opts = {}) + LaunchDarkly::Integrations::Consul::new_feature_store( + opts.merge($base_opts).merge({ expiration: 0 })) +end + + +describe "Consul feature store" do + + # These tests will all fail if there isn't a local Consul instance running. + + context "with local cache" do + include_examples "feature_store", method(:create_consul_store) + end + + context "without local cache" do + include_examples "feature_store", method(:create_consul_store_uncached) + end +end From 485a73dfdc23c25ff56db7fcdaf5ccf417df1579 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 15:45:12 -0800 Subject: [PATCH 066/135] use new SSE gem --- ldclient-rb.gemspec | 5 +- lib/ldclient-rb/stream.rb | 24 ++- lib/sse_client.rb | 4 - lib/sse_client/backoff.rb | 38 ---- lib/sse_client/sse_client.rb | 178 ----------------- lib/sse_client/sse_events.rb | 67 ------- lib/sse_client/streaming_http.rb | 199 ------------------- spec/sse_client/sse_client_spec.rb | 177 ----------------- spec/sse_client/sse_events_spec.rb | 100 ---------- spec/sse_client/sse_shared.rb | 82 -------- spec/sse_client/streaming_http_spec.rb | 263 ------------------------- spec/stream_spec.rb | 36 ++-- 12 files changed, 34 insertions(+), 1139 deletions(-) delete mode 100644 lib/sse_client.rb delete mode 100644 lib/sse_client/backoff.rb delete mode 100644 lib/sse_client/sse_client.rb delete mode 100644 lib/sse_client/sse_events.rb delete mode 100644 lib/sse_client/streaming_http.rb delete mode 100644 spec/sse_client/sse_client_spec.rb delete mode 100644 spec/sse_client/sse_events_spec.rb delete mode 100644 spec/sse_client/sse_shared.rb delete mode 100644 spec/sse_client/streaming_http_spec.rb diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 8b1f4cc7..9f7c5089 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -13,7 +13,7 @@ Gem::Specification.new do |spec| spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" spec.homepage = "https://github.com/launchdarkly/ruby-client" - spec.license = "Apache 2.0" + spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } @@ -40,6 +40,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" spec.add_runtime_dependency "hashdiff", "~> 0.2" - spec.add_runtime_dependency "http_tools", '~> 0.4.5' - spec.add_runtime_dependency "socketry", "~> 0.5.1" + spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 660d7063..adc4bf59 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "sse_client" +require "ld-eventsource" module LaunchDarkly # @private @@ -54,15 +54,18 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = SSE::SSEClient.new(@config.stream_uri + "/all", opts) do |conn| - conn.on_event { |event| process_message(event, event.type) } + @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + conn.on_event { |event| process_message(event) } conn.on_error { |err| - status = err[:status_code] - message = Util.http_error_message(status, "streaming connection", "will retry") - @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(status) - @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set - stop + case err + when LaunchDarklySSE::HTTPError + status = err.status + message = Util.http_error_message(status, "streaming connection", "will retry") + @config.logger.error { "[LDClient] #{message}" } + if !Util.http_error_recoverable?(status) + @ready.set # if client was waiting on us, make it stop waiting - has no effect if already set + stop + end end } end @@ -79,7 +82,8 @@ def stop private - def process_message(message, method) + def process_message(message) + method = message.type @config.logger.debug { "[LDClient] Stream received #{method} message: #{message.data}" } if method == PUT message = JSON.parse(message.data, symbolize_names: true) diff --git a/lib/sse_client.rb b/lib/sse_client.rb deleted file mode 100644 index dd24c3a6..00000000 --- a/lib/sse_client.rb +++ /dev/null @@ -1,4 +0,0 @@ -require "sse_client/streaming_http" -require "sse_client/sse_events" -require "sse_client/backoff" -require "sse_client/sse_client" diff --git a/lib/sse_client/backoff.rb b/lib/sse_client/backoff.rb deleted file mode 100644 index 73e0754f..00000000 --- a/lib/sse_client/backoff.rb +++ /dev/null @@ -1,38 +0,0 @@ - -module SSE - # - # A simple backoff algorithm that can be reset at any time, or reset itself after a given - # interval has passed without errors. - # - class Backoff - def initialize(base_interval, max_interval, auto_reset_interval = 60) - @base_interval = base_interval - @max_interval = max_interval - @auto_reset_interval = auto_reset_interval - @attempts = 0 - @last_good_time = nil - @jitter_rand = Random.new - end - - attr_accessor :base_interval - - def next_interval - if !@last_good_time.nil? && (Time.now.to_i - @last_good_time) >= @auto_reset_interval - @attempts = 0 - end - @last_good_time = nil - if @attempts == 0 - @attempts += 1 - return 0 - end - @last_good_time = nil - target = ([@base_interval * (2 ** @attempts), @max_interval].min).to_f - @attempts += 1 - (target / 2) + @jitter_rand.rand(target / 2) - end - - def mark_success - @last_good_time = Time.now.to_i if @last_good_time.nil? - end - end -end diff --git a/lib/sse_client/sse_client.rb b/lib/sse_client/sse_client.rb deleted file mode 100644 index 5b7e0fd9..00000000 --- a/lib/sse_client/sse_client.rb +++ /dev/null @@ -1,178 +0,0 @@ -require "concurrent/atomics" -require "logger" -require "thread" -require "uri" - -# -# A lightweight Server-Sent Events implementation based on the `socketry` gem. -# -# This module will be moved to a separate gem in the future. -# -# @private -# -module SSE - # - # A lightweight Server-Sent Events implementation, relying on two gems: socketry for sockets with - # read timeouts, and http_tools for HTTP response parsing. The overall logic is based on - # [https://github.com/Tonkpils/celluloid-eventsource]. - # - class SSEClient - DEFAULT_CONNECT_TIMEOUT = 10 - DEFAULT_READ_TIMEOUT = 300 - DEFAULT_RECONNECT_TIME = 1 - MAX_RECONNECT_TIME = 30 - - def initialize(uri, options = {}) - @uri = URI(uri) - @stopped = Concurrent::AtomicBoolean.new(false) - - @headers = options[:headers] ? options[:headers].clone : {} - @connect_timeout = options[:connect_timeout] || DEFAULT_CONNECT_TIMEOUT - @read_timeout = options[:read_timeout] || DEFAULT_READ_TIMEOUT - @logger = options[:logger] || default_logger - - if options[:proxy] - @proxy = options[:proxy] - else - proxyUri = @uri.find_proxy - if !proxyUri.nil? && (proxyUri.scheme == 'http' || proxyUri.scheme == 'https') - @proxy = proxyUri - end - end - - reconnect_time = options[:reconnect_time] || DEFAULT_RECONNECT_TIME - @backoff = Backoff.new(reconnect_time, MAX_RECONNECT_TIME) - - @on = { event: ->(_) {}, error: ->(_) {} } - @last_id = nil - - yield self if block_given? - - Thread.new do - run_stream - end - end - - def on(event_name, &action) - @on[event_name.to_sym] = action - end - - def on_event(&action) - @on[:event] = action - end - - def on_error(&action) - @on[:error] = action - end - - def close - if @stopped.make_true - @cxn.close if !@cxn.nil? - @cxn = nil - end - end - - private - - def default_logger - log = ::Logger.new($stdout) - log.level = ::Logger::WARN - log - end - - def run_stream - while !@stopped.value - @cxn = nil - begin - @cxn = connect - # There's a potential race if close was called in the middle of the previous line, i.e. after we - # connected but before @cxn was set. Checking the variable again is a bit clunky but avoids that. - return if @stopped.value - read_stream(@cxn) if !@cxn.nil? - rescue Errno::EBADF - # don't log this - it probably means we closed our own connection deliberately - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - begin - @cxn.close if !@cxn.nil? - rescue StandardError => e - @logger.error { "Unexpected error while closing stream: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - end - end - end - - # Try to establish a streaming connection. Returns the StreamingHTTPConnection object if successful. - def connect - loop do - return if @stopped.value - interval = @backoff.next_interval - if interval > 0 - @logger.warn { "Will retry connection after #{'%.3f' % interval} seconds" } - sleep(interval) - end - begin - cxn = open_connection(build_headers) - if cxn.status != 200 - body = cxn.read_all # grab the whole response body in case it has error details - cxn.close - @on[:error].call({status_code: cxn.status, body: body}) - next - elsif cxn.headers["content-type"] && cxn.headers["content-type"].start_with?("text/event-stream") - return cxn # we're good to proceed - end - @logger.error { "Event source returned unexpected content type '#{cxn.headers["content-type"]}'" } - rescue Errno::EBADF - raise - rescue StandardError => e - @logger.error { "Unexpected error from event source: #{e.inspect}" } - @logger.debug { "Exception trace: #{e.backtrace}" } - cxn.close if !cxn.nil? - end - # if unsuccessful, continue the loop to connect again - end - end - - # Just calls the StreamingHTTPConnection constructor - factored out for test purposes - def open_connection(headers) - StreamingHTTPConnection.new(@uri, @proxy, headers, @connect_timeout, @read_timeout) - end - - # Pipe the output of the StreamingHTTPConnection into the EventParser, and dispatch events as - # they arrive. - def read_stream(cxn) - event_parser = EventParser.new(cxn.read_lines) - event_parser.items.each do |item| - return if @stopped.value - case item - when SSEEvent - dispatch_event(item) - when SSESetRetryInterval - @backoff.base_interval = event.milliseconds.t-Of / 1000 - end - end - end - - def dispatch_event(event) - @last_id = event.id - - # Tell the Backoff object that as of the current time, we have succeeded in getting some data. It - # uses that information so it can automatically reset itself if enough time passes between failures. - @backoff.mark_success - - # Pass the event to the caller - @on[:event].call(event) - end - - def build_headers - h = { - 'Accept' => 'text/event-stream', - 'Cache-Control' => 'no-cache' - } - h['Last-Event-Id'] = @last_id if !@last_id.nil? - h.merge(@headers) - end - end -end diff --git a/lib/sse_client/sse_events.rb b/lib/sse_client/sse_events.rb deleted file mode 100644 index 762cc2b0..00000000 --- a/lib/sse_client/sse_events.rb +++ /dev/null @@ -1,67 +0,0 @@ - -module SSE - # Server-Sent Event type used by SSEClient and EventParser. - SSEEvent = Struct.new(:type, :data, :id) - - SSESetRetryInterval = Struct.new(:milliseconds) - - # - # Accepts lines of text via an iterator, and parses them into SSE messages. - # - class EventParser - def initialize(lines) - @lines = lines - reset_buffers - end - - # Generator that parses the input interator and returns instances of SSEEvent or SSERetryInterval. - def items - Enumerator.new do |gen| - @lines.each do |line| - line.chomp! - if line.empty? - event = maybe_create_event - reset_buffers - gen.yield event if !event.nil? - else - case line - when /^(\w+): ?(.*)$/ - item = process_field($1, $2) - gen.yield item if !item.nil? - end - end - end - end - end - - private - - def reset_buffers - @id = nil - @type = nil - @data = "" - end - - def process_field(name, value) - case name - when "event" - @type = value.to_sym - when "data" - @data << "\n" if !@data.empty? - @data << value - when "id" - @id = value - when "retry" - if /^(?\d+)$/ =~ value - return SSESetRetryInterval.new(num.to_i) - end - end - nil - end - - def maybe_create_event - return nil if @data.empty? - SSEEvent.new(@type || :message, @data, @id) - end - end -end diff --git a/lib/sse_client/streaming_http.rb b/lib/sse_client/streaming_http.rb deleted file mode 100644 index eeb80e82..00000000 --- a/lib/sse_client/streaming_http.rb +++ /dev/null @@ -1,199 +0,0 @@ -require "concurrent/atomics" -require "http_tools" -require "socketry" - -module SSE - # - # Wrapper around a socket providing a simplified HTTP request-response cycle including streaming. - # The socket is created and managed by Socketry, which we use so that we can have a read timeout. - # - class StreamingHTTPConnection - attr_reader :status, :headers - - def initialize(uri, proxy, headers, connect_timeout, read_timeout) - @socket = HTTPConnectionFactory.connect(uri, proxy, connect_timeout, read_timeout) - @socket.write(build_request(uri, headers)) - @reader = HTTPResponseReader.new(@socket, read_timeout) - @status = @reader.status - @headers = @reader.headers - @closed = Concurrent::AtomicBoolean.new(false) - end - - def close - if @closed.make_true - @socket.close if @socket - @socket = nil - end - end - - # Generator that returns one line of the response body at a time (delimited by \r, \n, - # or \r\n) until the response is fully consumed or the socket is closed. - def read_lines - @reader.read_lines - end - - # Consumes the entire response body and returns it. - def read_all - @reader.read_all - end - - private - - # Build an HTTP request line and headers. - def build_request(uri, headers) - ret = "GET #{uri.request_uri} HTTP/1.1\r\n" - ret << "Host: #{uri.host}\r\n" - headers.each { |k, v| - ret << "#{k}: #{v}\r\n" - } - ret + "\r\n" - end - end - - # - # Used internally to send the HTTP request, including the proxy dialogue if necessary. - # - class HTTPConnectionFactory - def self.connect(uri, proxy, connect_timeout, read_timeout) - if !proxy - return open_socket(uri, connect_timeout) - end - - socket = open_socket(proxy, connect_timeout) - socket.write(build_proxy_request(uri, proxy)) - - # temporarily create a reader just for the proxy connect response - proxy_reader = HTTPResponseReader.new(socket, read_timeout) - if proxy_reader.status != 200 - raise ProxyError, "proxy connection refused, status #{proxy_reader.status}" - end - - # start using TLS at this point if appropriate - if uri.scheme.downcase == 'https' - wrap_socket_in_ssl_socket(socket) - else - socket - end - end - - private - - def self.open_socket(uri, connect_timeout) - if uri.scheme.downcase == 'https' - Socketry::SSL::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - else - Socketry::TCP::Socket.connect(uri.host, uri.port, timeout: connect_timeout) - end - end - - # Build a proxy connection header. - def self.build_proxy_request(uri, proxy) - ret = "CONNECT #{uri.host}:#{uri.port} HTTP/1.1\r\n" - ret << "Host: #{uri.host}:#{uri.port}\r\n" - if proxy.user || proxy.password - encoded_credentials = Base64.strict_encode64([proxy.user || '', proxy.password || ''].join(":")) - ret << "Proxy-Authorization: Basic #{encoded_credentials}\r\n" - end - ret << "\r\n" - ret - end - - def self.wrap_socket_in_ssl_socket(socket) - io = IO.try_convert(socket) - ssl_sock = OpenSSL::SSL::SSLSocket.new(io, OpenSSL::SSL::SSLContext.new) - ssl_sock.connect - Socketry::SSL::Socket.new.from_socket(ssl_sock) - end - end - - class ProxyError < StandardError - def initialize(message) - super - end - end - - # - # Used internally to read the HTTP response, either all at once or as a stream of text lines. - # Incoming data is fed into an instance of HTTPTools::Parser, which gives us the header and - # chunks of the body via callbacks. - # - class HTTPResponseReader - DEFAULT_CHUNK_SIZE = 10000 - - attr_reader :status, :headers - - def initialize(socket, read_timeout) - @socket = socket - @read_timeout = read_timeout - @parser = HTTPTools::Parser.new - @buffer = "" - @done = false - @lock = Mutex.new - - # Provide callbacks for the Parser to give us the headers and body. This has to be done - # before we start piping any data into the parser. - have_headers = false - @parser.on(:header) do - have_headers = true - end - @parser.on(:stream) do |data| - @lock.synchronize { @buffer << data } # synchronize because we're called from another thread in Socketry - end - @parser.on(:finish) do - @lock.synchronize { @done = true } - end - - # Block until the status code and headers have been successfully read. - while !have_headers - raise EOFError if !read_chunk_into_buffer - end - @headers = Hash[@parser.header.map { |k,v| [k.downcase, v] }] - @status = @parser.status_code - end - - def read_lines - Enumerator.new do |gen| - loop do - line = read_line - break if line.nil? - gen.yield line - end - end - end - - def read_all - while read_chunk_into_buffer - end - @buffer - end - - private - - # Attempt to read some more data from the socket. Return true if successful, false if EOF. - # A read timeout will result in an exception from Socketry's readpartial method. - def read_chunk_into_buffer - # If @done is set, it means the Parser has signaled end of response body - @lock.synchronize { return false if @done } - data = @socket.readpartial(DEFAULT_CHUNK_SIZE, timeout: @read_timeout) - return false if data == :eof - @parser << data - # We are piping the content through the parser so that it can handle things like chunked - # encoding for us. The content ends up being appended to @buffer via our callback. - true - end - - # Extract the next line of text from the read buffer, refilling the buffer as needed. - def read_line - loop do - @lock.synchronize do - i = @buffer.index(/[\r\n]/) - if !i.nil? - i += 1 if (@buffer[i] == "\r" && i < @buffer.length - 1 && @buffer[i + 1] == "\n") - return @buffer.slice!(0, i + 1).force_encoding(Encoding::UTF_8) - end - end - return nil if !read_chunk_into_buffer - end - end - end -end diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb deleted file mode 100644 index 54f1f5c7..00000000 --- a/spec/sse_client/sse_client_spec.rb +++ /dev/null @@ -1,177 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of SSEClient against a real server -# -describe SSE::SSEClient do - subject { SSE::SSEClient } - - def with_client(client) - begin - yield client - ensure - client.close - end - end - - it "sends expected headers" do - with_server do |server| - requests = Queue.new - server.setup_response("/") do |req,res| - requests << req - res.content_type = "text/event-stream" - res.status = 200 - end - - headers = { - "Authorization" => "secret" - } - - with_client(subject.new(server.base_uri, headers: headers)) do |client| - received_req = requests.pop - expect(received_req.header).to eq({ - "accept" => ["text/event-stream"], - "cache-control" => ["no-cache"], - "host" => ["127.0.0.1"], - "authorization" => ["secret"] - }) - end - end - end - - it "receives messages" do - events_body = <<-EOT -event: go -data: foo -id: 1 - -event: stop -data: bar - -EOT - with_server do |server| - server.setup_response("/") do |req,res| - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", "1")) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:stop, "bar", nil)) - end - end - end - - it "reconnects after error response" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.status = 500 - res.body = "sorry" - res.keep_alive = false - else - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - end - - event_sink = Queue.new - error_sink = Queue.new - client = subject.new(server.base_uri, reconnect_time: 0.25) do |c| - c.on_event { |event| event_sink << event } - c.on_error { |error| error_sink << error } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(error_sink.pop).to eq({ status_code: 500, body: "sorry" }) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects after read timeout" do - events_body = <<-EOT -event: go -data: foo - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - sleep(2) - end - res.content_type = "text/event-stream" - res.status = 200 - res.body = events_body - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(attempt).to be >= 2 - end - end - end - - it "reconnects if stream returns EOF" do - events_body_1 = <<-EOT -event: go -data: foo - -EOT - events_body_2 = <<-EOT -event: go -data: bar - -EOT - with_server do |server| - attempt = 0 - server.setup_response("/") do |req,res| - attempt += 1 - if attempt == 1 - res.body = events_body_1 - else - res.body = events_body_2 - end - res.content_type = "text/event-stream" - res.status = 200 - end - - event_sink = Queue.new - client = subject.new(server.base_uri, - reconnect_time: 0.25, read_timeout: 0.25) do |c| - c.on_event { |event| event_sink << event } - end - - with_client(client) do |client| - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "foo", nil)) - expect(event_sink.pop).to eq(SSE::SSEEvent.new(:go, "bar", nil)) - expect(attempt).to be >= 2 - end - end - end -end diff --git a/spec/sse_client/sse_events_spec.rb b/spec/sse_client/sse_events_spec.rb deleted file mode 100644 index 438cfa7a..00000000 --- a/spec/sse_client/sse_events_spec.rb +++ /dev/null @@ -1,100 +0,0 @@ -require "spec_helper" - -describe SSE::EventParser do - subject { SSE::EventParser } - - it "parses an event with all fields" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:abc, "def", "1") - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with only data" do - lines = [ - "data: def\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses an event with multi-line data" do - lines = [ - "data: def\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def\nghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "ignores comments" do - lines = [ - ":", - "data: def\r\n", - ":", - "\r\n" - ] - ep = subject.new(lines) - - expected_event = SSE::SSEEvent.new(:message, "def", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event ]) - end - - it "parses reconnect interval" do - lines = [ - "retry: 2500\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_item = SSE::SSESetRetryInterval.new(2500) - output = ep.items.to_a - expect(output).to eq([ expected_item ]) - end - - it "parses multiple events" do - lines = [ - "event: abc\r\n", - "data: def\r\n", - "id: 1\r\n", - "\r\n", - "data: ghi\r\n", - "\r\n" - ] - ep = subject.new(lines) - - expected_event_1 = SSE::SSEEvent.new(:abc, "def", "1") - expected_event_2 = SSE::SSEEvent.new(:message, "ghi", nil) - output = ep.items.to_a - expect(output).to eq([ expected_event_1, expected_event_2 ]) - end - - it "ignores events with no data" do - lines = [ - "event: nothing\r\n", - "\r\n", - "event: nada\r\n", - "\r\n" - ] - ep = subject.new(lines) - - output = ep.items.to_a - expect(output).to eq([]) - end -end diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb deleted file mode 100644 index 7dfac9bd..00000000 --- a/spec/sse_client/streaming_http_spec.rb +++ /dev/null @@ -1,263 +0,0 @@ -require "spec_helper" -require "socketry" -require "sse_client/sse_shared" - -# -# End-to-end tests of HTTP requests against a real server -# -describe SSE::StreamingHTTPConnection do - subject { SSE::StreamingHTTPConnection } - - def with_connection(cxn) - begin - yield cxn - ensure - cxn.close - end - end - - it "makes HTTP connection and sends request" do - with_server do |server| - requests = Queue.new - server.setup_response("/foo") do |req,res| - requests << req - res.status = 200 - end - headers = { - "Accept" => "text/plain" - } - with_connection(subject.new(server.base_uri.merge("/foo?bar"), nil, headers, 30, 30)) do - received_req = requests.pop - expect(received_req.unparsed_uri).to eq("/foo?bar") - expect(received_req.header).to eq({ - "accept" => ["text/plain"], - "host" => [server.base_uri.host] - }) - end - end - end - - it "receives response status" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res.status = 204 - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq(204) - end - end - end - - it "receives response headers" do - with_server do |server| - server.setup_response("/foo") do |req,res| - res["Content-Type"] = "application/json" - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - expect(cxn.headers["content-type"]).to eq("application/json") - end - end - end - - it "can read response as lines" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - lines = cxn.read_lines - expect(lines.next).to eq("This is\n") - expect(lines.next).to eq("a response\n") - end - end - end - - it "can read entire response body" do - body = <<-EOT -This is -a response -EOT - with_server do |server| - server.setup_response("/foo") do |req,res| - res.body = body - end - with_connection(subject.new(server.base_uri.merge("/foo"), nil, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("This is\na response\n") - end - end - end - - it "enforces read timeout" do - with_server do |server| - server.setup_response("/") do |req,res| - sleep(2) - res.status = 200 - end - expect { subject.new(server.base_uri, nil, {}, 30, 0.25) }.to raise_error(Socketry::TimeoutError) - end - end - - it "connects to HTTP server through proxy" do - body = "hi" - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(server.base_uri, proxy.base_uri, {}, 30, 30)) do |cxn| - read_body = cxn.read_all - expect(read_body).to eq("hi") - expect(proxy.request_count).to eq(1) - end - end - end - end - - it "throws error if proxy responds with error status" do - with_server do |server| - server.setup_response("/") do |req,res| - res.body = body - end - with_server(StubProxyServer.new) do |proxy| - proxy.connect_status = 403 - expect { subject.new(server.base_uri, proxy.base_uri, {}, 30, 30) }.to raise_error(SSE::ProxyError) - end - end - end - - # The following 2 tests were originally written to connect to an embedded HTTPS server made with - # WEBrick. Unfortunately, some unknown problem prevents WEBrick's self-signed certificate feature - # from working in JRuby 9.1 (but not in any other Ruby version). Therefore these tests currently - # hit an external URL. - - it "connects to HTTPS server" do - with_connection(subject.new(URI("https://app.launchdarkly.com"), nil, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - end - end - - it "connects to HTTPS server through proxy" do - with_server(StubProxyServer.new) do |proxy| - with_connection(subject.new(URI("https://app.launchdarkly.com"), proxy.base_uri, {}, 30, 30)) do |cxn| - expect(cxn.status).to eq 200 - expect(proxy.request_count).to eq(1) - end - end - end -end - -# -# Tests of response parsing functionality without a real HTTP request -# -describe SSE::HTTPResponseReader do - subject { SSE::HTTPResponseReader } - - let(:simple_response) { <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -Content-Type: text/event-stream - -line1\r -line2 -\r -EOT - } - - def make_chunks(str) - # arbitrarily split content into 5-character blocks - str.scan(/.{1,5}/m).to_enum - end - - def mock_socket_without_timeout(chunks) - mock_socket(chunks) { :eof } - end - - def mock_socket_with_timeout(chunks) - mock_socket(chunks) { raise Socketry::TimeoutError } - end - - def mock_socket(chunks) - sock = double - allow(sock).to receive(:readpartial) do - begin - chunks.next - rescue StopIteration - yield - end - end - sock - end - - it "parses status code" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.status).to eq(200) - end - - it "parses headers" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.headers).to eq({ - 'cache-control' => 'no-cache', - 'content-type' => 'text/event-stream' - }) - end - - it "can read entire response body" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("line1\r\nline2\n\r\n") - end - - it "can read response body as lines" do - socket = mock_socket_without_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - expect(reader.read_lines.to_a).to eq([ - "line1\r\n", - "line2\n", - "\r\n" - ]) - end - - it "handles chunked encoding" do - chunked_response = <<-EOT -HTTP/1.1 200 OK -Content-Type: text/plain -Transfer-Encoding: chunked - -6\r -things\r -A\r - and stuff\r -0\r -\r -EOT - socket = mock_socket_without_timeout(make_chunks(chunked_response)) - reader = subject.new(socket, 0) - expect(reader.read_all).to eq("things and stuff") - end - - it "raises error if response ends without complete headers" do - malformed_response = <<-EOT -HTTP/1.1 200 OK -Cache-Control: no-cache -EOT - socket = mock_socket_without_timeout(make_chunks(malformed_response)) - expect { subject.new(socket, 0) }.to raise_error(EOFError) - end - - it "throws timeout if thrown by socket read" do - socket = mock_socket_with_timeout(make_chunks(simple_response)) - reader = subject.new(socket, 0) - lines = reader.read_lines - lines.next - lines.next - lines.next - expect { lines.next }.to raise_error(Socketry::TimeoutError) - end -end diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index df27e173..0ab9d3ec 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1,5 +1,5 @@ +require "ld-eventsource" require "spec_helper" -require 'ostruct' describe LaunchDarkly::StreamProcessor do subject { LaunchDarkly::StreamProcessor } @@ -8,52 +8,52 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { OpenStruct.new({data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}'}) } - let(:patch_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:patch_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}'}) } - let(:delete_flag_message) { OpenStruct.new({data: '{"path": "/flags/key", "version": 2}'}) } - let(:delete_seg_message) { OpenStruct.new({data: '{"path": "/segments/key", "version": 2}'}) } - let(:indirect_patch_flag_message) { OpenStruct.new({data: "/flags/key"}) } - let(:indirect_patch_segment_message) { OpenStruct.new({data: "/segments/key"}) } + let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } + let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } it "will accept PUT methods" do - processor.send(:process_message, put_message, LaunchDarkly::PUT) + processor.send(:process_message, put_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf") expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "segkey")).to eq(key: "segkey") end it "will accept PATCH methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "asdf")).to eq(key: "asdf", version: 1) end it "will accept PATCH methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) + processor.send(:process_message, patch_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "asdf")).to eq(key: "asdf", version: 1) end it "will accept DELETE methods for flags" do - processor.send(:process_message, patch_flag_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_flag_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_flag_message) + processor.send(:process_message, delete_flag_message) expect(config.feature_store.get(LaunchDarkly::FEATURES, "key")).to eq(nil) end it "will accept DELETE methods for segments" do - processor.send(:process_message, patch_seg_message, LaunchDarkly::PATCH) - processor.send(:process_message, delete_seg_message, LaunchDarkly::DELETE) + processor.send(:process_message, patch_seg_message) + processor.send(:process_message, delete_seg_message) expect(config.feature_store.get(LaunchDarkly::SEGMENTS, "key")).to eq(nil) end it "will accept INDIRECT PATCH method for flags" do flag = { key: 'key', version: 1 } allow(requestor).to receive(:request_flag).with(flag[:key]).and_return(flag) - processor.send(:process_message, indirect_patch_flag_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_flag_message); expect(config.feature_store.get(LaunchDarkly::FEATURES, flag[:key])).to eq(flag) end it "will accept INDIRECT PATCH method for segments" do segment = { key: 'key', version: 1 } allow(requestor).to receive(:request_segment).with(segment[:key]).and_return(segment) - processor.send(:process_message, indirect_patch_segment_message, LaunchDarkly::INDIRECT_PATCH); + processor.send(:process_message, indirect_patch_segment_message); expect(config.feature_store.get(LaunchDarkly::SEGMENTS, segment[:key])).to eq(segment) end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, put_message, "get") + processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 85674397211e249ffad3a9d8c2b9607aa32f180f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:32:39 -0800 Subject: [PATCH 067/135] numerous Ruby SDK documentation fixes --- CONTRIBUTING.md | 12 --- README.md | 75 ++++++++--------- lib/ldclient-rb/config.rb | 2 +- lib/ldclient-rb/impl.rb | 5 +- .../impl/integrations/dynamodb_impl.rb | 3 - lib/ldclient-rb/in_memory_store.rb | 7 +- lib/ldclient-rb/integrations/dynamodb.rb | 20 ++++- lib/ldclient-rb/integrations/redis.rb | 11 ++- lib/ldclient-rb/interfaces.rb | 7 +- lib/ldclient-rb/ldclient.rb | 82 +++++++++++++------ lib/ldclient-rb/redis_store.rb | 2 +- lib/ldclient-rb/version.rb | 1 + scripts/gendocs.sh | 3 + 13 files changed, 140 insertions(+), 90 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 96147068..c6b8dd20 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,15 +2,3 @@ Contributing to LaunchDarkly SDK for Ruby ========================================= We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. - -Dependencies ------------- -[ld-em-eventsource](https://github.com/launchdarkly/em-eventsource) - - -Style ------ - -Our pull requests have [Hound CI](https://houndci.com/) set up to do style checking. -We also run [Rubocop](https://github.com/bbatsov/rubocop). - diff --git a/README.md b/README.md index 43819554..4812690f 100644 --- a/README.md +++ b/README.md @@ -15,37 +15,37 @@ This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1 Quick setup ----------- -0. Install the Ruby SDK with `gem` +1. Install the Ruby SDK with `gem` -```shell + ```shell gem install ldclient-rb ``` -1. Require the LaunchDarkly client: +2. Require the LaunchDarkly client: -```ruby + ```ruby require 'ldclient-rb' ``` -2. Create a new LDClient with your SDK key: +3. Create a new LDClient with your SDK key: -```ruby + ```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` ### Ruby on Rails -0. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` +1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` -1. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: +2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: -```ruby + ```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` -2. You may want to include a function in your ApplicationController +3. You may want to include a function in your ApplicationController -```ruby + ```ruby def launchdarkly_settings if current_user.present? { @@ -72,31 +72,44 @@ Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") end ``` -3. In your controllers, access the client using +4. In your controllers, access the client using -```ruby + ```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` Note that this gem will automatically switch to using the Rails logger it is detected. +Your first feature flag +----------------------- + +1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). +2. In your application code, use the feature's key to check whether the flag is on for each user: + +```ruby +if client.variation("your.flag.key", {key: "user@test.com"}, false) + # application code to show the feature +else + # the code to run if the feature is off +end +``` + HTTPS proxy ------------- -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. +----------- + +The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) How to set the HTTPS_PROXY environment variable on Mac/Linux systems: ``` export HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - How to set the HTTPS_PROXY environment variable on Windows systems: ``` set HTTPS_PROXY=https://web-proxy.domain.com:8080 ``` - If your proxy requires authentication then you can prefix the URN with your login information: ``` export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 @@ -106,34 +119,22 @@ or set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 ``` - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com) -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the `LaunchDarkly::Integrations` module and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. Learn more ----------- -Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [complete reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). +Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). + +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). Testing ------- @@ -143,10 +144,10 @@ We run integration tests for all our SDKs using a centralized test harness. This Contributing ------------ -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) +See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..34f4f67b 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -346,7 +346,7 @@ def self.default_proxy # # The default value for {#logger}. - # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise + # @return [Logger] the Rails logger if in Rails, or a default Logger at WARN level otherwise # def self.default_logger if defined?(Rails) && Rails.respond_to?(:logger) diff --git a/lib/ldclient-rb/impl.rb b/lib/ldclient-rb/impl.rb index 3df0d7e3..b0d63ebe 100644 --- a/lib/ldclient-rb/impl.rb +++ b/lib/ldclient-rb/impl.rb @@ -1,10 +1,11 @@ module LaunchDarkly # - # Low-level implementation classes. Everything in this module should be considered non-public - # and subject to change with any release. + # Internal implementation classes. Everything in this module should be considered unsupported + # and subject to change. # # @since 5.5.0 + # @private # module Impl # code is in ldclient-rb/impl/ diff --git a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb index ebaa0445..a76fae52 100644 --- a/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb +++ b/lib/ldclient-rb/impl/integrations/dynamodb_impl.rb @@ -1,4 +1,3 @@ -require "concurrent/atomics" require "json" module LaunchDarkly @@ -36,8 +35,6 @@ def initialize(table_name, opts) @prefix = opts[:prefix] @logger = opts[:logger] || Config.default_logger - @stopped = Concurrent::AtomicBoolean.new(false) - if !opts[:existing_client].nil? @client = opts[:existing_client] else diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..f2843c1e 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,18 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + + # @private FEATURES = { namespace: "features" }.freeze + # @private SEGMENTS = { namespace: "segments" }.freeze # # Default implementation of the LaunchDarkly client's feature store, using an in-memory - # cache. This object holds feature flags and related data received from the - # streaming API. + # cache. This object holds feature flags and related data received from LaunchDarkly. + # Database-backed implementations are available in {LaunchDarkly::Integrations}. # class InMemoryFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index c9ded019..ecd87fce 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -5,12 +5,30 @@ module LaunchDarkly module Integrations module DynamoDB # - # Creates a DynamoDB-backed persistent feature store. + # Creates a DynamoDB-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first install one of the AWS SDK gems: either `aws-sdk-dynamodb`, or # the full `aws-sdk`. Then, put the object returned by this method into the `feature_store` property # of your client configuration ({LaunchDarkly::Config}). # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::DynamoDB::new_feature_store("my-table-name") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) + # + # Note that the specified table must already exist in DynamoDB. It must have a partition key called + # "namespace", and a sort key called "key" (both strings). The SDK does not create the table + # automatically because it has no way of knowing what additional properties (such as permissions + # and throughput) you would want it to have. + # + # By default, the DynamoDB client will try to get your AWS credentials and region name from + # environment variables and/or local configuration files, as described in the AWS SDK documentation. + # You can also specify any supported AWS SDK options in `dynamodb_opts`-- or, provide an + # already-configured DynamoDB client in `existing_client`. + # + # @param table_name [String] name of an existing DynamoDB table # @param opts [Hash] the configuration options # @option opts [Hash] :dynamodb_opts options to pass to the DynamoDB client constructor (ignored if you specify `:existing_client`) # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index b81097c6..34509181 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -23,11 +23,18 @@ def self.default_prefix end # - # Creates a Redis-backed persistent feature store. + # Creates a Redis-backed persistent feature store. For more details about how and why you can + # use a persistent feature store, see the + # [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store). # # To use this method, you must first have the `redis` and `connection-pool` gems installed. Then, # put the object returned by this method into the `feature_store` property of your - # client configuration ({LaunchDarkly::Config}). + # client configuration. + # + # @example Configuring the feature store + # store = LaunchDarkly::Integrations::Redis::new_feature_store(redis_url: "redis://my-server") + # config = LaunchDarkly::Config.new(feature_store: store) + # client = LaunchDarkly::LDClient.new(my_sdk_key, config) # # @param opts [Hash] the configuration options # @option opts [String] :redis_url (default_redis_url) URL of the Redis instance (shortcut for omitting `redis_opts`) diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..094ce0dd 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -17,9 +17,10 @@ module Interfaces # `:deleted`, a boolean (optional, defaults to false) that if true means this is a # placeholder for a deleted entity. # - # Examples of a "kind" are feature flags and segments; each of these is associated with an - # object such as {LaunchDarkly::FEATURES} and {LaunchDarkly::SEGMENTS}. The "kind" objects are - # hashes with a single property, `:namespace`, which is a short string unique to that kind. + # To represent the different kinds of objects that can be stored, such as feature flags and + # segments, the SDK will provide a "kind" object; this is a hash with a single property, + # `:namespace`, which is a short string unique to that kind. This string can be used as a + # collection name or a key prefix. # # The default implementation is {LaunchDarkly::InMemoryFeatureStore}. Several implementations # that use databases can be found in {LaunchDarkly::Integrations}. If you want to write a new diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..5788d276 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -17,10 +17,17 @@ class LDClient # configuration parameter can also supplied to specify advanced options, # but for most use cases, the default configuration is appropriate. # + # The client will immediately attempt to connect to LaunchDarkly and retrieve + # your feature flag data. If it cannot successfully do so within the time limit + # specified by `wait_for_sec`, the constructor will return a client that is in + # an uninitialized state. See {#initialized?} for more details. + # # @param sdk_key [String] the SDK key for your LaunchDarkly account # @param config [Config] an optional client configuration object + # @param wait_for_sec [Float] maximum time (in seconds) to wait for initialization # # @return [LDClient] The LaunchDarkly client instance + # def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key @config = config @@ -85,7 +92,7 @@ def toggle?(key, user, default = false) # # Creates a hash string that can be used by the JavaScript SDK to identify a user. - # For more information, see ["Secure mode"](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). + # For more information, see [Secure mode](https://docs.launchdarkly.com/docs/js-sdk-reference#section-secure-mode). # # @param user [Hash] the user properties # @return [String] a hash string @@ -94,44 +101,61 @@ def secure_mode_hash(user) OpenSSL::HMAC.hexdigest("sha256", @sdk_key, user[:key].to_s) end - # Returns whether the client has been initialized and is ready to serve feature flag requests + # + # Returns whether the client has been initialized and is ready to serve feature flag requests. + # + # If this returns false, it means that the client did not succeed in connecting to + # LaunchDarkly within the time limit that you specified in the constructor. It could + # still succeed in connecting at a later time (on another thread), or it could have + # given up permanently (for instance, if your SDK key is invalid). In the meantime, + # any call to {#variation} or {#variation_detail} will behave as follows: + # + # 1. It will check whether the feature store already contains data (that is, you + # are using a database-backed store and it was populated by a previous run of this + # application). If so, it will use the last known feature flag data. + # + # 2. Failing that, it will return the value that you specified for the `default` + # parameter of {#variation} or {#variation_detail}. + # # @return [Boolean] true if the client has been initialized + # def initialized? @config.offline? || @config.use_ldd? || @data_source.initialized? end # - # Determines the variation of a feature flag to present to a user. At a minimum, - # the user hash should contain a `:key`. + # Determines the variation of a feature flag to present to a user. # - # @example Basic user hash - # {key: "user@example.com"} + # At a minimum, the user hash should contain a `:key`, which should be the unique + # identifier for your user (or, for an anonymous user, a session identifier or + # cookie). # - # For authenticated users, the `:key` should be the unique identifier for - # your user. For anonymous users, the `:key` should be a session identifier - # or cookie. In either case, the only requirement is that the key - # is unique to a user. + # Other supported user attributes include IP address, country code, and an arbitrary hash of + # custom attributes. For more about the supported user properties and how they work in + # LaunchDarkly, see [Targeting users](https://docs.launchdarkly.com/docs/targeting-users). + # + # The optional `:privateAttributeNames` user property allows you to specify a list of + # attribute names that should not be sent back to LaunchDarkly. + # [Private attributes](https://docs.launchdarkly.com/docs/private-user-attributes) + # can also be configured globally in {Config}. # - # You can also pass IP addresses and country codes in the user hash. + # @example Basic user hash + # {key: "my-user-id"} # # @example More complete user hash - # {key: "user@example.com", ip: "127.0.0.1", country: "US"} - # - # The user hash can contain arbitrary custom attributes stored in a `:custom` sub-hash: - # - # @example A user hash with custom attributes - # {key: "user@example.com", custom: {customer_rank: 1000, groups: ["google", "microsoft"]}} + # {key: "my-user-id", ip: "127.0.0.1", country: "US", custom: {customer_rank: 1000}} # - # Attribute values in the custom hash can be integers, booleans, strings, or - # lists of integers, booleans, or strings. + # @example User with a private attribute + # {key: "my-user-id", email: "email@example.com", privateAttributeNames: ["email"]} # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag + # + # @return the variation to show the user, or the default value if there's an an error # - # @return the variation to show the user, or the - # default value if there's an an error def variation(key, user, default) evaluate_internal(key, user, default, false).value end @@ -148,10 +172,14 @@ def variation(key, user, default) # Calling `variation_detail` instead of `variation` also causes the "reason" data to # be included in analytics events, if you are capturing detailed event data for this flag. # + # For more information, see the reference guide on + # [Evaluation reasons](https://docs.launchdarkly.com/v2.0/docs/evaluation-reasons). + # # @param key [String] the unique feature key for the feature flag, as shown # on the LaunchDarkly dashboard # @param user [Hash] a hash containing parameters for the end user requesting the flag - # @param default the default value of the flag + # @param default the default value of the flag; this is used if there is an error + # condition making it impossible to find or evaluate the flag # # @return [EvaluationDetail] an object describing the result # @@ -198,9 +226,11 @@ def track(event_name, user, data) end # - # Returns all feature flag values for the given user. This method is deprecated - please use - # {#all_flags_state} instead. Current versions of the client-side SDK will not generate analytics - # events correctly if you pass the result of `all_flags`. + # Returns all feature flag values for the given user. + # + # @deprecated Please use {#all_flags_state} instead. Current versions of the + # client-side SDK will not generate analytics events correctly if you pass the + # result of `all_flags`. # # @param user [Hash] The end user requesting the feature flags # @return [Hash] a hash of feature flag keys to values diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 32a9507d..6ab7dd96 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -16,7 +16,7 @@ module LaunchDarkly # of your client configuration. # # @deprecated Use the factory method in {LaunchDarkly::Integrations::Redis} instead. This specific - # implementation class may change in the future. + # implementation class may be changed or removed in the future. # class RedisFeatureStore include LaunchDarkly::Interfaces::FeatureStore diff --git a/lib/ldclient-rb/version.rb b/lib/ldclient-rb/version.rb index a70241bf..b526a871 100644 --- a/lib/ldclient-rb/version.rb +++ b/lib/ldclient-rb/version.rb @@ -1,3 +1,4 @@ module LaunchDarkly + # The current gem version. VERSION = "5.4.1" end diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 6280355e..1e545955 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,5 +1,8 @@ #!/bin/bash +# Use this script to generate documentation locally in ./doc so it can be proofed before release. +# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb + gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting From 63c3680ab438609184e1593309636314ded27141 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:36:03 -0800 Subject: [PATCH 068/135] fix bundler version --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f19ae7bc..4d0800f1 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -20,7 +20,7 @@ ruby-docker-template: &ruby-docker-template fi - run: ruby -v - run: gem install bundler - - run: bundle install + - run: bundle install -v 1.17.3 - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: @@ -88,7 +88,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 008331b1cff9cda159dfaf7f0cb65873afeaec5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 3 Jan 2019 18:41:19 -0800 Subject: [PATCH 069/135] fix build --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 4d0800f1..a672a100 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,8 +19,8 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler - - run: bundle install -v 1.17.3 + - run: gem install bundler -v 1.17.3 + - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec - store_test_results: From c09c166ccd78055cc2dcb7778cc4779d97350796 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:23:48 -0800 Subject: [PATCH 070/135] make some dependencies less strict and remove some unused ones --- ldclient-rb.gemspec | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 0b8f4f9d..46dac190 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -34,11 +34,9 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] - spec.add_runtime_dependency "semantic", "~> 1.6.0" - spec.add_runtime_dependency "thread_safe", "~> 0.3" + spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" - spec.add_runtime_dependency "concurrent-ruby", "~> 1.0.4" - spec.add_runtime_dependency "hashdiff", "~> 0.2" + spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" end From a4ced95117f3b47b14d2048fa5e7deb1088becbd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:32:25 -0800 Subject: [PATCH 071/135] not using thread_safe --- lib/ldclient-rb/cache_store.rb | 12 ++++++------ lib/ldclient-rb/redis_store.rb | 1 - 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 0677da65..164534fb 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -1,12 +1,12 @@ -require "thread_safe" +require "concurrent/map" module LaunchDarkly - # A thread-safe in-memory store suitable for use - # with the Faraday caching HTTP client. Uses the - # Threadsafe gem as the underlying cache. + # + # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the + # concurrent-ruby gem's Map as the underlying cache. # # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency/thread_safe + # @see https://github.com/ruby-concurrency # class ThreadSafeMemoryStore # @@ -14,7 +14,7 @@ class ThreadSafeMemoryStore # # @return [ThreadSafeMemoryStore] a new store def initialize - @cache = ThreadSafe::Cache.new + @cache = Concurrent::Map.new end # diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 3729ca6b..c9b1bc64 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -1,6 +1,5 @@ require "concurrent/atomics" require "json" -require "thread_safe" module LaunchDarkly # From 806bb8e8fb7b665eb2ac68df583fe186d9cf9ca7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 12:34:01 -0800 Subject: [PATCH 072/135] add bundler version (still not sure why we need to) --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index df9dac51..544bd9ae 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler + - run: gem install bundler -v 1.17.3 - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v 1.17.3; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 9d446c85cd15f7375886f922d455de6cef8c8062 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:02:03 -0800 Subject: [PATCH 073/135] don't need bundler version for all rubies --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 544bd9ae..d742e552 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler -v 1.17.3; + gem install bundler; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 5516745a0c16d84d2b2420b3e7b84f37f1353f5e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 13:13:59 -0800 Subject: [PATCH 074/135] fix bundler version again --- .circleci/config.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index d742e552..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -19,7 +19,7 @@ ruby-docker-template: &ruby-docker-template gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - run: ruby -v - - run: gem install bundler -v 1.17.3 + - run: gem install bundler -v "~> 1.7" - run: bundle install - run: mkdir ./rspec - run: bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem install bundler; + gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" done From 54add1dcc64525b22a0e558eb3024e7b60adcf41 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 15:52:35 -0800 Subject: [PATCH 075/135] try to fix bundler version again --- .circleci/config.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..5a66f0ec 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3d4b08067de23b9fa77d061f419b788eb7bd1405 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:01:27 -0800 Subject: [PATCH 076/135] yet another build fix --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 5a66f0ec..d08d8c0c 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,7 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled + yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 635adf44c4bc9635111535f49ce16a1dd079d059 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:35:25 -0800 Subject: [PATCH 077/135] commit lock file to get correct bundler --- .circleci/config.yml | 1 - .gitignore | 1 - Gemfile.lock | 111 +++++++++++++++++++++++++++++++++++++++++++ 3 files changed, 111 insertions(+), 2 deletions(-) create mode 100644 Gemfile.lock diff --git a/.circleci/config.yml b/.circleci/config.yml index d08d8c0c..85f6f7cf 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,7 +80,6 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi - yes | gem uninstall bundler; # a later, incompatible version of bundler might be preinstalled gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" diff --git a/.gitignore b/.gitignore index bb576123..3f9d02f2 100644 --- a/.gitignore +++ b/.gitignore @@ -12,5 +12,4 @@ *.a mkmf.log *.gem -Gemfile.lock .DS_Store diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 00000000..17c5725e --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,111 @@ +PATH + remote: . + specs: + ldclient-rb (5.4.1) + concurrent-ruby (~> 1.0.4) + faraday (>= 0.9, < 2) + faraday-http-cache (>= 1.3.0, < 3) + hashdiff (~> 0.2) + http_tools (~> 0.4.5) + json (>= 1.8, < 3) + net-http-persistent (~> 2.9) + semantic (~> 1.6.0) + socketry (~> 0.5.1) + thread_safe (~> 0.3) + +GEM + remote: https://rubygems.org/ + specs: + aws-eventstream (1.0.1) + aws-partitions (1.125.0) + aws-sdk-core (3.44.0) + aws-eventstream (~> 1.0) + aws-partitions (~> 1.0) + aws-sigv4 (~> 1.0) + jmespath (~> 1.0) + aws-sdk-dynamodb (1.18.0) + aws-sdk-core (~> 3, >= 3.39.0) + aws-sigv4 (~> 1.0) + aws-sigv4 (1.0.3) + codeclimate-test-reporter (0.6.0) + simplecov (>= 0.7.1, < 1.0.0) + concurrent-ruby (1.0.5) + concurrent-ruby (1.0.5-java) + connection_pool (2.2.1) + diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json + docile (1.1.5) + faraday (0.15.4) + multipart-post (>= 1.2, < 3) + faraday-http-cache (2.0.0) + faraday (~> 0.8) + ffi (1.9.25) + ffi (1.9.25-java) + hashdiff (0.3.7) + hitimes (1.3.0) + hitimes (1.3.0-java) + http_tools (0.4.5) + jmespath (1.4.0) + json (1.8.6) + json (1.8.6-java) + listen (3.1.5) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + ruby_dep (~> 1.2) + multipart-post (2.0.0) + net-http-persistent (2.9.4) + rake (10.5.0) + rb-fsevent (0.10.3) + rb-inotify (0.9.10) + ffi (>= 0.5.0, < 2) + redis (3.3.5) + rspec (3.7.0) + rspec-core (~> 3.7.0) + rspec-expectations (~> 3.7.0) + rspec-mocks (~> 3.7.0) + rspec-core (3.7.1) + rspec-support (~> 3.7.0) + rspec-expectations (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-mocks (3.7.0) + diff-lcs (>= 1.2.0, < 2.0) + rspec-support (~> 3.7.0) + rspec-support (3.7.0) + rspec_junit_formatter (0.3.0) + rspec-core (>= 2, < 4, != 2.12.0) + ruby_dep (1.5.0) + semantic (1.6.1) + simplecov (0.15.1) + docile (~> 1.1.0) + json (>= 1.8, < 3) + simplecov-html (~> 0.10.0) + simplecov-html (0.10.2) + socketry (0.5.1) + hitimes (~> 1.2) + thread_safe (0.3.6) + thread_safe (0.3.6-java) + timecop (0.9.1) + +PLATFORMS + java + ruby + +DEPENDENCIES + aws-sdk-dynamodb (~> 1.18) + bundler (~> 1.7) + codeclimate-test-reporter (~> 0) + connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) + ldclient-rb! + listen (~> 3.0) + rake (~> 10.0) + redis (~> 3.3.5) + rspec (~> 3.2) + rspec_junit_formatter (~> 0.3.0) + timecop (~> 0.9.1) + +BUNDLED WITH + 1.17.1 From 3b5b08e2f61243f28748c59f6722ac1a914481c8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 4 Jan 2019 16:42:24 -0800 Subject: [PATCH 078/135] update lockfile --- Gemfile.lock | 31 ++++--------------------------- 1 file changed, 4 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 17c5725e..6c4673e4 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -2,40 +2,23 @@ PATH remote: . specs: ldclient-rb (5.4.1) - concurrent-ruby (~> 1.0.4) + concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - hashdiff (~> 0.2) http_tools (~> 0.4.5) json (>= 1.8, < 3) net-http-persistent (~> 2.9) - semantic (~> 1.6.0) + semantic (~> 1.6) socketry (~> 0.5.1) - thread_safe (~> 0.3) GEM remote: https://rubygems.org/ specs: - aws-eventstream (1.0.1) - aws-partitions (1.125.0) - aws-sdk-core (3.44.0) - aws-eventstream (~> 1.0) - aws-partitions (~> 1.0) - aws-sigv4 (~> 1.0) - jmespath (~> 1.0) - aws-sdk-dynamodb (1.18.0) - aws-sdk-core (~> 3, >= 3.39.0) - aws-sigv4 (~> 1.0) - aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.0.5) - concurrent-ruby (1.0.5-java) + concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) - diplomat (2.0.2) - faraday (~> 0.9) - json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -43,11 +26,9 @@ GEM faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hashdiff (0.3.7) hitimes (1.3.0) hitimes (1.3.0-java) http_tools (0.4.5) - jmespath (1.4.0) json (1.8.6) json (1.8.6-java) listen (3.1.5) @@ -85,8 +66,6 @@ GEM simplecov-html (0.10.2) socketry (0.5.1) hitimes (~> 1.2) - thread_safe (0.3.6) - thread_safe (0.3.6-java) timecop (0.9.1) PLATFORMS @@ -94,11 +73,9 @@ PLATFORMS ruby DEPENDENCIES - aws-sdk-dynamodb (~> 1.18) bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) - diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) @@ -108,4 +85,4 @@ DEPENDENCIES timecop (~> 0.9.1) BUNDLED WITH - 1.17.1 + 1.17.3 From 8656f258d42eb16c612181eb01a6b5e6ebebf225 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 12:52:42 -0800 Subject: [PATCH 079/135] use ruby-eventsource --- Gemfile.lock | 7 +++++-- lib/ldclient-rb/stream.rb | 4 ++-- spec/stream_spec.rb | 16 ++++++++-------- 3 files changed, 15 insertions(+), 12 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 695aaadc..f376fb32 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -5,11 +5,10 @@ PATH concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) - http_tools (~> 0.4.5) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (~> 2.9) semantic (~> 1.6) - socketry (~> 0.5.1) GEM remote: https://rubygems.org/ @@ -43,6 +42,10 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) + ld-eventsource (1.0.0) + concurrent-ruby (~> 1.0) + http_tools (~> 0.4.5) + socketry (~> 0.5.1) listen (3.1.5) rb-fsevent (~> 0.9, >= 0.9.4) rb-inotify (~> 0.9, >= 0.9.7) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index adc4bf59..e4f1b3bd 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -54,11 +54,11 @@ def start read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } - @es = LaunchDarklySSE::SSEClient.new(@config.stream_uri + "/all", **opts) do |conn| + @es = SSE::Client.new(@config.stream_uri + "/all", **opts) do |conn| conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when LaunchDarklySSE::HTTPError + when SSE::Errors::HTTPError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index 0ab9d3ec..648833ff 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -8,13 +8,13 @@ let(:processor) { subject.new("sdk_key", config, requestor) } describe '#process_message' do - let(:put_message) { LaunchDarklySSE::StreamEvent.new(type: :put, data: '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } - let(:patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } - let(:patch_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :patch, data: '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } - let(:delete_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/flags/key", "version": 2}') } - let(:delete_seg_message) { LaunchDarklySSE::StreamEvent.new(type: :delete, data: '{"path": "/segments/key", "version": 2}') } - let(:indirect_patch_flag_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/put', data: "/flags/key") } - let(:indirect_patch_segment_message) { LaunchDarklySSE::StreamEvent.new(type: :'indirect/patch', data: "/segments/key") } + let(:put_message) { SSE::StreamEvent.new(:put, '{"data":{"flags":{"asdf": {"key": "asdf"}},"segments":{"segkey": {"key": "segkey"}}}}') } + let(:patch_flag_message) { SSE::StreamEvent.new(:patch, '{"path": "/flags/key", "data": {"key": "asdf", "version": 1}}') } + let(:patch_seg_message) { SSE::StreamEvent.new(:patch, '{"path": "/segments/key", "data": {"key": "asdf", "version": 1}}') } + let(:delete_flag_message) { SSE::StreamEvent.new(:delete, '{"path": "/flags/key", "version": 2}') } + let(:delete_seg_message) { SSE::StreamEvent.new(:delete, '{"path": "/segments/key", "version": 2}') } + let(:indirect_patch_flag_message) { SSE::StreamEvent.new(:'indirect/patch', "/flags/key") } + let(:indirect_patch_segment_message) { SSE::StreamEvent.new(:'indirect/patch', "/segments/key") } it "will accept PUT methods" do processor.send(:process_message, put_message) @@ -53,7 +53,7 @@ end it "will log a warning if the method is not recognized" do expect(processor.instance_variable_get(:@config).logger).to receive :warn - processor.send(:process_message, LaunchDarklySSE::StreamEvent.new(type: :get, data: "", id: nil)) + processor.send(:process_message, SSE::StreamEvent.new(type: :get, data: "", id: nil)) end end end From 1ced67ef78b84e0ff74bf0b8f791de45782f1d6e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 080/135] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 0e285fcc..26dd2cb7 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -88,6 +88,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From e11bf4b05cced0e29dbf24daabd08922f8b9ba84 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:30:19 -0800 Subject: [PATCH 081/135] allow net-http-persistent 3.x --- Gemfile.lock | 7 ++++--- ldclient-rb.gemspec | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 6c4673e4..28f15ccf 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,13 @@ PATH remote: . specs: - ldclient-rb (5.4.1) + ldclient-rb (5.4.2) concurrent-ruby (~> 1.0) faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) http_tools (~> 0.4.5) json (>= 1.8, < 3) - net-http-persistent (~> 2.9) + net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) socketry (~> 0.5.1) @@ -36,7 +36,8 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (2.9.4) + net-http-persistent (3.0.0) + connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 46dac190..4e96b6b4 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,7 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", "~> 2.9" + spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "http_tools", '~> 0.4.5' spec.add_runtime_dependency "socketry", "~> 0.5.1" From cb2193e5c25a1c1c52fd426413c323914f873f15 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 14:19:35 -0800 Subject: [PATCH 082/135] uninstall unwanted bundler upgrade in CI --- .circleci/config.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 85f6f7cf..6fb11b32 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -80,6 +80,8 @@ jobs: if [[ $i == jruby* ]]; then gem install jruby-openssl; # required by bundler, no effect on Ruby MRI fi + # bundler 2.0 may be preinstalled, we need to remove it if so + yes | gem uninstall bundler --version '>=2.0' || true; gem install bundler -v "~> 1.7"; bundle install; mv Gemfile.lock "Gemfile.lock.$i" From 3f4e432c3f892e980d300e1ea4fbedcc32ebcc80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:19:39 -0800 Subject: [PATCH 083/135] rewrite requestor without Faraday - don't have proxy yet --- lib/ldclient-rb/polling.rb | 3 +- lib/ldclient-rb/requestor.rb | 53 +++++---- spec/http_util.rb | 103 +++++++++++++++++ spec/requestor_spec.rb | 208 +++++++++++++++++++++++++++-------- 4 files changed, 302 insertions(+), 65 deletions(-) create mode 100644 spec/http_util.rb diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 4c6769f3..17ff7c12 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -63,8 +63,7 @@ def create_worker stop end rescue StandardError => exn - @config.logger.error { "[LDClient] Exception while polling: #{exn.inspect}" } - # TODO: log_exception(__method__.to_s, exn) + Util.log_exception(@config.logger, "Exception while polling", exn) end delta = @config.poll_interval - (Time.now - started_at) if delta > 0 diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 3e244fbe..739ea277 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ +require "concurrent/atomics" require "json" require "net/http/persistent" -require "faraday/http_cache" module LaunchDarkly # @private @@ -16,14 +16,15 @@ def status # @private class Requestor + CacheEntry = Struct.new(:etag, :body) + def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Faraday.new do |builder| - builder.use :http_cache, store: @config.cache_store - - builder.adapter :net_http_persistent - end + @client = Net::HTTP::Persistent.new + @client.open_timeout = @config.connect_timeout + @client.read_timeout = @config.read_timeout + @cache = @config.cache_store end def request_flag(key) @@ -39,24 +40,38 @@ def request_all_data() end def make_request(path) - uri = @config.base_uri + path - res = @client.get (uri) do |req| - req.headers["Authorization"] = @sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.options.timeout = @config.read_timeout - req.options.open_timeout = @config.connect_timeout - if @config.proxy - req.options.proxy = Faraday::ProxyOptions.from @config.proxy - end + uri = URI(@config.base_uri + path) + req = Net::HTTP::Get.new(uri) + req["Authorization"] = @sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + cached = @cache.read(uri) + if !cached.nil? + req["If-None-Match"] = cached.etag end + # if @config.proxy + # req.options.proxy = Faraday::ProxyOptions.from @config.proxy + # end - @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{res.status}\n\theaders: #{res.headers}\n\tbody: #{res.body}" } + res = @client.request(uri, req) + status = res.code.to_i + @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } - if res.status < 200 || res.status >= 300 - raise UnexpectedResponseError.new(res.status) + if status == 304 && !cached.nil? + body = cached.body + else + @cache.delete(uri) + if status < 200 || status >= 300 + raise UnexpectedResponseError.new(status) + end + body = res.body + etag = res["etag"] + @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end + JSON.parse(body, symbolize_names: true) + end - JSON.parse(res.body, symbolize_names: true) + def stop + @client.shutdown end private :make_request diff --git a/spec/http_util.rb b/spec/http_util.rb new file mode 100644 index 00000000..764f8e48 --- /dev/null +++ b/spec/http_util.rb @@ -0,0 +1,103 @@ +require "webrick" +require "webrick/httpproxy" +require "webrick/https" + +class StubHTTPServer + attr_reader :requests + + @@next_port = 50000 + + def initialize + @port = StubHTTPServer.next_port + begin + base_opts = { + BindAddress: '127.0.0.1', + Port: @port, + AccessLog: [], + Logger: NullLogger.new, + RequestCallback: method(:record_request) + } + @server = create_server(@port, base_opts) + rescue Errno::EADDRINUSE + @port = StubHTTPServer.next_port + retry + end + @requests = [] + end + + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + + def create_server(port, base_opts) + WEBrick::HTTPServer.new(base_opts) + end + + def start + Thread.new { @server.start } + end + + def stop + @server.shutdown + end + + def base_uri + URI("http://127.0.0.1:#{@port}") + end + + def setup_response(uri_path, &action) + @server.mount_proc(uri_path, action) + end + + def setup_ok_response(uri_path, body, content_type=nil, headers={}) + setup_response(uri_path) do |req, res| + res.status = 200 + res.content_type = content_type if !content_type.nil? + res.body = body + headers.each { |n, v| res[n] = v } + end + end + + def record_request(req, res) + @requests.push(req) + end +end + +class StubProxyServer < StubHTTPServer + attr_reader :request_count + attr_accessor :connect_status + + def initialize + super + @request_count = 0 + end + + def create_server(port, base_opts) + WEBrick::HTTPProxyServer.new(base_opts.merge({ + ProxyContentHandler: proc do |req,res| + if !@connect_status.nil? + res.status = @connect_status + end + @request_count += 1 + end + })) + end +end + +class NullLogger + def method_missing(*) + self + end +end + +def with_server(server = nil) + server = StubHTTPServer.new if server.nil? + begin + server.start + yield server + ensure + server.stop + end +end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index b7838200..3d4a666f 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -1,52 +1,172 @@ require "spec_helper" -require "faraday" +require "http_util" + +$sdk_key = "secret" describe LaunchDarkly::Requestor do - describe ".request_all_flags" do - describe "with a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :proxy => "http://proxy.com", - :base_uri => "http://ld.com" + def with_requestor(base_uri) + r = LaunchDarkly::Requestor.new($sdk_key, LaunchDarkly::Config.new(base_uri: base_uri)) + yield r + r.stop + end + + describe "request_all_flags" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_all_data() + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-all" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] + }) + end + end + end + + it "parses response" do + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", expected_data.to_json) + data = requestor.request_all_data() + expect(data).to eq expected_data + end + end + end + + it "sends etag from previous response" do + etag = "xyz" + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = "{}" + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + end + end + end + + it "can reuse cached data" do + etag = "xyz" + expected_data = { flags: { x: { key: "x" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data.to_json + res["ETag"] = etag + end + requestor.request_all_data() + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag ] }) + expect(data).to eq expected_data + end + end + end + + it "replaces cached data with new data" do + etag1 = "abc" + etag2 = "xyz" + expected_data1 = { flags: { x: { key: "x" } } } + expected_data2 = { flags: { y: { key: "y" } } } + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data1.to_json + res["ETag"] = etag1 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 1 + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data1 + expect(server.requests.count).to eq 2 + expect(server.requests[1].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 200 + res.body = expected_data2.to_json + res["ETag"] = etag2 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 3 + expect(server.requests[2].header).to include({ "if-none-match" => [ etag1 ] }) + + server.setup_response("/") do |req, res| + res.status = 304 + end + data = requestor.request_all_data() + expect(data).to eq expected_data2 + expect(server.requests.count).to eq 4 + expect(server.requests[3].header).to include({ "if-none-match" => [ etag2 ] }) + end + end + end + + it "throws exception for error status" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_response("/") do |req, res| + res.status = 400 + end + expect { requestor.request_all_data() }.to raise_error(LaunchDarkly::UnexpectedResponseError) + end + end + end + end + + describe "request_flag" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_flag("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-flags/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy[:uri]).to eq URI("http://proxy.com") - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - - requestor.request_all_data() - end - end - describe "without a proxy" do - let(:requestor) { - LaunchDarkly::Requestor.new( - "key", - LaunchDarkly::Config.new({ - :base_uri => "http://ld.com" + end + end + end + end + + describe "request_segment" do + it "uses expected URI and headers" do + with_server do |server| + with_requestor(server.base_uri.to_s) do |requestor| + server.setup_ok_response("/", "{}") + requestor.request_segment("key") + expect(server.requests.count).to eq 1 + expect(server.requests[0].unparsed_uri).to eq "/sdk/latest-segments/key" + expect(server.requests[0].header).to include({ + "authorization" => [ $sdk_key ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ] }) - ) - } - it "converts the proxy option" do - faraday = Faraday.new - requestor.instance_variable_set(:@client, faraday) - allow(faraday).to receive(:get) do |*args, &block| - req = double(Faraday::Request, :headers => {}, :options => Faraday::RequestOptions.new) - block.call(req) - expect(args).to eq ['http://ld.com/sdk/latest-all'] - expect(req.options.proxy).to eq nil - double(body: '{"foo": "bar"}', status: 200, headers: {}) - end - requestor.request_all_data() + end end end end From b250437df78c99c6c0774e72a5f75ca06c5adf4e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:21:32 -0800 Subject: [PATCH 084/135] reduce intermittent HTTP errors on stub server by not reusing ports --- spec/http_util.rb | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/spec/http_util.rb b/spec/http_util.rb index 434cafc8..764f8e48 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -1,4 +1,3 @@ -require "spec_helper" require "webrick" require "webrick/httpproxy" require "webrick/https" @@ -6,8 +5,10 @@ class StubHTTPServer attr_reader :requests + @@next_port = 50000 + def initialize - @port = 50000 + @port = StubHTTPServer.next_port begin base_opts = { BindAddress: '127.0.0.1', @@ -18,12 +19,18 @@ def initialize } @server = create_server(@port, base_opts) rescue Errno::EADDRINUSE - @port += 1 + @port = StubHTTPServer.next_port retry end @requests = [] end + def self.next_port + p = @@next_port + @@next_port = (p + 1 < 60000) ? p + 1 : 50000 + p + end + def create_server(port, base_opts) WEBrick::HTTPServer.new(base_opts) end From 5b4b8555073bcc8c684e0317c830aee9c8fd543f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:43:46 -0800 Subject: [PATCH 085/135] fix charset handling --- lib/ldclient-rb/requestor.rb | 30 ++++++++++++++++++++++++++---- spec/requestor_spec.rb | 23 +++++++++++++++++++++++ 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 739ea277..94683bcb 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -39,6 +39,12 @@ def request_all_data() make_request("/sdk/latest-all") end + def stop + @client.shutdown + end + + private + def make_request(path) uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) @@ -63,17 +69,33 @@ def make_request(path) if status < 200 || status >= 300 raise UnexpectedResponseError.new(status) end - body = res.body + body = fix_encoding(res.body, res["content-type"]) etag = res["etag"] @cache.write(uri, CacheEntry.new(etag, body)) if !etag.nil? end JSON.parse(body, symbolize_names: true) end - def stop - @client.shutdown + def fix_encoding(body, content_type) + return body if content_type.nil? + media_type, charset = parse_content_type(content_type) + return body if charset.nil? + body.force_encoding(Encoding::find(charset)).encode(Encoding::UTF_8) end - private :make_request + def parse_content_type(value) + return [nil, nil] if value.nil? || value == '' + parts = value.split(/; */) + return [value, nil] if parts.count < 2 + charset = nil + parts.each do |part| + fields = part.split('=') + if fields.count >= 2 && fields[0] == 'charset' + charset = fields[1] + break + end + end + return [parts[0], charset] + end end end diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3d4a666f..3cc20991 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -125,6 +125,29 @@ def with_requestor(base_uri) end end + it "uses UTF-8 encoding by default" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["blue", "grėeń"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + + it "detects other encodings from Content-Type" do + content = '{"flags": {"flagkey": {"key": "flagkey", "variations": ["proszę", "dziękuję"]}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content.encode(Encoding::ISO_8859_2), + "text/plain; charset=ISO-8859-2") + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + end + end + it "throws exception for error status" do with_server do |server| with_requestor(server.base_uri.to_s) do |requestor| From 7a7c273764921f9f33ed3fd3953b1d4a2aacb8cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 18:59:32 -0800 Subject: [PATCH 086/135] add test for proxy support, remove obsolete property --- lib/ldclient-rb/config.rb | 15 --------------- lib/ldclient-rb/requestor.rb | 5 +---- spec/requestor_spec.rb | 18 ++++++++++++++++++ 3 files changed, 19 insertions(+), 19 deletions(-) diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index 64ad7378..c14f59c8 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -53,7 +53,6 @@ def initialize(opts = {}) @use_ldd = opts.has_key?(:use_ldd) ? opts[:use_ldd] : Config.default_use_ldd @offline = opts.has_key?(:offline) ? opts[:offline] : Config.default_offline @poll_interval = opts.has_key?(:poll_interval) && opts[:poll_interval] > Config.default_poll_interval ? opts[:poll_interval] : Config.default_poll_interval - @proxy = opts[:proxy] || Config.default_proxy @all_attributes_private = opts[:all_attributes_private] || false @private_attribute_names = opts[:private_attribute_names] || [] @send_events = opts.has_key?(:send_events) ? opts[:send_events] : Config.default_send_events @@ -184,12 +183,6 @@ def offline? # attr_reader :feature_store - # - # The proxy configuration string. - # @return [String] - # - attr_reader :proxy - # # True if all user attributes (other than the key) should be considered private. This means # that the attribute values will not be sent to LaunchDarkly in analytics events and will not @@ -336,14 +329,6 @@ def self.default_connect_timeout 2 end - # - # The default value for {#proxy}. - # @return [String] nil - # - def self.default_proxy - nil - end - # # The default value for {#logger}. # @return [::Logger] the Rails logger if in Rails, or a default [::Logger] at WARN level otherwise diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 94683bcb..5f48d7ff 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -7,6 +7,7 @@ module LaunchDarkly class UnexpectedResponseError < StandardError def initialize(status) @status = status + super("HTTP error #{status}") end def status @@ -54,10 +55,6 @@ def make_request(path) if !cached.nil? req["If-None-Match"] = cached.etag end - # if @config.proxy - # req.options.proxy = Faraday::ProxyOptions.from @config.proxy - # end - res = @client.request(uri, req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/spec/requestor_spec.rb b/spec/requestor_spec.rb index 3cc20991..eb6c2b62 100644 --- a/spec/requestor_spec.rb +++ b/spec/requestor_spec.rb @@ -158,6 +158,24 @@ def with_requestor(base_uri) end end end + + it "can use a proxy server" do + content = '{"flags": {"flagkey": {"key": "flagkey"}}}' + with_server do |server| + server.setup_ok_response("/sdk/latest-all", content, "application/json", { "etag" => "x" }) + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + with_requestor(server.base_uri.to_s) do |requestor| + data = requestor.request_all_data + expect(data).to eq(JSON.parse(content, symbolize_names: true)) + end + ensure + ENV["http_proxy"] = nil + end + end + end + end end describe "request_flag" do From 1502e61d05406ee51e2007e9a429bdbd67126f64 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:16:49 -0800 Subject: [PATCH 087/135] rm duplicate test code --- spec/sse_client/sse_client_spec.rb | 2 +- spec/sse_client/sse_shared.rb | 82 -------------------------- spec/sse_client/streaming_http_spec.rb | 2 +- 3 files changed, 2 insertions(+), 84 deletions(-) delete mode 100644 spec/sse_client/sse_shared.rb diff --git a/spec/sse_client/sse_client_spec.rb b/spec/sse_client/sse_client_spec.rb index 54f1f5c7..71e96112 100644 --- a/spec/sse_client/sse_client_spec.rb +++ b/spec/sse_client/sse_client_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of SSEClient against a real server diff --git a/spec/sse_client/sse_shared.rb b/spec/sse_client/sse_shared.rb deleted file mode 100644 index 3ecabb57..00000000 --- a/spec/sse_client/sse_shared.rb +++ /dev/null @@ -1,82 +0,0 @@ -require "spec_helper" -require "webrick" -require "webrick/httpproxy" -require "webrick/https" - -class StubHTTPServer - def initialize - @port = 50000 - begin - @server = create_server(@port) - rescue Errno::EADDRINUSE - @port += 1 - retry - end - end - - def create_server(port) - WEBrick::HTTPServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new - ) - end - - def start - Thread.new { @server.start } - end - - def stop - @server.shutdown - end - - def base_uri - URI("http://127.0.0.1:#{@port}") - end - - def setup_response(uri_path, &action) - @server.mount_proc(uri_path, action) - end -end - -class StubProxyServer < StubHTTPServer - attr_reader :request_count - attr_accessor :connect_status - - def initialize - super - @request_count = 0 - end - - def create_server(port) - WEBrick::HTTPProxyServer.new( - BindAddress: '127.0.0.1', - Port: port, - AccessLog: [], - Logger: NullLogger.new, - ProxyContentHandler: proc do |req,res| - if !@connect_status.nil? - res.status = @connect_status - end - @request_count += 1 - end - ) - end -end - -class NullLogger - def method_missing(*) - self - end -end - -def with_server(server = nil) - server = StubHTTPServer.new if server.nil? - begin - server.start - yield server - ensure - server.stop - end -end diff --git a/spec/sse_client/streaming_http_spec.rb b/spec/sse_client/streaming_http_spec.rb index 7dfac9bd..136a727a 100644 --- a/spec/sse_client/streaming_http_spec.rb +++ b/spec/sse_client/streaming_http_spec.rb @@ -1,6 +1,6 @@ require "spec_helper" require "socketry" -require "sse_client/sse_shared" +require "http_util" # # End-to-end tests of HTTP requests against a real server From 485784240ffe4c747b1e60eb93ee70dbaa8b0055 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sun, 6 Jan 2019 19:58:28 -0800 Subject: [PATCH 088/135] change event sending to use Net::HTTP; completely remove Faraday --- ldclient-rb.gemspec | 2 - lib/ldclient-rb/cache_store.rb | 6 +-- lib/ldclient-rb/config.rb | 7 ++-- lib/ldclient-rb/events.rb | 44 +++++++++++--------- spec/events_spec.rb | 73 +++++++++++++++++++++++++--------- spec/http_util.rb | 6 +++ 6 files changed, 92 insertions(+), 46 deletions(-) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9d541c18..15c20739 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -33,8 +33,6 @@ Gem::Specification.new do |spec| spec.add_development_dependency "listen", "~> 3.0" # see file_data_source.rb spec.add_runtime_dependency "json", [">= 1.8", "< 3"] - spec.add_runtime_dependency "faraday", [">= 0.9", "< 2"] - spec.add_runtime_dependency "faraday-http-cache", [">= 1.3.0", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "net-http-persistent", "~> 2.9" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" diff --git a/lib/ldclient-rb/cache_store.rb b/lib/ldclient-rb/cache_store.rb index 8451bb5f..b91b363d 100644 --- a/lib/ldclient-rb/cache_store.rb +++ b/lib/ldclient-rb/cache_store.rb @@ -2,11 +2,9 @@ module LaunchDarkly # - # A thread-safe in-memory store suitable for use with the Faraday caching HTTP client. Uses the - # concurrent-ruby gem's Map as the underlying cache. + # A thread-safe in-memory store that uses the same semantics that Faraday would expect, although we + # no longer use Faraday. This is used by Requestor, when we are not in a Rails environment. # - # @see https://github.com/plataformatec/faraday-http-cache - # @see https://github.com/ruby-concurrency # @private # class ThreadSafeMemoryStore diff --git a/lib/ldclient-rb/config.rb b/lib/ldclient-rb/config.rb index c14f59c8..e5217f45 100644 --- a/lib/ldclient-rb/config.rb +++ b/lib/ldclient-rb/config.rb @@ -152,9 +152,10 @@ def offline? attr_reader :capacity # - # A store for HTTP caching. This must support the semantics used by the - # [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem. Defaults - # to the Rails cache in a Rails environment, or a thread-safe in-memory store otherwise. + # A store for HTTP caching (used only in polling mode). This must support the semantics used by + # the [`faraday-http-cache`](https://github.com/plataformatec/faraday-http-cache) gem, although + # the SDK no longer uses Faraday. Defaults to the Rails cache in a Rails environment, or a + # thread-safe in-memory store otherwise. # @return [Object] # attr_reader :cache_store diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index cbae5ac5..02885904 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,9 +1,9 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "net/http/persistent" require "thread" require "time" -require "faraday" module LaunchDarkly MAX_FLUSH_WORKERS = 5 @@ -115,7 +115,12 @@ class EventDispatcher def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Faraday.new + + @client = client ? client : Net::HTTP::Persistent.new do |c| + c.open_timeout = @config.connect_timeout + c.read_timeout = @config.read_timeout + end + @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) @@ -162,7 +167,7 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - # There seems to be no such thing as "close" in Faraday: https://github.com/lostisland/faraday/issues/241 + @client.shutdown end def synchronize_for_testing(flush_workers) @@ -246,16 +251,17 @@ def trigger_flush(buffer, flush_workers) end def handle_response(res) - if res.status >= 400 - message = Util.http_error_message(res.status, "event delivery", "some events were dropped") + status = res.code.to_i + if status >= 400 + message = Util.http_error_message(status, "event delivery", "some events were dropped") @config.logger.error { "[LDClient] #{message}" } - if !Util.http_error_recoverable?(res.status) + if !Util.http_error_recoverable?(status) @disabled.value = true end else - if !res.headers.nil? && res.headers.has_key?("Date") + if !res["date"].nil? begin - res_time = (Time.httpdate(res.headers["Date"]).to_f * 1000).to_i + res_time = (Time.httpdate(res["date"]).to_f * 1000).to_i @last_known_past_time.value = res_time rescue ArgumentError end @@ -317,21 +323,21 @@ def run(sdk_key, config, client, payload, formatter) end begin config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } - res = client.post (config.events_uri + "/bulk") do |req| - req.headers["Authorization"] = sdk_key - req.headers["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION - req.headers["Content-Type"] = "application/json" - req.headers["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - req.body = body - req.options.timeout = config.read_timeout - req.options.open_timeout = config.connect_timeout - end + uri = URI(config.events_uri + "/bulk") + req = Net::HTTP::Post.new(uri) + req.content_type = "application/json" + req.body = body + req["Authorization"] = sdk_key + req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + res = client.request(uri, req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next end - if res.status < 200 || res.status >= 300 - if Util.http_error_recoverable?(res.status) + status = res.code.to_i + if status < 200 || status >= 300 + if Util.http_error_recoverable?(status) next end end diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 56bd14a2..86cc67b6 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -1,5 +1,5 @@ +require "http_util" require "spec_helper" -require "faraday" require "time" describe LaunchDarkly::EventProcessor do @@ -348,7 +348,7 @@ @ep.flush @ep.wait_until_inactive - expect(hc.get_request.headers["Authorization"]).to eq "sdk_key" + expect(hc.get_request["authorization"]).to eq "sdk_key" end def verify_unrecoverable_http_error(status) @@ -414,7 +414,7 @@ def verify_recoverable_http_error(status) e = { kind: "identify", user: user } @ep.add_event(e) - hc.set_exception(Faraday::Error::ConnectionFailed.new("fail")) + hc.set_exception(IOError.new("deliberate error")) @ep.flush @ep.wait_until_inactive @@ -423,6 +423,46 @@ def verify_recoverable_http_error(status) expect(hc.get_request).to be_nil # no 3rd request end + it "makes actual HTTP request with correct headers" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req.header).to include({ + "authorization" => [ "sdk_key" ], + "content-type" => [ "application/json" ], + "user-agent" => [ "RubyClient/" + LaunchDarkly::VERSION ], + "x-launchdarkly-event-schema" => [ "3" ] + }) + end + end + + it "can use a proxy server" do + e = { kind: "identify", key: user[:key], user: user } + with_server do |server| + server.setup_ok_response("/bulk", "") + + with_server(StubProxyServer.new) do |proxy| + begin + ENV["http_proxy"] = proxy.base_uri.to_s + @ep = subject.new("sdk_key", LaunchDarkly::Config.new(events_uri: server.base_uri.to_s)) + @ep.add_event(e) + @ep.flush + + req = server.await_request + expect(req["content-type"]).to eq("application/json") + ensure + ENV["http_proxy"] = nil + end + end + end + end + def index_event(e, user) { kind: "index", @@ -496,38 +536,35 @@ def reset @status = 200 end - def post(uri) - req = Faraday::Request.create("POST") - req.headers = {} - req.options = Faraday::RequestOptions.new - yield req + def request(uri, req) @requests.push(req) if @exception raise @exception else - resp = Faraday::Response.new headers = {} if @server_time headers["Date"] = @server_time.httpdate end - resp.finish({ - status: @status ? @status : 200, - response_headers: headers - }) - resp + FakeResponse.new(@status ? @status : 200, headers) end end def get_request @requests.shift end + + def shutdown + end end class FakeResponse - def initialize(status) - @status = status - end + include Net::HTTPHeader - attr_reader :status + attr_reader :code + + def initialize(status, headers) + @code = status.to_s + initialize_http_header(headers) + end end end diff --git a/spec/http_util.rb b/spec/http_util.rb index 764f8e48..e43e2ded 100644 --- a/spec/http_util.rb +++ b/spec/http_util.rb @@ -23,6 +23,7 @@ def initialize retry end @requests = [] + @requests_queue = Queue.new end def self.next_port @@ -62,6 +63,11 @@ def setup_ok_response(uri_path, body, content_type=nil, headers={}) def record_request(req, res) @requests.push(req) + @requests_queue << req + end + + def await_request + @requests_queue.pop end end From d658715b420ee029d85b442f643785a759aa4d5c Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 20:42:57 -0800 Subject: [PATCH 089/135] implement dependency ordering for feature store data --- lib/ldclient-rb/impl/store_client_wrapper.rb | 47 ++++++++++++++ lib/ldclient-rb/impl/store_data_set_sorter.rb | 57 +++++++++++++++++ lib/ldclient-rb/in_memory_store.rb | 13 +++- .../integrations/util/store_wrapper.rb | 5 ++ lib/ldclient-rb/interfaces.rb | 5 ++ lib/ldclient-rb/ldclient.rb | 14 +++- spec/ldclient_spec.rb | 64 +++++++++++++++++++ 7 files changed, 200 insertions(+), 5 deletions(-) create mode 100644 lib/ldclient-rb/impl/store_client_wrapper.rb create mode 100644 lib/ldclient-rb/impl/store_data_set_sorter.rb diff --git a/lib/ldclient-rb/impl/store_client_wrapper.rb b/lib/ldclient-rb/impl/store_client_wrapper.rb new file mode 100644 index 00000000..f0948251 --- /dev/null +++ b/lib/ldclient-rb/impl/store_client_wrapper.rb @@ -0,0 +1,47 @@ +require "ldclient-rb/interfaces" +require "ldclient-rb/impl/store_data_set_sorter" + +module LaunchDarkly + module Impl + # + # Provides additional behavior that the client requires before or after feature store operations. + # Currently this just means sorting the data set for init(). In the future we may also use this + # to provide an update listener capability. + # + class FeatureStoreClientWrapper + include Interfaces::FeatureStore + + def initialize(store) + @store = store + end + + def init(all_data) + @store.init(FeatureStoreDataSetSorter.sort_all_collections(all_data)) + end + + def get(kind, key) + @store.get(kind, key) + end + + def all(kind) + @store.all(kind) + end + + def upsert(kind, item) + @store.upsert(kind, item) + end + + def delete(kind, key, version) + @store.delete(kind, key, version) + end + + def initialized? + @store.initialized? + end + + def stop + @store.stop + end + end + end +end diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb new file mode 100644 index 00000000..4f3635cd --- /dev/null +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -0,0 +1,57 @@ + +module LaunchDarkly + module Impl + # + # Implements a dependency graph ordering for data to be stored in a feature store. We must use this + # on every data set that will be passed to the feature store's init() method. + # + class FeatureStoreDataSetSorter + # + # Returns a copy of the input hash that has the following guarantees: the iteration order of the outer + # hash will be in ascending order by the VersionDataKind's :priority property (if any), and for each + # data kind that has a :get_dependency_keys function, the inner hash will have an iteration order + # where B is before A if A has a dependency on B. + # + # This implementation relies on the fact that hashes in Ruby have an iteration order that is the same + # as the insertion order. Also, due to the way we deserialize JSON received from LaunchDarkly, the + # keys in the inner hash will always be symbols. + # + def self.sort_all_collections(all_data) + outer_hash = {} + kinds = all_data.keys.sort_by { |k| + k[:priority].nil? ? k[:namespace].length : k[:priority] # arbitrary order if priority is unknown + } + kinds.each do |kind| + items = all_data[kind] + outer_hash[kind] = self.sort_collection(kind, items) + end + outer_hash + end + + def self.sort_collection(kind, input) + dependency_fn = kind[:get_dependency_keys] + return input if dependency_fn.nil? || input.empty? + remaining_items = input.clone + items_out = {} + while !remaining_items.empty? + # pick a random item that hasn't been updated yet + remaining_items.each do |key, item| + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + break + end + end + items_out + end + + def self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) + item_key = item[:key].to_sym + remaining_items.delete(item_key) # we won't need to visit this item again + dependency_fn.call(item).each do |dep_key| + dep_item = remaining_items[dep_key.to_sym] + self.add_with_dependencies_first(dep_item, dependency_fn, remaining_items, items_out) if !dep_item.nil? + end + items_out[item_key] = item + end + end + end +end diff --git a/lib/ldclient-rb/in_memory_store.rb b/lib/ldclient-rb/in_memory_store.rb index 4814c85d..c959f399 100644 --- a/lib/ldclient-rb/in_memory_store.rb +++ b/lib/ldclient-rb/in_memory_store.rb @@ -6,12 +6,21 @@ module LaunchDarkly # we add another storable data type in the future, as long as it follows the same pattern # (having "key", "version", and "deleted" properties), we only need to add a corresponding # constant here and the existing store should be able to handle it. + # + # The :priority and :get_dependency_keys properties are used by FeatureStoreDataSetSorter + # to ensure data consistency during non-atomic updates. + + # @private FEATURES = { - namespace: "features" + namespace: "features", + priority: 1, # that is, features should be stored after segments + get_dependency_keys: lambda { |flag| (flag[:prerequisites] || []).map { |p| p[:key] } } }.freeze + # @private SEGMENTS = { - namespace: "segments" + namespace: "segments", + priority: 0 }.freeze # diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index 46a648c1..eef22d5e 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -151,6 +151,11 @@ module FeatureStoreCore # Initializes the store. This is the same as {LaunchDarkly::Interfaces::FeatureStore#init}, # but the wrapper will take care of updating the cache if caching is enabled. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/interfaces.rb b/lib/ldclient-rb/interfaces.rb index 912472b5..b6920fb5 100644 --- a/lib/ldclient-rb/interfaces.rb +++ b/lib/ldclient-rb/interfaces.rb @@ -33,6 +33,11 @@ module FeatureStore # date-- there is no need to perform individual version comparisons between the existing # objects and the supplied features. # + # If possible, the store should update the entire data set atomically. If that is not possible, + # it should iterate through the outer hash and then the inner hash using the existing iteration + # order of those hashes (the SDK will ensure that the items were inserted into the hashes in + # the correct order), storing each item, and then delete any leftover items at the very end. + # # @param all_data [Hash] a hash where each key is one of the data kind objects, and each # value is in turn a hash of string keys to entities # @return [void] diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 868c65bd..d9a09c65 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" require "logger" @@ -23,8 +24,15 @@ class LDClient # @return [LDClient] The LaunchDarkly client instance def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key - @config = config - @store = config.feature_store + + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add + # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses + # the feature store through the Config object, so we need to make a new Config that uses + # the wrapped store. + @store = Impl::FeatureStoreClientWrapper.new(config.feature_store) + updated_config = config.clone + updated_config.instance_variable_set(:@feature_store, @store) + @config = updated_config if @config.offline? || !@config.send_events @event_processor = NullEventProcessor.new @@ -39,7 +47,7 @@ def initialize(sdk_key, config = Config.default, wait_for_sec = 5) data_source_or_factory = @config.data_source || self.method(:create_default_data_source) if data_source_or_factory.respond_to? :call - @data_source = data_source_or_factory.call(sdk_key, config) + @data_source = data_source_or_factory.call(sdk_key, @config) else @data_source = data_source_or_factory end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index b3a9592c..453f4b53 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -375,4 +375,68 @@ def event_processor expect(ep).not_to be_a(LaunchDarkly::NullEventProcessor) end end + + describe "feature store data ordering" do + let(:dependency_ordering_test_data) { + { + LaunchDarkly::FEATURES => { + a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] }, + b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] }, + c: { key: "c" }, + d: { key: "d" }, + e: { key: "e" }, + f: { key: "f" } + }, + LaunchDarkly::SEGMENTS => { + o: { key: "o" } + } + } + } + + class FakeFeatureStore + attr_reader :received_data + + def init(all_data) + @received_data = all_data + end + end + + class FakeUpdateProcessor + def initialize(store, data) + @store = store + @data = data + end + + def start + @store.init(@data) + ev = Concurrent::Event.new + ev.set + ev + end + + def stop + end + + def initialized? + true + end + end + + it "passes data set to feature store in correct order on init" do + store = FakeFeatureStore.new + data_source_factory = lambda { |sdk_key, config| FakeUpdateProcessor.new(config.feature_store, + dependency_ordering_test_data) } + config = LaunchDarkly::Config.new(send_events: false, feature_store: store, data_source: data_source_factory) + client = subject.new("secret", config) + + data = store.received_data + expect(data).not_to be_nil + expect(data.count).to eq(2) + + puts(data) + + # Segments should always come first + expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + end + end end \ No newline at end of file From ed302ad79bd63c2197eadff8c2e9ce410eed54c1 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 8 Jan 2019 22:17:51 -0800 Subject: [PATCH 090/135] fix incomplete test --- spec/ldclient_spec.rb | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 453f4b53..fca81ab0 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -432,11 +432,26 @@ def initialized? data = store.received_data expect(data).not_to be_nil expect(data.count).to eq(2) - - puts(data) # Segments should always come first expect(data.keys[0]).to be(LaunchDarkly::SEGMENTS) + expect(data.values[0].count).to eq(dependency_ordering_test_data[LaunchDarkly::SEGMENTS].count) + + # Features should be ordered so that a flag always appears after its prerequisites, if any + expect(data.keys[1]).to be(LaunchDarkly::FEATURES) + flags_map = data.values[1] + flags_list = flags_map.values + expect(flags_list.count).to eq(dependency_ordering_test_data[LaunchDarkly::FEATURES].count) + flags_list.each_with_index do |item, item_index| + (item[:prerequisites] || []).each do |prereq| + prereq = flags_map[prereq[:key].to_sym] + prereq_index = flags_list.index(prereq) + if prereq_index > item_index + all_keys = (flags_list.map { |f| f[:key] }).join(", ") + raise "#{item[:key]} depends on #{prereq[:key]}, but #{item[:key]} was listed first; keys in order are [#{all_keys}]" + end + end + end end end end \ No newline at end of file From 8436be4f1e6ad7f1ff48208969fb4d6e8e73ff61 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:00:51 -0800 Subject: [PATCH 091/135] use Hash.first --- lib/ldclient-rb/impl/store_data_set_sorter.rb | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/lib/ldclient-rb/impl/store_data_set_sorter.rb b/lib/ldclient-rb/impl/store_data_set_sorter.rb index 4f3635cd..4454fe75 100644 --- a/lib/ldclient-rb/impl/store_data_set_sorter.rb +++ b/lib/ldclient-rb/impl/store_data_set_sorter.rb @@ -35,10 +35,8 @@ def self.sort_collection(kind, input) items_out = {} while !remaining_items.empty? # pick a random item that hasn't been updated yet - remaining_items.each do |key, item| - self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) - break - end + key, item = remaining_items.first + self.add_with_dependencies_first(item, dependency_fn, remaining_items, items_out) end items_out end From e84fbe7ab74894cc4ba600b05d043cc949eae21b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 12:58:47 -0800 Subject: [PATCH 092/135] add test for Unicode in feature store serialization --- spec/feature_store_spec_base.rb | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..3580a67f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -109,4 +109,14 @@ def new_version_plus(f, deltaVersion, attrs = {}) store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 end + + it "stores Unicode data correctly" do + flag = { + key: "tęst-feåtūre-flæg😺", + version: 1, + deleted: false + } + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end From 89209237faa8f29b2b063839a38c491a7bafda40 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:38:43 -0800 Subject: [PATCH 093/135] fill in the rest of the Consul implementation --- Gemfile.lock | 6 +- .../impl/integrations/consul_impl.rb | 84 ++++++++++++------- 2 files changed, 57 insertions(+), 33 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 72158223..2e96a86a 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -6,6 +6,7 @@ PATH faraday (>= 0.9, < 2) faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) + ld-eventsource (~> 1.0) net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) @@ -28,6 +29,9 @@ GEM concurrent-ruby (1.1.4) connection_pool (2.2.1) diff-lcs (1.3) + diplomat (2.0.2) + faraday (~> 0.9) + json docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) @@ -36,7 +40,6 @@ GEM ffi (1.9.25) ffi (1.9.25-java) hitimes (1.3.0) - hitimes (1.3.0-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -92,6 +95,7 @@ DEPENDENCIES bundler (~> 1.7) codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) + diplomat (>= 2.0.2) ldclient-rb! listen (~> 3.0) rake (~> 10.0) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 48d308c2..5044f33c 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -22,15 +22,15 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger - @client = Diplomat::Kv.new(configuration: opts[:consul_config]) - + Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end def init_internal(all_data) # Start by reading the existing keys; we will later delete any of these that weren't in all_data. - unused_old_keys = set() - unused_old_keys.merge(@client.get(@prefix, keys: true, recurse: true)) + unused_old_keys = Set.new + keys = Diplomat::Kv.get(@prefix, { keys: true, recurse: true }, :return) + unused_old_keys.merge(keys) if keys != "" ops = [] num_items = 0 @@ -47,12 +47,12 @@ def init_internal(all_data) end # Now delete any previously existing items whose keys were not in the current data - unused_old_keys.each do |tuple| + unused_old_keys.each do |key| ops.push({ 'KV' => { 'Verb' => 'delete', 'Key' => key } }) end # Now set the special key that we check in initialized_internal? - ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => key, 'Value' => '' } }) + ops.push({ 'KV' => { 'Verb' => 'set', 'Key' => inited_key, 'Value' => '' } }) ConsulUtil.batch_operations(ops) @@ -60,56 +60,76 @@ def init_internal(all_data) end def get_internal(kind, key) - - resp = get_item_by_keys(namespace_for_kind(kind), key) - unmarshal_item(resp.item) + value = Diplomat::Kv.get(item_key(kind, key), {}, :return) # :return means "don't throw an error if not found" + (value.nil? || value == "") ? nil : JSON.parse(value, symbolize_names: true) end def get_all_internal(kind) items_out = {} - + results = Diplomat::Kv.get(kind_key(kind), { recurse: true }, :return) + (results == "" ? [] : results).each do |result| + value = result[:value] + if !value.nil? + item = JSON.parse(value, symbolize_names: true) + items_out[item[:key].to_sym] = item + end + end items_out end def upsert_internal(kind, new_item) - + key = item_key(kind, new_item[:key]) + json = new_item.to_json + + # We will potentially keep retrying indefinitely until someone's write succeeds + while true + old_value = Diplomat::Kv.get(key, { decode_values: true }, :return) + if old_value.nil? || old_value == "" + mod_index = 0 + else + puts("old_value = #{old_value}") + old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) + # Check whether the item is stale. If so, don't do the update (and return the existing item to + # FeatureStoreWrapper so it can be cached) + if old_item[:version] >= new_item[:version] + return old_item + end + mod_index = old_value[0]["ModifyIndex"] + end + + # Otherwise, try to write. We will do a compare-and-set operation, so the write will only succeed if + # the key's ModifyIndex is still equal to the previous value. If the previous ModifyIndex was zero, + # it means the key did not previously exist and the write will only succeed if it still doesn't exist. + success = Diplomat::Kv.put(key, json, cas: mod_index) + return new_item if success + + # If we failed, retry the whole shebang + @logger.debug { "Concurrent modification detected, retrying" } + end end def initialized_internal? - + value = Diplomat::Kv.get(inited_key, {}, :return) + !value.nil? && value != "" end def stop - # There's no way to close the Consul client + # There's no Consul client instance to dispose of end private def item_key(kind, key) - kind_key(kind) + '/' + key + kind_key(kind) + key.to_s end def kind_key(kind) - @prefix + kind[:namespace] + @prefix + kind[:namespace] + '/' end def inited_key @prefix + '$inited' end - - def marshal_item(kind, item) - make_keys_hash(namespace_for_kind(kind), item[:key]).merge({ - VERSION_ATTRIBUTE => item[:version], - ITEM_JSON_ATTRIBUTE => item.to_json - }) - end - - def unmarshal_item(item) - return nil if item.nil? || item.length == 0 - json_attr = item[ITEM_JSON_ATTRIBUTE] - raise RuntimeError.new("DynamoDB map did not contain expected item string") if json_attr.nil? - JSON.parse(json_attr, symbolize_names: true) - end end class ConsulUtil @@ -117,10 +137,10 @@ class ConsulUtil # Submits as many transactions as necessary to submit all of the given operations. # The ops array is consumed. # - def self.batch_write_requests(ops) - batch_size = 64 # Consul can only do this many at a time + def self.batch_operations(ops) + batch_size = 64 # Consul can only do this many at a time while true - chunk = requests.shift(batch_size) + chunk = ops.shift(batch_size) break if chunk.empty? Diplomat::Kv.txn(chunk) end From 21c79fe007ca47b2ff6b5fcc1bf1068a4baa1517 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:47:20 -0800 Subject: [PATCH 094/135] minor doc fixes --- README.md | 8 +++++--- lib/ldclient-rb/integrations/consul.rb | 2 +- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 43819554..a194443f 100644 --- a/README.md +++ b/README.md @@ -82,7 +82,8 @@ Note that this gem will automatically switch to using the Rails logger it is det HTTPS proxy ------------- +----------- + The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. How to set the HTTPS_PROXY environment variable on Mac/Linux systems: @@ -124,10 +125,11 @@ end Database integrations --------------------- -Feature flag data can be kept in a persistent store using Redis or DynamoDB. These adapters are implemented in the `LaunchDarkly::Integrations::Redis` and `LaunchDarkly::Integrations::DynamoDB` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [source code](https://github.com/launchdarkly/ruby-client-private/tree/master/lib/ldclient-rb/integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. +Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. Using flag data from a file --------------------------- + For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.rb`](https://github.com/launchdarkly/ruby-client/blob/master/lib/ldclient-rb/file_data_source.rb) for more details. Learn more @@ -146,7 +148,7 @@ Contributing See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md) About LaunchDarkly ------------ +------------------ * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 7450d3b9..8f5d1f09 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -21,7 +21,7 @@ def self.default_prefix # # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default - # Consul client configuration + # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching From 7ea110fd0662cc835fb3fd007591806765763740 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 13:53:03 -0800 Subject: [PATCH 095/135] rm debugging --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 5044f33c..82a4fec9 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -87,7 +87,6 @@ def upsert_internal(kind, new_item) if old_value.nil? || old_value == "" mod_index = 0 else - puts("old_value = #{old_value}") old_item = JSON.parse(old_value[0]["Value"], symbolize_names: true) # Check whether the item is stale. If so, don't do the update (and return the existing item to # FeatureStoreWrapper so it can be cached) From 513618735575da8e177ccb871d835f9adaadefce Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 14:47:12 -0800 Subject: [PATCH 096/135] fix initialized check --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 82a4fec9..4082378f 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -108,8 +108,14 @@ def upsert_internal(kind, new_item) end def initialized_internal? - value = Diplomat::Kv.get(inited_key, {}, :return) - !value.nil? && value != "" + # Unfortunately we need to use exceptions here, instead of the :return parameter, because with + # :return there's no way to distinguish between a missing value and an empty string. + begin + Diplomat::Kv.get(inited_key, {}) + true + rescue Diplomat::KeyNotFound + false + end end def stop From 46ebc1f4a826a1dc13a8140c8b6cd3eceb09db6f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 15 Jan 2019 15:38:19 -0800 Subject: [PATCH 097/135] improve feature store tests + minor Redis fixes --- .../impl/integrations/redis_impl.rb | 17 +- spec/feature_store_spec_base.rb | 232 ++++++++++++------ spec/in_memory_feature_store_spec.rb | 2 +- .../integrations/consul_feature_store_spec.rb | 16 +- .../dynamodb_feature_store_spec.rb | 33 ++- spec/redis_feature_store_spec.rb | 17 +- 6 files changed, 223 insertions(+), 94 deletions(-) diff --git a/lib/ldclient-rb/impl/integrations/redis_impl.rb b/lib/ldclient-rb/impl/integrations/redis_impl.rb index 497b01c5..107340f8 100644 --- a/lib/ldclient-rb/impl/integrations/redis_impl.rb +++ b/lib/ldclient-rb/impl/integrations/redis_impl.rb @@ -48,14 +48,15 @@ def initialize(opts) def init_internal(all_data) count = 0 with_connection do |redis| - all_data.each do |kind, items| - redis.multi do |multi| + redis.multi do |multi| + all_data.each do |kind, items| multi.del(items_key(kind)) count = count + items.count - items.each { |key, item| - redis.hset(items_key(kind), key, item.to_json) - } + items.each do |key, item| + multi.hset(items_key(kind), key, item.to_json) + end end + multi.set(inited_key, inited_key) end end @logger.info { "RedisFeatureStore: initialized with #{count} items" } @@ -112,7 +113,7 @@ def upsert_internal(kind, new_item) end def initialized_internal? - with_connection { |redis| redis.exists(items_key(FEATURES)) } + with_connection { |redis| redis.exists(inited_key) } end def stop @@ -135,6 +136,10 @@ def cache_key(kind, key) kind[:namespace] + ":" + key.to_s end + def inited_key + @prefix + ":$inited" + end + def with_connection @pool.with { |redis| yield(redis) } end diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 0e0f1ca9..8689577f 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -1,112 +1,200 @@ require "spec_helper" -shared_examples "feature_store" do |create_store_method| +shared_examples "feature_store" do |create_store_method, clear_data_method| - let(:feature0) { + # Rather than testing with feature flag or segment data, we'll use this fake data kind + # to make it clear that feature stores need to be able to handle arbitrary data. + let(:things_kind) { { namespace: "things" } } + + let(:key1) { "thing1" } + let(:thing1) { { - key: "test-feature-flag", + key: key1, + name: "Thing 1", version: 11, - on: true, - prerequisites: [], - salt: "718ea30a918a4eba8734b57ab1a93227", - sel: "fe1244e5378c4f99976c9634e33667c6", - targets: [ - { - values: [ "alice" ], - variation: 0 - }, - { - values: [ "bob" ], - variation: 1 - } - ], - rules: [], - fallthrough: { variation: 0 }, - offVariation: 1, - variations: [ true, false ], deleted: false } } - let(:key0) { feature0[:key].to_sym } + let(:unused_key) { "no" } + + let(:create_store) { create_store_method } # just to avoid a scope issue + let(:clear_data) { clear_data_method } + + def with_store(opts = {}) + s = create_store.call(opts) + begin + yield s + ensure + s.stop + end + end - let!(:store) do - s = create_store_method.call() - s.init(LaunchDarkly::FEATURES => { key0 => feature0 }) - s + def with_inited_store(things) + things_hash = {} + things.each { |thing| things_hash[thing[:key].to_sym] = thing } + + with_store do |s| + s.init({ things_kind => things_hash }) + yield s + end end def new_version_plus(f, deltaVersion, attrs = {}) - f1 = f.clone - f1[:version] = f[:version] + deltaVersion - f1.update(attrs) - f1 + f.clone.merge({ version: f[:version] + deltaVersion }).merge(attrs) end + before(:each) do + clear_data.call if !clear_data.nil? + end - it "is initialized" do - expect(store.initialized?).to eq true + # This block of tests is only run if the clear_data method is defined, meaning that this is a persistent store + # that operates on a database that can be shared with other store instances (as opposed to the in-memory store, + # which has its own private storage). + if !clear_data_method.nil? + it "is not initialized by default" do + with_store do |store| + expect(store.initialized?).to eq false + end + end + + it "can detect if another instance has initialized the store" do + with_store do |store1| + store1.init({}) + with_store do |store2| + expect(store2.initialized?).to eq true + end + end + end + + it "can read data written by another instance" do + with_store do |store1| + store1.init({ things_kind => { key1.to_sym => thing1 } }) + with_store do |store2| + expect(store2.get(things_kind, key1)).to eq thing1 + end + end + end + + it "is independent from other stores with different prefixes" do + with_store({ prefix: "a" }) do |store_a| + store_a.init({ things_kind => { key1.to_sym => thing1 } }) + with_store({ prefix: "b" }) do |store_b| + store_b.init({ things_kind => {} }) + end + with_store({ prefix: "b" }) do |store_b1| # this ensures we're not just reading cached data + expect(store_b1.get(things_kind, key1)).to be_nil + expect(store_a.get(things_kind, key1)).to eq thing1 + end + end + end end - it "can get existing feature with symbol key" do - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "is initialized after calling init" do + with_inited_store([]) do |store| + expect(store.initialized?).to eq true + end end - it "can get existing feature with string key" do - expect(store.get(LaunchDarkly::FEATURES, key0.to_s)).to eq feature0 + it "can get existing item with symbol key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_sym)).to eq thing1 + end end - it "gets nil for nonexisting feature" do - expect(store.get(LaunchDarkly::FEATURES, 'nope')).to be_nil + it "can get existing item with string key" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, key1.to_s)).to eq thing1 + end end - it "can get all features" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.all(LaunchDarkly::FEATURES)).to eq ({ key0 => feature0, :"test-feature-flag1" => feature1 }) + it "gets nil for nonexisting item" do + with_inited_store([ thing1 ]) do |store| + expect(store.get(things_kind, unused_key)).to be_nil + end + end + + it "returns nil for deleted item" do + deleted_thing = thing1.clone.merge({ deleted: true }) + with_inited_store([ deleted_thing ]) do |store| + expect(store.get(things_kind, key1)).to be_nil + end + end + + it "can get all items" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: false + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1, key2.to_sym => thing2 }) + end + end + + it "filters out deleted items when getting all" do + key2 = "thing2" + thing2 = { + key: key2, + name: "Thing 2", + version: 22, + deleted: true + } + with_inited_store([ thing1, thing2 ]) do |store| + expect(store.all(things_kind)).to eq ({ key1.to_sym => thing1 }) + end end - it "can add new feature" do - feature1 = feature0.clone - feature1[:key] = "test-feature-flag1" - feature1[:version] = 5 - feature1[:on] = false - store.upsert(LaunchDarkly::FEATURES, feature1) - expect(store.get(LaunchDarkly::FEATURES, :"test-feature-flag1")).to eq feature1 + it "can add new item" do + with_inited_store([]) do |store| + store.upsert(things_kind, thing1) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can update feature with newer version" do - f1 = new_version_plus(feature0, 1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq f1 + it "can update item with newer version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, 1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1_mod + end end - it "cannot update feature with same version" do - f1 = new_version_plus(feature0, 0, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot update item with same version" do + with_inited_store([ thing1 ]) do |store| + thing1_mod = thing1.clone.merge({ name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end it "cannot update feature with older version" do - f1 = new_version_plus(feature0, -1, { on: !feature0[:on] }) - store.upsert(LaunchDarkly::FEATURES, f1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + with_inited_store([ thing1 ]) do |store| + thing1_mod = new_version_plus(thing1, -1, { name: thing1[:name] + ' updated' }) + store.upsert(things_kind, thing1_mod) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "can delete feature with newer version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] + 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to be_nil + it "can delete item with newer version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] + 1) + expect(store.get(things_kind, key1)).to be_nil + end end - it "cannot delete feature with same version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version]) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with same version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version]) + expect(store.get(things_kind, key1)).to eq thing1 + end end - it "cannot delete feature with older version" do - store.delete(LaunchDarkly::FEATURES, key0, feature0[:version] - 1) - expect(store.get(LaunchDarkly::FEATURES, key0)).to eq feature0 + it "cannot delete item with older version" do + with_inited_store([ thing1 ]) do |store| + store.delete(things_kind, key1, thing1[:version] - 1) + expect(store.get(things_kind, key1)).to eq thing1 + end end end diff --git a/spec/in_memory_feature_store_spec.rb b/spec/in_memory_feature_store_spec.rb index a1673bbc..c403fc69 100644 --- a/spec/in_memory_feature_store_spec.rb +++ b/spec/in_memory_feature_store_spec.rb @@ -1,7 +1,7 @@ require "feature_store_spec_base" require "spec_helper" -def create_in_memory_store() +def create_in_memory_store(opts = {}) LaunchDarkly::InMemoryFeatureStore.new end diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 1aa6f919..13767686 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -1,5 +1,5 @@ require "feature_store_spec_base" -#require "diplomat" +require "diplomat" require "spec_helper" @@ -7,19 +7,23 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL -$base_opts = { +$consul_base_opts = { prefix: $my_prefix, logger: $null_log } def create_consul_store(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 60 })) + $consul_base_opts.merge(opts).merge({ expiration: 60 })) end def create_consul_store_uncached(opts = {}) LaunchDarkly::Integrations::Consul::new_feature_store( - opts.merge($base_opts).merge({ expiration: 0 })) + $consul_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + Diplomat::Kv.delete($my_prefix + '/', recurse: true) end @@ -28,10 +32,10 @@ def create_consul_store_uncached(opts = {}) # These tests will all fail if there isn't a local Consul instance running. context "with local cache" do - include_examples "feature_store", method(:create_consul_store) + include_examples "feature_store", method(:create_consul_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_consul_store_uncached) + include_examples "feature_store", method(:create_consul_store_uncached), method(:clear_all_data) end end diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 38104fb3..4add3d53 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -15,7 +15,7 @@ endpoint: $endpoint } -$base_opts = { +$ddb_base_opts = { dynamodb_opts: $dynamodb_opts, prefix: $my_prefix, logger: $null_log @@ -23,12 +23,35 @@ def create_dynamodb_store(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 60 })) + $ddb_base_opts.merge(opts).merge({ expiration: 60 })) end def create_dynamodb_store_uncached(opts = {}) LaunchDarkly::Integrations::DynamoDB::new_feature_store($table_name, - opts.merge($base_opts).merge({ expiration: 0 })) + $ddb_base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = create_test_client + items_to_delete = [] + req = { + table_name: $table_name, + projection_expression: '#namespace, #key', + expression_attribute_names: { + '#namespace' => 'namespace', + '#key' => 'key' + } + } + while true + resp = client.scan(req) + items_to_delete = items_to_delete + resp.items + break if resp.last_evaluated_key.nil? || resp.last_evaluated_key.length == 0 + req.exclusive_start_key = resp.last_evaluated_key + end + requests = items_to_delete.map do |item| + { delete_request: { key: item } } + end + LaunchDarkly::Impl::Integrations::DynamoDB::DynamoDBUtil.batch_write_requests(client, $table_name, requests) end def create_table_if_necessary @@ -72,10 +95,10 @@ def create_test_client create_table_if_necessary context "with local cache" do - include_examples "feature_store", method(:create_dynamodb_store) + include_examples "feature_store", method(:create_dynamodb_store), method(:clear_all_data) end context "without local cache" do - include_examples "feature_store", method(:create_dynamodb_store_uncached) + include_examples "feature_store", method(:create_dynamodb_store_uncached), method(:clear_all_data) end end diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index d5ccfb65..3da25f4f 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -9,13 +9,22 @@ $null_log = ::Logger.new($stdout) $null_log.level = ::Logger::FATAL +$base_opts = { + prefix: $my_prefix, + logger: $null_log +} def create_redis_store(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 60 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 60 })) end def create_redis_store_uncached(opts = {}) - LaunchDarkly::RedisFeatureStore.new(opts.merge({ prefix: $my_prefix, logger: $null_log, expiration: 0 })) + LaunchDarkly::RedisFeatureStore.new($base_opts.merge(opts).merge({ expiration: 0 })) +end + +def clear_all_data + client = Redis.new + client.flushdb end @@ -25,11 +34,11 @@ def create_redis_store_uncached(opts = {}) # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do - include_examples "feature_store", method(:create_redis_store) + include_examples "feature_store", method(:create_redis_store), method(:clear_all_data) end context "real Redis without local cache" do - include_examples "feature_store", method(:create_redis_store_uncached) + include_examples "feature_store", method(:create_redis_store_uncached), method(:clear_all_data) end def make_concurrent_modifier_test_hook(other_client, flag, start_version, end_version) From 97ee2daaa6e6cedb1e74adeab7785dd2759eb68e Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 12:21:59 -0800 Subject: [PATCH 098/135] test fix: we can't use Unicode in flag keys anyway --- spec/feature_store_spec_base.rb | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index 3580a67f..d004cb54 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -112,7 +112,8 @@ def new_version_plus(f, deltaVersion, attrs = {}) it "stores Unicode data correctly" do flag = { - key: "tęst-feåtūre-flæg😺", + key: "my-fancy-flag", + name: "Tęst Feåtūre Flæg😺", version: 1, deleted: false } From 21a505e366505365458b98c1bd8fd8875183bf80 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 13:56:58 -0800 Subject: [PATCH 099/135] test fix --- spec/feature_store_spec_base.rb | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/spec/feature_store_spec_base.rb b/spec/feature_store_spec_base.rb index b7d15cc5..2d06f0ff 100644 --- a/spec/feature_store_spec_base.rb +++ b/spec/feature_store_spec_base.rb @@ -205,7 +205,9 @@ def new_version_plus(f, deltaVersion, attrs = {}) version: 1, deleted: false } - store.upsert(LaunchDarkly::FEATURES, flag) - expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + with_inited_store([]) do |store| + store.upsert(LaunchDarkly::FEATURES, flag) + expect(store.get(LaunchDarkly::FEATURES, flag[:key])).to eq flag + end end end From 784eb07fcea16cf79e36def97ebf4967926b0f05 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 16 Jan 2019 18:37:31 -0800 Subject: [PATCH 100/135] misc prerelease fixes --- lib/ldclient-rb/impl/integrations/consul_impl.rb | 1 + lib/ldclient-rb/integrations/consul.rb | 1 + lib/ldclient-rb/stream.rb | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/impl/integrations/consul_impl.rb b/lib/ldclient-rb/impl/integrations/consul_impl.rb index 4082378f..10c16dbc 100644 --- a/lib/ldclient-rb/impl/integrations/consul_impl.rb +++ b/lib/ldclient-rb/impl/integrations/consul_impl.rb @@ -23,6 +23,7 @@ def initialize(opts) @prefix = (opts[:prefix] || LaunchDarkly::Integrations::Consul.default_prefix) + '/' @logger = opts[:logger] || Config.default_logger Diplomat.configuration = opts[:consul_config] if !opts[:consul_config].nil? + Diplomat.configuration.url = opts[:url] if !opts[:url].nil? @logger.info("ConsulFeatureStore: using Consul host at #{Diplomat.configuration.url}") end diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 8f5d1f09..2d46d813 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -22,6 +22,7 @@ def self.default_prefix # @param opts [Hash] the configuration options # @option opts [Hash] :consul_config an instance of `Diplomat::Configuration` to replace the default # Consul client configuration (note that this is exactly the same as modifying `Diplomat.configuration`) + # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index e4f1b3bd..094a37b2 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -58,7 +58,7 @@ def start conn.on_event { |event| process_message(event) } conn.on_error { |err| case err - when SSE::Errors::HTTPError + when SSE::Errors::HTTPStatusError status = err.status message = Util.http_error_message(status, "streaming connection", "will retry") @config.logger.error { "[LDClient] #{message}" } From e9b06c60c7e46d45487d823325e86804eb4c32fe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 17 Jan 2019 17:15:54 -0800 Subject: [PATCH 101/135] fix doc comments --- lib/ldclient-rb/integrations/consul.rb | 2 +- lib/ldclient-rb/integrations/dynamodb.rb | 2 +- lib/ldclient-rb/integrations/redis.rb | 2 +- lib/ldclient-rb/integrations/util/store_wrapper.rb | 2 +- lib/ldclient-rb/redis_store.rb | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/integrations/consul.rb b/lib/ldclient-rb/integrations/consul.rb index 2d46d813..4f32d5fd 100644 --- a/lib/ldclient-rb/integrations/consul.rb +++ b/lib/ldclient-rb/integrations/consul.rb @@ -25,7 +25,7 @@ def self.default_prefix # @option opts [String] :url shortcut for setting the `url` property of the Consul client configuration # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/dynamodb.rb b/lib/ldclient-rb/integrations/dynamodb.rb index ecd87fce..189e118f 100644 --- a/lib/ldclient-rb/integrations/dynamodb.rb +++ b/lib/ldclient-rb/integrations/dynamodb.rb @@ -34,7 +34,7 @@ module DynamoDB # @option opts [Object] :existing_client an already-constructed DynamoDB client for the feature store to use # @option opts [String] :prefix namespace prefix to add to all keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object # diff --git a/lib/ldclient-rb/integrations/redis.rb b/lib/ldclient-rb/integrations/redis.rb index 34509181..7e447657 100644 --- a/lib/ldclient-rb/integrations/redis.rb +++ b/lib/ldclient-rb/integrations/redis.rb @@ -42,7 +42,7 @@ def self.default_prefix # @option opts [String] :prefix (default_prefix) namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds (15) expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration (15) expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # @option opts [Object] :pool custom connection pool, if desired # @return [LaunchDarkly::Interfaces::FeatureStore] a feature store object diff --git a/lib/ldclient-rb/integrations/util/store_wrapper.rb b/lib/ldclient-rb/integrations/util/store_wrapper.rb index eef22d5e..26318d67 100644 --- a/lib/ldclient-rb/integrations/util/store_wrapper.rb +++ b/lib/ldclient-rb/integrations/util/store_wrapper.rb @@ -23,7 +23,7 @@ class CachingStoreWrapper # # @param core [Object] an object that implements the {FeatureStoreCore} methods # @param opts [Hash] a hash that may include cache-related options; all others will be ignored - # @option opts [Float] :expiration_seconds (15) cache TTL; zero means no caching + # @option opts [Float] :expiration (15) cache TTL; zero means no caching # @option opts [Integer] :capacity (1000) maximum number of items in the cache # def initialize(core, opts) diff --git a/lib/ldclient-rb/redis_store.rb b/lib/ldclient-rb/redis_store.rb index 392f5d2e..48632411 100644 --- a/lib/ldclient-rb/redis_store.rb +++ b/lib/ldclient-rb/redis_store.rb @@ -32,7 +32,7 @@ class RedisFeatureStore # @option opts [String] :prefix namespace prefix to add to all hash keys used by LaunchDarkly # @option opts [Logger] :logger a `Logger` instance; defaults to `Config.default_logger` # @option opts [Integer] :max_connections size of the Redis connection pool - # @option opts [Integer] :expiration_seconds expiration time for the in-memory cache, in seconds; 0 for no local caching + # @option opts [Integer] :expiration expiration time for the in-memory cache, in seconds; 0 for no local caching # @option opts [Integer] :capacity maximum number of feature flags (or related objects) to cache locally # @option opts [Object] :pool custom connection pool, if desired # From 6389a2663bf7221ca0948261dadd2c00a72fc8df Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:38:23 -0800 Subject: [PATCH 102/135] add YARD config so our docs show up correctly everywhere --- .yardopts | 10 ++++++++++ scripts/gendocs.sh | 5 ++--- 2 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 .yardopts diff --git a/.yardopts b/.yardopts new file mode 100644 index 00000000..559b7ab5 --- /dev/null +++ b/.yardopts @@ -0,0 +1,10 @@ +--no-private +--markup markdown +--markup-provider redcarpet +--embed-mixins +lib/*.rb +lib/**/*.rb +lib/**/**/*.rb +lib/**/**/**/*.rb +- +README.md diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 1e545955..96df177f 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -6,7 +6,6 @@ gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting -# yard doesn't seem to do recursive directories, even though Ruby's Dir.glob supposedly recurses for "**" -PATHS="lib/*.rb lib/**/*.rb lib/**/**/*.rb lib/**/**/**/*.rb" +rm -rf doc/* -yard doc --no-private --markup markdown --markup-provider redcarpet --embed-mixins $PATHS - README.md +yard doc From 3406a03430efbd839659aee23d334d48b126da03 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 18 Jan 2019 11:53:38 -0800 Subject: [PATCH 103/135] don't need markup-provider option --- .yardopts | 1 - 1 file changed, 1 deletion(-) diff --git a/.yardopts b/.yardopts index 559b7ab5..5388ac50 100644 --- a/.yardopts +++ b/.yardopts @@ -1,6 +1,5 @@ --no-private --markup markdown ---markup-provider redcarpet --embed-mixins lib/*.rb lib/**/*.rb From d38973acf1dbdda8da4ac529e472ec434a14742f Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 29 Jan 2019 10:57:43 -0800 Subject: [PATCH 104/135] rm obsolete proxy param --- lib/ldclient-rb/stream.rb | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ldclient-rb/stream.rb b/lib/ldclient-rb/stream.rb index 094a37b2..ddb7f669 100644 --- a/lib/ldclient-rb/stream.rb +++ b/lib/ldclient-rb/stream.rb @@ -50,7 +50,6 @@ def start } opts = { headers: headers, - proxy: @config.proxy, read_timeout: READ_TIMEOUT_SECONDS, logger: @config.logger } From def22fc67e8b918cd7cef3006f0d896a7ed4bc68 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 21:38:49 -0800 Subject: [PATCH 105/135] remove net-http-persistent --- Gemfile.lock | 12 +++--------- ldclient-rb.gemspec | 1 - lib/ldclient-rb/events.rb | 17 +++++++++++------ lib/ldclient-rb/polling.rb | 2 +- lib/ldclient-rb/requestor.rb | 15 +++++++++------ lib/ldclient-rb/util.rb | 10 ++++++++++ spec/events_spec.rb | 15 +++++++++++---- 7 files changed, 45 insertions(+), 27 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index 2e96a86a..21a65cc1 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,13 +1,10 @@ PATH remote: . specs: - ldclient-rb (5.4.3) + ldclient-rb (5.5.2) concurrent-ruby (~> 1.0) - faraday (>= 0.9, < 2) - faraday-http-cache (>= 1.3.0, < 3) json (>= 1.8, < 3) ld-eventsource (~> 1.0) - net-http-persistent (>= 2.9, < 4.0) semantic (~> 1.6) GEM @@ -35,11 +32,10 @@ GEM docile (1.1.5) faraday (0.15.4) multipart-post (>= 1.2, < 3) - faraday-http-cache (2.0.0) - faraday (~> 0.8) ffi (1.9.25) ffi (1.9.25-java) - hitimes (1.3.0) + hitimes (1.3.1) + hitimes (1.3.1-java) http_tools (0.4.5) jmespath (1.4.0) json (1.8.6) @@ -53,8 +49,6 @@ GEM rb-inotify (~> 0.9, >= 0.9.7) ruby_dep (~> 1.2) multipart-post (2.0.0) - net-http-persistent (3.0.0) - connection_pool (~> 2.2) rake (10.5.0) rb-fsevent (0.10.3) rb-inotify (0.9.10) diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 810987a4..9fb4daa0 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -35,7 +35,6 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" - spec.add_runtime_dependency "net-http-persistent", [">= 2.9", "< 4.0"] spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" spec.add_runtime_dependency "ld-eventsource", '~> 1.0' end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 02885904..72c82a90 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,7 +1,6 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" -require "net/http/persistent" require "thread" require "time" @@ -116,9 +115,10 @@ def initialize(queue, sdk_key, config, client) @sdk_key = sdk_key @config = config - @client = client ? client : Net::HTTP::Persistent.new do |c| - c.open_timeout = @config.connect_timeout - c.read_timeout = @config.read_timeout + if client + @client = client + else + @client = Util.new_http_client(@config.events_uri, @config) end @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @@ -167,7 +167,10 @@ def main_loop(queue, buffer, flush_workers) def do_shutdown(flush_workers) flush_workers.shutdown flush_workers.wait_for_termination - @client.shutdown + begin + @client.finish + rescue + end end def synchronize_for_testing(flush_workers) @@ -322,6 +325,7 @@ def run(sdk_key, config, client, payload, formatter) sleep(1) end begin + client.start if !client.started? config.logger.debug { "[LDClient] sending #{events_out.length} events: #{body}" } uri = URI(config.events_uri + "/bulk") req = Net::HTTP::Post.new(uri) @@ -330,7 +334,8 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s - res = client.request(uri, req) + req["Connection"] = "keep-alive" + res = client.request(req) rescue StandardError => exn config.logger.warn { "[LDClient] Error flushing events: #{exn.inspect}." } next diff --git a/lib/ldclient-rb/polling.rb b/lib/ldclient-rb/polling.rb index 17ff7c12..da0427dc 100644 --- a/lib/ldclient-rb/polling.rb +++ b/lib/ldclient-rb/polling.rb @@ -26,7 +26,7 @@ def start def stop if @stopped.make_true - if @worker && @worker.alive? + if @worker && @worker.alive? && @worker != Thread.current @worker.run # causes the thread to wake up if it's currently in a sleep @worker.join end diff --git a/lib/ldclient-rb/requestor.rb b/lib/ldclient-rb/requestor.rb index 5f48d7ff..f7174787 100644 --- a/lib/ldclient-rb/requestor.rb +++ b/lib/ldclient-rb/requestor.rb @@ -1,6 +1,6 @@ require "concurrent/atomics" require "json" -require "net/http/persistent" +require "uri" module LaunchDarkly # @private @@ -22,9 +22,7 @@ class Requestor def initialize(sdk_key, config) @sdk_key = sdk_key @config = config - @client = Net::HTTP::Persistent.new - @client.open_timeout = @config.connect_timeout - @client.read_timeout = @config.read_timeout + @client = Util.new_http_client(@config.base_uri, @config) @cache = @config.cache_store end @@ -41,21 +39,26 @@ def request_all_data() end def stop - @client.shutdown + begin + @client.finish + rescue + end end private def make_request(path) + @client.start if !@client.started? uri = URI(@config.base_uri + path) req = Net::HTTP::Get.new(uri) req["Authorization"] = @sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION + req["Connection"] = "keep-alive" cached = @cache.read(uri) if !cached.nil? req["If-None-Match"] = cached.etag end - res = @client.request(uri, req) + res = @client.request(req) status = res.code.to_i @config.logger.debug { "[LDClient] Got response from uri: #{uri}\n\tstatus code: #{status}\n\theaders: #{res.to_hash}\n\tbody: #{res.body}" } diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index e303e18a..03849957 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,7 +1,17 @@ +require "uri" module LaunchDarkly # @private module Util + def self.new_http_client(uri_s, config) + uri = URI(uri_s) + client = Net::HTTP.new(uri.hostname, uri.port) + client.use_ssl = true if uri.scheme == "https" + client.open_timeout = config.connect_timeout + client.read_timeout = config.read_timeout + client + end + def self.log_exception(logger, message, exc) logger.error { "[LDClient] #{message}: #{exc.inspect}" } logger.debug { "[LDClient] Exception trace: #{exc.backtrace}" } diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 86cc67b6..90b91ec9 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -536,7 +536,7 @@ def reset @status = 200 end - def request(uri, req) + def request(req) @requests.push(req) if @exception raise @exception @@ -549,11 +549,18 @@ def request(uri, req) end end - def get_request - @requests.shift + def start + end + + def started? + false end - def shutdown + def finish + end + + def get_request + @requests.shift end end From f5ef9a4630df1444c6744bc33f0f07dbbdddb7cc Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 30 Jan 2019 22:14:48 -0800 Subject: [PATCH 106/135] fix concurrent-ruby usage that breaks on Windows --- lib/ldclient-rb/events.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 72c82a90..c45a9da2 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -124,7 +124,7 @@ def initialize(queue, sdk_key, config, client) @user_keys = SimpleLRUCacheSet.new(config.user_keys_capacity) @formatter = EventOutputFormatter.new(config) @disabled = Concurrent::AtomicBoolean.new(false) - @last_known_past_time = Concurrent::AtomicFixnum.new(0) + @last_known_past_time = Concurrent::AtomicReference.new(0) buffer = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) From 9b4d75b99549393f8e12d3f0a498870b1bf52b28 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 12:39:30 -0800 Subject: [PATCH 107/135] add pipeline and clean up with with rm_rf instead of rm --- azure-pipelines.yml | 14 ++++++++++++++ spec/file_data_source_spec.rb | 10 +++++----- 2 files changed, 19 insertions(+), 5 deletions(-) create mode 100644 azure-pipelines.yml diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 00000000..1b9cae48 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,14 @@ +jobs: + - job: build + pool: + vmImage: 'vs2017-win2016' + steps: + - task: PowerShell@2 + inputs: + targetType: inline + script: | + ruby -v + gem install bundler -v 1.17.3 + bundle install + mkdir rspec + bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index 28a0c06f..c827222d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -74,7 +74,7 @@ segments: seg1: key: seg1 - include: ["user1"] + include: ["user1"] EOF } @@ -87,7 +87,7 @@ end after do - FileUtils.remove_dir(@tmp_dir) + FileUtils.rm_rf(@tmp_dir) end def make_temp_file(content) @@ -198,10 +198,10 @@ def test_auto_reload(options) event = ds.start expect(event.set?).to eq(true) expect(@store.all(LaunchDarkly::SEGMENTS).keys).to eq([]) - + sleep(1) IO.write(file, all_properties_json) - + max_time = 10 ok = wait_for_condition(10) { @store.all(LaunchDarkly::SEGMENTS).keys == all_segment_keys } expect(ok).to eq(true), "Waited #{max_time}s after modifying file and it did not reload" @@ -243,7 +243,7 @@ def test_auto_reload(options) client.close end end - + def wait_for_condition(max_time) deadline = Time.now + max_time while Time.now < deadline From eb4ad9703f5c230375b75dd01c3756ce0b8afcb3 Mon Sep 17 00:00:00 2001 From: Harpo roeder Date: Mon, 4 Feb 2019 14:52:52 -0800 Subject: [PATCH 108/135] fix highlight blocks --- README.md | 58 +++++++++++++++++++++++++++---------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index f4dc72b7..df406928 100644 --- a/README.md +++ b/README.md @@ -17,19 +17,19 @@ Quick setup 1. Install the Ruby SDK with `gem` - ```shell +```shell gem install ldclient-rb ``` 2. Require the LaunchDarkly client: - ```ruby +```ruby require 'ldclient-rb' ``` 3. Create a new LDClient with your SDK key: - ```ruby +```ruby client = LaunchDarkly::LDClient.new("your_sdk_key") ``` @@ -39,42 +39,42 @@ client = LaunchDarkly::LDClient.new("your_sdk_key") 2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - ```ruby +```ruby Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") ``` 3. You may want to include a function in your ApplicationController - ```ruby - def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end +```ruby +def launchdarkly_settings + if current_user.present? + { + key: current_user.id, + anonymous: false, + email: current_user.email, + custom: { groups: current_user.groups.pluck(:name) }, + # Any other fields you may have + # e.g. lastName: current_user.last_name, + } + else + if Rails::VERSION::MAJOR <= 3 + hash_key = request.session_options[:id] + else + hash_key = session.id end + # session ids should be private to prevent session hijacking + hash_key = Digest::SHA256.base64digest hash_key + { + key: hash_key, + anonymous: true, + } + end +end ``` 4. In your controllers, access the client using - ```ruby +```ruby Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) ``` From 4aa6272748587ac362bbe098cb1233acce43148b Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 00:12:38 +0000 Subject: [PATCH 109/135] Hr/azure3 (#103) * Add Consul and Redis services to Windows. * Enable Consul and Redis testing --- azure-pipelines.yml | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 1b9cae48..40d39abe 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -4,11 +4,37 @@ jobs: vmImage: 'vs2017-win2016' steps: - task: PowerShell@2 + displayName: 'Setup Consul' inputs: targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf consul.zip https://releases.hashicorp.com/consul/1.4.2/consul_1.4.2_windows_amd64.zip + mkdir consul + Expand-Archive -Path consul.zip -DestinationPath consul + cd consul + sc.exe create "Consul" binPath="$(System.DefaultWorkingDirectory)/consul/consul.exe agent -dev" + sc.exe start "Consul" + - task: PowerShell@2 + displayName: 'Setup Redis' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf redis.zip https://github.com/MicrosoftArchive/redis/releases/download/win-3.0.504/Redis-x64-3.0.504.zip + mkdir redis + Expand-Archive -Path redis.zip -DestinationPath redis + cd redis + ./redis-server --service-install + ./redis-server --service-start + - task: PowerShell@2 + displayName: 'Setup SDK and Test' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) script: | ruby -v gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/*,spec/redis_*" --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From fd143d7150d908e1734faa3eccbb3390075a2d38 Mon Sep 17 00:00:00 2001 From: hroederld <46500128+hroederld@users.noreply.github.com> Date: Tue, 5 Feb 2019 10:40:07 -0800 Subject: [PATCH 110/135] add dynamo (#104) --- azure-pipelines.yml | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 40d39abe..3d3fd98a 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -3,6 +3,17 @@ jobs: pool: vmImage: 'vs2017-win2016' steps: + - task: PowerShell@2 + displayName: 'Setup Dynamo' + inputs: + targetType: inline + workingDirectory: $(System.DefaultWorkingDirectory) + script: | + iwr -outf dynamo.zip https://s3-us-west-2.amazonaws.com/dynamodb-local/dynamodb_local_latest.zip + mkdir dynamo + Expand-Archive -Path dynamo.zip -DestinationPath dynamo + cd dynamo + javaw -D"java.library.path=./DynamoDBLocal_lib" -jar DynamoDBLocal.jar - task: PowerShell@2 displayName: 'Setup Consul' inputs: @@ -37,4 +48,4 @@ jobs: gem install bundler -v 1.17.3 bundle install mkdir rspec - bundle exec rspec --exclude-pattern "spec/integrations/dynamodb_feature_store_spec.rb," --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec + bundle exec rspec --format progress --format RspecJunitFormatter -o ./rspec/rspec.xml spec From 6a20ff1c1946992210fd33e1a1f7e997e29e43f5 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 11:10:22 -0800 Subject: [PATCH 111/135] add experimentation event overrides for rules and fallthrough --- lib/ldclient-rb/evaluation.rb | 25 +++------ lib/ldclient-rb/impl/event_factory.rb | 77 +++++++++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 44 ++++++--------- spec/evaluation_spec.rb | 74 +++++++++++++------------ spec/ldclient_spec.rb | 57 +++++++++++++++++++- 5 files changed, 193 insertions(+), 84 deletions(-) create mode 100644 lib/ldclient-rb/impl/event_factory.rb diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..14a7ea55 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -195,22 +195,22 @@ def error_result(errorKind, value = nil) # Evaluates a feature flag and returns an EvalResult. The result.value will be nil if the flag returns # the default value. Error conditions produce a result with an error reason, not an exception. - def evaluate(flag, user, store, logger) + def evaluate(flag, user, store, logger, event_factory) if user.nil? || user[:key].nil? return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, user, store, events, logger, event_factory) return EvalResult.new(detail, events) end - def eval_internal(flag, user, store, events, logger) + def eval_internal(flag, user, store, events, logger, event_factory) if !flag[:on] return get_off_value(flag, { kind: 'OFF' }, logger) end - prereq_failure_reason = check_prerequisites(flag, user, store, events, logger) + prereq_failure_reason = check_prerequisites(flag, user, store, events, logger, event_factory) if !prereq_failure_reason.nil? return get_off_value(flag, prereq_failure_reason, logger) end @@ -243,7 +243,7 @@ def eval_internal(flag, user, store, events, logger) return EvaluationDetail.new(nil, nil, { kind: 'FALLTHROUGH' }) end - def check_prerequisites(flag, user, store, events, logger) + def check_prerequisites(flag, user, store, events, logger, event_factory) (flag[:prerequisites] || []).each do |prerequisite| prereq_ok = true prereq_key = prerequisite[:key] @@ -254,25 +254,16 @@ def check_prerequisites(flag, user, store, events, logger) prereq_ok = false else begin - prereq_res = eval_internal(prereq_flag, user, store, events, logger) + prereq_res = eval_internal(prereq_flag, user, store, events, logger, event_factory) # Note that if the prerequisite flag is off, we don't consider it a match no matter what its # off variation was. But we still need to evaluate it in order to generate an event. if !prereq_flag[:on] || prereq_res.variation_index != prerequisite[:variation] prereq_ok = false end - event = { - kind: "feature", - key: prereq_key, - variation: prereq_res.variation_index, - value: prereq_res.value, - version: prereq_flag[:version], - prereqOf: flag[:key], - trackEvents: prereq_flag[:trackEvents], - debugEventsUntilDate: prereq_flag[:debugEventsUntilDate] - } + event = event_factory.new_eval_event(prereq_flag, user, prereq_res, nil, flag) events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb new file mode 100644 index 00000000..6af4c5f8 --- /dev/null +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -0,0 +1,77 @@ + +module LaunchDarkly + module Impl + # Event constructors are centralized here to avoid mistakes and repetitive logic. + # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons + # in the events (for when variation_detail is called) and one that doesn't. + class EventFactory + def initialize(with_reasons) + @with_reasons = with_reasons + end + + def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) + add_experiment_data = is_experiment(flag, detail.reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + variation: detail.variation_index, + value: detail.value, + default: default_value, + version: flag[:version] + } + # the following properties are handled separately so we don't waste bandwidth on unused keys + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:prereqOf] = prereq_of_flag[:key] if !prereq_of_flag.nil? + e[:reason] = detail.reason if add_experiment_data || @with_reasons + e + end + + def new_default_event(flag, user, default_value, reason) + add_experiment_data = is_experiment(flag, reason) + e = { + kind: 'feature', + key: flag[:key], + user: user, + value: default_value, + default: default_value, + version: flag[:version] + } + e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] + e[:reason] = reason if add_experiment_data || @with_reasons + e + end + + def new_unknown_flag_event(key, user, default_value, reason) + e = { + kind: 'feature', + key: key, + user: user, + value: default_value, + default: default_value + } + e[:reason] = reason if @with_reasons + e + end + + private + + def is_experiment(flag, reason) + return false if !reason + case reason[:kind] + when 'RULE_MATCH' + index = reason[:ruleIndex] + if !index.nil? + rules = flag[:rules] || [] + return index >= 0 && index < rules.length && rules[index][:trackEvents] + end + when 'FALLTHROUGH' + return !!flag[:trackEventsFallthrough] + end + false + end + end + end +end diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..0c113d0d 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -1,3 +1,4 @@ +require "ldclient-rb/impl/event_factory" require "ldclient-rb/impl/store_client_wrapper" require "concurrent/atomics" require "digest/sha1" @@ -13,6 +14,7 @@ module LaunchDarkly # class LDClient include Evaluation + include Impl # # Creates a new client instance that connects to LaunchDarkly. A custom # configuration parameter can also supplied to specify advanced options, @@ -32,6 +34,9 @@ class LDClient def initialize(sdk_key, config = Config.default, wait_for_sec = 5) @sdk_key = sdk_key + @event_factory_default = EventFactory.new(false) + @event_factory_with_reasons = EventFactory.new(true) + # We need to wrap the feature store object with a FeatureStoreClientWrapper in order to add # some necessary logic around updates. Unfortunately, we have code elsewhere that accesses # the feature store through the Config object, so we need to make a new Config that uses @@ -165,7 +170,7 @@ def initialized? # @return the variation to show the user, or the default value if there's an an error # def variation(key, user, default) - evaluate_internal(key, user, default, false).value + evaluate_internal(key, user, default, @event_factory_default).value end # @@ -192,7 +197,7 @@ def variation(key, user, default) # @return [EvaluationDetail] an object describing the result # def variation_detail(key, user, default) - evaluate_internal(key, user, default, true) + evaluate_internal(key, user, default, @event_factory_with_reasons) end # @@ -290,7 +295,7 @@ def all_flags_state(user, options={}) next end begin - result = evaluate(f, user, @store, @config.logger) + result = evaluate(f, user, @store, @config.logger, @event_factory_default) state.add_flag(f, result.detail.value, result.detail.variation_index, with_reasons ? result.detail.reason : nil, details_only_if_tracked) rescue => exn @@ -330,7 +335,7 @@ def create_default_data_source(sdk_key, config) end # @return [EvaluationDetail] - def evaluate_internal(key, user, default, include_reasons_in_events) + def evaluate_internal(key, user, default, event_factory) if @config.offline? return error_result('CLIENT_NOT_READY', default) end @@ -340,8 +345,9 @@ def evaluate_internal(key, user, default, include_reasons_in_events) @config.logger.warn { "[LDClient] Client has not finished initializing; using last known values from feature store" } else @config.logger.error { "[LDClient] Client has not finished initializing; feature store unavailable, returning default value" } - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user) - return error_result('CLIENT_NOT_READY', default) + detail = error_result('CLIENT_NOT_READY', default) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) + return detail end end @@ -351,20 +357,19 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if feature.nil? @config.logger.info { "[LDClient] Unknown feature flag \"#{key}\". Returning default value" } detail = error_result('FLAG_NOT_FOUND', default) - @event_processor.add_event(kind: "feature", key: key, value: default, default: default, user: user, - reason: include_reasons_in_events ? detail.reason : nil) + @event_processor.add_event(event_factory.new_unknown_flag_event(key, user, default, detail.reason)) return detail end unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger, event_factory) if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -374,12 +379,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) if detail.default_value? detail = EvaluationDetail.new(default, nil, detail.reason) end - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_eval_event(feature, user, detail, default)) return detail rescue => exn Util.log_exception(@config.logger, "Error evaluating feature flag \"#{key}\"", exn) detail = error_result('EXCEPTION', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(event_factory.new_default_event(feature, user, default, detail.reason)) return detail end end @@ -389,21 +394,6 @@ def sanitize_user(user) user[:key] = user[:key].to_s end end - - def make_feature_event(flag, user, detail, default, with_reasons) - { - kind: "feature", - key: flag[:key], - user: user, - variation: detail.variation_index, - value: detail.value, - default: default, - version: flag[:version], - trackEvents: flag[:trackEvents], - debugEventsUntilDate: flag[:debugEventsUntilDate], - reason: with_reasons ? detail.reason : nil - } - end end # diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..c8949b3a 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -7,6 +7,8 @@ let(:features) { LaunchDarkly::InMemoryFeatureStore.new } + let(:factory) { LaunchDarkly::Impl::EventFactory.new(false) } + let(:user) { { key: "userkey", @@ -36,7 +38,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -50,7 +52,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'OFF' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -66,7 +68,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -82,7 +84,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -99,7 +101,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'badfeature' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -127,10 +129,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, value: nil, default: nil, variation: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -159,10 +160,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -189,10 +189,9 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -218,10 +217,9 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', - trackEvents: nil, debugEventsUntilDate: nil + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', default: nil, version: 2, prereqOf: 'feature0' }] - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq(events_should_be) end @@ -236,7 +234,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -251,7 +249,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -266,7 +264,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -281,7 +279,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -299,7 +297,7 @@ def boolean_flag_with_clauses(clauses) } user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new('c', 2, { kind: 'TARGET_MATCH' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -310,7 +308,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(true, 1, { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -321,7 +319,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -332,7 +330,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -343,7 +341,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -355,7 +353,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } detail = LaunchDarkly::EvaluationDetail.new(nil, nil, { kind: 'ERROR', errorKind: 'MALFORMED_FLAG' }) - result = evaluate(flag, user, features, logger) + result = evaluate(flag, user, features, logger, factory) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end @@ -366,28 +364,28 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can match custom attribute" do user = { key: 'x', name: 'Bob', custom: { legs: 4 } } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "returns false for missing attribute" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'legs', op: 'in', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "returns false for unknown operator" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'unknown', values: [4] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "does not stop evaluating rules after clause with unknown operator" do @@ -397,14 +395,14 @@ def boolean_flag_with_clauses(clauses) clause1 = { attribute: 'name', op: 'in', values: ['Bob'] } rule1 = { clauses: [ clause1 ], variation: 1 } flag = boolean_flag_with_rules([rule0, rule1]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "can be negated" do user = { key: 'x', name: 'Bob' } clause = { attribute: 'name', op: 'in', values: ['Bob'], negate: true } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "retrieves segment from segment store for segmentMatch operator" do @@ -419,14 +417,14 @@ def boolean_flag_with_clauses(clauses) user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be true + expect(evaluate(flag, user, features, logger, factory).detail.value).to be true end it "falls through with no errors if referenced segment is not found" do user = { key: 'userkey' } clause = { attribute: '', op: 'segmentMatch', values: ['segkey'] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be false + expect(evaluate(flag, user, features, logger, factory).detail.value).to be false end it "can be negated" do @@ -435,7 +433,7 @@ def boolean_flag_with_clauses(clauses) flag = boolean_flag_with_clauses([clause]) expect { clause[:negate] = true - }.to change {evaluate(flag, user, features, logger).detail.value}.from(true).to(false) + }.to change {evaluate(flag, user, features, logger, factory).detail.value}.from(true).to(false) end end @@ -538,7 +536,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x', custom: { foo: value1 } } clause = { attribute: 'foo', op: op, values: [value2] } flag = boolean_flag_with_clauses([clause]) - expect(evaluate(flag, user, features, logger).detail.value).to be shouldBe + expect(evaluate(flag, user, features, logger, factory).detail.value).to be shouldBe end end end @@ -629,7 +627,7 @@ def test_segment_match(segment) features.upsert(LaunchDarkly::SEGMENTS, segment) clause = make_segment_match_clause(segment) flag = boolean_flag_with_clauses([clause]) - evaluate(flag, user, features, logger).detail.value + evaluate(flag, user, features, logger, factory).detail.value end it 'explicitly includes user' do diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..2916861e 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -91,7 +91,6 @@ def event_processor key: "key", version: 100, user: nil, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -109,7 +108,6 @@ def event_processor key: "key", version: 100, user: bad_user, - variation: nil, value: "default", default: "default", trackEvents: true, @@ -117,6 +115,61 @@ def event_processor )) client.variation("key", bad_user, "default") end + + it "sets trackEvents and reason if trackEvents is set for matched rule" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + version: 100, + rules: [ + clauses: [ + { attribute: 'key', op: 'in', values: [ user[:key] ] } + ], + variation: 0, + id: 'id', + trackEvents: true + ] + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'id' } + )) + client.variation('flag', user, 'default') + end + + it "sets trackEvents and reason if trackEventsFallthrough is set and we fell through" do + flag = { + key: 'flag', + on: true, + variations: [ 'value' ], + fallthrough: { variation: 0 }, + version: 100, + rules: [], + trackEventsFallthrough: true + } + config.feature_store.init({ LaunchDarkly::FEATURES => {} }) + config.feature_store.upsert(LaunchDarkly::FEATURES, flag) + expect(event_processor).to receive(:add_event).with(hash_including( + kind: 'feature', + key: 'flag', + version: 100, + user: user, + value: 'value', + default: 'default', + trackEvents: true, + reason: { kind: 'FALLTHROUGH' } + )) + client.variation('flag', user, 'default') + end end describe '#variation_detail' do From 12f541a4a3be42d7ea70d3cb2e6f2571958636f6 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:04:13 -0800 Subject: [PATCH 112/135] warn & don't send event if identify or track has no valid user --- lib/ldclient-rb/ldclient.rb | 8 +++ spec/fixtures/numeric_key_user.json | 9 ---- spec/fixtures/sanitized_numeric_key_user.json | 9 ---- spec/ldclient_spec.rb | 54 ++++++++++++++++--- 4 files changed, 55 insertions(+), 25 deletions(-) delete mode 100644 spec/fixtures/numeric_key_user.json delete mode 100644 spec/fixtures/sanitized_numeric_key_user.json diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index a5799700..28c21869 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -211,6 +211,10 @@ def variation_detail(key, user, default) # @return [void] # def identify(user) + if !user || user[:key].nil? + @config.logger.warn("Identify called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -229,6 +233,10 @@ def identify(user) # @return [void] # def track(event_name, user, data) + if !user || user[:key].nil? + @config.logger.warn("Track called with nil user or nil user key!") + return + end sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end diff --git a/spec/fixtures/numeric_key_user.json b/spec/fixtures/numeric_key_user.json deleted file mode 100644 index 2a7ec475..00000000 --- a/spec/fixtures/numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": 33, - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/fixtures/sanitized_numeric_key_user.json b/spec/fixtures/sanitized_numeric_key_user.json deleted file mode 100644 index 874e0067..00000000 --- a/spec/fixtures/sanitized_numeric_key_user.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "key": "33", - "custom":{ - "groups":[ - "microsoft", - "google" - ] - } -} diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index fca81ab0..6f530610 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -8,7 +8,8 @@ subject.new("secret", offline_config) end let(:null_data) { LaunchDarkly::NullUpdateProcessor.new } - let(:config) { LaunchDarkly::Config.new({send_events: false, data_source: null_data}) } + let(:logger) { double().as_null_object } + let(:config) { LaunchDarkly::Config.new({ send_events: false, data_source: null_data, logger: logger }) } let(:client) do subject.new("secret", config) end @@ -17,16 +18,31 @@ JSON.parse(data, symbolize_names: true) end let(:user) do - data = File.read(File.join("spec", "fixtures", "user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "user@test.com", + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: 33, + custom: { + groups: [ "microsoft", "google" ] + } + } end let(:sanitized_numeric_key_user) do - data = File.read(File.join("spec", "fixtures", "sanitized_numeric_key_user.json")) - JSON.parse(data, symbolize_names: true) + { + key: "33", + custom: { + groups: [ "microsoft", "google" ] + } + } + end + let(:user_without_key) do + { name: "Keyless Joe" } end def event_processor @@ -342,6 +358,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", nil, nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.track("custom_event_name", user_without_key, nil) + end end describe '#identify' do @@ -354,6 +382,18 @@ def event_processor expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.identify(numeric_key_user) end + + it "does not send an event, and logs a warning, if user is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(nil) + end + + it "does not send an event, and logs a warning, if user key is nil" do + expect(event_processor).not_to receive(:add_event) + expect(logger).to receive(:warn) + client.identify(user_without_key) + end end describe 'with send_events: false' do From 2800db88876e85dc9bd918b01978f69f135d2207 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Fri, 22 Feb 2019 12:18:05 -0800 Subject: [PATCH 113/135] include user in prereq flag events --- lib/ldclient-rb/evaluation.rb | 3 ++- spec/evaluation_spec.rb | 8 ++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index f873a6e3..1b5bbdca 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -263,6 +263,7 @@ def check_prerequisites(flag, user, store, events, logger) event = { kind: "feature", key: prereq_key, + user: user, variation: prereq_res.variation_index, value: prereq_res.value, version: prereq_flag[:version], @@ -272,7 +273,7 @@ def check_prerequisites(flag, user, store, events, logger) } events.push(event) rescue => exn - Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"{flag[:key]}\"", exn) + Util.log_exception(logger, "Error evaluating prerequisite flag \"#{prereq_key}\" for flag \"#{flag[:key]}\"", exn) prereq_ok = false end end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 3af960c6..68824ebd 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -127,7 +127,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: nil, value: nil, version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: nil, value: nil, version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -159,7 +159,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -189,7 +189,7 @@ def boolean_flag_with_clauses(clauses) detail = LaunchDarkly::EvaluationDetail.new('b', 1, { kind: 'PREREQUISITE_FAILED', prerequisiteKey: 'feature1' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 0, value: 'd', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 0, value: 'd', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) @@ -218,7 +218,7 @@ def boolean_flag_with_clauses(clauses) user = { key: 'x' } detail = LaunchDarkly::EvaluationDetail.new('a', 0, { kind: 'FALLTHROUGH' }) events_should_be = [{ - kind: 'feature', key: 'feature1', variation: 1, value: 'e', version: 2, prereqOf: 'feature0', + kind: 'feature', key: 'feature1', user: user, variation: 1, value: 'e', version: 2, prereqOf: 'feature0', trackEvents: nil, debugEventsUntilDate: nil }] result = evaluate(flag, user, features, logger) From 47106d9da24380ec3b7ee630a674a15dfef21dac Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 25 Feb 2019 16:12:29 -0800 Subject: [PATCH 114/135] rm unnecessary logic --- lib/ldclient-rb/impl/event_factory.rb | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 6af4c5f8..83dc76d9 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -29,7 +29,6 @@ def new_eval_event(flag, user, detail, default_value, prereq_of_flag = nil) end def new_default_event(flag, user, default_value, reason) - add_experiment_data = is_experiment(flag, reason) e = { kind: 'feature', key: flag[:key], @@ -38,9 +37,9 @@ def new_default_event(flag, user, default_value, reason) default: default_value, version: flag[:version] } - e[:trackEvents] = true if add_experiment_data || flag[:trackEvents] + e[:trackEvents] = true if flag[:trackEvents] e[:debugEventsUntilDate] = flag[:debugEventsUntilDate] if flag[:debugEventsUntilDate] - e[:reason] = reason if add_experiment_data || @with_reasons + e[:reason] = reason if @with_reasons e end From 763a222eece4e9eec4d8b7e441af62f8c2f4f607 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 26 Feb 2019 15:25:43 -0800 Subject: [PATCH 115/135] more factory methods --- lib/ldclient-rb/impl/event_factory.rb | 20 ++++++++++++++++++++ lib/ldclient-rb/ldclient.rb | 4 ++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index 83dc76d9..a43f6a33 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -4,6 +4,9 @@ module Impl # Event constructors are centralized here to avoid mistakes and repetitive logic. # The LDClient owns two instances of EventFactory: one that always embeds evaluation reasons # in the events (for when variation_detail is called) and one that doesn't. + # + # Note that these methods do not set the "creationDate" property, because in the Ruby client, + # that is done by EventProcessor.add_event(). class EventFactory def initialize(with_reasons) @with_reasons = with_reasons @@ -55,6 +58,23 @@ def new_unknown_flag_event(key, user, default_value, reason) e end + def new_identify_event(user) + { + kind: 'identify', + key: user[:key], + user: user + } + end + + def new_custom_event(event_name, user, data) + { + kind: 'custom', + key: event_name, + user: user, + data: data + } + end + private def is_experiment(flag, reason) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 0c113d0d..bf396827 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -217,7 +217,7 @@ def variation_detail(key, user, default) # def identify(user) sanitize_user(user) - @event_processor.add_event(kind: "identify", key: user[:key], user: user) + @event_processor.add_event(@event_factory_default.new_identify_event(user)) end # @@ -235,7 +235,7 @@ def identify(user) # def track(event_name, user, data) sanitize_user(user) - @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) end # From 4c234619ce652efcb8658ba5f9c85c728db138b7 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Wed, 6 Mar 2019 12:48:21 -0800 Subject: [PATCH 116/135] update readme to refer to docs --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index df406928..1c3eaa8a 100644 --- a/README.md +++ b/README.md @@ -33,6 +33,8 @@ require 'ldclient-rb' client = LaunchDarkly::LDClient.new("your_sdk_key") ``` +*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* + ### Ruby on Rails 1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` From 232f419e2bd69d5a6e46ca2e32b58aed42a4ceb2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 12:03:59 -0700 Subject: [PATCH 117/135] add Ruby 2.6.2 to CI --- .circleci/config.yml | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8201b95d..c6ff6938 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -9,6 +9,7 @@ workflows: - test-2.3 - test-2.4 - test-2.5 + - test-2.6 - test-jruby-9.2 ruby-docker-template: &ruby-docker-template @@ -57,6 +58,13 @@ jobs: - image: consul - image: redis - image: amazon/dynamodb-local + test-2.6: + <<: *ruby-docker-template + docker: + - image: circleci/ruby:2.6.2-stretch + - image: consul + - image: redis + - image: amazon/dynamodb-local test-jruby-9.2: <<: *ruby-docker-template docker: From 20d109b8a1561ed5a57b4c3fa1836cbbb30852d7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 20 Mar 2019 16:08:50 -0700 Subject: [PATCH 118/135] fix missing require for net/http --- lib/ldclient-rb/util.rb | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 03849957..396a5171 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -1,3 +1,4 @@ +require "net/http" require "uri" module LaunchDarkly From 651dc37b8d13d75b8cba51d5069fe6af944d776d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:12:52 -0700 Subject: [PATCH 119/135] stringify built-in user attributes in events, and secondary key for evals --- lib/ldclient-rb/evaluation.rb | 8 ++++- lib/ldclient-rb/events.rb | 20 +++++++---- lib/ldclient-rb/ldclient.rb | 15 ++------ lib/ldclient-rb/util.rb | 15 ++++++++ spec/evaluation_spec.rb | 19 ++++++++++ spec/events_spec.rb | 65 +++++++++++++++++++++++++++++++++++ spec/ldclient_spec.rb | 26 -------------- 7 files changed, 122 insertions(+), 46 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 1b5bbdca..112aa975 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,6 +189,10 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) + ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. + # This is because it could affect evaluation results for existing users (ch35206). + def error_result(errorKind, value = nil) EvaluationDetail.new(value, nil, { kind: 'ERROR', errorKind: errorKind }) end @@ -200,8 +204,10 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end + sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + events = [] - detail = eval_internal(flag, user, store, events, logger) + detail = eval_internal(flag, sanitized_user, store, events, logger) return EvalResult.new(detail, events) end diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..69563572 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -7,9 +7,12 @@ module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 + USER_ATTRS_TO_STRINGIFY_FOR_EVENTS = [ :key, :secondary, :ip, :country, :email, :firstName, :lastName, + :avatar, :name ] private_constant :MAX_FLUSH_WORKERS private_constant :CURRENT_SCHEMA_VERSION + private_constant :USER_ATTRS_TO_STRINGIFY_FOR_EVENTS # @private class NullEventProcessor @@ -219,7 +222,7 @@ def notice_user(user) if user.nil? || !user.has_key?(:key) true else - @user_keys.add(user[:key]) + @user_keys.add(user[:key].to_s) end end @@ -371,6 +374,11 @@ def make_output_events(events, summary) private + def process_user(event) + filtered = @user_filter.transform_user_props(event[:user]) + Util.stringify_attrs(filtered, USER_ATTRS_TO_STRINGIFY_FOR_EVENTS) + end + def make_output_event(event) case event[:kind] when "feature" @@ -386,7 +394,7 @@ def make_output_event(event) out[:version] = event[:version] if event.has_key?(:version) out[:prereqOf] = event[:prereqOf] if event.has_key?(:prereqOf) if @inline_users || is_debug - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -396,8 +404,8 @@ def make_output_event(event) { kind: "identify", creationDate: event[:creationDate], - key: event[:user].nil? ? nil : event[:user][:key], - user: @user_filter.transform_user_props(event[:user]) + key: event[:user].nil? ? nil : event[:user][:key].to_s, + user: process_user(event) } when "custom" out = { @@ -407,7 +415,7 @@ def make_output_event(event) } out[:data] = event[:data] if event.has_key?(:data) if @inline_users - out[:user] = @user_filter.transform_user_props(event[:user]) + out[:user] = process_user(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end @@ -416,7 +424,7 @@ def make_output_event(event) { kind: "index", creationDate: event[:creationDate], - user: @user_filter.transform_user_props(event[:user]) + user: process_user(event) } else event diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index 28c21869..3680619a 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -215,7 +215,6 @@ def identify(user) @config.logger.warn("Identify called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "identify", key: user[:key], user: user) end @@ -237,7 +236,6 @@ def track(event_name, user, data) @config.logger.warn("Track called with nil user or nil user key!") return end - sanitize_user(user) @event_processor.add_event(kind: "custom", key: event_name, user: user, data: data) end @@ -280,8 +278,6 @@ def all_flags_state(user, options={}) return FeatureFlagsState.new(false) end - sanitize_user(user) - begin features = @store.all(FEATURES) rescue => exn @@ -353,7 +349,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - sanitize_user(user) if !user.nil? feature = @store.get(FEATURES, key) if feature.nil? @@ -367,12 +362,12 @@ def evaluate_internal(key, user, default, include_reasons_in_events) unless user @config.logger.error { "[LDClient] Must specify user" } detail = error_result('USER_NOT_SPECIFIED', default) - @event_processor.add_event(make_feature_event(feature, user, detail, default, include_reasons_in_events)) + @event_processor.add_event(make_feature_event(feature, nil, detail, default, include_reasons_in_events)) return detail end begin - res = evaluate(feature, user, @store, @config.logger) + res = evaluate(feature, user, @store, @config.logger) # note, evaluate will do its own sanitization if !res.events.nil? res.events.each do |event| @event_processor.add_event(event) @@ -392,12 +387,6 @@ def evaluate_internal(key, user, default, include_reasons_in_events) end end - def sanitize_user(user) - if user[:key] - user[:key] = user[:key].to_s - end - end - def make_feature_event(flag, user, detail, default, with_reasons) { kind: "feature", diff --git a/lib/ldclient-rb/util.rb b/lib/ldclient-rb/util.rb index 396a5171..e129c279 100644 --- a/lib/ldclient-rb/util.rb +++ b/lib/ldclient-rb/util.rb @@ -4,6 +4,21 @@ module LaunchDarkly # @private module Util + def self.stringify_attrs(hash, attrs) + return hash if hash.nil? + ret = hash + changed = false + attrs.each do |attr| + value = hash[attr] + if !value.nil? && !value.is_a?(String) + ret = hash.clone if !changed + ret[attr] = value.to_s + changed = true + end + end + ret + end + def self.new_http_client(uri_s, config) uri = URI(uri_s) client = Net::HTTP.new(uri.hostname, uri.port) diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index 68824ebd..52a617b6 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -359,6 +359,25 @@ def boolean_flag_with_clauses(clauses) expect(result.detail).to eq(detail) expect(result.events).to eq([]) end + + it "coerces user key to a string for evaluation" do + clause = { attribute: 'key', op: 'in', values: ['999'] } + flag = boolean_flag_with_clauses([clause]) + user = { key: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.value).to eq(true) + end + + it "coerces secondary key to a string for evaluation" do + # We can't really verify that the rollout calculation works correctly, but we can at least + # make sure it doesn't error out if there's a non-string secondary value (ch35189) + rule = { id: 'ruleid', clauses: [{ attribute: 'key', op: 'in', values: ['userkey'] }], + rollout: { salt: '', variations: [ { weight: 100000, variation: 1 } ] } } + flag = boolean_flag_with_rules([rule]) + user = { key: "userkey", secondary: 999 } + result = evaluate(flag, user, features, logger) + expect(result.detail.reason).to eq({ kind: 'RULE_MATCH', ruleIndex: 0, ruleId: 'ruleid'}) + end end describe "clause" do diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..557c3594 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -9,6 +9,10 @@ let(:hc) { FakeHttpClient.new } let(:user) { { key: "userkey", name: "Red" } } let(:filtered_user) { { key: "userkey", privateAttrs: [ "name" ] } } + let(:numeric_user) { { key: 1, secondary: 2, ip: 3, country: 4, email: 5, firstName: 6, lastName: 7, + avatar: 8, name: 9, anonymous: false, custom: { age: 99 } } } + let(:stringified_numeric_user) { { key: '1', secondary: '2', ip: '3', country: '4', email: '5', firstName: '6', + lastName: '7', avatar: '8', name: '9', anonymous: false, custom: { age: 99 } } } after(:each) do if !@ep.nil? @@ -40,6 +44,21 @@ }) end + it "stringifies built-in user attributes in identify event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + e = { kind: "identify", key: numeric_user[:key], user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + kind: "identify", + key: numeric_user[:key].to_s, + creationDate: e[:creationDate], + user: stringified_numeric_user + ) + end + it "queues individual feature event with index event" do @ep = subject.new("sdk_key", default_config, hc) flag = { key: "flagkey", version: 11 } @@ -75,6 +94,23 @@ ) end + it "stringifies built-in user attributes in index event" do + @ep = subject.new("sdk_key", default_config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(index_event(fe, stringified_numeric_user)), + eq(feature_event(fe, flag, false, nil)), + include(:kind => "summary") + ) + end + it "can include inline user in feature event" do config = LaunchDarkly::Config.new(inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -92,6 +128,23 @@ ) end + it "stringifies built-in user attributes in feature event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + flag = { key: "flagkey", version: 11 } + fe = { + kind: "feature", key: "flagkey", version: 11, user: numeric_user, + variation: 1, value: "value", trackEvents: true + } + @ep.add_event(fe) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(feature_event(fe, flag, false, stringified_numeric_user)), + include(:kind => "summary") + ) + end + it "filters user in feature event" do config = LaunchDarkly::Config.new(all_attributes_private: true, inline_users_in_events: true) @ep = subject.new("sdk_key", config, hc) @@ -323,6 +376,18 @@ ) end + it "stringifies built-in user attributes in custom event" do + config = LaunchDarkly::Config.new(inline_users_in_events: true) + @ep = subject.new("sdk_key", config, hc) + e = { kind: "custom", key: "eventkey", user: numeric_user } + @ep.add_event(e) + + output = flush_and_get_events + expect(output).to contain_exactly( + eq(custom_event(e, stringified_numeric_user)) + ) + end + it "does a final flush when shutting down" do @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", key: user[:key], user: user } diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 6f530610..86cb5be5 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -25,22 +25,6 @@ } } end - let(:numeric_key_user) do - { - key: 33, - custom: { - groups: [ "microsoft", "google" ] - } - } - end - let(:sanitized_numeric_key_user) do - { - key: "33", - custom: { - groups: [ "microsoft", "google" ] - } - } - end let(:user_without_key) do { name: "Keyless Joe" } end @@ -354,11 +338,6 @@ def event_processor client.track("custom_event_name", user, 42) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.track("custom_event_name", numeric_key_user, nil) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) @@ -378,11 +357,6 @@ def event_processor client.identify(user) end - it "sanitizes the user in the event" do - expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) - client.identify(numeric_key_user) - end - it "does not send an event, and logs a warning, if user is nil" do expect(event_processor).not_to receive(:add_event) expect(logger).to receive(:warn) From da15bdd62515fb5ae47c23fd67c7956073465e23 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Thu, 28 Mar 2019 16:15:56 -0700 Subject: [PATCH 120/135] make const names consistent --- lib/ldclient-rb/evaluation.rb | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 112aa975..7edef6b2 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -189,7 +189,7 @@ def self.comparator(converter) # Used internally to hold an evaluation result and the events that were generated from prerequisites. EvalResult = Struct.new(:detail, :events) - ATTRS_TO_SANITIZE_FOR_EVALUATION = [ :key, :secondary ] + USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION = [ :key, :secondary ] # Currently we are not stringifying the rest of the built-in attributes prior to evaluation, only for events. # This is because it could affect evaluation results for existing users (ch35206). @@ -204,7 +204,7 @@ def evaluate(flag, user, store, logger) return EvalResult.new(error_result('USER_NOT_SPECIFIED'), []) end - sanitized_user = Util.stringify_attrs(user, ATTRS_TO_SANITIZE_FOR_EVALUATION) + sanitized_user = Util.stringify_attrs(user, USER_ATTRS_TO_STRINGIFY_FOR_EVALUATION) events = [] detail = eval_internal(flag, sanitized_user, store, events, logger) From 260bd1b8cd75c110dd3f719fb907d255d90a0747 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Sat, 13 Apr 2019 16:16:43 -0700 Subject: [PATCH 121/135] support metric value with track() --- lib/ldclient-rb/events.rb | 1 + lib/ldclient-rb/impl/event_factory.rb | 10 ++++++---- lib/ldclient-rb/ldclient.rb | 7 ++++--- spec/events_spec.rb | 3 ++- spec/ldclient_spec.rb | 6 ++++++ 5 files changed, 19 insertions(+), 8 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index c45a9da2..22fdd38c 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -411,6 +411,7 @@ def make_output_event(event) else out[:userKey] = event[:user].nil? ? nil : event[:user][:key] end + out[:metricValue] = event[:metricValue] if event.has_key?(:metricValue) out when "index" { diff --git a/lib/ldclient-rb/impl/event_factory.rb b/lib/ldclient-rb/impl/event_factory.rb index a43f6a33..2e7d2697 100644 --- a/lib/ldclient-rb/impl/event_factory.rb +++ b/lib/ldclient-rb/impl/event_factory.rb @@ -66,13 +66,15 @@ def new_identify_event(user) } end - def new_custom_event(event_name, user, data) - { + def new_custom_event(event_name, user, data, metric_value) + e = { kind: 'custom', key: event_name, - user: user, - data: data + user: user } + e[:data] = data if !data.nil? + e[:metricValue] = metric_value if !metric_value.nil? + e end private diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index bf396827..dc40602c 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -230,12 +230,13 @@ def identify(user) # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} - # @param data [Hash] A hash containing any additional data associated with the event + # @param data [Hash] An optional hash containing any additional data associated with the event + # @param metric_value [Number] An optional numeric value that can be used for analytics purposes # @return [void] # - def track(event_name, user, data) + def track(event_name, user, data = nil, metric_value = nil) sanitize_user(user) - @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data)) + @event_processor.add_event(@event_factory_default.new_custom_event(event_name, user, data, metric_value)) end # diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 90b91ec9..31b74b08 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -289,7 +289,7 @@ it "queues custom event with user" do @ep = subject.new("sdk_key", default_config, hc) - e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" } } + e = { kind: "custom", key: "eventkey", user: user, data: { thing: "stuff" }, metricValue: 1.5 } @ep.add_event(e) output = flush_and_get_events @@ -500,6 +500,7 @@ def custom_event(e, inline_user) else out[:user] = inline_user end + out[:metricValue] = e[:metricValue] if e.has_key?(:metricValue) out end diff --git a/spec/ldclient_spec.rb b/spec/ldclient_spec.rb index 2916861e..c07d4023 100644 --- a/spec/ldclient_spec.rb +++ b/spec/ldclient_spec.rb @@ -391,6 +391,12 @@ def event_processor client.track("custom_event_name", user, 42) end + it "can include a metric value" do + expect(event_processor).to receive(:add_event).with(hash_including( + kind: "custom", key: "custom_event_name", user: user, metricValue: 1.5)) + client.track("custom_event_name", user, nil, 1.5) + end + it "sanitizes the user in the event" do expect(event_processor).to receive(:add_event).with(hash_including(user: sanitized_numeric_key_user)) client.track("custom_event_name", numeric_key_user, nil) From 703ffe5ca65e3299a2c7ffbef4baac42ddd08beb Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 16 Apr 2019 18:41:36 -0700 Subject: [PATCH 122/135] update method description --- lib/ldclient-rb/ldclient.rb | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index dc40602c..bd2e1225 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,7 +231,10 @@ def identify(user) # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} # @param data [Hash] An optional hash containing any additional data associated with the event - # @param metric_value [Number] An optional numeric value that can be used for analytics purposes + # @param metric_value [Number] A numeric value used by the LaunchDarkly experimentation + # feature in numeric custom metrics. Can be omitted if this event is used by only + # non-numeric metrics. This field will also be returned as part of the custom event + # for Data Export. # @return [void] # def track(event_name, user, data = nil, metric_value = nil) From 4bc671bd7dbaf400441b6a8a7852f8e45f437c42 Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Mon, 6 May 2019 16:11:43 -0700 Subject: [PATCH 123/135] applying markdown templates and updating repository url references --- CHANGELOG.md | 16 ++--- CONTRIBUTING.md | 39 +++++++++- Gemfile.lock | 4 +- README.md | 169 +++++++------------------------------------- ldclient-rb.gemspec | 2 +- scripts/release.sh | 4 +- 6 files changed, 74 insertions(+), 160 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index af4ffb62..2a4c2269 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,12 +4,12 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.5.5] - 2019-03-28 ### Fixed: -- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-client/issues/131)) +- Setting user attributes to non-string values when a string was expected would cause analytics events not to be processed. Also, in the case of the `secondary` attribute, this could cause evaluations to fail for a flag with a percentage rollout. The SDK will now convert attribute values to strings as needed. ([#131](https://github.com/launchdarkly/ruby-server-sdk/issues/131)) ## [5.5.4] - 2019-03-29 ### Fixed: -- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-client/issues/129)) -- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-client/issues/128)) +- Fixed a missing `require` that could sometimes cause a `NameError` to be thrown when starting the client, depending on what other gems were installed. This bug was introduced in version 5.5.3. ([#129](https://github.com/launchdarkly/ruby-server-sdk/issues/129)) +- When an analytics event was generated for a feature flag because it is a prerequisite for another flag that was evaluated, the user data was being omitted from the event. ([#128](https://github.com/launchdarkly/ruby-server-sdk/issues/128)) - If `track` or `identify` is called without a user, the SDK now logs a warning, and does not send an analytics event to LaunchDarkly (since it would not be processed without a user). - Added a link from the SDK readme to the guide regarding the client initialization. @@ -44,7 +44,7 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ### Fixed: - Added or corrected a large number of documentation comments. All API classes and methods are now documented, and internal implementation details have been hidden from the documentation. You can view the latest documentation on [RubyDoc](https://www.rubydoc.info/gems/ldclient-rb). - Fixed a problem in the Redis feature store that would only happen under unlikely circumstances: trying to evaluate a flag when the LaunchDarkly client had not yet been fully initialized and the store did not yet have data in it, and then trying again when the client was still not ready but the store _did_ have data (presumably put there by another process). Previously, the second attempt would fail. -- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-client/issues/90)) +- In polling mode, the SDK did not correctly handle non-ASCII Unicode characters in feature flag data. ([#90](https://github.com/launchdarkly/ruby-server-sdk/issues/90)) ### Deprecated: - `RedisFeatureStore.new`. This implementation class may be changed or moved in the future; use `LaunchDarkly::Integrations::Redis::new_feature_store`. @@ -52,16 +52,16 @@ All notable changes to the LaunchDarkly Ruby SDK will be documented in this file ## [5.4.3] - 2019-01-11 ### Changed: -- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-client/pull/113)!) +- The SDK is now compatible with `net-http-persistent` 3.x. (Thanks, [CodingAnarchy](https://github.com/launchdarkly/ruby-server-sdk/pull/113)!) ## [5.4.2] - 2019-01-04 ### Fixed: -- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-client/issues/115)) +- Fixed overly specific dependency versions of `concurrent-ruby` and `semantic`. ([#115](https://github.com/launchdarkly/ruby-server-sdk/issues/115)) - Removed obsolete dependencies on `hashdiff` and `thread_safe`. ## [5.4.1] - 2018-11-05 ### Fixed: -- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-client/pull/110)!) +- Fixed a `LoadError` in `file_data_source.rb`, which was added in 5.4.0. (Thanks, [kbarrette](https://github.com/launchdarkly/ruby-server-sdk/pull/110)!) ## [5.4.0] - 2018-11-02 @@ -128,7 +128,7 @@ Fixed a regression in version 5.0.0 that could prevent the client from reconnect ## [3.0.2] - 2018-03-06 ## Fixed -- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-client/pull/98)!) +- Improved efficiency of logging by not constructing messages that won't be visible at the current log level. (Thanks, [julik](https://github.com/launchdarkly/ruby-server-sdk/pull/98)!) ## [3.0.1] - 2018-02-26 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c6b8dd20..618877f8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,37 @@ -Contributing to LaunchDarkly SDK for Ruby -========================================= +Contributing to the LaunchDarkly Server-side SDK for Ruby +================================================ -We encourage pull-requests and other contributions from the community. We've also published an [SDK contributor's guide](http://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. +LaunchDarkly has published an [SDK contributor's guide](https://docs.launchdarkly.com/docs/sdk-contributors-guide) that provides a detailed explanation of how our SDKs work. See below for additional information on how to contribute to this SDK. + +Submitting bug reports and feature requests +------------------ + +The LaunchDarkly SDK team monitors the [issue tracker](https://github.com/launchdarkly/ruby-server-sdk/issues) in the SDK repository. Bug reports and feature requests specific to this SDK should be filed in this issue tracker. The SDK team will respond to all newly filed issues within two business days. + +Submitting pull requests +------------------ + +We encourage pull requests and other contributions from the community. Before submitting pull requests, ensure that all temporary or unintended code is removed. Don't worry about adding reviewers to the pull request; the LaunchDarkly SDK team will add themselves. The SDK team will acknowledge all pull requests within two business days. + +Build instructions +------------------ + +### Prerequisites + +This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. + +### Building + +To build the SDK without running any tests: + +``` +bundle install +``` + +### Testing + +To run all unit tests: + +``` +bundle exec rspec spec +``` \ No newline at end of file diff --git a/Gemfile.lock b/Gemfile.lock index 21a65cc1..aa131e55 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.2) + ldclient-rb (5.5.5) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -23,7 +23,7 @@ GEM aws-sigv4 (1.0.3) codeclimate-test-reporter (0.6.0) simplecov (>= 0.7.1, < 1.0.0) - concurrent-ruby (1.1.4) + concurrent-ruby (1.1.5) connection_pool (2.2.1) diff-lcs (1.3) diplomat (2.0.2) diff --git a/README.md b/README.md index 1c3eaa8a..0fc5a9d2 100644 --- a/README.md +++ b/README.md @@ -1,135 +1,27 @@ -LaunchDarkly SDK for Ruby +LaunchDarkly Server-side SDK for Ruby =========================== [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) -[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-client/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-client/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-client/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-client/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-client/master.svg)](https://hakiri.io/github/launchdarkly/ruby-client/master) +[![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) +[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) +[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) + +LaunchDarkly overview +------------------------- +[LaunchDarkly](https://www.launchdarkly.com) is a feature management platform that serves over 100 billion feature flags daily to help teams build better software, faster. [Get started](https://docs.launchdarkly.com/docs/getting-started) using LaunchDarkly today! + +[![Twitter Follow](https://img.shields.io/twitter/follow/launchdarkly.svg?style=social&label=Follow&maxAge=2592000)](https://twitter.com/intent/follow?screen_name=launchdarkly) Supported Ruby versions ----------------------- This version of the LaunchDarkly SDK has a minimum Ruby version of 2.2.6, or 9.1.6 for JRuby. -Quick setup +Getting started ----------- -1. Install the Ruby SDK with `gem` - -```shell -gem install ldclient-rb -``` - -2. Require the LaunchDarkly client: - -```ruby -require 'ldclient-rb' -``` - -3. Create a new LDClient with your SDK key: - -```ruby -client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -*NOTE: Please refer to [our documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-initializing-ldclient-using-spring-unicorn-or-puma) for additional instructions on how to use LaunchDarkly with [Spring](https://github.com/rails/spring), [Unicorn](https://bogomips.org/unicorn/), or [Puma](https://github.com/puma/puma).* - -### Ruby on Rails - -1. Add `gem 'ldclient-rb'` to your Gemfile and `bundle install` - -2. Initialize the launchdarkly client in `config/initializers/launchdarkly.rb`: - -```ruby -Rails.configuration.ld_client = LaunchDarkly::LDClient.new("your_sdk_key") -``` - -3. You may want to include a function in your ApplicationController - -```ruby -def launchdarkly_settings - if current_user.present? - { - key: current_user.id, - anonymous: false, - email: current_user.email, - custom: { groups: current_user.groups.pluck(:name) }, - # Any other fields you may have - # e.g. lastName: current_user.last_name, - } - else - if Rails::VERSION::MAJOR <= 3 - hash_key = request.session_options[:id] - else - hash_key = session.id - end - # session ids should be private to prevent session hijacking - hash_key = Digest::SHA256.base64digest hash_key - { - key: hash_key, - anonymous: true, - } - end -end -``` - -4. In your controllers, access the client using - -```ruby -Rails.application.config.ld_client.variation('your.flag.key', launchdarkly_settings, false) -``` - -Note that this gem will automatically switch to using the Rails logger it is detected. - - -Your first feature flag ------------------------ - -1. Create a new feature flag on your [dashboard](https://app.launchdarkly.com). -2. In your application code, use the feature's key to check whether the flag is on for each user: - -```ruby -if client.variation("your.flag.key", {key: "user@test.com"}, false) - # application code to show the feature -else - # the code to run if the feature is off -end -``` - -HTTPS proxy ------------ - -The Ruby SDK uses Faraday and Socketry to handle its network traffic. Both of these provide built-in support for the use of an HTTPS proxy. If the HTTPS_PROXY environment variable is present then the SDK will proxy all network requests through the URL provided. (HTTP_PROXY is not used because all LaunchDarkly services require HTTPS.) - -How to set the HTTPS_PROXY environment variable on Mac/Linux systems: -``` -export HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -How to set the HTTPS_PROXY environment variable on Windows systems: -``` -set HTTPS_PROXY=https://web-proxy.domain.com:8080 -``` - -If your proxy requires authentication then you can prefix the URN with your login information: -``` -export HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` -or -``` -set HTTPS_PROXY=http://user:pass@web-proxy.domain.com:8080 -``` - -Database integrations ---------------------- - -Feature flag data can be kept in a persistent store using Redis, DynamoDB, or Consul. These adapters are implemented in the `LaunchDarkly::Integrations::Redis`, `LaunchDarkly::Integrations::DynamoDB`, and `LaunchDarkly::Integrations::Consul` modules; to use them, call the `new_feature_store` method in the module, and put the returned object in the `feature_store` property of your client configuration. See the [API documentation](https://www.rubydoc.info/gems/ldclient-rb/LaunchDarkly/Integrations) and the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store) for more information. - -Using flag data from a file ---------------------------- - -For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See `LaunchDarkly::FileDataSource` or the [SDK reference guide](https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file) for more details. +Refer to the [SDK documentation](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-getting-started) for instructions on getting started with using the SDK. Learn more ----------- @@ -140,37 +32,26 @@ Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/l Testing ------- - + We run integration tests for all our SDKs using a centralized test harness. This approach gives us the ability to test for consistency across SDKs, as well as test networking behavior in a long-running application. These tests cover each method in the SDK, and verify that event sending, flag evaluation, stream reconnection, and other aspects of the SDK all behave correctly. - + Contributing ------------ - -See [Contributing](https://github.com/launchdarkly/ruby-client/blob/master/CONTRIBUTING.md). - + +We encourage pull requests and other contributions from the community. Check out our [contributing guidelines](CONTRIBUTING.md) for instructions on how to contribute to this SDK. + About LaunchDarkly ------------------- - +----------- + * LaunchDarkly is a continuous delivery platform that provides feature flags as a service and allows developers to iterate quickly and safely. We allow you to easily flag your features and manage them from the LaunchDarkly dashboard. With LaunchDarkly, you can: * Roll out a new feature to a subset of your users (like a group of users who opt-in to a beta tester group), gathering feedback and bug reports from real-world use cases. * Gradually roll out a feature to an increasing percentage of users, and track the effect that the feature has on key metrics (for instance, how likely is a user to complete a purchase if they have feature A versus feature B?). * Turn off a feature that you realize is causing performance problems in production, without needing to re-deploy, or even restart the application with a changed configuration file. * Grant access to certain features based on user attributes, like payment plan (eg: users on the ‘gold’ plan get access to more features than users in the ‘silver’ plan). Disable parts of your application to facilitate maintenance, without taking everything offline. -* LaunchDarkly provides feature flag SDKs for - * [Java](http://docs.launchdarkly.com/docs/java-sdk-reference "Java SDK") - * [JavaScript](http://docs.launchdarkly.com/docs/js-sdk-reference "LaunchDarkly JavaScript SDK") - * [PHP](http://docs.launchdarkly.com/docs/php-sdk-reference "LaunchDarkly PHP SDK") - * [Python](http://docs.launchdarkly.com/docs/python-sdk-reference "LaunchDarkly Python SDK") - * [Go](http://docs.launchdarkly.com/docs/go-sdk-reference "LaunchDarkly Go SDK") - * [Node.JS](http://docs.launchdarkly.com/docs/node-sdk-reference "LaunchDarkly Node SDK") - * [Electron](http://docs.launchdarkly.com/docs/electron-sdk-reference "LaunchDarkly Electron SDK") - * [.NET](http://docs.launchdarkly.com/docs/dotnet-sdk-reference "LaunchDarkly .Net SDK") - * [Ruby](http://docs.launchdarkly.com/docs/ruby-sdk-reference "LaunchDarkly Ruby SDK") - * [iOS](http://docs.launchdarkly.com/docs/ios-sdk-reference "LaunchDarkly iOS SDK") - * [Android](http://docs.launchdarkly.com/docs/android-sdk-reference "LaunchDarkly Android SDK") +* LaunchDarkly provides feature flag SDKs for a wide variety of languages and technologies. Check out [our documentation](https://docs.launchdarkly.com/docs) for a complete list. * Explore LaunchDarkly - * [launchdarkly.com](http://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information - * [docs.launchdarkly.com](http://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDKs - * [apidocs.launchdarkly.com](http://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation - * [blog.launchdarkly.com](http://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates - * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies + * [launchdarkly.com](https://www.launchdarkly.com/ "LaunchDarkly Main Website") for more information + * [docs.launchdarkly.com](https://docs.launchdarkly.com/ "LaunchDarkly Documentation") for our documentation and SDK reference guides + * [apidocs.launchdarkly.com](https://apidocs.launchdarkly.com/ "LaunchDarkly API Documentation") for our API documentation + * [blog.launchdarkly.com](https://blog.launchdarkly.com/ "LaunchDarkly Blog Documentation") for the latest product updates + * [Feature Flagging Guide](https://github.com/launchdarkly/featureflags/ "Feature Flagging Guide") for best practices and strategies \ No newline at end of file diff --git a/ldclient-rb.gemspec b/ldclient-rb.gemspec index 9fb4daa0..d1a19483 100644 --- a/ldclient-rb.gemspec +++ b/ldclient-rb.gemspec @@ -12,7 +12,7 @@ Gem::Specification.new do |spec| spec.email = ["team@launchdarkly.com"] spec.summary = "LaunchDarkly SDK for Ruby" spec.description = "Official LaunchDarkly SDK for Ruby" - spec.homepage = "https://github.com/launchdarkly/ruby-client" + spec.homepage = "https://github.com/launchdarkly/ruby-server-sdk" spec.license = "Apache-2.0" spec.files = `git ls-files -z`.split("\x0") diff --git a/scripts/release.sh b/scripts/release.sh index 18537846..314fe8b9 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -9,7 +9,7 @@ # When done you should commit and push the changes made. set -uxe -echo "Starting ruby-client release." +echo "Starting ruby-server-sdk release." VERSION=$1 @@ -24,4 +24,4 @@ gem build ldclient-rb.gemspec # Publish Ruby Gem gem push ldclient-rb-${VERSION}.gem -echo "Done with ruby-client release" \ No newline at end of file +echo "Done with ruby-server-sdk release" \ No newline at end of file From a56ee201360abd3a9c3ad9cb55cc6a7c9d493fef Mon Sep 17 00:00:00 2001 From: Ben Woskow Date: Tue, 7 May 2019 12:55:19 -0700 Subject: [PATCH 124/135] Cleaning up markdown files --- CONTRIBUTING.md | 4 +--- README.md | 3 +-- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 618877f8..6ed90ddb 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -20,9 +20,7 @@ Build instructions This SDK is built with [Bundler](https://bundler.io/). To install Bundler, run `gem install bundler -v 1.17.3`. You might need `sudo` to execute the command successfully. As of this writing, the SDK does not support being built with Bundler 2.0. -### Building - -To build the SDK without running any tests: +To install the runtime dependencies: ``` bundle install diff --git a/README.md b/README.md index 0fc5a9d2..7795ddb9 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,7 @@ LaunchDarkly Server-side SDK for Ruby [![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) -[![Test Coverage](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/badges/coverage.svg)](https://codeclimate.com/github/launchdarkly/ruby-server-sdk/coverage) -[![security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) +[![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) LaunchDarkly overview ------------------------- From 962e729b88f81efdf61d3905aa2a888bfed6e8e7 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 8 May 2019 18:01:29 -0700 Subject: [PATCH 125/135] allow skipping database tests --- CONTRIBUTING.md | 4 +++- spec/integrations/consul_feature_store_spec.rb | 1 + spec/integrations/dynamodb_feature_store_spec.rb | 3 ++- spec/redis_feature_store_spec.rb | 2 ++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 6ed90ddb..ac126eec 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -32,4 +32,6 @@ To run all unit tests: ``` bundle exec rspec spec -``` \ No newline at end of file +``` + +By default, the full unit test suite includes live tests of the integrations for Consul, DynamoDB, and Redis. Those tests expect you to have instances of all of those databases running locally. To skip them, set the environment variable `LD_SKIP_DATABASE_TESTS=1` before running the tests. diff --git a/spec/integrations/consul_feature_store_spec.rb b/spec/integrations/consul_feature_store_spec.rb index 13767686..45f87097 100644 --- a/spec/integrations/consul_feature_store_spec.rb +++ b/spec/integrations/consul_feature_store_spec.rb @@ -28,6 +28,7 @@ def clear_all_data describe "Consul feature store" do + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' # These tests will all fail if there isn't a local Consul instance running. diff --git a/spec/integrations/dynamodb_feature_store_spec.rb b/spec/integrations/dynamodb_feature_store_spec.rb index 4add3d53..d924b30a 100644 --- a/spec/integrations/dynamodb_feature_store_spec.rb +++ b/spec/integrations/dynamodb_feature_store_spec.rb @@ -89,7 +89,8 @@ def create_test_client describe "DynamoDB feature store" do - + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a local DynamoDB instance running. create_table_if_necessary diff --git a/spec/redis_feature_store_spec.rb b/spec/redis_feature_store_spec.rb index 3da25f4f..0f372184 100644 --- a/spec/redis_feature_store_spec.rb +++ b/spec/redis_feature_store_spec.rb @@ -31,6 +31,8 @@ def clear_all_data describe LaunchDarkly::RedisFeatureStore do subject { LaunchDarkly::RedisFeatureStore } + return if ENV['LD_SKIP_DATABASE_TESTS'] == '1' + # These tests will all fail if there isn't a Redis instance running on the default port. context "real Redis with local cache" do From f32b9c694b8ceb6f761fd068d085b2cce05200fc Mon Sep 17 00:00:00 2001 From: Ben Woskow <48036130+bwoskow-ld@users.noreply.github.com> Date: Mon, 13 May 2019 13:28:36 -0700 Subject: [PATCH 126/135] Updating the package name (#115) * update package name * missed one * revert module entry point name change --- Gemfile.lock | 4 ++-- README.md | 4 ++-- ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec | 2 +- scripts/gendocs.sh | 2 +- scripts/release.sh | 8 ++++---- 5 files changed, 10 insertions(+), 10 deletions(-) rename ldclient-rb.gemspec => launchdarkly-server-sdk.gemspec (97%) diff --git a/Gemfile.lock b/Gemfile.lock index aa131e55..a076f848 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - ldclient-rb (5.5.5) + launchdarkly-server-sdk (5.5.6) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) ld-eventsource (~> 1.0) @@ -90,7 +90,7 @@ DEPENDENCIES codeclimate-test-reporter (~> 0) connection_pool (>= 2.1.2) diplomat (>= 2.0.2) - ldclient-rb! + launchdarkly-server-sdk! listen (~> 3.0) rake (~> 10.0) redis (~> 3.3.5) diff --git a/README.md b/README.md index 7795ddb9..d3f99b69 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ LaunchDarkly Server-side SDK for Ruby =========================== -[![Gem Version](https://badge.fury.io/rb/ldclient-rb.svg)](http://badge.fury.io/rb/ldclient-rb) +[![Gem Version](https://badge.fury.io/rb/launchdarkly-server-sdk.svg)](http://badge.fury.io/rb/launchdarkly-server-sdk) [![Circle CI](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master.svg?style=svg)](https://circleci.com/gh/launchdarkly/ruby-server-sdk/tree/master) [![Security](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master.svg)](https://hakiri.io/github/launchdarkly/ruby-server-sdk/master) @@ -27,7 +27,7 @@ Learn more Check out our [documentation](http://docs.launchdarkly.com) for in-depth instructions on configuring and using LaunchDarkly. You can also head straight to the [reference guide for this SDK](http://docs.launchdarkly.com/docs/ruby-sdk-reference). -Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/ldclient-rb). +Generated API documentation is on [RubyDoc.info](https://www.rubydoc.info/gems/launchdarkly-server-sdk). Testing ------- diff --git a/ldclient-rb.gemspec b/launchdarkly-server-sdk.gemspec similarity index 97% rename from ldclient-rb.gemspec rename to launchdarkly-server-sdk.gemspec index d1a19483..911e438b 100644 --- a/ldclient-rb.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -6,7 +6,7 @@ require "ldclient-rb/version" # rubocop:disable Metrics/BlockLength Gem::Specification.new do |spec| - spec.name = "ldclient-rb" + spec.name = "launchdarkly-server-sdk" spec.version = LaunchDarkly::VERSION spec.authors = ["LaunchDarkly"] spec.email = ["team@launchdarkly.com"] diff --git a/scripts/gendocs.sh b/scripts/gendocs.sh index 96df177f..c5ec7dcf 100755 --- a/scripts/gendocs.sh +++ b/scripts/gendocs.sh @@ -1,7 +1,7 @@ #!/bin/bash # Use this script to generate documentation locally in ./doc so it can be proofed before release. -# After release, documentation will be visible at https://www.rubydoc.info/gems/ldclient-rb +# After release, documentation will be visible at https://www.rubydoc.info/gems/launchdarkly-server-sdk gem install --conservative yard gem install --conservative redcarpet # provides Markdown formatting diff --git a/scripts/release.sh b/scripts/release.sh index 314fe8b9..9813240c 100755 --- a/scripts/release.sh +++ b/scripts/release.sh @@ -1,5 +1,5 @@ #!/usr/bin/env bash -# This script updates the version for the ldclient library and releases it to RubyGems +# This script updates the version for the launchdarkly-server-sdk library and releases it to RubyGems # It will only work if you have the proper credentials set up in ~/.gem/credentials # It takes exactly one argument: the new version. @@ -13,15 +13,15 @@ echo "Starting ruby-server-sdk release." VERSION=$1 -#Update version in ldclient/version.py +#Update version in lib/ldclient-rb/version.rb VERSION_RB_TEMP=./version.rb.tmp sed "s/VERSION =.*/VERSION = \"${VERSION}\"/g" lib/ldclient-rb/version.rb > ${VERSION_RB_TEMP} mv ${VERSION_RB_TEMP} lib/ldclient-rb/version.rb # Build Ruby Gem -gem build ldclient-rb.gemspec +gem build launchdarkly-server-sdk.gemspec # Publish Ruby Gem -gem push ldclient-rb-${VERSION}.gem +gem push launchdarkly-server-sdk-${VERSION}.gem echo "Done with ruby-server-sdk release" \ No newline at end of file From 8defb308a44a25f056fcd7260393e25e8a277dbe Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 10 Jul 2019 17:49:38 -0700 Subject: [PATCH 127/135] bump ld-eventsource version for stream logging fix --- Gemfile.lock | 6 +++--- launchdarkly-server-sdk.gemspec | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Gemfile.lock b/Gemfile.lock index a076f848..155eccf2 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,10 +1,10 @@ PATH remote: . specs: - launchdarkly-server-sdk (5.5.6) + launchdarkly-server-sdk (5.5.7) concurrent-ruby (~> 1.0) json (>= 1.8, < 3) - ld-eventsource (~> 1.0) + ld-eventsource (= 1.0.1) semantic (~> 1.6) GEM @@ -40,7 +40,7 @@ GEM jmespath (1.4.0) json (1.8.6) json (1.8.6-java) - ld-eventsource (1.0.0) + ld-eventsource (1.0.1) concurrent-ruby (~> 1.0) http_tools (~> 0.4.5) socketry (~> 0.5.1) diff --git a/launchdarkly-server-sdk.gemspec b/launchdarkly-server-sdk.gemspec index 911e438b..2e95cd41 100644 --- a/launchdarkly-server-sdk.gemspec +++ b/launchdarkly-server-sdk.gemspec @@ -36,5 +36,5 @@ Gem::Specification.new do |spec| spec.add_runtime_dependency "json", [">= 1.8", "< 3"] spec.add_runtime_dependency "semantic", "~> 1.6" spec.add_runtime_dependency "concurrent-ruby", "~> 1.0" - spec.add_runtime_dependency "ld-eventsource", '~> 1.0' + spec.add_runtime_dependency "ld-eventsource", "1.0.1" end From c1aeaa3a8f35c7fb375507c4886b6e874e36ef69 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 11:49:41 -0700 Subject: [PATCH 128/135] use YAML.safe_load --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From f8aac44e2b4b0b2022ec845cd8eef68b57e3b8cd Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:25:34 -0700 Subject: [PATCH 129/135] add unit test and temporarily revert fix to demonstrate failure --- lib/ldclient-rb/file_data_source.rb | 2 +- spec/file_data_source_spec.rb | 28 ++++++++++++++++++++++++++++ 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..7606c1d3 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.safe_load(content)) + symbolize_all_keys(YAML.load(content)) end def symbolize_all_keys(value) diff --git a/spec/file_data_source_spec.rb b/spec/file_data_source_spec.rb index c827222d..837b775d 100644 --- a/spec/file_data_source_spec.rb +++ b/spec/file_data_source_spec.rb @@ -1,6 +1,14 @@ require "spec_helper" require "tempfile" +# see does not allow Ruby objects in YAML" for the purpose of the following two things +$created_bad_class = false +class BadClassWeShouldNotInstantiate < Hash + def []=(key, value) + $created_bad_class = true + end +end + describe LaunchDarkly::FileDataSource do let(:full_flag_1_key) { "flag1" } let(:full_flag_1_value) { "on" } @@ -78,6 +86,12 @@ EOF } + let(:unsafe_yaml) { <<-EOF +--- !ruby/hash:BadClassWeShouldNotInstantiate +foo: bar +EOF + } + let(:bad_file_path) { "no-such-file" } before do @@ -138,6 +152,20 @@ def with_data_source(options) end end + it "does not allow Ruby objects in YAML" do + # This tests for the vulnerability described here: https://trailofbits.github.io/rubysec/yaml/index.html + # The file we're loading contains a hash with a custom Ruby class, BadClassWeShouldNotInstantiate (see top + # of file). If we're not loading in safe mode, it will create an instance of that class and call its []= + # method, which we've defined to set $created_bad_class to true. In safe mode, it refuses to parse this file. + file = make_temp_file(unsafe_yaml) + with_data_source({ paths: [file.path ] }) do |ds| + event = ds.start + expect(event.set?).to eq(true) + expect(ds.initialized?).to eq(false) + expect($created_bad_class).to eq(false) + end + end + it "sets start event and initialized on successful load" do file = make_temp_file(all_properties_json) with_data_source({ paths: [ file.path ] }) do |ds| From 8f480604b3b96f7bc3070bf49996f23078a871f2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 12:27:52 -0700 Subject: [PATCH 130/135] restore fix --- lib/ldclient-rb/file_data_source.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index 7606c1d3..d5e05ae0 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -206,7 +206,7 @@ def parse_content(content) # We can use the Ruby YAML parser for both YAML and JSON (JSON is a subset of YAML and while # not all YAML parsers handle it correctly, we have verified that the Ruby one does, at least # for all the samples of actual flag data that we've tested). - symbolize_all_keys(YAML.load(content)) + symbolize_all_keys(YAML.safe_load(content)) end def symbolize_all_keys(value) From 8fa005fee2d0800c5da76e745d82e0476cadd6af Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 13:08:50 -0700 Subject: [PATCH 131/135] add comment about not using FileDataSource in production --- lib/ldclient-rb/file_data_source.rb | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/ldclient-rb/file_data_source.rb b/lib/ldclient-rb/file_data_source.rb index d5e05ae0..cfea75f7 100644 --- a/lib/ldclient-rb/file_data_source.rb +++ b/lib/ldclient-rb/file_data_source.rb @@ -21,9 +21,11 @@ def self.have_listen? end # - # Provides a way to use local files as a source of feature flag state. This would typically be - # used in a test environment, to operate using a predetermined feature flag state without an - # actual LaunchDarkly connection. + # Provides a way to use local files as a source of feature flag state. This allows using a + # predetermined feature flag state without an actual LaunchDarkly connection. + # + # Reading flags from a file is only intended for pre-production environments. Production + # environments should always be configured to receive flag updates from LaunchDarkly. # # To use this component, call {FileDataSource#factory}, and store its return value in the # {Config#data_source} property of your LaunchDarkly client configuration. In the options From 963e4ebadf716b5014dc9f60e8fa0c445bd07a9b Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 24 Jul 2019 20:31:58 -0700 Subject: [PATCH 132/135] drop events if inbox is full --- lib/ldclient-rb/events.rb | 87 ++++++++++++++++++++++++++++----------- 1 file changed, 62 insertions(+), 25 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 69563572..f57287a4 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -4,6 +4,23 @@ require "thread" require "time" +# +# Analytics event processing in the SDK involves several components. The purpose of this design is to +# minimize overhead on the application threads that are generating analytics events. +# +# EventProcessor receives an analytics event from the SDK client, on an application thread. It places +# the event in a bounded queue, the "inbox", and immediately returns. +# +# On a separate worker thread, EventDispatcher consumes events from the inbox. These are considered +# "input events" because they may or may not actually be sent to LaunchDarkly; most flag evaluation +# events are not sent, but are counted and the counters become part of a single summary event. +# EventDispatcher updates those counters, creates "index" events for any users that have not been seen +# recently, and places any events that will be sent to LaunchDarkly into the "outbox" queue. +# +# When it is time to flush events to LaunchDarkly, the contents of the outbox are handed off to +# another worker thread which sends the HTTP request. +# + module LaunchDarkly MAX_FLUSH_WORKERS = 5 CURRENT_SCHEMA_VERSION = 3 @@ -68,28 +85,30 @@ class StopMessage < SynchronousMessage # @private class EventProcessor def initialize(sdk_key, config, client = nil) - @queue = Queue.new + @logger = config.logger + @inbox = SizedQueue.new(config.capacity) @flush_task = Concurrent::TimerTask.new(execution_interval: config.flush_interval) do - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end @flush_task.execute @users_flush_task = Concurrent::TimerTask.new(execution_interval: config.user_keys_flush_interval) do - @queue << FlushUsersMessage.new + post_to_inbox(FlushUsersMessage.new) end @users_flush_task.execute @stopped = Concurrent::AtomicBoolean.new(false) - - EventDispatcher.new(@queue, sdk_key, config, client) + @inbox_full = Concurrent::AtomicBoolean.new(false) + + EventDispatcher.new(@inbox, sdk_key, config, client) end def add_event(event) event[:creationDate] = (Time.now.to_f * 1000).to_i - @queue << EventMessage.new(event) + post_to_inbox(EventMessage.new(event)) end def flush # flush is done asynchronously - @queue << FlushMessage.new + post_to_inbox(FlushMessage.new) end def stop @@ -97,9 +116,11 @@ def stop if @stopped.make_true @flush_task.shutdown @users_flush_task.shutdown - @queue << FlushMessage.new + # Note that here we are not calling post_to_inbox, because we *do* want to wait if the inbox + # is full; an orderly shutdown can't happen unless these messages are received. + @inbox << FlushMessage.new stop_msg = StopMessage.new - @queue << stop_msg + @inbox << stop_msg stop_msg.wait_for_completion end end @@ -107,14 +128,30 @@ def stop # exposed only for testing def wait_until_inactive sync_msg = TestSyncMessage.new - @queue << sync_msg + @inbox << sync_msg sync_msg.wait_for_completion end + + private + + def post_to_inbox(message) + begin + @inbox.push(message, non_block=true) + rescue ThreadError + # If the inbox is full, it means the EventDispatcher thread is seriously backed up with not-yet-processed + # events. This is unlikely, but if it happens, it means the application is probably doing a ton of flag + # evaluations across many threads-- so if we wait for a space in the inbox, we risk a very serious slowdown + # of the app. To avoid that, we'll just drop the event. The log warning about this will only be shown once. + if @inbox_full.make_true + @logger.warn { "[LDClient] Events are being produced faster than they can be processed; some events will be dropped" } + end + end + end end # @private class EventDispatcher - def initialize(queue, sdk_key, config, client) + def initialize(inbox, sdk_key, config, client) @sdk_key = sdk_key @config = config @@ -129,10 +166,10 @@ def initialize(queue, sdk_key, config, client) @disabled = Concurrent::AtomicBoolean.new(false) @last_known_past_time = Concurrent::AtomicReference.new(0) - buffer = EventBuffer.new(config.capacity, config.logger) + outbox = EventBuffer.new(config.capacity, config.logger) flush_workers = NonBlockingThreadPool.new(MAX_FLUSH_WORKERS) - Thread.new { main_loop(queue, buffer, flush_workers) } + Thread.new { main_loop(inbox, outbox, flush_workers) } end private @@ -141,16 +178,16 @@ def now_millis() (Time.now.to_f * 1000).to_i end - def main_loop(queue, buffer, flush_workers) + def main_loop(inbox, outbox, flush_workers) running = true while running do begin - message = queue.pop + message = inbox.pop case message when EventMessage - dispatch_event(message.event, buffer) + dispatch_event(message.event, outbox) when FlushMessage - trigger_flush(buffer, flush_workers) + trigger_flush(outbox, flush_workers) when FlushUsersMessage @user_keys.clear when TestSyncMessage @@ -181,11 +218,11 @@ def synchronize_for_testing(flush_workers) flush_workers.wait_all end - def dispatch_event(event, buffer) + def dispatch_event(event, outbox) return if @disabled.value # Always record the event in the summary. - buffer.add_to_summary(event) + outbox.add_to_summary(event) # Decide whether to add the event to the payload. Feature events may be added twice, once for # the event (if tracked) and once for debugging. @@ -205,7 +242,7 @@ def dispatch_event(event, buffer) # an identify event for that user. if !(will_add_full_event && @config.inline_users_in_events) if event.has_key?(:user) && !notice_user(event[:user]) && event[:kind] != "identify" - buffer.add_event({ + outbox.add_event({ kind: "index", creationDate: event[:creationDate], user: event[:user] @@ -213,8 +250,8 @@ def dispatch_event(event, buffer) end end - buffer.add_event(event) if will_add_full_event - buffer.add_event(debug_event) if !debug_event.nil? + outbox.add_event(event) if will_add_full_event + outbox.add_event(debug_event) if !debug_event.nil? end # Add to the set of users we've noticed, and return true if the user was already known to us. @@ -236,12 +273,12 @@ def should_debug_event(event) end end - def trigger_flush(buffer, flush_workers) + def trigger_flush(outbox, flush_workers) if @disabled.value return end - payload = buffer.get_payload + payload = outbox.get_payload if !payload.events.empty? || !payload.summary.counters.empty? # If all available worker threads are busy, success will be false and no job will be queued. success = flush_workers.post do @@ -252,7 +289,7 @@ def trigger_flush(buffer, flush_workers) Util.log_exception(@config.logger, "Unexpected error in event processor", e) end end - buffer.clear if success # Reset our internal state, these events now belong to the flush worker + outbox.clear if success # Reset our internal state, these events now belong to the flush worker end end From f0581a0120c987f9af5b1e42c09cffe2fb486ac8 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 19 Aug 2019 15:39:48 -0700 Subject: [PATCH 133/135] update doc comment for track with metric_value --- lib/ldclient-rb/ldclient.rb | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/ldclient-rb/ldclient.rb b/lib/ldclient-rb/ldclient.rb index f75c8930..b7c2ee85 100644 --- a/lib/ldclient-rb/ldclient.rb +++ b/lib/ldclient-rb/ldclient.rb @@ -231,6 +231,11 @@ def identify(user) # Note that event delivery is asynchronous, so the event may not actually be sent # until later; see {#flush}. # + # As of this version’s release date, the LaunchDarkly service does not support the `metricValue` + # parameter. As a result, specifying `metricValue` will not yet produce any different behavior + # from omitting it. Refer to the [SDK reference guide](https://docs.launchdarkly.com/docs/ruby-sdk-reference#section-track) + # for the latest status. + # # @param event_name [String] The name of the event # @param user [Hash] The user to register; this can have all the same user properties # described in {#variation} From 7620721cdee390659cd86bd679c47b3d9781f9e9 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Mon, 30 Dec 2019 13:59:17 -0800 Subject: [PATCH 134/135] don't let user fall outside of last bucket in rollout --- lib/ldclient-rb/evaluation.rb | 18 ++++++++---- spec/evaluation_spec.rb | 52 +++++++++++++++++++++++++++++++++++ 2 files changed, 65 insertions(+), 5 deletions(-) diff --git a/lib/ldclient-rb/evaluation.rb b/lib/ldclient-rb/evaluation.rb index 43a03c23..d0d2aa38 100644 --- a/lib/ldclient-rb/evaluation.rb +++ b/lib/ldclient-rb/evaluation.rb @@ -323,20 +323,28 @@ def clause_match_user_no_segments(clause, user) end def variation_index_for_user(flag, rule, user) - if !rule[:variation].nil? # fixed variation - return rule[:variation] - elsif !rule[:rollout].nil? # percentage rollout + variation = rule[:variation] + return variation if !variation.nil? # fixed variation + rollout = rule[:rollout] + return nil if rollout.nil? + variations = rollout[:variations] + if !variations.nil? && variations.length > 0 # percentage rollout rollout = rule[:rollout] bucket_by = rollout[:bucketBy].nil? ? "key" : rollout[:bucketBy] bucket = bucket_user(user, flag[:key], bucket_by, flag[:salt]) sum = 0; - rollout[:variations].each do |variate| + variations.each do |variate| sum += variate[:weight].to_f / 100000.0 if bucket < sum return variate[:variation] end end - nil + # The user's bucket value was greater than or equal to the end of the last bucket. This could happen due + # to a rounding error, or due to the fact that we are scaling to 100000 rather than 99999, or the flag + # data could contain buckets that don't actually add up to 100000. Rather than returning an error in + # this case (or changing the scaling, which would potentially change the results for *all* users), we + # will simply put the user in the last bucket. + variations[-1][:variation] else # the rule isn't well-formed nil end diff --git a/spec/evaluation_spec.rb b/spec/evaluation_spec.rb index ff4b63f6..2efbd745 100644 --- a/spec/evaluation_spec.rb +++ b/spec/evaluation_spec.rb @@ -560,6 +560,58 @@ def boolean_flag_with_clauses(clauses) end end + describe "variation_index_for_user" do + it "matches bucket" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + # First verify that with our test inputs, the bucket value will be greater than zero and less than 100000, + # so we can construct a rollout whose second bucket just barely contains that value + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + expect(bucket_value).to be > 0 + expect(bucket_value).to be < 100000 + + bad_variation_a = 0 + matched_variation = 1 + bad_variation_b = 2 + rule = { + rollout: { + variations: [ + { variation: bad_variation_a, weight: bucket_value }, # end of bucket range is not inclusive, so it will *not* match the target value + { variation: matched_variation, weight: 1 }, # size of this bucket is 1, so it only matches that specific value + { variation: bad_variation_b, weight: 100000 - (bucket_value + 1) } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be matched_variation + end + + it "uses last bucket if bucket value is equal to total weight" do + user = { key: "userkey" } + flag_key = "flagkey" + salt = "salt" + + bucket_value = (bucket_user(user, flag_key, "key", salt) * 100000).truncate() + + # We'll construct a list of variations that stops right at the target bucket value + rule = { + rollout: { + variations: [ + { variation: 0, weight: bucket_value } + ] + } + } + flag = { key: flag_key, salt: salt } + + result_variation = variation_index_for_user(flag, rule, user) + expect(result_variation).to be 0 + end + end + describe "bucket_user" do it "gets expected bucket values for specific keys" do user = { key: "userKeyA" } From 1cfcd527c38b7eca57fe9f52b88e41316efd2836 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Tue, 14 Jan 2020 16:32:01 -0800 Subject: [PATCH 135/135] add event payload ID header --- lib/ldclient-rb/events.rb | 3 +++ spec/events_spec.rb | 34 ++++++++++++++++++++++++++++++++-- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/lib/ldclient-rb/events.rb b/lib/ldclient-rb/events.rb index 184facc4..bb12f6ec 100644 --- a/lib/ldclient-rb/events.rb +++ b/lib/ldclient-rb/events.rb @@ -1,6 +1,7 @@ require "concurrent" require "concurrent/atomics" require "concurrent/executors" +require "securerandom" require "thread" require "time" @@ -359,6 +360,7 @@ def run(sdk_key, config, client, payload, formatter) events_out = formatter.make_output_events(payload.events, payload.summary) res = nil body = events_out.to_json + payload_id = SecureRandom.uuid (0..1).each do |attempt| if attempt > 0 config.logger.warn { "[LDClient] Will retry posting events after 1 second" } @@ -374,6 +376,7 @@ def run(sdk_key, config, client, payload, formatter) req["Authorization"] = sdk_key req["User-Agent"] = "RubyClient/" + LaunchDarkly::VERSION req["X-LaunchDarkly-Event-Schema"] = CURRENT_SCHEMA_VERSION.to_s + req["X-LaunchDarkly-Payload-ID"] = payload_id req["Connection"] = "keep-alive" res = client.request(req) rescue StandardError => exn diff --git a/spec/events_spec.rb b/spec/events_spec.rb index 16bee286..1108a3ac 100644 --- a/spec/events_spec.rb +++ b/spec/events_spec.rb @@ -416,6 +416,29 @@ expect(hc.get_request["authorization"]).to eq "sdk_key" end + it "sends unique payload IDs" do + @ep = subject.new("sdk_key", default_config, hc) + e = { kind: "identify", user: user } + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req0 = hc.get_request + + @ep.add_event(e) + @ep.flush + @ep.wait_until_inactive + req1 = hc.get_request + + id0 = req0["x-launchdarkly-payload-id"] + id1 = req1["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(id1).not_to be nil + expect(id1).not_to eq "" + expect(id1).not_to eq id0 + end + def verify_unrecoverable_http_error(status) @ep = subject.new("sdk_key", default_config, hc) e = { kind: "identify", user: user } @@ -442,8 +465,15 @@ def verify_recoverable_http_error(status) @ep.flush @ep.wait_until_inactive - expect(hc.get_request).not_to be_nil - expect(hc.get_request).not_to be_nil + req0 = hc.get_request + expect(req0).not_to be_nil + req1 = hc.get_request + expect(req1).not_to be_nil + id0 = req0["x-launchdarkly-payload-id"] + expect(id0).not_to be_nil + expect(id0).not_to eq "" + expect(req1["x-launchdarkly-payload-id"]).to eq id0 + expect(hc.get_request).to be_nil # no 3rd request # now verify that a subsequent flush still generates a request