From 9ac456251834e41b1aa15282aa9c22e1e3c43d2a Mon Sep 17 00:00:00 2001 From: Erdi Rowlands Date: Tue, 12 Nov 2024 12:10:45 +0000 Subject: [PATCH 1/4] FFM-12192 Add guards for metric events and metric processing --- lib/ff/ruby/server/sdk/api/metrics_event.rb | 5 +++++ lib/ff/ruby/server/sdk/api/metrics_processor.rb | 16 ++++++++++++++++ 2 files changed, 21 insertions(+) diff --git a/lib/ff/ruby/server/sdk/api/metrics_event.rb b/lib/ff/ruby/server/sdk/api/metrics_event.rb index 1ae368e..1355fa7 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_event.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_event.rb @@ -15,6 +15,11 @@ def ==(other) end def eql?(other) + unless other.is_a?(MetricsEvent) + @logger.warn("Warning: Attempted to compare MetricsEvent with #{other.class.name}" ) + return false + end + feature_config.feature == other.feature_config.feature and variation.identifier == other.variation.identifier and target.identifier == other.target.identifier diff --git a/lib/ff/ruby/server/sdk/api/metrics_processor.rb b/lib/ff/ruby/server/sdk/api/metrics_processor.rb index 84d7ec8..4d4e17c 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_processor.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_processor.rb @@ -188,6 +188,22 @@ def prepare_summary_metrics_body(evaluation_metrics_map, target_metrics_map) total_count = 0 evaluation_metrics_map.each do |key, value| + # Components should not be missing, but as we transition to Ruby 3 support, let's + # add validation. + # Initialize an array to collect missing components + missing_components = [] + + # Check each required component and add to missing_components if absent + missing_components << 'feature_config' unless key.respond_to?(:feature_config) && key.feature_config + missing_components << 'variation' unless key.respond_to?(:variation) && key.variation + missing_components << 'target' unless key.respond_to?(:target) && key.target + + # If any components are missing, log a detailed warning and skip processing + unless missing_components.empty? + @config.logger.warn "Skipping invalid metrics event: missing #{missing_components.join(', ')} in key: #{key.inspect}, full details: #{key.inspect}" + next + end + total_count += value metrics_data = OpenapiClient::MetricsData.new({ :attributes => [] }) metrics_data.timestamp = (Time.now.to_f * 1000).to_i From a840ce3e6582aaf8dd4c944527b2a1ed128ef0a3 Mon Sep 17 00:00:00 2001 From: Erdi Rowlands Date: Tue, 12 Nov 2024 12:12:37 +0000 Subject: [PATCH 2/4] FFM-12192 Simplify evaluation metrics map clearance --- lib/ff/ruby/server/sdk/api/metrics_processor.rb | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/ff/ruby/server/sdk/api/metrics_processor.rb b/lib/ff/ruby/server/sdk/api/metrics_processor.rb index 4d4e17c..0be5c88 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_processor.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_processor.rb @@ -29,6 +29,7 @@ def get(key) self[key] end + # TODO Will be removed in V2 in favour of simplified clearing. Currently not used outside of tests. def drain_to_map result = {} each_key do |key| @@ -158,8 +159,14 @@ def run_one_iteration end def send_data_and_reset_cache(evaluation_metrics_map, target_metrics_map) - evaluation_metrics_map_clone = evaluation_metrics_map.drain_to_map + # Clone and clear evaluation metrics map + evaluation_metrics_map_clone = Concurrent::Map.new + evaluation_metrics_map.each_pair do |key, value| + evaluation_metrics_map_clone[key] = value + end + + evaluation_metrics_map.clear target_metrics_map_clone = Concurrent::Map.new target_metrics_map.each_pair do |key, value| From 16e70234d2673e70406c974e7de5b659f6c49a52 Mon Sep 17 00:00:00 2001 From: Erdi Rowlands Date: Tue, 12 Nov 2024 12:39:16 +0000 Subject: [PATCH 3/4] FFM-12192 Add test for metrics event comparison --- lib/ff/ruby/server/sdk/api/metrics_event.rb | 3 +- .../ruby/server/sdk/api/metrics_processor.rb | 28 +++++++++++++++++-- .../ruby/server/sdk/metrics_processor_test.rb | 15 +++++++--- 3 files changed, 38 insertions(+), 8 deletions(-) diff --git a/lib/ff/ruby/server/sdk/api/metrics_event.rb b/lib/ff/ruby/server/sdk/api/metrics_event.rb index 1355fa7..2aff6d3 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_event.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_event.rb @@ -2,11 +2,12 @@ class MetricsEvent attr_accessor :feature_config, :target, :variation - def initialize(feature_config, target, variation) + def initialize(feature_config, target, variation, logger) @target = target @variation = variation @feature_config = feature_config + @logger = logger freeze end diff --git a/lib/ff/ruby/server/sdk/api/metrics_processor.rb b/lib/ff/ruby/server/sdk/api/metrics_processor.rb index 0be5c88..7f708c4 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_processor.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_processor.rb @@ -118,6 +118,26 @@ def register_evaluation(target, feature_config, variation) private def register_evaluation_metric(feature_config, variation) + # Guard clause to ensure feature_config, @global_target, and variation are valid. + # While they should be, this adds protection for an edge case we are seeing with very large + # project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove + # these checks in a future release. + if feature_config.nil? || !feature_config.respond_to?(:feature) || feature_config.feature.nil? + @config.logger.warn("Invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_config.inspect}") + return + end + + if @global_target.nil? || !@global_target.respond_to?(:identifier) || @global_target.identifier.nil? + @config.logger.warn("Invalid MetricsEvent: global_target is missing or incomplete. global_target=#{@global_target.inspect}") + return + end + + if variation.nil? || !variation.respond_to?(:identifier) || variation.identifier.nil? + @config.logger.warn("Invalid MetricsEvent: variation is missing or incomplete. variation=#{variation.inspect}") + return + end + + if @evaluation_metrics.size > @max_buffer_size unless @evaluation_warning_issued.true? SdkCodes.warn_metrics_evaluations_max_size_exceeded(@config.logger) @@ -126,7 +146,7 @@ def register_evaluation_metric(feature_config, variation) return end - event = MetricsEvent.new(feature_config, @global_target, variation) + event = MetricsEvent.new(feature_config, @global_target, variation, @config.logger) @evaluation_metrics.increment event end @@ -195,8 +215,9 @@ def prepare_summary_metrics_body(evaluation_metrics_map, target_metrics_map) total_count = 0 evaluation_metrics_map.each do |key, value| - # Components should not be missing, but as we transition to Ruby 3 support, let's - # add validation. + # While Components should not be missing, this adds protection for an edge case we are seeing with very large + # project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove + # these checks in a future release. # Initialize an array to collect missing components missing_components = [] @@ -204,6 +225,7 @@ def prepare_summary_metrics_body(evaluation_metrics_map, target_metrics_map) missing_components << 'feature_config' unless key.respond_to?(:feature_config) && key.feature_config missing_components << 'variation' unless key.respond_to?(:variation) && key.variation missing_components << 'target' unless key.respond_to?(:target) && key.target + missing_components << 'count' if value.nil? # If any components are missing, log a detailed warning and skip processing unless missing_components.empty? diff --git a/test/ff/ruby/server/sdk/metrics_processor_test.rb b/test/ff/ruby/server/sdk/metrics_processor_test.rb index ee65cd9..c2ece2f 100644 --- a/test/ff/ruby/server/sdk/metrics_processor_test.rb +++ b/test/ff/ruby/server/sdk/metrics_processor_test.rb @@ -198,17 +198,24 @@ def test_frequency_map_drain_to_map def test_comparable_metrics_event_equals_and_hash - event1 = MetricsEvent.new(@feature1, @target, @variation1) - event2 = MetricsEvent.new(@feature1, @target, @variation1) + event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT)) + event2 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT)) assert(event1 == event2) - event1 = MetricsEvent.new(@feature1, @target, @variation1) - event2 = MetricsEvent.new(@feature2, @target, @variation2) + event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT)) + event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT)) assert(event1 != event2) end + def test_metrics_event_eql_with_invalid_object + event = MetricsEvent.new(@feature1, @target, @variation1,Logger.new(STDOUT)) + non_event = "Not a MetricsEvent" + + refute_equal(event, non_event, "MetricsEvent should not be equal to a non-MetricsEvent object") + end + def test_flush_map_when_buffer_fills logger = Logger.new(STDOUT) From 6ca9329dd7944c1828bf0690a7cdfa221ff6fcb5 Mon Sep 17 00:00:00 2001 From: Erdi Rowlands Date: Tue, 12 Nov 2024 13:02:59 +0000 Subject: [PATCH 4/4] FFM-12192 Only create global target once / Add guard for nil target, and add test for invalid metric / Fix tests --- .../inner_client_flag_evaluate_callback.rb | 2 +- lib/ff/ruby/server/sdk/api/metrics_event.rb | 4 + .../ruby/server/sdk/api/metrics_processor.rb | 16 ++-- lib/ff/ruby/server/sdk/version.rb | 2 +- scripts/sdk_specs.sh | 2 +- test/ff/ruby/server/sdk/cf_client_test.rb | 2 +- .../ruby/server/sdk/metrics_processor_test.rb | 78 +++++++++++++------ 7 files changed, 71 insertions(+), 35 deletions(-) diff --git a/lib/ff/ruby/server/sdk/api/inner_client_flag_evaluate_callback.rb b/lib/ff/ruby/server/sdk/api/inner_client_flag_evaluate_callback.rb index ba28f42..820ed96 100644 --- a/lib/ff/ruby/server/sdk/api/inner_client_flag_evaluate_callback.rb +++ b/lib/ff/ruby/server/sdk/api/inner_client_flag_evaluate_callback.rb @@ -23,7 +23,7 @@ def initialize(metrics_processor, logger = nil) def process_evaluation(feature_config, target, variation) - @logger.debug "Processing evaluation: " + feature_config.feature.to_s + ", " + target.identifier.to_s + @logger.debug "Processing evaluation: #{feature_config&.feature || 'nil feature'}, #{target&.identifier || 'nil target'}" @metrics_processor.register_evaluation(target, feature_config, variation) end diff --git a/lib/ff/ruby/server/sdk/api/metrics_event.rb b/lib/ff/ruby/server/sdk/api/metrics_event.rb index 2aff6d3..9373421 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_event.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_event.rb @@ -16,6 +16,10 @@ def ==(other) end def eql?(other) + # Guard clause other is the same type. + # While it should be, this adds protection for an edge case we are seeing with very large + # project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove + # these checks in a future release. unless other.is_a?(MetricsEvent) @logger.warn("Warning: Attempted to compare MetricsEvent with #{other.class.name}" ) return false diff --git a/lib/ff/ruby/server/sdk/api/metrics_processor.rb b/lib/ff/ruby/server/sdk/api/metrics_processor.rb index 7f708c4..6a93a1b 100644 --- a/lib/ff/ruby/server/sdk/api/metrics_processor.rb +++ b/lib/ff/ruby/server/sdk/api/metrics_processor.rb @@ -9,6 +9,7 @@ require_relative "../api/summary_metrics" class MetricsProcessor < Closeable + GLOBAL_TARGET = Target.new(identifier: "__global__cf_target", name: "Global Target").freeze class FrequencyMap < Concurrent::Map def initialize(options = nil, &block) @@ -66,7 +67,6 @@ def init(connector, config, callback) @target_attribute = "target" @global_target_identifier = "__global__cf_target" # <--- This target identifier is used to aggregate and send data for all # targets as a summary - @global_target = Target.new("RubySDK1", identifier = @global_target_identifier, name = @global_target_name) @ready = false @jar_version = Ff::Ruby::Server::Sdk::VERSION @server = "server" @@ -112,7 +112,9 @@ def close def register_evaluation(target, feature_config, variation) register_evaluation_metric(feature_config, variation) - register_target_metric(target) + if target + register_target_metric(target) + end end private @@ -123,17 +125,17 @@ def register_evaluation_metric(feature_config, variation) # project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove # these checks in a future release. if feature_config.nil? || !feature_config.respond_to?(:feature) || feature_config.feature.nil? - @config.logger.warn("Invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_config.inspect}") + @config.logger.warn("Skipping invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_config.inspect}") return end - if @global_target.nil? || !@global_target.respond_to?(:identifier) || @global_target.identifier.nil? - @config.logger.warn("Invalid MetricsEvent: global_target is missing or incomplete. global_target=#{@global_target.inspect}") + if GLOBAL_TARGET.nil? || !GLOBAL_TARGET.respond_to?(:identifier) || GLOBAL_TARGET.identifier.nil? + @config.logger.warn("Skipping invalid MetricsEvent: global_target is missing or incomplete. global_target=#{GLOBAL_TARGET.inspect}") return end if variation.nil? || !variation.respond_to?(:identifier) || variation.identifier.nil? - @config.logger.warn("Invalid MetricsEvent: variation is missing or incomplete. variation=#{variation.inspect}") + @config.logger.warn("Skipping iInvalid MetricsEvent: variation is missing or incomplete. variation=#{variation.inspect}") return end @@ -146,7 +148,7 @@ def register_evaluation_metric(feature_config, variation) return end - event = MetricsEvent.new(feature_config, @global_target, variation, @config.logger) + event = MetricsEvent.new(feature_config, GLOBAL_TARGET, variation, @config.logger) @evaluation_metrics.increment event end diff --git a/lib/ff/ruby/server/sdk/version.rb b/lib/ff/ruby/server/sdk/version.rb index 2cdbba2..6521c8d 100644 --- a/lib/ff/ruby/server/sdk/version.rb +++ b/lib/ff/ruby/server/sdk/version.rb @@ -5,7 +5,7 @@ module Ruby module Server module Sdk - VERSION = "1.4.0" + VERSION = "1.4.1" end end end diff --git a/scripts/sdk_specs.sh b/scripts/sdk_specs.sh index a687c96..8d9699a 100755 --- a/scripts/sdk_specs.sh +++ b/scripts/sdk_specs.sh @@ -1,4 +1,4 @@ #!/bin/bash export ff_ruby_sdk="ff-ruby-server-sdk" -export ff_ruby_sdk_version="1.4.0" +export ff_ruby_sdk_version="1.4.1" diff --git a/test/ff/ruby/server/sdk/cf_client_test.rb b/test/ff/ruby/server/sdk/cf_client_test.rb index 871886e..d6b9446 100644 --- a/test/ff/ruby/server/sdk/cf_client_test.rb +++ b/test/ff/ruby/server/sdk/cf_client_test.rb @@ -37,7 +37,7 @@ class CfClientTest < Minitest::Test .event_url("http://localhost:#{port}/api/1.0") .config_url("http://localhost:#{port}/api/1.0").build) - client.wait_for_initialization + client.wait_for_initialization(timeout_ms: 5000) target = Target.new("RubyTestSDK", identifier="rubytestsdk", attributes={"location": "emea"}) diff --git a/test/ff/ruby/server/sdk/metrics_processor_test.rb b/test/ff/ruby/server/sdk/metrics_processor_test.rb index c2ece2f..a172ab0 100644 --- a/test/ff/ruby/server/sdk/metrics_processor_test.rb +++ b/test/ff/ruby/server/sdk/metrics_processor_test.rb @@ -132,7 +132,7 @@ def test_metrics assert_equal 0, freq_map.size, "not all pending metrics were flushed" assert_equal 1, connector.get_captured_metrics.size - assert_equal 2, connector.get_captured_metrics[0].target_data.size, "there should only be 2 targets" + assert_equal 1, connector.get_captured_metrics[0].target_data.size, "there should only be 1 targets" connector.get_captured_metrics.each do |metric| metric.metrics_data.each do |metric_data| @@ -156,7 +156,6 @@ def assert_target_data(target_data) end end - assert targets.key?("__global__cf_target") assert targets.key?("target-id") end @@ -164,8 +163,8 @@ def test_frequency_map_increment map = MetricsProcessor::FrequencyMap.new - event1 = MetricsEvent.new(@feature1, @target, @variation1) - event2 = MetricsEvent.new(@feature2, @target, @variation2) + event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT)) + event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT)) map.increment event1 map.increment event2 @@ -180,8 +179,8 @@ def test_frequency_map_increment def test_frequency_map_drain_to_map map = MetricsProcessor::FrequencyMap.new - event1 = MetricsEvent.new(@feature1, @target, @variation1) - event2 = MetricsEvent.new(@feature2, @target, @variation2) + event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT)) + event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT)) map.increment event1 map.increment event2 @@ -209,15 +208,7 @@ def test_comparable_metrics_event_equals_and_hash assert(event1 != event2) end - def test_metrics_event_eql_with_invalid_object - event = MetricsEvent.new(@feature1, @target, @variation1,Logger.new(STDOUT)) - non_event = "Not a MetricsEvent" - - refute_equal(event, non_event, "MetricsEvent should not be equal to a non-MetricsEvent object") - end - - def test_flush_map_when_buffer_fills - + def test_metrics_processor_prevents_invalid_metrics_event logger = Logger.new(STDOUT) callback = TestCallback.new connector = TestConnector.new @@ -225,23 +216,62 @@ def test_flush_map_when_buffer_fills config.expect :kind_of?, true, [Config.class] config.expect :metrics_service_acceptable_duration, 10000 - (1..30).each { |_| config.expect :buffer_size, 2 } - (1..30).each { |_| config.expect :logger, logger } + (1..20).each { |_| config.expect :buffer_size, 10 } + (1..20).each { |_| config.expect :logger, logger } metrics_processor = MetricsProcessor.new - metrics_processor.init connector, config, callback + metrics_processor.init(connector, config, callback) callback.wait_until_ready - # several evaluations with a buffer size of 2 - metrics_processor.register_evaluation @target, @feature1, @variation1 - metrics_processor.register_evaluation @target, @feature1, @variation2 - metrics_processor.register_evaluation @target, @feature2, @variation1 - metrics_processor.register_evaluation @target, @feature2, @variation2 + # Attempt to register invalid evaluations + metrics_processor.register_evaluation(@target,nil, @variation1) + metrics_processor.register_evaluation(@target,nil, nil) + metrics_processor.register_evaluation(nil, @feature1, nil) + - assert connector.wait_for_metrics, "no metrics were posted" + # Register some valid evaluations + metrics_processor.register_evaluation(@target, @feature1, @variation1) + metrics_processor.register_evaluation(@target, @feature2, @variation2) + # Nil target, which is a valid input to variation methods + metrics_processor.register_evaluation(nil, @feature2, @variation2) + # Run iteration + metrics_processor.send(:run_one_iteration) + + # Wait for metrics to be posted + connector.wait_for_metrics + + # Check that only valid metrics are sent + captured_metrics = connector.get_captured_metrics + assert_equal 1, captured_metrics.size, "Only one metrics batch should be sent" + + metrics_data = captured_metrics[0].metrics_data + + # Since we have two valid events, two metrics_data should be present + assert_equal 2, metrics_data.size, "Invalid metrics should be ignored" + + # Verify that only valid features are present in sent metrics + sent_features = metrics_data.map { |md| md.attributes.find { |kv| kv.key == "featureName" }.value } + assert_includes(sent_features, "feature-name") + assert_includes(sent_features, "feature-name2") # Assuming @feature2 has "feature-name2" + + # Invalid event is not among the sent features + refute_includes(sent_features, nil, "Invalid MetricsEvent should not be included in metrics_data") + + # Valid events were processed correctly + assert_equal 2, metrics_data.size, "There should be two metrics_data entries for valid events" + + # Target data is still correctly sent + assert_equal 1, captured_metrics[0].target_data.size, "There should only be a single target" end + def test_metrics_event_eql_with_invalid_object + event = MetricsEvent.new(@feature1, @target, @variation1,Logger.new(STDOUT)) + non_event = "Not a MetricsEvent" + + refute_equal(event, non_event, "MetricsEvent should not be equal to a non-MetricsEvent object") + end + end \ No newline at end of file