Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ def initialize(metrics_processor, logger = nil)

def process_evaluation(feature_config, target, variation)

@logger.debug "Processing evaluation: " + feature_config.feature.to_s + ", " + target.identifier.to_s
@logger.debug "Processing evaluation: #{feature_config&.feature || 'nil feature'}, #{target&.identifier || 'nil target'}"

@metrics_processor.register_evaluation(target, feature_config, variation)
end
Expand Down
12 changes: 11 additions & 1 deletion lib/ff/ruby/server/sdk/api/metrics_event.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,12 @@ class MetricsEvent

attr_accessor :feature_config, :target, :variation

def initialize(feature_config, target, variation)
def initialize(feature_config, target, variation, logger)

@target = target
@variation = variation
@feature_config = feature_config
@logger = logger
freeze
end

Expand All @@ -15,6 +16,15 @@ def ==(other)
end

def eql?(other)
# Guard clause other is the same type.
# While it should be, this adds protection for an edge case we are seeing with very large
# project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove
# these checks in a future release.
unless other.is_a?(MetricsEvent)
@logger.warn("Warning: Attempted to compare MetricsEvent with #{other.class.name}" )
return false
end

feature_config.feature == other.feature_config.feature and
variation.identifier == other.variation.identifier and
target.identifier == other.target.identifier
Expand Down
55 changes: 51 additions & 4 deletions lib/ff/ruby/server/sdk/api/metrics_processor.rb
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
require_relative "../api/summary_metrics"

class MetricsProcessor < Closeable
GLOBAL_TARGET = Target.new(identifier: "__global__cf_target", name: "Global Target").freeze

class FrequencyMap < Concurrent::Map
def initialize(options = nil, &block)
Expand All @@ -29,6 +30,7 @@ def get(key)
self[key]
end

# TODO Will be removed in V2 in favour of simplified clearing. Currently not used outside of tests.
def drain_to_map
result = {}
each_key do |key|
Expand Down Expand Up @@ -65,7 +67,6 @@ def init(connector, config, callback)
@target_attribute = "target"
@global_target_identifier = "__global__cf_target" # <--- This target identifier is used to aggregate and send data for all
# targets as a summary
@global_target = Target.new("RubySDK1", identifier = @global_target_identifier, name = @global_target_name)
@ready = false
@jar_version = Ff::Ruby::Server::Sdk::VERSION
@server = "server"
Expand Down Expand Up @@ -111,12 +112,34 @@ def close

def register_evaluation(target, feature_config, variation)
register_evaluation_metric(feature_config, variation)
register_target_metric(target)
if target
register_target_metric(target)
end
end

private

def register_evaluation_metric(feature_config, variation)
# Guard clause to ensure feature_config, @global_target, and variation are valid.
# While they should be, this adds protection for an edge case we are seeing with very large
# project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove
# these checks in a future release.
if feature_config.nil? || !feature_config.respond_to?(:feature) || feature_config.feature.nil?
@config.logger.warn("Skipping invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_config.inspect}")
return
end

if GLOBAL_TARGET.nil? || !GLOBAL_TARGET.respond_to?(:identifier) || GLOBAL_TARGET.identifier.nil?
@config.logger.warn("Skipping invalid MetricsEvent: global_target is missing or incomplete. global_target=#{GLOBAL_TARGET.inspect}")
return
end

if variation.nil? || !variation.respond_to?(:identifier) || variation.identifier.nil?
@config.logger.warn("Skipping iInvalid MetricsEvent: variation is missing or incomplete. variation=#{variation.inspect}")
return
end


if @evaluation_metrics.size > @max_buffer_size
unless @evaluation_warning_issued.true?
SdkCodes.warn_metrics_evaluations_max_size_exceeded(@config.logger)
Expand All @@ -125,7 +148,7 @@ def register_evaluation_metric(feature_config, variation)
return
end

event = MetricsEvent.new(feature_config, @global_target, variation)
event = MetricsEvent.new(feature_config, GLOBAL_TARGET, variation, @config.logger)
@evaluation_metrics.increment event
end

Expand Down Expand Up @@ -158,8 +181,14 @@ def run_one_iteration
end

def send_data_and_reset_cache(evaluation_metrics_map, target_metrics_map)
evaluation_metrics_map_clone = evaluation_metrics_map.drain_to_map
# Clone and clear evaluation metrics map
evaluation_metrics_map_clone = Concurrent::Map.new

evaluation_metrics_map.each_pair do |key, value|
evaluation_metrics_map_clone[key] = value
end

evaluation_metrics_map.clear
target_metrics_map_clone = Concurrent::Map.new

target_metrics_map.each_pair do |key, value|
Expand Down Expand Up @@ -188,6 +217,24 @@ def prepare_summary_metrics_body(evaluation_metrics_map, target_metrics_map)

total_count = 0
evaluation_metrics_map.each do |key, value|
# While Components should not be missing, this adds protection for an edge case we are seeing with very large
# project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove
# these checks in a future release.
# Initialize an array to collect missing components
missing_components = []

# Check each required component and add to missing_components if absent
missing_components << 'feature_config' unless key.respond_to?(:feature_config) && key.feature_config
missing_components << 'variation' unless key.respond_to?(:variation) && key.variation
missing_components << 'target' unless key.respond_to?(:target) && key.target
missing_components << 'count' if value.nil?

# If any components are missing, log a detailed warning and skip processing
unless missing_components.empty?
@config.logger.warn "Skipping invalid metrics event: missing #{missing_components.join(', ')} in key: #{key.inspect}, full details: #{key.inspect}"
next
end

total_count += value
metrics_data = OpenapiClient::MetricsData.new({ :attributes => [] })
metrics_data.timestamp = (Time.now.to_f * 1000).to_i
Expand Down
2 changes: 1 addition & 1 deletion lib/ff/ruby/server/sdk/version.rb
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ module Ruby
module Server
module Sdk

VERSION = "1.4.0"
VERSION = "1.4.1"
end
end
end
Expand Down
2 changes: 1 addition & 1 deletion scripts/sdk_specs.sh
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#!/bin/bash

export ff_ruby_sdk="ff-ruby-server-sdk"
export ff_ruby_sdk_version="1.4.0"
export ff_ruby_sdk_version="1.4.1"
2 changes: 1 addition & 1 deletion test/ff/ruby/server/sdk/cf_client_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ class CfClientTest < Minitest::Test
.event_url("http://localhost:#{port}/api/1.0")
.config_url("http://localhost:#{port}/api/1.0").build)

client.wait_for_initialization
client.wait_for_initialization(timeout_ms: 5000)

target = Target.new("RubyTestSDK", identifier="rubytestsdk", attributes={"location": "emea"})

Expand Down
79 changes: 58 additions & 21 deletions test/ff/ruby/server/sdk/metrics_processor_test.rb
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def test_metrics

assert_equal 0, freq_map.size, "not all pending metrics were flushed"
assert_equal 1, connector.get_captured_metrics.size
assert_equal 2, connector.get_captured_metrics[0].target_data.size, "there should only be 2 targets"
assert_equal 1, connector.get_captured_metrics[0].target_data.size, "there should only be 1 targets"

connector.get_captured_metrics.each do |metric|
metric.metrics_data.each do |metric_data|
Expand All @@ -156,16 +156,15 @@ def assert_target_data(target_data)
end
end

assert targets.key?("__global__cf_target")
assert targets.key?("target-id")
end

def test_frequency_map_increment

map = MetricsProcessor::FrequencyMap.new

event1 = MetricsEvent.new(@feature1, @target, @variation1)
event2 = MetricsEvent.new(@feature2, @target, @variation2)
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT))

map.increment event1
map.increment event2
Expand All @@ -180,8 +179,8 @@ def test_frequency_map_increment
def test_frequency_map_drain_to_map
map = MetricsProcessor::FrequencyMap.new

event1 = MetricsEvent.new(@feature1, @target, @variation1)
event2 = MetricsEvent.new(@feature2, @target, @variation2)
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT))

map.increment event1
map.increment event2
Expand All @@ -198,43 +197,81 @@ def test_frequency_map_drain_to_map

def test_comparable_metrics_event_equals_and_hash

event1 = MetricsEvent.new(@feature1, @target, @variation1)
event2 = MetricsEvent.new(@feature1, @target, @variation1)
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
event2 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))

assert(event1 == event2)

event1 = MetricsEvent.new(@feature1, @target, @variation1)
event2 = MetricsEvent.new(@feature2, @target, @variation2)
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT))

assert(event1 != event2)
end

def test_flush_map_when_buffer_fills

def test_metrics_processor_prevents_invalid_metrics_event
logger = Logger.new(STDOUT)
callback = TestCallback.new
connector = TestConnector.new
config = Minitest::Mock.new
config.expect :kind_of?, true, [Config.class]
config.expect :metrics_service_acceptable_duration, 10000

(1..30).each { |_| config.expect :buffer_size, 2 }
(1..30).each { |_| config.expect :logger, logger }
(1..20).each { |_| config.expect :buffer_size, 10 }
(1..20).each { |_| config.expect :logger, logger }

metrics_processor = MetricsProcessor.new
metrics_processor.init connector, config, callback
metrics_processor.init(connector, config, callback)

callback.wait_until_ready

# several evaluations with a buffer size of 2
metrics_processor.register_evaluation @target, @feature1, @variation1
metrics_processor.register_evaluation @target, @feature1, @variation2
metrics_processor.register_evaluation @target, @feature2, @variation1
metrics_processor.register_evaluation @target, @feature2, @variation2
# Attempt to register invalid evaluations
metrics_processor.register_evaluation(@target,nil, @variation1)
metrics_processor.register_evaluation(@target,nil, nil)
metrics_processor.register_evaluation(nil, @feature1, nil)


# Register some valid evaluations
metrics_processor.register_evaluation(@target, @feature1, @variation1)
metrics_processor.register_evaluation(@target, @feature2, @variation2)
# Nil target, which is a valid input to variation methods
metrics_processor.register_evaluation(nil, @feature2, @variation2)

# Run iteration
metrics_processor.send(:run_one_iteration)

# Wait for metrics to be posted
connector.wait_for_metrics

assert connector.wait_for_metrics, "no metrics were posted"
# Check that only valid metrics are sent
captured_metrics = connector.get_captured_metrics
assert_equal 1, captured_metrics.size, "Only one metrics batch should be sent"

metrics_data = captured_metrics[0].metrics_data

# Since we have two valid events, two metrics_data should be present
assert_equal 2, metrics_data.size, "Invalid metrics should be ignored"

# Verify that only valid features are present in sent metrics
sent_features = metrics_data.map { |md| md.attributes.find { |kv| kv.key == "featureName" }.value }
assert_includes(sent_features, "feature-name")
assert_includes(sent_features, "feature-name2") # Assuming @feature2 has "feature-name2"

# Invalid event is not among the sent features
refute_includes(sent_features, nil, "Invalid MetricsEvent should not be included in metrics_data")

# Valid events were processed correctly
assert_equal 2, metrics_data.size, "There should be two metrics_data entries for valid events"

# Target data is still correctly sent
assert_equal 1, captured_metrics[0].target_data.size, "There should only be a single target"
end


def test_metrics_event_eql_with_invalid_object
event = MetricsEvent.new(@feature1, @target, @variation1,Logger.new(STDOUT))
non_event = "Not a MetricsEvent"

refute_equal(event, non_event, "MetricsEvent should not be equal to a non-MetricsEvent object")
end

end