Skip to content

Commit 766909d

Browse files
authored
FFM-12192 patch 5 (#52)
* CI-15294 Use string as key for metrics * CI-15294 delete metrics event * CI-15294 delete metrics event * CI-15294 Refactor tests to account for simple string keys * CI-15294 Bump version and fix comment
1 parent c5053ce commit 766909d

File tree

8 files changed

+55
-177
lines changed

8 files changed

+55
-177
lines changed

lib/ff/ruby/server/sdk.rb

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
require_relative "sdk/api/evaluation"
1818
require_relative "sdk/api/inner_client"
1919
require_relative "sdk/api/auth_service"
20-
require_relative "sdk/api/metrics_event"
2120
require_relative "sdk/api/default_cache"
2221
require_relative "sdk/api/config_builder"
2322
require_relative "sdk/api/file_map_store"

lib/ff/ruby/server/sdk/api/evaluator.rb

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,9 @@ def evaluate(identifier, target, expected, callback)
117117

118118
if variation != nil
119119
if callback != nil
120-
callback.process_evaluation(flag, target, variation)
120+
feature_name = flag.feature
121+
variation_identifier = variation.identifier
122+
callback.process_evaluation(feature_name, target, variation_identifier)
121123
end
122124
return variation
123125
end

lib/ff/ruby/server/sdk/api/inner_client_flag_evaluate_callback.rb

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -21,10 +21,10 @@ def initialize(metrics_processor, logger = nil)
2121
@metrics_processor = metrics_processor
2222
end
2323

24-
def process_evaluation(feature_config, target, variation)
24+
def process_evaluation(feature_name, target, variation_identifier)
2525

26-
@logger.debug "Processing evaluation: #{feature_config&.feature || 'nil feature'}, #{target&.identifier || 'nil target'}"
26+
@logger.debug "Processing evaluation: #{feature_name || 'nil feature'}, #{variation_identifier || 'nil variation'}, #{target&.identifier || 'nil target'}"
2727

28-
@metrics_processor.register_evaluation(target, feature_config, variation)
28+
@metrics_processor.register_evaluation(target, feature_name, variation_identifier)
2929
end
3030
end

lib/ff/ruby/server/sdk/api/metrics_event.rb

Lines changed: 0 additions & 53 deletions
This file was deleted.

lib/ff/ruby/server/sdk/api/metrics_processor.rb

Lines changed: 23 additions & 34 deletions
Original file line numberDiff line numberDiff line change
@@ -6,12 +6,9 @@
66
require_relative "../../sdk/version"
77
require_relative "../common/closeable"
88
require_relative "../common/sdk_codes"
9-
require_relative "../api/metrics_event"
109
require_relative "../api/summary_metrics"
1110

1211
class MetricsProcessor < Closeable
13-
GLOBAL_TARGET = Target.new(identifier: "__global__cf_target", name: "Global Target").freeze
14-
1512
def init(connector, config, callback)
1613

1714
unless connector.kind_of?(Connector)
@@ -79,38 +76,33 @@ def close
7976
@config.logger.debug "Closing metrics processor"
8077
end
8178

82-
def register_evaluation(target, feature_config, variation)
83-
register_evaluation_metric(feature_config, variation)
79+
def register_evaluation(target, feature_name, variation_identifier)
80+
register_evaluation_metric(feature_name, variation_identifier)
8481
if target
8582
register_target_metric(target)
8683
end
8784
end
8885

8986
private
9087

91-
def register_evaluation_metric(feature_config, variation)
88+
def register_evaluation_metric(feature_name, variation_identifier)
9289
# Guard clause to ensure feature_config, @global_target, and variation are valid.
9390
# While they should be, this adds protection for an edge case we are seeing where the the ConcurrentMap (now replaced with our own thread safe hash)
9491
# seemed to be accessing invalid areas of memory and seg faulting.
9592
# Issue being tracked in FFM-12192, and once resolved, can remove these checks in a future release && once the issue is resolved.
96-
if feature_config.nil? || !feature_config.respond_to?(:feature) || feature_config.feature.nil?
97-
@config.logger.warn("Skipping invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_config.inspect}")
93+
unless feature_name && !feature_name.empty?
94+
@config.logger.warn("Skipping invalid MetricsEvent: feature_config is missing or incomplete. feature_config=#{feature_name.inspect}")
9895
return
9996
end
10097

101-
if GLOBAL_TARGET.nil? || !GLOBAL_TARGET.respond_to?(:identifier) || GLOBAL_TARGET.identifier.nil?
102-
@config.logger.warn("Skipping invalid MetricsEvent: global_target is missing or incomplete. global_target=#{GLOBAL_TARGET.inspect}")
98+
unless variation_identifier && !variation_identifier.empty?
99+
@config.logger.warn("Skipping iInvalid MetricsEvent: variation is missing or incomplete. variation=#{variation_identifier.inspect}")
103100
return
104101
end
105102

106-
if variation.nil? || !variation.respond_to?(:identifier) || variation.identifier.nil?
107-
@config.logger.warn("Skipping iInvalid MetricsEvent: variation is missing or incomplete. variation=#{variation.inspect}")
108-
return
109-
end
110-
111-
event = MetricsEvent.new(feature_config, GLOBAL_TARGET, variation, @config.logger)
112103
@metric_maps_mutex.synchronize do
113-
@evaluation_metrics[event] = (@evaluation_metrics[event] || 0) + 1
104+
key = "#{feature_name}\0#{variation_identifier}"
105+
@evaluation_metrics[key] = (@evaluation_metrics[key] || 0) + 1
114106
end
115107
end
116108

@@ -185,31 +177,28 @@ def prepare_summary_metrics_body(evaluation_metrics_clone, target_metrics_clone)
185177

186178
total_count = 0
187179
evaluation_metrics_clone.each do |key, value|
188-
# While Components should not be missing, this adds protection for an edge case we are seeing with very large
189-
# project sizes. Issue being tracked in FFM-12192, and once resolved, can feasibly remove
190-
# these checks in a future release.
191-
# Initialize an array to collect missing components
192-
missing_components = []
193-
194-
# Check each required component and add to missing_components if absent
195-
missing_components << 'feature_config' unless key.respond_to?(:feature_config) && key.feature_config
196-
missing_components << 'variation' unless key.respond_to?(:variation) && key.variation
197-
missing_components << 'target' unless key.respond_to?(:target) && key.target
198-
missing_components << 'count' if value.nil?
199-
200-
# If any components are missing, log a detailed warning and skip processing
201-
unless missing_components.empty?
202-
@config.logger.warn "Skipping invalid metrics event: missing #{missing_components.join(', ')} in key: #{key.inspect}, full details: #{key.inspect}"
180+
feature_name, variation_identifier = key.split("\0", 2)
181+
182+
# Although feature_name and variation_identifier should always be present,
183+
# this guard provides protection against an edge case where keys reference
184+
# other objects in memory. In versions <= 1.4.4, we were keying on the MetricsEvent
185+
# class (now deleted). To remediate this, we have transitioned to using strings as keys.
186+
# This issue is being tracked in FFM-12192. Once resolved, these checks can be safely
187+
# removed in a future release.
188+
# If any required data is missing, log a detailed warning and skip processing.
189+
unless feature_name && variation_identifier && value.is_a?(Integer) && value > 0
190+
@config.logger.warn "Skipping invalid metrics event: missing or invalid feature_name, variation_identifier, or count. Key: #{key.inspect}, Count: #{value.inspect}"
203191
next
204192
end
205193

206194
total_count += value
195+
207196
metrics_data = OpenapiClient::MetricsData.new({ :attributes => [] })
208197
metrics_data.timestamp = (Time.now.to_f * 1000).to_i
209198
metrics_data.count = value
210199
metrics_data.metrics_type = "FFMETRICS"
211-
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @feature_name_attribute, :value => key.feature_config.feature }))
212-
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @variation_identifier_attribute, :value => key.variation.identifier }))
200+
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @feature_name_attribute, :value => feature_name }))
201+
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @variation_identifier_attribute, :value => variation_identifier }))
213202
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @target_attribute, :value => @global_target_identifier }))
214203
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @sdk_type, :value => @server }))
215204
metrics_data.attributes.push(OpenapiClient::KeyValue.new({ :key => @sdk_language, :value => "ruby" }))

lib/ff/ruby/server/sdk/version.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ module Ruby
55
module Server
66
module Sdk
77

8-
VERSION = "1.4.4"
8+
VERSION = "1.4.5"
99
end
1010
end
1111
end

scripts/sdk_specs.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
#!/bin/bash
22

33
export ff_ruby_sdk="ff-ruby-server-sdk"
4-
export ff_ruby_sdk_version="1.4.4"
4+
export ff_ruby_sdk_version="1.4.5"

test/ff/ruby/server/sdk/metrics_processor_test.rb

Lines changed: 24 additions & 83 deletions
Original file line numberDiff line numberDiff line change
@@ -118,10 +118,10 @@ def test_metrics
118118
callback.wait_until_ready
119119

120120
# Here we test register_evaluation then run_one_iteration
121-
(1..3).each { metrics_processor.register_evaluation @target, @feature1, @variation1 }
122-
(1..3).each { metrics_processor.register_evaluation @target, @feature1, @variation2 }
123-
(1..3).each { metrics_processor.register_evaluation @target, @feature2, @variation1 }
124-
(1..3).each { metrics_processor.register_evaluation @target, @feature2, @variation2 }
121+
(1..3).each { metrics_processor.register_evaluation @target, @feature1.feature, @variation1.identifier }
122+
(1..3).each { metrics_processor.register_evaluation @target, @feature1.feature, @variation2.identifier }
123+
(1..3).each { metrics_processor.register_evaluation @target, @feature2.feature, @variation1.identifier }
124+
(1..3).each { metrics_processor.register_evaluation @target, @feature2.feature, @variation2.identifier }
125125

126126
freq_map = metrics_processor.send :get_frequency_map
127127
assert_equal 4, freq_map.size, "not enough pending metrics"
@@ -162,18 +162,8 @@ def config.logger
162162

163163
# Register evaluations
164164
(1..10).each do |i|
165-
feature = OpenapiClient::FeatureConfig.new
166-
feature.feature = "feature-#{i}"
167-
variation = OpenapiClient::Variation.new
168-
variation.identifier = "variation-#{i}"
169-
variation.value = "value-#{i}"
170-
variation.name = "Test-#{i}"
171-
172-
variation2 = OpenapiClient::Variation.new
173-
variation2.identifier = "variation2-#{i}"
174-
variation2.value = "value2-#{i}"
175-
variation2.name = "Test2-#{i}"
176-
feature.variations = [variation, @variation2]
165+
feature = "feature-#{i}"
166+
variation = "variation-#{i}"
177167
metrics_processor.register_evaluation(@target, feature, variation)
178168
end
179169

@@ -258,19 +248,9 @@ def config.logger
258248

259249
# Define a method to register evaluations
260250
register_evals = Proc.new do |feature_id|
261-
feature = OpenapiClient::FeatureConfig.new
262-
feature.feature = "feature-#{feature_id}"
263-
variation = OpenapiClient::Variation.new
264-
variation.identifier = "variation-#{feature_id}"
265-
variation.value = "value-#{feature_id}"
266-
variation.name = "Test-#{feature_id}"
267-
268-
variation2 = OpenapiClient::Variation.new
269-
variation2.identifier = "variation2-#{feature_id}"
270-
variation2.value = "value2-#{feature_id}"
271-
variation2.name = "Test2-#{feature_id}"
272-
273-
feature.variations = [variation, variation2]
251+
feature = "feature-#{feature_id}"
252+
variation = "variation-#{feature_id}"
253+
274254
metrics_processor.register_evaluation(@target, feature, variation)
275255
end
276256

@@ -316,13 +296,11 @@ def config.logger
316296
metric_found = {}
317297
expected_metrics_set.each { |metric| metric_found[metric] = false }
318298

319-
freq_map.each_key do |metrics_event|
320-
# Extract feature name and variation identifier from the metrics_event
321-
actual_feature_name = metrics_event.feature_config.feature
322-
actual_variation_id = metrics_event.variation.identifier
299+
freq_map.each_key do |key|
300+
feature_name, variation_identifier = key.split("\0", 2)
323301

324302
# Create the key for the current metric
325-
metric_key = [actual_feature_name, actual_variation_id]
303+
metric_key = [feature_name, variation_identifier]
326304

327305
if metric_found.key?(metric_key)
328306
metric_found[metric_key] = true
@@ -353,19 +331,9 @@ def config.logger
353331
callback.wait_until_ready
354332

355333
# Register some evaluations
356-
feature = OpenapiClient::FeatureConfig.new
357-
feature.feature = "feature-error-test"
358-
variation = OpenapiClient::Variation.new
359-
variation.identifier = "variation-error-test"
360-
variation.value = "value-error-test"
361-
variation.name = "Test-Error"
362-
363-
variation2 = OpenapiClient::Variation.new
364-
variation2.identifier = "variation2-error-test"
365-
variation2.value = "value2-error-test"
366-
variation2.name = "Test2-Error"
367-
368-
feature.variations = [variation, variation2]
334+
feature = "feature-error-test"
335+
variation = "variation-error-test"
336+
369337
metrics_processor.register_evaluation(@target, feature, variation)
370338

371339
# Attempt to send data, which should raise an exception
@@ -379,18 +347,10 @@ def config.logger
379347
assert_empty target_metrics_map, "Target metrics map should be cleared even if an error occurs"
380348

381349
# Verify that the MetricsProcessor remains operational by registering another evaluation
382-
feature_new = OpenapiClient::FeatureConfig.new
383-
feature_new.feature = "feature-new"
384-
variation_new = OpenapiClient::Variation.new
385-
variation_new.identifier = "variation-new"
386-
variation_new.value = "value-new"
387-
variation_new.name = "Test-New"
388-
389-
variation2_new = OpenapiClient::Variation.new
390-
variation2_new.identifier = "variation2-new"
391-
variation2_new.value = "value2-new"
392-
variation2_new.name = "Test2-New"
393-
metrics_processor.register_evaluation(@target, feature, variation)
350+
feature_new = "feature-new"
351+
variation_new = "variation-new"
352+
353+
metrics_processor.register_evaluation(@target, feature_new, variation_new)
394354

395355
# Ensure that the new evaluation is registered correctly
396356
freq_map = metrics_processor.send(:get_frequency_map)
@@ -465,18 +425,6 @@ def assert_target_data(target_data)
465425
assert targets.key?("target-id")
466426
end
467427

468-
def test_comparable_metrics_event_equals_and_hash
469-
470-
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
471-
event2 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
472-
473-
assert(event1 == event2)
474-
475-
event1 = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
476-
event2 = MetricsEvent.new(@feature2, @target, @variation2, Logger.new(STDOUT))
477-
478-
assert(event1 != event2)
479-
end
480428

481429
def test_metrics_processor_prevents_invalid_metrics_event
482430
logger = Logger.new(STDOUT)
@@ -495,15 +443,15 @@ def test_metrics_processor_prevents_invalid_metrics_event
495443
callback.wait_until_ready
496444

497445
# Attempt to register invalid evaluations
498-
metrics_processor.register_evaluation(@target, nil, @variation1)
446+
metrics_processor.register_evaluation(@target, nil, @variation1.identifier)
499447
metrics_processor.register_evaluation(@target, nil, nil)
500-
metrics_processor.register_evaluation(nil, @feature1, nil)
448+
metrics_processor.register_evaluation(nil, @feature1.feature, nil)
501449

502450
# Register some valid evaluations
503-
metrics_processor.register_evaluation(@target, @feature1, @variation1)
504-
metrics_processor.register_evaluation(@target, @feature2, @variation2)
451+
metrics_processor.register_evaluation(@target, @feature1.feature, @variation1.identifier)
452+
metrics_processor.register_evaluation(@target, @feature2.feature, @variation2.identifier)
505453
# Nil target, which is a valid input to variation methods
506-
metrics_processor.register_evaluation(nil, @feature2, @variation2)
454+
metrics_processor.register_evaluation(nil, @feature2.feature, @variation2.identifier)
507455

508456
# Run iteration
509457
metrics_processor.send(:run_one_iteration)
@@ -535,11 +483,4 @@ def test_metrics_processor_prevents_invalid_metrics_event
535483
assert_equal 1, captured_metrics[0].target_data.size, "There should only be a single target"
536484
end
537485

538-
def test_metrics_event_eql_with_invalid_object
539-
event = MetricsEvent.new(@feature1, @target, @variation1, Logger.new(STDOUT))
540-
non_event = "Not a MetricsEvent"
541-
542-
refute_equal(event, non_event, "MetricsEvent should not be equal to a non-MetricsEvent object")
543-
end
544-
545486
end

0 commit comments

Comments
 (0)