Skip to content

Commit

Permalink
Merge pull request #3 from test-IO/tracer
Browse files Browse the repository at this point in the history
added tracer class
  • Loading branch information
aleksei-okatiev authored Dec 5, 2024
2 parents f8def0d + f5e08e8 commit a431021
Show file tree
Hide file tree
Showing 16 changed files with 532 additions and 6 deletions.
8 changes: 7 additions & 1 deletion .rubocop.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,10 @@ Style/Documentation:
Enabled: false

Metrics/MethodLength:
Max: 20
Max: 25

Metrics/PerceivedComplexity:
Max: 10

Metrics/CyclomaticComplexity:
Max: 10
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,7 @@
## [0.2.0] - 2024-11-27

- Added text and chat prompts

## [0.2.2] - 2024-12-03

- Introduced Tracer
2 changes: 1 addition & 1 deletion Gemfile.lock
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
llm_eval_ruby (0.2.1)
llm_eval_ruby (0.2.2)
httparty (~> 0.22.0)
liquid (~> 5.5.0)

Expand Down
2 changes: 2 additions & 0 deletions lib/llm_eval_ruby.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
require_relative "llm_eval_ruby/version"
require_relative "llm_eval_ruby/prompt_repository"
require_relative "llm_eval_ruby/configuration"
require_relative "llm_eval_ruby/tracer"
require_relative "llm_eval_ruby/observable"

module LlmEvalRuby
class Error < StandardError; end
Expand Down
86 changes: 84 additions & 2 deletions lib/llm_eval_ruby/api_clients/langfuse.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,14 +14,96 @@ class Langfuse
raise_on [400, 401, 406, 422, 500]

def initialize(host:, username:, password:)
self.class.base_uri "#{host}/api/public/v2"
self.class.base_uri "#{host}/api/public/"
self.class.basic_auth username, password
end

def fetch_prompt(name:, version:)
response = self.class.get("/prompts/#{name}", { query: { version: } })
response = self.class.get("/v2/prompts/#{name}", { query: { version: } })
response["prompt"]
end

def create_trace(params = {})
body = {
id: params[:id],
name: params[:name],
input: params[:input],
sessionId: params[:session_id],
userId: params[:user_id]
}
create_event(type: "trace-create", body:)
end

def create_span(params = {})
body = {
id: params[:id],
name: params[:name],
input: params[:input],
traceId: params[:trace_id]
}
create_event(type: "span-create", body:)
end

def update_span(params = {})
body = {
id: params[:id],
output: params[:output],
endTime: params[:end_time]
}
create_event(type: "span-update", body:)
end

def create_generation(params = {})
body = {
id: params[:id],
timestamp: params[:timestamp],
name: params[:name],
input: params[:input],
output: params[:output] || "UNKNOWN",
traceId: params[:trace_id],
release: params[:release] || "UNKNOWN",
version: params[:version] || "UNKNOWN",
metadata: params[:metadata] || {},
promptName: params[:prompt_name],
promptVersion: params[:prompt_version]
}
create_event(type: "generation-create", body:)
end

def update_generation(params = {})
body = {
id: params[:id],
output: params[:output],
endTime: params[:end_time],
usage: convert_keys_to_camel_case(params[:usage])
}
create_event(type: "generation-update", body:)
end

def create_event(type:, body:)
payload = {
batch: [
{
id: SecureRandom.uuid,
type:,
body:,
timestamp: Time.now.utc.iso8601,
metadata: {}
}
]
}

self.class.post("/ingestion", body: payload.to_json)
end

private

def convert_keys_to_camel_case(hash)
hash.each_with_object({}) do |(key, value), new_hash|
camel_case_key = key.gsub(/_([a-z])/) { ::Regexp.last_match(1).upcase }
new_hash[camel_case_key] = value
end
end
end
end
end
112 changes: 112 additions & 0 deletions lib/llm_eval_ruby/observable.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
# frozen_string_literal: true

module LlmEvalRuby
module Observable
def self.included(base)
base.extend(ClassMethods)
end

module ClassMethods
def observed_methods
@observed_methods ||= {}
end

def observe(method_name, options = {})
observed_methods[method_name] = options
end

def method_added(method_name)
super
return unless observed_methods.key?(method_name)

wrap_observed_method(method_name)
end

private

def wrap_observed_method(method_name)
options = observed_methods[method_name]
original_method = instance_method(method_name)
observed_methods.delete(method_name)
wrap_method(method_name, original_method, options)
end

def wrap_method(method_name, original_method, options)
define_method(method_name) do |*args, **kwargs, &block|
result = nil
input = prepare_input(args, kwargs)
case options[:type]
when :span
LlmEvalRuby::Tracer.span(name: method_name, input: input, trace_id: @trace_id) do
result = original_method.bind(self).call(*args, **kwargs, &block)
end
when :generation
LlmEvalRuby::Tracer.generation(name: method_name, input: input, trace_id: @trace_id) do
result = original_method.bind(self).call(*args, **kwargs, &block)
end
else
LlmEvalRuby::Tracer.trace(name: method_name, input: input, trace_id: @trace_id) do
result = original_method.bind(self).call(*args, **kwargs, &block)
end
end

result
end
end
end

def prepare_input(*args, **kwargs)
return nil if args.empty? && kwargs.empty?

inputs = deep_copy(Array[*args, **kwargs].flatten)
inputs.each do |item|
trim_base64_images(item) if item.is_a?(Hash)
end

inputs
end

def trim_base64_images(hash, max_length = 30)
# Iterate through each key-value pair in the hash
hash.each do |key, value|
if value.is_a?(Hash)
# Recursively process nested hashes
trim_base64_images(value, max_length)
elsif value.is_a?(String) && value.start_with?("data:image/jpeg;base64,")
# Trim the byte string while keeping the prefix; set max length limit
prefix = "data:image/jpeg;base64,"
byte_string = value[prefix.length..]
trimmed_byte_string = byte_string[0, max_length] # Trim to max_length characters
hash[key] = "#{prefix}#{trimmed_byte_string}... (truncated)"
elsif value.is_a?(Array)
# Recursively process arrays
value.each do |element|
trim_base64_images(element, max_length) if element.is_a?(Hash)
end
end
end
hash
end

def deep_copy(obj)
case obj
when Numeric, Symbol, NilClass, TrueClass, FalseClass
obj
when String
obj.dup
when Array
obj.map { |e| deep_copy(e) }
when Hash
obj.each_with_object({}) do |(key, value), result|
result[deep_copy(key)] = deep_copy(value)
end
else
begin
Marshal.load(Marshal.dump(obj))
rescue TypeError
nil # or handle as needed, perhaps log or raise a specific error
end
end
end
end
end
1 change: 1 addition & 0 deletions lib/llm_eval_ruby/prompt_types/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ class Base

def initialize(adapter:, content:, role:)
@adapter = adapter
@adapter = adapter.safe_constantize if adapter.is_a?(String)
@role = role
@content = content
end
Expand Down
8 changes: 8 additions & 0 deletions lib/llm_eval_ruby/trace_adapters/base.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# frozen_string_literal: true

module LlmEvalRuby
module TraceAdapters
class Base
end
end
end
85 changes: 85 additions & 0 deletions lib/llm_eval_ruby/trace_adapters/langfuse.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
# frozen_string_literal: true

require_relative "base"
require_relative "../api_clients/langfuse"
require_relative "../trace_types"

module LlmEvalRuby
module TraceAdapters
class Langfuse < Base
class << self
def trace(**kwargs)
trace = TraceTypes::Trace.new(id: SecureRandom.uuid, **kwargs)
response = client.create_trace(trace.to_h)

logger.warn "Failed to create generation" if response["successes"].blank?

trace
end

def span(**kwargs)
span = TraceTypes::Span.new(id: SecureRandom.uuid, **kwargs)
response = client.create_span(span.to_h)

logger.warn "Failed to create span" if response["successes"].blank?

return span unless block_given?

result = yield

end_span(span, result)

result
end

def update_generation(**kwargs)
generation = TraceTypes::Generation.new(**kwargs)
response = client.update_generation(generation.to_h)

logger.warn "Failed to create generation" if response["successes"].blank?

generation
end

def generation(**kwargs)
generation = TraceTypes::Generation.new(id: SecureRandom.uuid, tracer: self, **kwargs)
response = client.create_generation(generation.to_h)
logger.warn "Failed to create generation" if response["successes"].blank?

return generation unless block_given?

result = yield generation

finish_generation(generation, result)

result
end

private

def logger
@logger ||= Logger.new($stdout)
end

def client
@client ||= ApiClients::Langfuse.new(**LlmEvalRuby.config.langfuse_options)
end

def end_span(span, result)
span.end_time = Time.now.utc.iso8601
span.output = result

client.update_span(span.to_h)
end

def end_generation(generation, result)
generation.output = result.dig("choices", 0, "message", "content")
generation.usage = result["usage"]
generation.end_time = Time.now.utc.iso8601

client.update_generation(generation.to_h)
end
end
end
end
end
Loading

0 comments on commit a431021

Please sign in to comment.