Skip to content

Commit

Permalink
Merge pull request #287 from reidmorrison/refactor_tests
Browse files Browse the repository at this point in the history
Fixes #216
  • Loading branch information
reidmorrison authored Jul 1, 2024
2 parents ee7d2b5 + f7d54bf commit 54692db
Show file tree
Hide file tree
Showing 42 changed files with 1,512 additions and 887 deletions.
11 changes: 7 additions & 4 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,10 +5,13 @@ This project adheres to [Semantic Versioning](http://semver.org/).

## [unreleased]

- Add support for Ruby 3.3
- Allow SyncProcessor to be called from appenders
- Fix incorrect metrics usage examples in documentation
- Add `:duration_ms` to Logfmt fomatter
## [4.16.0]

- Add support for Ruby 3.3.
- Allow SyncProcessor to be called from appenders.
- Fix incorrect metrics usage examples in documentation.
- Add `:duration_ms` to Logfmt formatter.
- Fixes #216 Log message from supplied message argument when payload contains payload key.

## [4.15.0]

Expand Down
10 changes: 5 additions & 5 deletions Gemfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
source "http://rubygems.org"
source "https://rubygems.org"

gemspec

Expand All @@ -23,23 +23,23 @@ gem "honeybadger"
# [optional] Kafka appender
gem "ruby-kafka"
# [optional] MongoDB appender
gem "base64"
gem "mongo"
# [optional] NewRelic appender ( Tests use a mock class )
# gem 'newrelic_rpm'
# [optional] Net::TCP appender
gem "net_tcp_client"
# [optional] Splunk appender
gem "splunk-sdk-ruby"
gem "nokogiri"
gem "splunk-sdk-ruby"
# [optional] Statsd metrics
gem "statsd-ruby"
# [optional] legacy Sentry appender
gem "sentry-raven"
# [optional] new Sentry appender
gem "sentry-ruby"
# [optional] Syslog appender when communicating with a remote syslogd over TCP
gem "syslog"
gem "syslog_protocol"

group :development do
gem "rubocop", "~> 1.28", "< 1.29"
end
gem "rubocop", "~> 1.28.1", require: false
34 changes: 13 additions & 21 deletions lib/semantic_logger/appender/elasticsearch.rb
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def initialize(url: "http://localhost:9200",
@elasticsearch_args = elasticsearch_args.dup
@elasticsearch_args[:url] = url if url && !elasticsearch_args[:hosts]
@elasticsearch_args[:logger] = logger
@data_stream = data_stream
@data_stream = data_stream

super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: false, &block)
reopen
Expand Down Expand Up @@ -177,11 +177,12 @@ def batch(logs)
private

def write_to_elasticsearch(messages)
bulk_result = if @data_stream
@client.bulk(index: index, body: messages)
else
@client.bulk(body: messages)
end
bulk_result =
if @data_stream
@client.bulk(index: index, body: messages)
else
@client.bulk(body: messages)
end

return unless bulk_result["errors"]

Expand All @@ -191,27 +192,18 @@ def write_to_elasticsearch(messages)

def bulk_index(log)
expanded_index_name = log.time.strftime("#{index}-#{date_pattern}")
if @data_stream
{"create" => {}}
else
bulk_index = {"index" => {"_index" => expanded_index_name}}
bulk_index["index"].merge!({ "_type" => type }) if version_supports_type?
bulk_index
end
return {"create" => {}} if @data_stream

bulk_index = {"index" => {"_index" => expanded_index_name}}
bulk_index["index"].merge!({"_type" => type}) if version_supports_type?
bulk_index
end

def default_formatter
time_key = if @data_stream
"@timestamp"
else
:timestamp
end

time_key = @data_stream ? "@timestamp" : :timestamp
SemanticLogger::Formatters::Raw.new(time_format: :iso_8601, time_key: time_key)
end

private

def version_supports_type?
Gem::Version.new(::Elasticsearch::VERSION) < Gem::Version.new(7)
end
Expand Down
10 changes: 8 additions & 2 deletions lib/semantic_logger/appender/rabbitmq.rb
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,10 @@ class Rabbitmq < SemanticLogger::Subscriber
# Name of this application to appear in log messages.
# Default: SemanticLogger.application
#
# metrics: [Boolean]
# Also send metrics only events to rabbitmq.
# Default: true
#
# RabbitMQ Parameters:
#
# rabbitmq_host: [String]
Expand All @@ -76,13 +80,15 @@ class Rabbitmq < SemanticLogger::Subscriber
# Default: nil
#
# more parameters supported by Bunny: http://rubybunny.info/articles/connecting.html
def initialize(queue_name: "semantic_logger", rabbitmq_host: nil, metrics: false, **args, &block)
def initialize(queue_name: "semantic_logger", rabbitmq_host: nil,
level: nil, formatter: nil, filter: nil, application: nil, environment: nil, host: nil, metrics: true,
**args, &block)
@queue_name = queue_name
@rabbitmq_args = args.dup
@rabbitmq_args[:host] = rabbitmq_host
@rabbitmq_args[:logger] = logger

super(level: level, formatter: formatter, filter: filter, application: application, host: host, metrics: metrics, &block)
super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
reopen
end

Expand Down
4 changes: 2 additions & 2 deletions lib/semantic_logger/appender/splunk.rb
Original file line number Diff line number Diff line change
Expand Up @@ -101,10 +101,10 @@ def initialize(index: "main", source_type: nil, **args, &block)
# open the handles to resources
def reopen
# Connect to splunk. Connect is a synonym for creating a Service by hand and calling login.
self.service = Splunk.connect(config)
@service = ::Splunk.connect(config)

# The index we are logging to
self.service_index = service.indexes[index]
@service_index = service.indexes[index]
end

# Log the message to Splunk
Expand Down
2 changes: 1 addition & 1 deletion lib/semantic_logger/appender/syslog.rb
Original file line number Diff line number Diff line change
Expand Up @@ -208,7 +208,7 @@ def log(log)

# Flush is called by the semantic_logger during shutdown.
def flush
@remote_syslog.flush if @remote_syslog&.respond_to?(:flush)
@remote_syslog.flush if @remote_syslog.respond_to?(:flush)
end

# Returns [SemanticLogger::Formatters::Base] default formatter for this Appender depending on the protocal selected
Expand Down
2 changes: 1 addition & 1 deletion lib/semantic_logger/appender/tcp.rb
Original file line number Diff line number Diff line change
Expand Up @@ -191,7 +191,7 @@ def initialize(separator: "\n",
Net::TCPClient.logger = logger
Net::TCPClient.logger.name = "Net::TCPClient"

super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, &block)
super(level: level, formatter: formatter, filter: filter, application: application, environment: environment, host: host, metrics: metrics, &block)
reopen
end

Expand Down
12 changes: 9 additions & 3 deletions lib/semantic_logger/appender/wrapper.rb
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
# Send log messages to any standard Ruby logging class.
#
# Forwards logging call to loggers such as Logger, log4r, etc.
#
module SemanticLogger
module Appender
class Wrapper < SemanticLogger::Subscriber
Expand Down Expand Up @@ -56,15 +55,22 @@ def initialize(logger:, **args, &block)
# trace entries are mapped to debug since :trace is not supported by the
# Ruby or Rails Loggers
def log(log)
@logger.send(log.level == :trace ? :debug : log.level, formatter.call(log, self))
level = log.level == :trace ? :debug : log.level
@logger.send(level, formatter.call(log, self))
true
end

# Flush all pending logs to disk.
# Waits for all sent documents to be writted to disk
# Waits for all queued log messages to be written to disk.
def flush
@logger.flush if @logger.respond_to?(:flush)
end

# Close underlying log
# Waits for all queued log messages to be written to disk.
def close
@logger.close if @logger.respond_to?(:close)
end
end
end
end
10 changes: 7 additions & 3 deletions lib/semantic_logger/appenders.rb
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ class Appenders < Concurrent::Array
def initialize(logger = Processor.logger.dup)
@logger = logger
@logger.name = self.class.name
super()
end

def add(**args, &block)
Expand Down Expand Up @@ -45,18 +46,21 @@ def flush
end

def close
to_a.each do |appender|
closed_appenders = []
each do |appender|
logger.trace "Closing appender: #{appender.name}"
delete(appender)
appenders << appender
appender.flush
appender.close
rescue Exception => e
logger.error "Failed to close appender: #{appender.name}", e
end
# Delete appenders outside the #each above which prevents changes during iteration.
closed_appenders.each { |appender| delete(appender) }
logger.trace "All appenders closed and removed from appender list"
end

# After a fork the appender thread is not running, start it if it is not running.
# After a fork reopen each appender.
def reopen
each do |appender|
next unless appender.respond_to?(:reopen)
Expand Down
5 changes: 3 additions & 2 deletions lib/semantic_logger/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -342,9 +342,10 @@ def log_internal(level, index, message = nil, payload = nil, exception = nil)
# Add result of block to message or payload if not nil
if block_given?
result = yield(log)
if result.is_a?(String)
case result
when String
log.message = log.message.nil? ? result : "#{log.message} -- #{result}"
elsif result.is_a?(Hash)
when Hash
log.assign_hash(result)
end
end
Expand Down
19 changes: 4 additions & 15 deletions lib/semantic_logger/debug_as_trace_logger.rb
Original file line number Diff line number Diff line change
Expand Up @@ -3,20 +3,9 @@ module SemanticLogger
# This is useful for existing gems / libraries that log too much to debug
# when most of the debug logging should be at the trace level
class DebugAsTraceLogger < Logger
def debug(*args, &block)
trace(*args, &block)
end

def debug?
trace?
end

def measure_debug(*args, &block)
measure_trace(*args, &block)
end

def benchmark_debug(*args, &block)
measure_trace(*args, &block)
end
alias debug trace
alias debug? trace?
alias measure_debug measure_trace
alias benchmark_debug benchmark_trace
end
end
2 changes: 1 addition & 1 deletion lib/semantic_logger/formatters/raw.rb
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def message

# Payload
def payload
hash[:payload] = log.payload if log.payload&.respond_to?(:empty?) && !log.payload.empty?
hash[:payload] = log.payload if log.payload.respond_to?(:empty?) && !log.payload.empty?
end

# Exception
Expand Down
10 changes: 8 additions & 2 deletions lib/semantic_logger/formatters/signalfx.rb
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,10 @@ def call(log, logger)
self.log = log
self.logger = logger

metric; time; value; format_dimensions
metric
time
value
format_dimensions

# gauge, counter, or cumulative_counter
data = {}
Expand Down Expand Up @@ -113,7 +116,10 @@ def batch(logs, logger)
self.hash = {}
self.log = log

metric; time; value; format_dimensions
metric
time
value
format_dimensions

if log.duration
gauges = (data[:gauge] ||= [])
Expand Down
4 changes: 3 additions & 1 deletion lib/semantic_logger/log.rb
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,9 @@ def extract_arguments(payload, message = nil)
raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)

message = nil if message == ""
return payload if payload.key?(:payload)
if payload.key?(:payload)
return message ? payload.merge(message: message) : payload
end

new_payload = {}
args = {}
Expand Down
1 change: 1 addition & 0 deletions lib/semantic_logger/metric/statsd.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ class Statsd < Subscriber
# )
def initialize(url: "udp://localhost:8125")
@url = url
super()
end

def reopen
Expand Down
36 changes: 26 additions & 10 deletions lib/semantic_logger/test/capture_log_events.rb
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,21 @@ module Test
#
# Example:
#
# class UserTest < ActiveSupport::TestCase
# describe User do
# let(:capture_logger) { SemanticLogger::Test::CaptureLogEvents.new }
# let(:user) { User.new }
# class UserTest < ActiveSupport::TestCase
# describe User do
# let(:logger) { SemanticLogger::Test::CaptureLogEvents.new }
# let(:user) { User.new }
#
# it "logs message" do
# user.stub(:logger, capture_logger) do
# user.enable!
# it "logs message" do
# user.stub(:logger, logger) do
# user.enable!
# end
# assert log = logger.events.first
# assert_equal "Hello World", log.message
# assert_equal :info, log.level
# end
# assert_equal "Hello World", capture_logger.events.last.message
# assert_equal :info, capture_logger.events.last.level
# end
# end
# end
class CaptureLogEvents < SemanticLogger::Subscriber
attr_accessor :events

Expand All @@ -28,12 +29,27 @@ def initialize(level: :trace, metrics: true)
end

def log(log)
Logger.call_subscribers(log)
@events << log
end

# Supports batching of log events
def batch(logs)
@events += log
end

def clear
@events.clear
end

# Support silencing of log messages
def level_index
@level_index || SemanticLogger.default_level_index
end

def to_h
events.map(&:to_h)
end
end
end
end
Loading

0 comments on commit 54692db

Please sign in to comment.