Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion lib/yabeda/sidekiq.rb
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@ module Sidekiq
counter :jobs_enqueued_total, comment: "A counter of the total number of jobs sidekiq enqueued."

next unless ::Sidekiq.server?

counter :jobs_executed_total, comment: "A counter of the total number of jobs sidekiq executed."
counter :jobs_success_total, comment: "A counter of the total number of jobs successfully processed by sidekiq."
counter :jobs_failed_total, comment: "A counter of the total number of jobs failed in sidekiq."
Expand All @@ -32,6 +31,7 @@ module Sidekiq
gauge :jobs_dead_count, comment: "The number of jobs exceeded their retry count."
gauge :active_processes, comment: "The number of active Sidekiq worker processes."
gauge :jobs_latency, comment: "The job latency, the difference in seconds since the oldest job in the queue was enqueued"
gauge :memory_usage, comment: "The sidekiq process overall memory usage"

histogram :job_runtime, unit: :seconds, per: :job, comment: "A histogram of the job execution time.",
buckets: LONG_RUNNING_JOB_RUNTIME_BUCKETS
Expand All @@ -52,6 +52,8 @@ module Sidekiq
sidekiq_jobs_latency.set({ queue: queue.name }, queue.latency)
end

sidekiq_memory_usage.set({}, Yabeda::Sidekiq.process_memory_usage)

# That is quite slow if your retry set is large
# I don't want to enable it by default
# retries_by_queues =
Expand Down Expand Up @@ -90,6 +92,12 @@ def worker_class(worker, job)
end
(worker.is_a?(String) ? worker : worker.class).to_s
end

def process_memory_usage
memories = Hash[%i{size resident shared trs lrs drs dt}.zip(open("/proc/#{Process.pid}/statm").read.split)]
page_size = `getconf PAGESIZE`.chomp.to_i
memories[:resident].to_i * page_size
end
end
end
end