Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 24 additions & 16 deletions cmd/devtool/analyze_logs.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package main

import (
"bufio"
"encoding/json"
"fmt"
"os"
"sort"
Expand Down Expand Up @@ -68,6 +69,13 @@ func (c *AnalyzeLogsCommand) Run(args []string) error {
return nil
}

type logEntry struct {
UserID string `json:"user_id"`
Username string `json:"username"`
Job string `json:"job"`
Msg string `json:"msg"`
}

func (c *AnalyzeLogsCommand) scanLogFile(file *os.File) (userJobs map[string]map[string]int, userNames map[string]string, err error) {
// userJobs[uid][job] -> count
userJobs = make(map[string]map[string]int)
Expand All @@ -80,10 +88,22 @@ func (c *AnalyzeLogsCommand) scanLogFile(file *os.File) (userJobs map[string]map
for scanner.Scan() {
line := scanner.Text()

uid := c.extractValue(line, "user_id")
uname := c.extractValue(line, "username")
job := c.extractValue(line, "job")
msg := c.extractValue(line, "msg")
var uid, uname, job, msg string

if strings.HasPrefix(line, "{") {
var entry logEntry
if err := json.Unmarshal([]byte(line), &entry); err == nil {
uid = entry.UserID
uname = entry.Username
job = entry.Job
msg = entry.Msg
}
} else {
uid = c.extractValue(line, "user_id")
uname = c.extractValue(line, "username")
job = c.extractValue(line, "job")
msg = c.extractValue(line, "msg")
}

if uid != "" && uname != "" {
userNames[uid] = uname
Expand Down Expand Up @@ -131,17 +151,5 @@ func (c *AnalyzeLogsCommand) extractValue(line, key string) string {
return line[start : start+end]
}

// Try json format "key":"value"
prefix = `"` + key + `":"`
idx = strings.Index(line, prefix)
if idx != -1 {
start := idx + len(prefix)
end := strings.Index(line[start:], `"`)
if end == -1 {
return line[start:]
}
return line[start : start+end]
}

return ""
}
Loading