mirror of
https://github.com/woodchen-ink/proxy-go.git
synced 2025-07-18 08:31:55 +08:00
feat(metrics): add safe type conversion functions and enhance metrics data retrieval
- Introduced safe type conversion functions for memory usage, status code stats, top paths, and recent requests to prevent panics when accessing metrics data. - Updated MetricsHandler to utilize these safe functions, improving the robustness of metrics handling. - Refactored GetStats method in Collector to ensure all fields are initialized and to streamline the retrieval of metrics data. - Enhanced the overall structure of metrics data for better clarity and reliability in reporting.
This commit is contained in:
parent
9602034f9d
commit
91d4686713
@ -68,6 +68,17 @@ func (h *ProxyHandler) MetricsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
return 0
|
||||
}
|
||||
|
||||
// 添加安全的字符串转换函数
|
||||
safeString := func(v interface{}) string {
|
||||
if v == nil {
|
||||
return "0 B" // 返回默认值
|
||||
}
|
||||
if s, ok := v.(string); ok {
|
||||
return s
|
||||
}
|
||||
return "0 B" // 返回默认值
|
||||
}
|
||||
|
||||
totalRequests := safeInt64(stats["total_requests"])
|
||||
metrics := Metrics{
|
||||
Uptime: uptime.String(),
|
||||
@ -76,15 +87,15 @@ func (h *ProxyHandler) MetricsHandler(w http.ResponseWriter, r *http.Request) {
|
||||
TotalErrors: safeInt64(stats["total_errors"]),
|
||||
ErrorRate: float64(safeInt64(stats["total_errors"])) / float64(max(totalRequests, 1)),
|
||||
NumGoroutine: safeInt(stats["num_goroutine"]),
|
||||
MemoryUsage: stats["memory_usage"].(string),
|
||||
MemoryUsage: safeString(stats["memory_usage"]), // 使用安全转换
|
||||
AverageResponseTime: metrics.FormatDuration(time.Duration(safeInt64(stats["avg_latency"]))),
|
||||
TotalBytes: safeInt64(stats["total_bytes"]),
|
||||
BytesPerSecond: float64(safeInt64(stats["total_bytes"])) / metrics.Max(uptime.Seconds(), 1),
|
||||
RequestsPerSecond: float64(totalRequests) / metrics.Max(uptime.Seconds(), 1),
|
||||
StatusCodeStats: stats["status_code_stats"].(map[string]int64),
|
||||
TopPaths: stats["top_paths"].([]models.PathMetrics),
|
||||
RecentRequests: stats["recent_requests"].([]models.RequestLog),
|
||||
TopReferers: stats["top_referers"].([]models.PathMetrics),
|
||||
StatusCodeStats: safeStatusCodeStats(stats["status_code_stats"]), // 添加安全转换
|
||||
TopPaths: safePathMetrics(stats["top_paths"]), // 添加安全转换
|
||||
RecentRequests: safeRequestLogs(stats["recent_requests"]), // 添加安全转换
|
||||
TopReferers: safePathMetrics(stats["top_referers"]), // 添加安全转换
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
@ -574,7 +585,7 @@ var metricsTemplate = `
|
||||
// 每5秒自动刷新
|
||||
setInterval(refreshMetrics, 5000);
|
||||
|
||||
// 添加图表相关代码
|
||||
// <EFBFBD><EFBFBD>加图表相关代码
|
||||
function loadHistoryData() {
|
||||
const hours = document.getElementById('timeRange').value;
|
||||
fetch('/metrics/history?hours=' + hours, {
|
||||
@ -796,3 +807,34 @@ func (h *ProxyHandler) MetricsHistoryHandler(w http.ResponseWriter, r *http.Requ
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(metrics)
|
||||
}
|
||||
|
||||
// 添加安全的类型转换辅助函数
|
||||
func safeStatusCodeStats(v interface{}) map[string]int64 {
|
||||
if v == nil {
|
||||
return make(map[string]int64)
|
||||
}
|
||||
if m, ok := v.(map[string]int64); ok {
|
||||
return m
|
||||
}
|
||||
return make(map[string]int64)
|
||||
}
|
||||
|
||||
func safePathMetrics(v interface{}) []models.PathMetrics {
|
||||
if v == nil {
|
||||
return []models.PathMetrics{}
|
||||
}
|
||||
if m, ok := v.([]models.PathMetrics); ok {
|
||||
return m
|
||||
}
|
||||
return []models.PathMetrics{}
|
||||
}
|
||||
|
||||
func safeRequestLogs(v interface{}) []models.RequestLog {
|
||||
if v == nil {
|
||||
return []models.RequestLog{}
|
||||
}
|
||||
if m, ok := v.([]models.RequestLog); ok {
|
||||
return m
|
||||
}
|
||||
return []models.RequestLog{}
|
||||
}
|
||||
|
@ -181,32 +181,43 @@ func (c *Collector) RecordRequest(path string, status int, latency time.Duration
|
||||
}
|
||||
|
||||
func (c *Collector) GetStats() map[string]interface{} {
|
||||
stats := c.statsPool.Get().(map[string]interface{})
|
||||
defer func() {
|
||||
// 清空map并放回池中
|
||||
for k := range stats {
|
||||
delete(stats, k)
|
||||
}
|
||||
c.statsPool.Put(stats)
|
||||
}()
|
||||
|
||||
// 先查缓存
|
||||
if stats, ok := c.cache.Get("stats"); ok {
|
||||
return stats.(map[string]interface{})
|
||||
if statsMap, ok := stats.(map[string]interface{}); ok {
|
||||
return statsMap
|
||||
}
|
||||
}
|
||||
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
|
||||
uptime := time.Since(c.startTime)
|
||||
totalRequests := atomic.LoadInt64(&c.totalRequests)
|
||||
totalErrors := atomic.LoadInt64(&c.totalErrors)
|
||||
// 确保所有字段都被初始化
|
||||
stats := make(map[string]interface{})
|
||||
|
||||
// 获取状态码统计
|
||||
// 基础指标
|
||||
stats["active_requests"] = atomic.LoadInt64(&c.activeRequests)
|
||||
stats["total_requests"] = atomic.LoadInt64(&c.totalRequests)
|
||||
stats["total_errors"] = atomic.LoadInt64(&c.totalErrors)
|
||||
stats["total_bytes"] = c.totalBytes.Load()
|
||||
|
||||
// 系统指标
|
||||
stats["num_goroutine"] = runtime.NumGoroutine()
|
||||
stats["memory_usage"] = FormatBytes(m.Alloc)
|
||||
|
||||
// 延迟指标
|
||||
totalRequests := atomic.LoadInt64(&c.totalRequests)
|
||||
if totalRequests > 0 {
|
||||
stats["avg_latency"] = c.latencySum.Load() / totalRequests
|
||||
} else {
|
||||
stats["avg_latency"] = int64(0)
|
||||
}
|
||||
|
||||
// 状态码统计
|
||||
statusStats := make(map[string]int64)
|
||||
for i := range c.statusStats {
|
||||
statusStats[fmt.Sprintf("%dxx", i+1)] = c.statusStats[i].Load()
|
||||
}
|
||||
stats["status_code_stats"] = statusStats
|
||||
|
||||
// 获取Top 10路径统计
|
||||
var pathMetrics []models.PathMetrics
|
||||
@ -227,17 +238,20 @@ func (c *Collector) GetStats() map[string]interface{} {
|
||||
return true
|
||||
})
|
||||
|
||||
// 按请求数排序
|
||||
// 按请求数排序并获取前10个
|
||||
sort.Slice(allPaths, func(i, j int) bool {
|
||||
return allPaths[i].RequestCount > allPaths[j].RequestCount
|
||||
})
|
||||
|
||||
// 取前10个
|
||||
if len(allPaths) > 10 {
|
||||
pathMetrics = allPaths[:10]
|
||||
} else {
|
||||
pathMetrics = allPaths
|
||||
}
|
||||
stats["top_paths"] = pathMetrics
|
||||
|
||||
// 获取最近请求
|
||||
stats["recent_requests"] = c.getRecentRequests()
|
||||
|
||||
// 获取Top 10引用来源
|
||||
var refererMetrics []models.PathMetrics
|
||||
@ -254,43 +268,17 @@ func (c *Collector) GetStats() map[string]interface{} {
|
||||
return true
|
||||
})
|
||||
|
||||
// 按请求数排序
|
||||
// 按请求数排序并获取前10个
|
||||
sort.Slice(allReferers, func(i, j int) bool {
|
||||
return allReferers[i].RequestCount > allReferers[j].RequestCount
|
||||
})
|
||||
|
||||
// 取前10个
|
||||
if len(allReferers) > 10 {
|
||||
refererMetrics = allReferers[:10]
|
||||
} else {
|
||||
refererMetrics = allReferers
|
||||
}
|
||||
|
||||
result := map[string]interface{}{
|
||||
"uptime": uptime.String(),
|
||||
"active_requests": atomic.LoadInt64(&c.activeRequests),
|
||||
"total_requests": totalRequests,
|
||||
"total_errors": totalErrors,
|
||||
"error_rate": float64(totalErrors) / float64(totalRequests),
|
||||
"num_goroutine": runtime.NumGoroutine(),
|
||||
"memory_usage": FormatBytes(m.Alloc),
|
||||
"total_bytes": c.totalBytes.Load(),
|
||||
"bytes_per_second": float64(c.totalBytes.Load()) / Max(uptime.Seconds(), 1),
|
||||
"avg_latency": func() int64 {
|
||||
if totalRequests > 0 {
|
||||
return int64(c.latencySum.Load() / totalRequests)
|
||||
}
|
||||
return 0
|
||||
}(),
|
||||
"status_code_stats": statusStats,
|
||||
"top_paths": pathMetrics,
|
||||
"recent_requests": c.getRecentRequests(),
|
||||
"top_referers": refererMetrics,
|
||||
}
|
||||
|
||||
for k, v := range result {
|
||||
stats[k] = v
|
||||
}
|
||||
stats["top_referers"] = refererMetrics
|
||||
|
||||
// 检查告警
|
||||
c.monitor.CheckMetrics(stats)
|
||||
|
Loading…
x
Reference in New Issue
Block a user