fix metrics.

pull/233/head
shunza 6 years ago
parent 33da144c4a
commit d7a638bce5
  1. 26
      tool/kratos-gen-bts/testdata/dao.bts.go
  2. 20
      tool/kratos-gen-mc/testdata/mc.cache.go

@ -23,14 +23,12 @@ import (
"context" "context"
"sync" "sync"
"github.com/bilibili/kratos/pkg/stat/metric" "github.com/bilibili/kratos/pkg/cache"
"github.com/bilibili/kratos/pkg/sync/errgroup" "github.com/bilibili/kratos/pkg/sync/errgroup"
) )
var ( var (
_ _bts _ _bts
_metricHits = metric.NewBusinessMetricCount("hits_total", "NAME")
_metricMisses = metric.NewBusinessMetricCount("misses_total", "NAME")
) )
// Demos get data from cache if miss will call source method, then add to cache. // Demos get data from cache if miss will call source method, then add to cache.
@ -50,7 +48,7 @@ func (d *Dao) Demos(c context.Context, keys []int64) (res map[int64]*Demo, err e
miss = append(miss, key) miss = append(miss, key)
} }
} }
_metricHits.Add(float64(len(keys)-len(miss)), "Demos") cache.MetricHits.Add(float64(len(keys)-len(miss)), "bts:Demos")
for k, v := range res { for k, v := range res {
if v.ID == -1 { if v.ID == -1 {
delete(res, k) delete(res, k)
@ -61,7 +59,7 @@ func (d *Dao) Demos(c context.Context, keys []int64) (res map[int64]*Demo, err e
return return
} }
missData := make(map[int64]*Demo, missLen) missData := make(map[int64]*Demo, missLen)
_metricMisses.Add(int64(missLen), "Demos") cache.MetricMisses.Add(float64(missLen), "bts:Demos")
var mutex sync.Mutex var mutex sync.Mutex
group := errgroup.WithCancel(c) group := errgroup.WithCancel(c)
if missLen > 20 { if missLen > 20 {
@ -129,7 +127,7 @@ func (d *Dao) Demos1(c context.Context, keys []int64) (res map[int64]*Demo, err
miss = append(miss, key) miss = append(miss, key)
} }
} }
_metricHits.Add(float64(len(keys)-len(miss)), "Demos1") cache.MetricHits.Add(float64(len(keys)-len(miss)), "bts:Demos1")
for k, v := range res { for k, v := range res {
if v.ID == -1 { if v.ID == -1 {
delete(res, k) delete(res, k)
@ -140,7 +138,7 @@ func (d *Dao) Demos1(c context.Context, keys []int64) (res map[int64]*Demo, err
return return
} }
missData := make(map[int64]*Demo, missLen) missData := make(map[int64]*Demo, missLen)
_metricMisses.Add(int64(missLen), "Demos1") cache.MetricMisses.Add(float64(missLen), "bts:Demos1")
var mutex sync.Mutex var mutex sync.Mutex
group := errgroup.WithContext(c) group := errgroup.WithContext(c)
if missLen > 20 { if missLen > 20 {
@ -205,10 +203,10 @@ func (d *Dao) Demo(c context.Context, key int64) (res *Demo, err error) {
} }
}() }()
if res != nil { if res != nil {
prom.CacheHit.Incr("Demo") cache.MetricHits.Inc("bts:Demo")
return return
} }
prom.CacheMiss.Incr("Demo") cache.MetricMisses.Inc("bts:Demo")
res, err = d.RawDemo(c, key) res, err = d.RawDemo(c, key)
if err != nil { if err != nil {
return return
@ -233,11 +231,11 @@ func (d *Dao) Demo1(c context.Context, key int64, pn int, ps int) (res *Demo, er
err = nil err = nil
} }
if res != nil { if res != nil {
prom.CacheHit.Incr("Demo1") cache.MetricHits.Inc("bts:Demo1")
return return
} }
var miss *Demo var miss *Demo
prom.CacheMiss.Incr("Demo1") cache.MetricMisses.Inc("bts:Demo1")
res, miss, err = d.RawDemo1(c, key, pn, ps) res, miss, err = d.RawDemo1(c, key, pn, ps)
if err != nil { if err != nil {
return return
@ -265,10 +263,10 @@ func (d *Dao) None(c context.Context) (res *Demo, err error) {
} }
}() }()
if res != nil { if res != nil {
_metricHits.Incr("None") cache.MetricHits.Inc("bts:None")
return return
} }
_metricMisses.Incr("None") cache.MetricMisses.Inc("bts:None")
res, err = d.RawNone(c) res, err = d.RawNone(c)
if err != nil { if err != nil {
return return

@ -48,12 +48,10 @@ import (
"github.com/bilibili/kratos/pkg/cache/memcache" "github.com/bilibili/kratos/pkg/cache/memcache"
"github.com/bilibili/kratos/pkg/log" "github.com/bilibili/kratos/pkg/log"
"github.com/bilibili/kratos/pkg/stat/metric"
) )
var ( var (
_ _mc _ _mc
_metricErrCount = metric.NewBusinessMetricCount("mc_error_total", "NAME")
) )
// CacheDemos get data from mc // CacheDemos get data from mc
@ -71,7 +69,6 @@ func (d *Dao) CacheDemos(c context.Context, ids []int64) (res map[int64]*Demo, e
} }
replies, err := d.mc.GetMulti(c, keys) replies, err := d.mc.GetMulti(c, keys)
if err != nil { if err != nil {
_metricErrCount.Inc("CacheDemos")
log.Errorv(c, log.KV("CacheDemos", fmt.Sprintf("%+v", err)), log.KV("keys", keys)) log.Errorv(c, log.KV("CacheDemos", fmt.Sprintf("%+v", err)), log.KV("keys", keys))
return return
} }
@ -79,7 +76,6 @@ func (d *Dao) CacheDemos(c context.Context, ids []int64) (res map[int64]*Demo, e
v := &Demo{} v := &Demo{}
err = replies.Scan(key, v) err = replies.Scan(key, v)
if err != nil { if err != nil {
_metricErrCount.Inc("CacheDemos")
log.Errorv(c, log.KV("CacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("CacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -102,7 +98,6 @@ func (d *Dao) CacheDemo(c context.Context, id int64) (res *Demo, err error) {
} }
} }
if err != nil { if err != nil {
_metricErrCount.Inc("CacheDemo")
log.Errorv(c, log.KV("CacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("CacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -120,7 +115,6 @@ func (d *Dao) CacheDemo1(c context.Context, id int64, mid int64) (res *Demo, err
} }
} }
if err != nil { if err != nil {
_metricErrCount.Inc("CacheDemo1")
log.Errorv(c, log.KV("CacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("CacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -139,7 +133,6 @@ func (d *Dao) CacheNone(c context.Context) (res *Demo, err error) {
} }
} }
if err != nil { if err != nil {
_metricErrCount.Inc("CacheNone")
log.Errorv(c, log.KV("CacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("CacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -155,7 +148,6 @@ func (d *Dao) CacheString(c context.Context, id int64) (res string, err error) {
err = nil err = nil
return return
} }
_metricErrCount.Inc("CacheString")
log.Errorv(c, log.KV("CacheString", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("CacheString", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -171,7 +163,6 @@ func (d *Dao) AddCacheDemos(c context.Context, values map[int64]*Demo) (err erro
key := demoKey(id) key := demoKey(id)
item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON} item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Inc("AddCacheDemos")
log.Errorv(c, log.KV("AddCacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -188,7 +179,6 @@ func (d *Dao) AddCacheDemos2(c context.Context, values map[int64]*Demo, tp int64
key := demo2Key(id, tp) key := demo2Key(id, tp)
item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON} item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Inc("AddCacheDemos2")
log.Errorv(c, log.KV("AddCacheDemos2", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheDemos2", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -204,7 +194,6 @@ func (d *Dao) AddCacheDemo(c context.Context, id int64, val *Demo) (err error) {
key := demoKey(id) key := demoKey(id)
item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON | memcache.FlagGzip} item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON | memcache.FlagGzip}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Incr("AddCacheDemo")
log.Errorv(c, log.KV("AddCacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -219,7 +208,6 @@ func (d *Dao) AddCacheDemo1(c context.Context, id int64, val *Demo, mid int64) (
key := keyMid(id, mid) key := keyMid(id, mid)
item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagGOB} item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagGOB}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Incr("AddCacheDemo1")
log.Errorv(c, log.KV("AddCacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -234,7 +222,6 @@ func (d *Dao) AddCacheNone(c context.Context, val *Demo) (err error) {
key := noneKey() key := noneKey()
item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON} item := &memcache.Item{Key: key, Object: val, Expiration: d.demoExpire, Flags: memcache.FlagJSON}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Inc("AddCacheNone")
log.Errorv(c, log.KV("AddCacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -250,7 +237,6 @@ func (d *Dao) AddCacheString(c context.Context, id int64, val string) (err error
bs := []byte(val) bs := []byte(val)
item := &memcache.Item{Key: key, Value: bs, Expiration: d.demoExpire, Flags: memcache.FlagRAW} item := &memcache.Item{Key: key, Value: bs, Expiration: d.demoExpire, Flags: memcache.FlagRAW}
if err = d.mc.Set(c, item); err != nil { if err = d.mc.Set(c, item); err != nil {
_metricErrCount.Incr("AddCacheString")
log.Errorv(c, log.KV("AddCacheString", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("AddCacheString", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -269,7 +255,6 @@ func (d *Dao) DelCacheDemos(c context.Context, ids []int64) (err error) {
err = nil err = nil
continue continue
} }
_metricErrCount.Inc("DelCacheDemos")
log.Errorv(c, log.KV("DelCacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("DelCacheDemos", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -285,7 +270,6 @@ func (d *Dao) DelCacheDemo(c context.Context, id int64) (err error) {
err = nil err = nil
return return
} }
_metricErrCount.Incr("DelCacheDemo")
log.Errorv(c, log.KV("DelCacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("DelCacheDemo", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -300,7 +284,6 @@ func (d *Dao) DelCacheDemo1(c context.Context, id int64, mid int64) (err error)
err = nil err = nil
return return
} }
_metricErrCount.Incr("DelCacheDemo1")
log.Errorv(c, log.KV("DelCacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("DelCacheDemo1", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }
@ -315,7 +298,6 @@ func (d *Dao) DelCacheNone(c context.Context) (err error) {
err = nil err = nil
return return
} }
_metricErrCount.Inc("DelCacheNone")
log.Errorv(c, log.KV("DelCacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key)) log.Errorv(c, log.KV("DelCacheNone", fmt.Sprintf("%+v", err)), log.KV("key", key))
return return
} }

Loading…
Cancel
Save