Skip to content
This repository was archived by the owner on Apr 25, 2025. It is now read-only.

Commit 0c6b7f1

Browse files
committed
[FAB-11056] Fix caching in Fabric & Dynamic Selection
Both Fabric selection and Dynamic selection services cache GetEndorsersForChaincode requests even if the request contains a bad invocation chain. This patch adds an 'expiring' feature to lazycache that combines the features of lazycache with lazyref. Now if lazyref.Get() returns an error then the ref will not be cached. Change-Id: Ia5022c748d1dd504cd7d8a29f0bf0b5527edc76f Signed-off-by: Bob Stasyszyn <Bob.Stasyszyn@securekey.com>
1 parent 2ad5906 commit 0c6b7f1

File tree

10 files changed

+446
-101
lines changed

10 files changed

+446
-101
lines changed

pkg/client/common/selection/dynamicselection/dynamicselection.go

Lines changed: 3 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -90,13 +90,9 @@ func newService(context context.Client, channelID string, discovery fab.Discover
9090
service.pgResolvers = lazycache.New(
9191
"PG_Resolver_Cache",
9292
func(key lazycache.Key) (interface{}, error) {
93-
return lazyref.New(
94-
func() (interface{}, error) {
95-
return service.createPGResolver(key.(*resolverKey))
96-
},
97-
lazyref.WithAbsoluteExpiration(service.cacheTimeout),
98-
), nil
93+
return service.createPGResolver(key.(*resolverKey))
9994
},
95+
lazyref.WithAbsoluteExpiration(service.cacheTimeout),
10096
)
10197

10298
return service, nil
@@ -150,12 +146,7 @@ func (s *SelectionService) Close() {
150146
}
151147

152148
func (s *SelectionService) getPeerGroupResolver(chaincodeIDs []string) (pgresolver.PeerGroupResolver, error) {
153-
value, err := s.pgResolvers.Get(newResolverKey(s.channelID, chaincodeIDs...))
154-
if err != nil {
155-
return nil, err
156-
}
157-
lazyRef := value.(*lazyref.Reference)
158-
resolver, err := lazyRef.Get()
149+
resolver, err := s.pgResolvers.Get(newResolverKey(s.channelID, chaincodeIDs...))
159150
if err != nil {
160151
return nil, err
161152
}

pkg/client/common/selection/fabricselection/fabricselection.go

Lines changed: 12 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -100,10 +100,19 @@ func New(ctx contextAPI.Client, channelID string, discovery fab.DiscoveryService
100100
}
101101

102102
s.chResponseCache = lazycache.New(
103-
"Channel_Response_Cache",
103+
"Fabric_Selection_Cache",
104104
func(key lazycache.Key) (interface{}, error) {
105-
return s.newChannelResponseRef(key.(*cacheKey).chaincodes, options.refreshInterval), nil
105+
invocationChain := key.(*cacheKey).chaincodes
106+
if logging.IsEnabledFor(moduleName, logging.DEBUG) {
107+
key, err := json.Marshal(invocationChain)
108+
if err != nil {
109+
panic(fmt.Sprintf("marshal of chaincodes failed: %s", err))
110+
}
111+
logger.Debugf("Refreshing endorsers for chaincodes [%s] in channel [%s] from discovery service...", key, channelID)
112+
}
113+
return s.queryEndorsers(invocationChain)
106114
},
115+
lazyref.WithRefreshInterval(lazyref.InitImmediately, options.refreshInterval),
107116
)
108117

109118
return s, nil
@@ -158,30 +167,9 @@ func (s *Service) getEndorsers(chaincodes []*fab.ChaincodeCall, chResponse discc
158167
return endpoints, err
159168
}
160169

161-
func (s *Service) newChannelResponseRef(chaincodes []*fab.ChaincodeCall, refreshInterval time.Duration) *lazyref.Reference {
162-
return lazyref.New(
163-
func() (interface{}, error) {
164-
if logging.IsEnabledFor(moduleName, logging.DEBUG) {
165-
key, err := json.Marshal(chaincodes)
166-
if err != nil {
167-
panic(fmt.Sprintf("marshal of chaincodes failed: %s", err))
168-
}
169-
170-
logger.Debugf("Refreshing endorsers for chaincodes [%s] in channel [%s] from discovery service...", key, s.channelID)
171-
}
172-
return s.queryEndorsers(chaincodes)
173-
},
174-
lazyref.WithRefreshInterval(lazyref.InitImmediately, refreshInterval),
175-
)
176-
}
177-
178170
func (s *Service) getChannelResponse(chaincodes []*fab.ChaincodeCall) (discclient.ChannelResponse, error) {
179171
key := newCacheKey(chaincodes)
180-
ref, err := s.chResponseCache.Get(key)
181-
if err != nil {
182-
return nil, err
183-
}
184-
chResp, err := ref.(*lazyref.Reference).Get()
172+
chResp, err := s.chResponseCache.Get(key)
185173
if err != nil {
186174
return nil, err
187175
}

pkg/fabsdk/provider/chpvdr/chprovider.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ import (
2929
var logger = logging.NewLogger("fabsdk")
3030

3131
type cache interface {
32-
Get(lazycache.Key) (interface{}, error)
32+
Get(lazycache.Key, ...interface{}) (interface{}, error)
3333
Close()
3434
}
3535

pkg/fabsdk/provider/chpvdr/fabprovider_testing_env.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ func newChCfgRef(cfg fab.ChannelCfg) *chconfig.Ref {
4747
}
4848

4949
// Get mock channel config reference
50-
func (m *chCfgCache) Get(k lazycache.Key) (interface{}, error) {
50+
func (m *chCfgCache) Get(k lazycache.Key, data ...interface{}) (interface{}, error) {
5151
cfg, ok := m.cfgMap.Load(k.(chconfig.CacheKey).ChannelID())
5252
if !ok {
5353
return nil, errors.New("Channel config not found in cache")

pkg/util/concurrent/lazycache/lazycache.go

Lines changed: 98 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,9 @@ import (
1212
"sync/atomic"
1313

1414
"github.com/hyperledger/fabric-sdk-go/pkg/common/logging"
15+
"github.com/hyperledger/fabric-sdk-go/pkg/common/options"
1516
"github.com/hyperledger/fabric-sdk-go/pkg/util/concurrent/futurevalue"
17+
"github.com/hyperledger/fabric-sdk-go/pkg/util/concurrent/lazyref"
1618
"github.com/pkg/errors"
1719
)
1820

@@ -26,6 +28,12 @@ type Key interface {
2628
// EntryInitializer creates a cache value for the given key
2729
type EntryInitializer func(key Key) (interface{}, error)
2830

31+
// EntryInitializerWithData creates a cache value for the given key and the
32+
// additional data passed in from Get(). With expiring cache entries, the
33+
// initializer is called with the same key, but the latest data is passed from
34+
// the Get() call that triggered the data to be cached/re-cached.
35+
type EntryInitializerWithData func(key Key, data interface{}) (interface{}, error)
36+
2937
type future interface {
3038
Get() (interface{}, error)
3139
MustGet() interface{}
@@ -44,16 +52,42 @@ type Cache struct {
4452
// name is useful for debugging
4553
name string
4654
m sync.Map
47-
initializer EntryInitializer
55+
initializer EntryInitializerWithData
4856
closed int32
57+
useRef bool
58+
}
59+
60+
// New creates a new lazy cache.
61+
// - name is the name of the cache and is only used for debugging purpose
62+
// - initializer is invoked the first time an entry is being cached
63+
// - opts are options for the cache. If any lazyref option is passed then a lazy reference
64+
// is created for each of the cache entries to hold the actual value. This makes it possible
65+
// to have expiring values and values that proactively refresh.
66+
func New(name string, initializer EntryInitializer, opts ...options.Opt) *Cache {
67+
return NewWithData(name,
68+
func(key Key, data interface{}) (interface{}, error) {
69+
return initializer(key)
70+
},
71+
opts...,
72+
)
4973
}
5074

51-
// New creates a new lazy cache with the given name
52-
// (Note that the name is only used for debugging purpose)
53-
func New(name string, initializer EntryInitializer) *Cache {
75+
// NewWithData creates a new lazy cache. The provided initializer accepts optional data that
76+
// is passed in from Get().
77+
// - name is the name of the cache and is only used for debugging purpose
78+
// - initializer is invoked the first time an entry is being cached
79+
// - opts are options for the cache. If any lazyref option is passed then a lazy reference
80+
// is created for each of the cache entries to hold the actual value. This makes it possible
81+
// to have expiring values and values that proactively refresh.
82+
func NewWithData(name string, initializer EntryInitializerWithData, opts ...options.Opt) *Cache {
83+
useRef := useLazyRef(opts...)
84+
if useRef {
85+
initializer = newLazyRefInitializer(name, initializer, opts...)
86+
}
5487
return &Cache{
5588
name: name,
5689
initializer: initializer,
90+
useRef: useRef,
5791
}
5892
}
5993

@@ -67,12 +101,16 @@ func (c *Cache) Name() string {
67101
// to create the value, and the key is inserted. If the
68102
// initializer returns an error then the key is removed
69103
// from the cache.
70-
func (c *Cache) Get(key Key) (interface{}, error) {
104+
func (c *Cache) Get(key Key, data ...interface{}) (interface{}, error) {
71105
keyStr := key.String()
72106

73107
f, ok := c.m.Load(keyStr)
74108
if ok {
75-
return f.(future).Get()
109+
v, err := f.(future).Get()
110+
if err != nil {
111+
return nil, err
112+
}
113+
return c.value(v, first(data))
76114
}
77115

78116
// The key wasn't found. Attempt to add one.
@@ -81,24 +119,29 @@ func (c *Cache) Get(key Key) (interface{}, error) {
81119
if closed := atomic.LoadInt32(&c.closed); closed == 1 {
82120
return nil, errors.Errorf("%s - cache is closed", c.name)
83121
}
84-
return c.initializer(key)
122+
return c.initializer(key, first(data))
85123
},
86124
)
87125

88126
f, loaded := c.m.LoadOrStore(keyStr, newFuture)
89127
if loaded {
90128
// Another thread has added the key before us. Return the value.
91-
return f.(future).Get()
129+
v, err := f.(future).Get()
130+
if err != nil {
131+
return nil, err
132+
}
133+
return c.value(v, first(data))
92134
}
93135

94-
// We added the key. It must be initailized.
136+
// We added the key. It must be initialized.
95137
value, err := newFuture.Initialize()
96138
if err != nil {
97139
// Failed. Delete the key.
98140
logger.Debugf("%s - Failed to initialize key [%s]: %s. Deleting key.", c.name, keyStr, err)
99141
c.m.Delete(keyStr)
142+
return nil, err
100143
}
101-
return value, err
144+
return c.value(value, first(data))
102145
}
103146

104147
// MustGet returns the value for the given key. If the key doesn't
@@ -150,3 +193,48 @@ func (c *Cache) close(key string, f future) {
150193
}
151194
}
152195
}
196+
197+
func newLazyRefInitializer(name string, initializer EntryInitializerWithData, opts ...options.Opt) EntryInitializerWithData {
198+
return func(key Key, data interface{}) (interface{}, error) {
199+
logger.Debugf("%s - Calling initializer for [%s], data [%#v]", name, key, data)
200+
ref := lazyref.NewWithData(
201+
func(data interface{}) (interface{}, error) {
202+
logger.Debugf("%s - Calling lazyref initializer for [%s], data [%#v]", name, key, data)
203+
return initializer(key, data)
204+
},
205+
opts...,
206+
)
207+
208+
// Make sure no error is returned from lazyref.Get(). If there is
209+
// then return the error. We don't want to cache a reference that always
210+
// returns an error, especially if it's a refreshing reference.
211+
_, err := ref.Get(data)
212+
if err != nil {
213+
logger.Debugf("%s - Error returned from lazyref initializer [%s], data [%#v]: %s", name, key, data, err)
214+
return nil, err
215+
}
216+
logger.Debugf("%s - Returning lazyref for [%s], data [%#v]", name, key, data)
217+
return ref, nil
218+
}
219+
}
220+
221+
func (c *Cache) value(value interface{}, data interface{}) (interface{}, error) {
222+
if value != nil && c.useRef {
223+
return value.(*lazyref.Reference).Get(data)
224+
}
225+
return value, nil
226+
}
227+
228+
func first(data []interface{}) interface{} {
229+
if len(data) == 0 {
230+
return nil
231+
}
232+
return data[0]
233+
}
234+
235+
// useLazyRef returns true if the cache should used lazy references to hold the actual value
236+
func useLazyRef(opts ...options.Opt) bool {
237+
chk := &refOptCheck{}
238+
options.Apply(chk, opts)
239+
return chk.useRef
240+
}

0 commit comments

Comments
 (0)