1
1
package org .dataloader ;
2
2
3
+ import org .dataloader .annotations .GuardedBy ;
3
4
import org .dataloader .annotations .Internal ;
4
5
import org .dataloader .impl .CompletableFutureKit ;
5
6
import org .dataloader .stats .StatisticsCollector ;
@@ -287,6 +288,7 @@ private void possiblyClearCacheEntriesOnExceptions(List<K> keys) {
287
288
}
288
289
}
289
290
291
+ @ GuardedBy ("dataLoader" )
290
292
private CompletableFuture <V > loadFromCache (K key , Object loadContext , boolean batchingEnabled ) {
291
293
final Object cacheKey = loadContext == null ? getCacheKey (key ) : getCacheKeyWithContext (key , loadContext );
292
294
@@ -296,15 +298,12 @@ private CompletableFuture<V> loadFromCache(K key, Object loadContext, boolean ba
296
298
return futureCache .get (cacheKey );
297
299
}
298
300
299
- CompletableFuture <V > loadCallFuture ;
300
- synchronized (dataLoader ) {
301
- loadCallFuture = queueOrInvokeLoader (key , loadContext , batchingEnabled , true );
302
- }
303
-
301
+ CompletableFuture <V > loadCallFuture = queueOrInvokeLoader (key , loadContext , batchingEnabled , true );
304
302
futureCache .set (cacheKey , loadCallFuture );
305
303
return loadCallFuture ;
306
304
}
307
305
306
+ @ GuardedBy ("dataLoader" )
308
307
private CompletableFuture <V > queueOrInvokeLoader (K key , Object loadContext , boolean batchingEnabled , boolean cachingEnabled ) {
309
308
if (batchingEnabled ) {
310
309
CompletableFuture <V > loadCallFuture = new CompletableFuture <>();
0 commit comments