@@ -331,22 +331,33 @@ CompletableFuture<List<V>> invokeLoader(List<K> keys, List<Object> keyContexts,
331
331
CompletableFuture <List <Try <V >>> cacheCallCF = getFromValueCache (keys );
332
332
return cacheCallCF .thenCompose (cachedValues -> {
333
333
334
- assertState (keys .size () == cachedValues .size (), () -> "The size of the cached values MUST be the same size as the key list" );
335
-
336
334
// the following is NOT a Map because keys in data loader can repeat (by design)
337
335
// and hence "a","b","c","b" is a valid set of keys
338
336
List <Try <V >> valuesInKeyOrder = new ArrayList <>();
339
337
List <Integer > missedKeyIndexes = new ArrayList <>();
340
338
List <K > missedKeys = new ArrayList <>();
341
339
List <Object > missedKeyContexts = new ArrayList <>();
342
- for (int i = 0 ; i < keys .size (); i ++) {
343
- Try <V > cacheGet = cachedValues .get (i );
344
- valuesInKeyOrder .add (cacheGet );
345
- if (cacheGet .isFailure ()) {
340
+
341
+ // if they return a ValueCachingNotSupported exception then we insert this special marker value, and it
342
+ // means it's a total miss, we need to get all these keys via the batch loader
343
+ if (cachedValues == NOT_SUPPORTED_LIST ) {
344
+ for (int i = 0 ; i < keys .size (); i ++) {
345
+ valuesInKeyOrder .add (ALWAYS_FAILED );
346
346
missedKeyIndexes .add (i );
347
347
missedKeys .add (keys .get (i ));
348
348
missedKeyContexts .add (keyContexts .get (i ));
349
349
}
350
+ } else {
351
+ assertState (keys .size () == cachedValues .size (), () -> "The size of the cached values MUST be the same size as the key list" );
352
+ for (int i = 0 ; i < keys .size (); i ++) {
353
+ Try <V > cacheGet = cachedValues .get (i );
354
+ valuesInKeyOrder .add (cacheGet );
355
+ if (cacheGet .isFailure ()) {
356
+ missedKeyIndexes .add (i );
357
+ missedKeys .add (keys .get (i ));
358
+ missedKeyContexts .add (keyContexts .get (i ));
359
+ }
360
+ }
350
361
}
351
362
if (missedKeys .isEmpty ()) {
352
363
//
@@ -442,9 +453,16 @@ int dispatchDepth() {
442
453
}
443
454
}
444
455
456
+ private final List <Try <V >> NOT_SUPPORTED_LIST = emptyList ();
457
+ private final CompletableFuture <List <Try <V >>> NOT_SUPPORTED = CompletableFuture .completedFuture (NOT_SUPPORTED_LIST );
458
+ private final Try <V > ALWAYS_FAILED = Try .alwaysFailed ();
459
+
445
460
private CompletableFuture <List <Try <V >>> getFromValueCache (List <K > keys ) {
446
461
try {
447
462
return nonNull (valueCache .getValues (keys ), () -> "Your ValueCache.getValues function MUST return a non null CompletableFuture" );
463
+ } catch (ValueCache .ValueCachingNotSupported ignored ) {
464
+ // use of a final field prevents CF object allocation for this special purpose
465
+ return NOT_SUPPORTED ;
448
466
} catch (RuntimeException e ) {
449
467
return CompletableFutureKit .failedFuture (e );
450
468
}
@@ -456,16 +474,18 @@ private CompletableFuture<List<V>> setToValueCache(List<V> assembledValues, List
456
474
if (completeValueAfterCacheSet ) {
457
475
return nonNull (valueCache
458
476
.setValues (missedKeys , missedValues ), () -> "Your ValueCache.setValues function MUST return a non null CompletableFuture" )
459
- // we dont trust the set cache to give us the values back - we have them - lets use them
460
- // if the cache set fails - then they wont be in cache and maybe next time they will
477
+ // we don't trust the set cache to give us the values back - we have them - lets use them
478
+ // if the cache set fails - then they won't be in cache and maybe next time they will
461
479
.handle ((ignored , setExIgnored ) -> assembledValues );
462
480
} else {
463
481
// no one is waiting for the set to happen here so if its truly async
464
- // it will happen eventually but no result will be dependant on it
482
+ // it will happen eventually but no result will be dependent on it
465
483
valueCache .setValues (missedKeys , missedValues );
466
484
}
485
+ } catch (ValueCache .ValueCachingNotSupported ignored ) {
486
+ // ok no set caching is fine if they say so
467
487
} catch (RuntimeException ignored ) {
468
- // if we cant set values back into the cache - so be it - this must be a faulty
488
+ // if we can't set values back into the cache - so be it - this must be a faulty
469
489
// ValueCache implementation
470
490
}
471
491
return CompletableFuture .completedFuture (assembledValues );
0 commit comments