@@ -395,7 +395,7 @@ t.test('forceRefresh', async t => {
395395 return status
396396 }
397397
398- const cache = new LRU < number , number > ( {
398+ const cache = new LRU < number , number , boolean > ( {
399399 max : 10 ,
400400 allowStale : true ,
401401 ttl : 100 ,
@@ -413,33 +413,52 @@ t.test('forceRefresh', async t => {
413413 // put in some values that don't match what fetchMethod returns
414414 cache . set ( 1 , 100 )
415415 cache . set ( 2 , 200 )
416- t . equal ( await cache . fetch ( 1 ) , 100 )
416+ t . equal ( await cache . fetch ( 1 , { context : true } ) , 100 )
417417 // still there, because we're allowing stale, and it's not stale
418418 const status : LRUCache . Status < number , number > = { }
419419 t . equal (
420420 await cache . fetch ( 2 , {
421421 forceRefresh : true ,
422422 allowStale : false ,
423423 status,
424+ context : false ,
424425 } ) ,
425426 2 ,
426427 )
427428 t . equal ( status . fetch , 'refresh' , 'status reflects forced refresh' )
428- t . equal ( await cache . fetch ( 1 , { forceRefresh : true } ) , 100 )
429+ t . equal (
430+ await cache . fetch ( 1 , { forceRefresh : true , context : false } ) ,
431+ 100 ,
432+ )
429433 clock . advance ( 100 )
430- t . equal ( await cache . fetch ( 2 , { forceRefresh : true , status : s ( ) } ) , 2 )
434+ t . equal (
435+ await cache . fetch ( 2 , {
436+ forceRefresh : true ,
437+ status : s ( ) ,
438+ context : false ,
439+ } ) ,
440+ 2 ,
441+ )
431442 t . equal ( cache . peek ( 1 ) , 100 )
432443 // if we don't allow stale though, then that means that we wait
433444 // for the background fetch to complete, so we get the updated value.
434- t . equal ( await cache . fetch ( 1 , { allowStale : false , status : s ( ) } ) , 1 )
445+ t . equal (
446+ await cache . fetch ( 1 , {
447+ allowStale : false ,
448+ context : false ,
449+ status : s ( ) ,
450+ } ) ,
451+ 1 ,
452+ )
435453
436454 cache . set ( 1 , 100 )
437- t . equal ( await cache . fetch ( 1 , { allowStale : false } ) , 100 )
455+ t . equal ( await cache . fetch ( 1 , { allowStale : false , context : false } ) , 100 )
438456 t . equal (
439457 await cache . fetch ( 1 , {
440458 forceRefresh : true ,
441459 allowStale : false ,
442460 status : s ( ) ,
461+ context : true ,
443462 } ) ,
444463 1 ,
445464 )
@@ -599,7 +618,7 @@ t.test('abort, but then keep on fetching anyway', async t => {
599618 let aborted : Error | undefined = undefined
600619 let resolved : boolean = false
601620 let returnUndefined : boolean = false
602- const cache = new LRU < number , number > ( {
621+ const cache = new LRU < number , number , boolean > ( {
603622 max : 10 ,
604623 ignoreFetchAbort : true ,
605624 fetchMethod : async ( k , _ , { signal, options } ) => {
@@ -618,7 +637,7 @@ t.test('abort, but then keep on fetching anyway', async t => {
618637 } )
619638 const ac = new AbortController ( )
620639 const status : LRUCache . Status < number , number > = { }
621- const p = cache . fetch ( 1 , { signal : ac . signal , status } )
640+ const p = cache . fetch ( 1 , { signal : ac . signal , status, context : true } )
622641 const er = new Error ( 'ignored abort signal' )
623642 ac . abort ( er )
624643 clock . advance ( 100 )
@@ -632,7 +651,7 @@ t.test('abort, but then keep on fetching anyway', async t => {
632651 t . equal ( ac . signal . reason , er )
633652 t . equal ( cache . get ( 1 ) , 1 )
634653
635- const p2 = cache . forceFetch ( 2 )
654+ const p2 = cache . forceFetch ( 2 , { context : true } )
636655 t . equal ( cache . get ( 2 ) , undefined )
637656 cache . delete ( 2 )
638657 t . equal ( cache . get ( 2 ) , undefined )
@@ -641,7 +660,7 @@ t.test('abort, but then keep on fetching anyway', async t => {
641660 t . equal ( cache . get ( 2 ) , undefined )
642661
643662 // if aborted for cause, we don't save the fetched value
644- const p3 = cache . fetch ( 3 )
663+ const p3 = cache . fetch ( 3 , { context : true } )
645664 t . equal ( cache . get ( 3 ) , undefined )
646665 cache . set ( 3 , 33 )
647666 t . equal ( cache . get ( 3 ) , 33 )
@@ -652,10 +671,10 @@ t.test('abort, but then keep on fetching anyway', async t => {
652671 const e = expose ( cache )
653672 returnUndefined = true
654673 const before = e . valList . slice ( )
655- const p4 = cache . fetch ( 4 )
674+ const p4 = cache . fetch ( 4 , { context : true } )
656675 clock . advance ( 100 )
657676 t . equal ( await p4 , undefined )
658- const p5 = cache . forceFetch ( 4 )
677+ const p5 = cache . forceFetch ( 4 , { context : true } )
659678 clock . advance ( 100 )
660679 await t . rejects ( p5 , { message : 'fetch() returned undefined' } )
661680 t . same ( e . valList , before , 'did not update values with undefined' )
0 commit comments