@@ -23,25 +23,35 @@ export const fetchAllDocuments = ({ databaseTypeDefs, previousCache, schemaDef,
23
23
S . chain ( ( pages ) =>
24
24
pipe (
25
25
S . effect (
26
- T . forEachParN_ ( pages , os . cpus ( ) . length , ( page ) =>
27
- makeCacheItem ( {
28
- page,
29
- documentTypeDef : schemaDef . documentTypeDefMap [ databaseTypeDef . name ] ! ,
30
- databaseTypeDef,
31
- previousCache,
32
- options,
33
- } ) ,
26
+ pipe (
27
+ T . forEachParN_ ( pages , os . cpus ( ) . length , ( page ) =>
28
+ makeCacheItem ( {
29
+ page,
30
+ documentTypeDef : schemaDef . documentTypeDefMap [ databaseTypeDef . name ] ! ,
31
+ databaseTypeDef,
32
+ previousCache,
33
+ options,
34
+ } ) ,
35
+ ) ,
34
36
) ,
35
37
) ,
36
38
OT . withStreamSpan ( '@contentlayer/source-notion/fetchData:makeCacheItems' ) ,
37
39
) ,
38
40
) ,
39
41
S . runCollect ,
40
- T . chain ( ( chunks ) => T . reduce_ ( chunks , [ ] as DataCache . CacheItem [ ] , ( z , a ) => T . succeed ( [ ...z , ...a ] ) ) ) ,
42
+ T . chain ( ( chunks ) =>
43
+ T . reduce_ ( chunks , [ ] as { fromCache : boolean ; cacheItem : DataCache . CacheItem } [ ] , ( z , a ) =>
44
+ T . succeed ( [ ...z , ...a ] ) ,
45
+ ) ,
46
+ ) ,
41
47
) ,
42
48
) ,
43
- T . map ( ( chunks ) => Chunk . reduce_ ( chunks , [ ] as DataCache . CacheItem [ ] , ( z , a ) => [ ...z , ...a ] ) ) ,
44
- T . map ( ( documents ) => ( { cacheItemsMap : Object . fromEntries ( documents . map ( ( _ ) => [ _ . document . _id , _ ] ) ) } ) ) ,
49
+ T . map ( ( chunks ) =>
50
+ Chunk . reduce_ ( chunks , [ ] as { fromCache : boolean ; cacheItem : DataCache . CacheItem } [ ] , ( z , a ) => [ ...z , ...a ] ) ,
51
+ ) ,
52
+ T . map ( ( documents ) => ( {
53
+ cacheItemsMap : Object . fromEntries ( documents . map ( ( _ ) => [ _ . cacheItem . document . _id , _ . cacheItem ] ) ) ,
54
+ } ) ) ,
45
55
OT . withSpan ( '@contentlayer/source-notion/fetchData:fetchAllDocuments' ) ,
46
56
T . mapError ( ( error ) => new core . SourceFetchDataError ( { error, alreadyHandled : false } ) ) ,
47
57
)
0 commit comments