1module Darcs.Repository.Cache 2 ( cacheHash 3 , okayHash 4 , Cache 5 , mkCache 6 , cacheEntries 7 , CacheType(..) 8 , CacheLoc(..) 9 , WritableOrNot(..) 10 , HashedDir(..) 11 , hashedDir 12 , bucketFolder 13 , unionCaches 14 , unionRemoteCaches 15 , cleanCaches 16 , cleanCachesWithHint 17 , fetchFileUsingCache 18 , speculateFileUsingCache 19 , speculateFilesUsingCache 20 , writeFileUsingCache 21 , peekInCache 22 , repo2cache 23 , writable 24 , isThisRepo 25 , hashedFilePath 26 , allHashedDirs 27 , reportBadSources 28 , closestWritableDirectory 29 , dropNonRepos 30 ) where 31 32import Control.Concurrent.MVar ( MVar, newMVar, modifyMVar_, readMVar ) 33import Control.Monad ( liftM, when, unless, filterM, forM_, mplus ) 34import qualified Data.ByteString as B (length, ByteString ) 35import Data.List ( nub, intercalate, sortBy ) 36import Data.Maybe ( catMaybes, fromMaybe, listToMaybe ) 37import System.FilePath.Posix ( (</>), joinPath, dropFileName ) 38import System.Directory ( createDirectoryIfMissing, removeFile, doesFileExist, 39 doesDirectoryExist, getDirectoryContents, 40 getPermissions ) 41import qualified System.Directory as SD ( writable ) 42import System.IO ( hPutStrLn, stderr ) 43import System.IO.Unsafe (unsafePerformIO) 44import System.Posix.Files ( createLink, linkCount, getSymbolicLinkStatus ) 45 46import Darcs.Prelude 47 48import Darcs.Util.ByteString ( gzWriteFilePS ) 49import Darcs.Util.Global ( darcsdir, defaultRemoteDarcsCmd ) 50import Darcs.Util.External ( gzFetchFilePS, fetchFilePS 51 , speculateFileOrUrl, copyFileOrUrl 52 , Cachable( Cachable ) ) 53import Darcs.Repository.Flags ( Compression(..) ) 54import Darcs.Util.Lock ( writeAtomicFilePS, gzWriteAtomicFilePS, 55 withTemp ) 56import Darcs.Util.SignalHandler ( catchNonSignal ) 57import Darcs.Util.URL ( isValidLocalPath, isHttpUrl, isSshUrl ) 58import Darcs.Util.File ( withCurrentDirectory ) 59import Darcs.Util.Hash ( sha256sum ) 60import Darcs.Util.English ( englishNum, Noun(..), Pronoun(..) ) 61import Darcs.Util.Exception ( catchall ) 62import Darcs.Util.Progress ( progressList, debugMessage ) 63import qualified Darcs.Util.Download as Download ( ConnectionError ) 64 65data HashedDir = HashedPristineDir 66 | HashedPatchesDir 67 | HashedInventoriesDir 68 69hashedDir :: HashedDir -> String 70hashedDir HashedPristineDir = "pristine.hashed" 71hashedDir HashedPatchesDir = "patches" 72hashedDir HashedInventoriesDir = "inventories" 73 74allHashedDirs :: [HashedDir] 75allHashedDirs = [ HashedPristineDir 76 , HashedPatchesDir 77 , HashedInventoriesDir 78 ] 79 80data WritableOrNot = Writable 81 | NotWritable 82 deriving ( Eq, Show ) 83 84data CacheType = Repo 85 | Directory 86 deriving ( Eq, Show ) 87 88data CacheLoc = Cache 89 { cacheType :: !CacheType 90 , cacheWritable :: !WritableOrNot 91 , cacheSource :: !String 92 } 93 94-- | Cache is an abstract type for hiding the underlying cache locations 95newtype Cache = Ca [CacheLoc] 96 97mkCache :: [CacheLoc] -> Cache 98mkCache = Ca . sortBy compareByLocality . nub 99 100cacheEntries :: Cache -> [CacheLoc] 101cacheEntries (Ca entries) = entries 102 103instance Eq CacheLoc where 104 (Cache aTy _ aSrc) == (Cache bTy _ bSrc) = aTy == bTy && aSrc == bSrc 105 106instance Show CacheLoc where 107 show (Cache Repo Writable a) = "thisrepo:" ++ a 108 show (Cache Repo NotWritable a) = "repo:" ++ a 109 show (Cache Directory Writable a) = "cache:" ++ a 110 show (Cache Directory NotWritable a) = "readonly:" ++ a 111 112instance Show Cache where 113 show (Ca cs) = unlines $ map show cs 114 115unionCaches :: Cache -> Cache -> Cache 116unionCaches (Ca a) (Ca b) = Ca (nub (a ++ b)) 117 118-- | unionRemoteCaches merges caches. It tries to do better than just blindly 119-- copying remote cache entries: 120-- 121-- * If remote repository is accessed through network, do not copy any cache 122-- entries from it. Taking local entries does not make sense and using 123-- network entries can lead to darcs hang when it tries to get to 124-- unaccessible host. 125-- 126-- * If remote repository is local, copy all network cache entries. For local 127-- cache entries if the cache directory exists and is writable it is added 128-- as writable cache, if it exists but is not writable it is added as 129-- read-only cache. 130-- 131-- This approach should save us from bogus cache entries. One case it does 132-- not work very well is when you fetch from partial repository over network. 133-- Hopefully this is not a common case. 134unionRemoteCaches :: Cache -> Cache -> String -> IO Cache 135unionRemoteCaches local (Ca remote) repourl 136 | isValidLocalPath repourl = do 137 f <- filtered 138 return $ local `unionCaches` Ca f 139 | otherwise = return local 140 where 141 filtered = catMaybes `fmap` 142 mapM (\x -> mbGetRemoteCacheLoc x `catchall` return Nothing) remote 143 mbGetRemoteCacheLoc :: CacheLoc -> IO (Maybe CacheLoc) 144 mbGetRemoteCacheLoc (Cache Repo Writable _) = return Nothing 145 mbGetRemoteCacheLoc c@(Cache t _ url) 146 | isValidLocalPath url = do 147 ex <- doesDirectoryExist url 148 if ex 149 then do 150 p <- getPermissions url 151 return $ Just $ if writable c && SD.writable p 152 then c 153 else Cache t NotWritable url 154 else return Nothing 155 | otherwise = return $ Just c 156 157-- | Compares two caches, a remote cache is greater than a local one. 158-- The order of the comparison is given by: local < http < ssh 159compareByLocality :: CacheLoc -> CacheLoc -> Ordering 160compareByLocality (Cache _ w x) (Cache _ z y) 161 | isValidLocalPath x && isRemote y = LT 162 | isRemote x && isValidLocalPath y = GT 163 | isHttpUrl x && isSshUrl y = LT 164 | isSshUrl x && isHttpUrl y = GT 165 | isValidLocalPath x && isWritable w 166 && isValidLocalPath y && isNotWritable z = LT 167 | otherwise = EQ 168 where 169 isRemote r = isHttpUrl r || isSshUrl r 170 isWritable = (==) Writable 171 isNotWritable = (==) NotWritable 172 173repo2cache :: String -> Cache 174repo2cache r = Ca [Cache Repo NotWritable r] 175 176-- | 'cacheHash' computes the cache hash (i.e. filename) of a packed string. 177cacheHash :: B.ByteString -> String 178cacheHash ps = if sizeStrLen > 10 179 then shaOfPs 180 else replicate (10 - sizeStrLen) '0' ++ sizeStr 181 ++ '-' : shaOfPs 182 where 183 sizeStr = show $ B.length ps 184 sizeStrLen = length sizeStr 185 shaOfPs = sha256sum ps 186 187okayHash :: String -> Bool 188okayHash s = length s `elem` [64, 75] 189 190checkHash :: String -> B.ByteString -> Bool 191checkHash h s 192 | length h == 64 = sha256sum s == h 193 | length h == 75 = 194 B.length s == read (take 10 h) && sha256sum s == drop 11 h 195 | otherwise = False 196 197-- |@fetchFileUsingCache cache dir hash@ receives a list of caches @cache@, the 198-- directory for which that file belongs @dir@ and the @hash@ of the file to 199-- fetch. It tries to fetch the file from one of the sources, trying them in 200-- order one by one. If the file cannot be fetched from any of the sources, 201-- this operation fails. 202fetchFileUsingCache :: Cache -> HashedDir -> String 203 -> IO (String, B.ByteString) 204fetchFileUsingCache = fetchFileUsingCachePrivate Anywhere 205 206writable :: CacheLoc -> Bool 207writable (Cache _ NotWritable _) = False 208writable (Cache _ Writable _) = True 209 210-- | This keeps only 'Repo' 'NotWritable' entries. 211dropNonRepos :: Cache -> Cache 212dropNonRepos (Ca cache) = Ca $ filter notRepo cache where 213 notRepo xs = case xs of 214 Cache Directory _ _ -> False 215 -- we don't want to write thisrepo: entries to the disk 216 Cache Repo Writable _ -> False 217 _ -> True 218 219closestWritableDirectory :: Cache -> Maybe String 220closestWritableDirectory (Ca cs) = 221 listToMaybe . catMaybes .flip map cs $ \case 222 Cache Directory Writable x -> Just x 223 _ -> Nothing 224 225isThisRepo :: CacheLoc -> Bool 226isThisRepo (Cache Repo Writable _) = True 227isThisRepo _ = False 228 229bucketFolder :: String -> String 230bucketFolder f = take 2 (cleanHash f) 231 where 232 cleanHash fileName = case dropWhile (/= '-') fileName of 233 [] -> fileName 234 s -> drop 1 s 235 236-- | @hashedFilePath cachelocation subdir hash@ returns the physical filename 237-- of hash @hash@ in the @subdir@ section of @cachelocation@. 238hashedFilePath :: CacheLoc -> HashedDir -> String -> String 239hashedFilePath (Cache Directory _ d) s f = 240 joinPath [d, hashedDir s, bucketFolder f, f] 241hashedFilePath (Cache Repo _ r) s f = 242 joinPath [r, darcsdir, hashedDir s, f] 243 244-- | @hashedFilePathReadOnly cachelocation subdir hash@ returns the physical filename 245-- of hash @hash@ in the @subdir@ section of @cachelocation@. 246-- If directory, assume it is non-bucketed cache (old cache location). 247hashedFilePathReadOnly :: CacheLoc -> HashedDir -> String -> String 248hashedFilePathReadOnly (Cache Directory _ d) s f = 249 d </> hashedDir s </> f 250hashedFilePathReadOnly (Cache Repo _ r) s f = 251 r </> darcsdir </> hashedDir s </> f 252 253-- | @peekInCache cache subdir hash@ tells whether @cache@ and contains an 254-- object with hash @hash@ in a writable position. Florent: why do we want it 255-- to be in a writable position? 256peekInCache :: Cache -> HashedDir -> String -> IO Bool 257peekInCache (Ca cache) subdir f = cacheHasIt cache `catchall` return False 258 where 259 cacheHasIt [] = return False 260 cacheHasIt (c : cs) 261 | not $ writable c = cacheHasIt cs 262 | otherwise = do 263 ex <- doesFileExist $ hashedFilePath c subdir f 264 if ex then return True else cacheHasIt cs 265 266-- | @speculateFileUsingCache cache subdirectory name@ takes note that the file 267-- @name@ is likely to be useful soon: pipelined downloads will add it to the 268-- (low-priority) queue, for the rest it is a noop. 269speculateFileUsingCache :: Cache -> HashedDir -> String -> IO () 270speculateFileUsingCache c sd h = do 271 debugMessage $ "Speculating on " ++ h 272 copyFileUsingCache OnlySpeculate c sd h 273 274-- | Note that the files are likely to be useful soon: pipelined downloads will 275-- add them to the (low-priority) queue, for the rest it is a noop. 276speculateFilesUsingCache :: Cache -> HashedDir -> [String] -> IO () 277speculateFilesUsingCache _ _ [] = return () 278speculateFilesUsingCache cache sd hs = do 279 debugMessage $ "Thinking about speculating on " ++ unwords hs 280 hs' <- filterM (fmap not . peekInCache cache sd) hs 281 unless (null hs') $ do 282 debugMessage $ "Speculating on " ++ unwords hs' 283 copyFilesUsingCache OnlySpeculate cache sd hs' 284 285data OrOnlySpeculate = ActuallyCopy 286 | OnlySpeculate 287 deriving ( Eq ) 288 289-- | We hace a list of locations (@cache@) ordered from "closest/fastest" 290-- (typically, the destination repo) to "farthest/slowest" (typically, 291-- the source repo). 292-- @copyFileUsingCache@ first checks whether given file @f@ is present 293-- in some writeable location, if yes, do nothing. If no, it copies it 294-- to the last writeable location, which would be the global cache 295-- by default, or the destination repo if `--no-cache` is passed. 296-- Function does nothing if there is no writeable location at all. 297-- If the copy should occur between two locations of the same filesystem, 298-- a hard link is actually made. 299-- TODO document @oos@: what happens when we only speculate? 300copyFileUsingCache :: OrOnlySpeculate -> Cache -> HashedDir -> String -> IO () 301copyFileUsingCache oos (Ca cache) subdir f = do 302 debugMessage $ 303 "I'm doing copyFileUsingCache on " ++ hashedDir subdir </> f 304 Just stickItHere <- cacheLoc cache 305 createDirectoryIfMissing True 306 (reverse $ dropWhile (/= '/') $ reverse stickItHere) 307 debugMessage $ "Will effectively do copyFileUsingCache to: " ++ show stickItHere 308 filterBadSources cache >>= sfuc stickItHere 309 `catchall` 310 return () 311 where 312 -- return last writeable cache/repo location for file. 313 -- usually returns the global cache unless `--no-cache` is passed. 314 cacheLoc [] = return Nothing 315 cacheLoc (c : cs) 316 | not $ writable c = cacheLoc cs 317 | otherwise = do 318 let attemptPath = hashedFilePath c subdir f 319 ex <- doesFileExist attemptPath 320 if ex 321 then fail $ "File already present in writable location." 322 else do 323 othercache <- cacheLoc cs 324 return $ othercache `mplus` Just attemptPath 325 -- do the actual copy, or hard link, or put file in download queue 326 sfuc _ [] = return () 327 sfuc out (c : cs) 328 | not (writable c) = 329 let cacheFile = hashedFilePathReadOnly c subdir f in 330 if oos == OnlySpeculate 331 then speculateFileOrUrl cacheFile out 332 `catchNonSignal` 333 \e -> checkCacheReachability (show e) c 334 else do debugMessage $ "Copying from " ++ show cacheFile ++ " to " ++ show out 335 copyFileOrUrl defaultRemoteDarcsCmd cacheFile out Cachable 336 `catchNonSignal` 337 (\e -> do checkCacheReachability (show e) c 338 sfuc out cs) -- try another read-only location 339 | otherwise = sfuc out cs 340 341copyFilesUsingCache :: OrOnlySpeculate -> Cache -> HashedDir -> [String] 342 -> IO () 343copyFilesUsingCache oos cache subdir hs = 344 forM_ hs $ copyFileUsingCache oos cache subdir 345 346data FromWhere = LocalOnly 347 | Anywhere 348 deriving ( Eq ) 349 350-- | Checks if a given cache entry is reachable or not. It receives an error 351-- caught during execution and the cache entry. If the caches is not reachable 352-- it is blacklisted and not longer tried for the rest of the session. If it is 353-- reachable it is whitelisted and future errors with such cache get ignore. 354-- To determine reachability: 355-- * For a local cache, if the given source doesn't exist anymore, it is 356-- blacklisted. 357-- * For remote sources if the error is timeout, it is blacklisted, if not, 358-- it checks if _darcs/hashed_inventory exist, if it does, the entry is 359-- whitelisted, if it doesn't, it is blacklisted. 360checkCacheReachability :: String -> CacheLoc -> IO () 361checkCacheReachability e cache 362 | isValidLocalPath source = doUnreachableCheck $ 363 checkFileReachability (doesDirectoryExist source) 364 | isHttpUrl source = 365 doUnreachableCheck $ do 366 let err = case dropWhile (/= '(') e of 367 (_ : xs) -> fst (break (==')') xs) 368 _ -> e 369 case reads err :: [(Download.ConnectionError, String)] of 370 [(_, _)] -> addBadSource source 371 _ -> checkFileReachability 372 (checkHashedInventoryReachability cache) 373 | isSshUrl source = doUnreachableCheck $ 374 checkFileReachability (checkHashedInventoryReachability cache) 375 | otherwise = fail $ "unknown transport protocol for: " ++ source 376 where 377 source = cacheSource cache 378 379 doUnreachableCheck unreachableAction = do 380 reachable <- isReachableSource 381 unless (reachable source) unreachableAction 382 383 checkFileReachability doCheck = do 384 reachable <- doCheck 385 if reachable 386 then addReachableSource source 387 else addBadSource source 388 389-- | Returns a list of reachables cache entries, removing blacklisted entries. 390filterBadSources :: [CacheLoc] -> IO [CacheLoc] 391filterBadSources cache = do 392 badSource <- isBadSource 393 return $ filter (not . badSource . cacheSource) cache 394 395-- | Checks if the _darcs/hashed_inventory exist and is reachable 396checkHashedInventoryReachability :: CacheLoc -> IO Bool 397checkHashedInventoryReachability cache = withTemp $ \tempout -> do 398 let f = cacheSource cache </> darcsdir </> "hashed_inventory" 399 copyFileOrUrl defaultRemoteDarcsCmd f tempout Cachable 400 return True 401 `catchNonSignal` const (return False) 402 403-- | Get contents of some hashed file taking advantage of the cache system. 404-- We have a list of locations (@cache@) ordered from "closest/fastest" 405-- (typically, the destination repo) to "farthest/slowest" (typically, 406-- the source repo). 407-- First, if possible it copies the file from remote location to local. 408-- Then, it reads it contents, and links the file across all writeable 409-- locations including the destination repository. 410fetchFileUsingCachePrivate :: FromWhere -> Cache -> HashedDir -> String 411 -> IO (String, B.ByteString) 412fetchFileUsingCachePrivate fromWhere (Ca cache) subdir f = do 413 when (fromWhere == Anywhere) $ 414 copyFileUsingCache ActuallyCopy (Ca cache) subdir f 415 filterBadSources cache >>= ffuc 416 `catchall` fail ("Couldn't fetch " ++ f ++ "\nin subdir " 417 ++ hashedDir subdir ++ " from sources:\n\n" 418 ++ show (Ca cache)) 419 where 420 ffuc (c : cs) 421 | not (writable c) && 422 (Anywhere == fromWhere || isValidLocalPath (hashedFilePathReadOnly c subdir f)) = do 423 let cacheFile = hashedFilePathReadOnly c subdir f 424 -- looks like `copyFileUsingCache` could not copy the file we wanted. 425 -- this can happen if `--no-cache` is NOT passed and the global cache is not accessible 426 debugMessage $ "In fetchFileUsingCachePrivate I'm directly grabbing file contents from " 427 ++ cacheFile 428 x <- gzFetchFilePS cacheFile Cachable 429 if not $ checkHash f x 430 then do 431 x' <- fetchFilePS cacheFile Cachable 432 unless (checkHash f x') $ do 433 hPutStrLn stderr $ "Hash failure in " ++ cacheFile 434 fail $ "Hash failure in " ++ cacheFile 435 return (cacheFile, x') 436 else return (cacheFile, x) -- FIXME: create links in caches 437 `catchNonSignal` \e -> do 438 -- something bad happened, check if cache became unaccessible and try other ones 439 checkCacheReachability (show e) c 440 filterBadSources cs >>= ffuc 441 | writable c = let cacheFile = hashedFilePath c subdir f in do 442 debugMessage $ "About to gzFetchFilePS from " ++ show cacheFile 443 x1 <- gzFetchFilePS cacheFile Cachable 444 debugMessage $ "gzFetchFilePS done." 445 x <- if not $ checkHash f x1 446 then do 447 x2 <- fetchFilePS cacheFile Cachable 448 unless (checkHash f x2) $ do 449 hPutStrLn stderr $ "Hash failure in " ++ cacheFile 450 removeFile cacheFile 451 fail $ "Hash failure in " ++ cacheFile 452 return x2 453 else return x1 454 mapM_ (tryLinking cacheFile) cs 455 return (cacheFile, x) 456 `catchNonSignal` \e -> do 457 debugMessage "Caught exception, now attempt creating cache." 458 createCache c subdir `catchall` return () 459 checkCacheReachability (show e) c 460 (fname, x) <- filterBadSources cs >>= ffuc -- fetch file from remaining locations 461 debugMessage $ "Attempt creating link from: " ++ show fname ++ " to " ++ show cacheFile 462 (createLink fname cacheFile >> (debugMessage "successfully created link") 463 >> return (cacheFile, x)) 464 `catchall` do 465 debugMessage $ "Attempt writing file: " ++ show cacheFile 466 -- the following block is usually when files get actually written 467 -- inside of _darcs or global cache. 468 do createDirectoryIfMissing True (dropFileName cacheFile) 469 gzWriteFilePS cacheFile x 470 debugMessage $ "successfully wrote file" 471 `catchall` return () 472 -- above block can fail if cache is not writeable 473 return (fname, x) 474 | otherwise = ffuc cs 475 476 ffuc [] = fail $ "No sources from which to fetch file " ++ f 477 ++ "\n"++ show (Ca cache) 478 479 tryLinking ff c@(Cache Directory Writable d) = do 480 createDirectoryIfMissing False (d </> hashedDir subdir) 481 createLink ff (hashedFilePath c subdir f) 482 `catchall` 483 return () 484 tryLinking _ _ = return () 485 486createCache :: CacheLoc -> HashedDir -> IO () 487createCache (Cache Directory _ d) subdir = 488 createDirectoryIfMissing True (d </> hashedDir subdir) 489createCache _ _ = return () 490 491-- | @write compression filename content@ writes @content@ to the file 492-- @filename@ according to the policy given by @compression@. 493write :: Compression -> String -> B.ByteString -> IO () 494write NoCompression = writeAtomicFilePS 495write GzipCompression = gzWriteAtomicFilePS 496 497-- | @writeFileUsingCache cache compression subdir contents@ write the string 498-- @contents@ to the directory subdir, except if it is already in the cache, in 499-- which case it is a noop. Warning (?) this means that in case of a hash 500-- collision, writing using writeFileUsingCache is a noop. The returned value 501-- is the filename that was given to the string. 502writeFileUsingCache :: Cache -> Compression -> HashedDir -> B.ByteString 503 -> IO String 504writeFileUsingCache (Ca cache) compr subdir ps = do 505 _ <- fetchFileUsingCachePrivate LocalOnly (Ca cache) subdir hash 506 return hash 507 `catchall` 508 wfuc cache 509 `catchall` 510 fail ("Couldn't write " ++ hash ++ "\nin subdir " 511 ++ hashedDir subdir ++ " to sources:\n\n"++ show (Ca cache)) 512 where 513 hash = cacheHash ps 514 wfuc (c : cs) 515 | not $ writable c = wfuc cs 516 | otherwise = do 517 createCache c subdir 518 -- FIXME: create links in caches 519 write compr (hashedFilePath c subdir hash) ps 520 return hash 521 wfuc [] = fail $ "No location to write file " ++ (hashedDir subdir </> hash) 522 523cleanCaches :: Cache -> HashedDir -> IO () 524cleanCaches c d = cleanCachesWithHint' c d Nothing 525 526cleanCachesWithHint :: Cache -> HashedDir -> [String] -> IO () 527cleanCachesWithHint c d h = cleanCachesWithHint' c d (Just h) 528 529cleanCachesWithHint' :: Cache -> HashedDir -> Maybe [String] -> IO () 530cleanCachesWithHint' (Ca cs) subdir hint = mapM_ cleanCache cs 531 where 532 cleanCache (Cache Directory Writable d) = 533 withCurrentDirectory (d </> hashedDir subdir) (do 534 fs' <- getDirectoryContents "." 535 let fs = filter okayHash $ fromMaybe fs' hint 536 cleanMsg = "Cleaning cache " ++ d </> hashedDir subdir 537 mapM_ clean $ progressList cleanMsg fs) 538 `catchall` 539 return () 540 cleanCache _ = return () 541 clean f = do 542 lc <- linkCount `liftM` getSymbolicLinkStatus f 543 when (lc < 2) $ removeFile f 544 `catchall` 545 return () 546 547-- | Prints an error message with a list of bad caches. 548reportBadSources :: IO () 549reportBadSources = do 550 sources <- getBadSourcesList 551 let size = length sources 552 unless (null sources) $ hPutStrLn stderr $ 553 concat [ "\nBy the way, I could not reach the following " 554 , englishNum size (Noun "location") ":" 555 , "\n" 556 , intercalate "\n" (map (" " ++) sources) 557 , "\nUnless you plan to restore access to " 558 , englishNum size It ", you should delete " 559 , "the corresponding " 560 , englishNum size (Noun "entry") " from _darcs/prefs/sources." 561 ] 562 563-- * Global Variables 564 565badSourcesList :: MVar [String] 566badSourcesList = unsafePerformIO $ newMVar [] 567{-# NOINLINE badSourcesList #-} 568 569addBadSource :: String -> IO () 570addBadSource cache = modifyMVarPure badSourcesList (cache:) 571 572getBadSourcesList :: IO [String] 573getBadSourcesList = readMVar badSourcesList 574 575isBadSource :: IO (String -> Bool) 576isBadSource = do 577 badSources <- getBadSourcesList 578 return (`elem` badSources) 579 580reachableSourcesList :: MVar [String] 581reachableSourcesList = unsafePerformIO $ newMVar [] 582{-# NOINLINE reachableSourcesList #-} 583 584addReachableSource :: String -> IO () 585addReachableSource src = modifyMVarPure reachableSourcesList (src:) 586 587getReachableSources :: IO [String] 588getReachableSources = readMVar reachableSourcesList 589 590isReachableSource :: IO (String -> Bool) 591isReachableSource = do 592 reachableSources <- getReachableSources 593 return (`elem` reachableSources) 594 595modifyMVarPure :: MVar a -> (a -> a) -> IO () 596modifyMVarPure mvar f = modifyMVar_ mvar (return . f) 597