diff --git a/Annex.hs b/Annex.hs index 293e323eb2..f233712db2 100644 --- a/Annex.hs +++ b/Annex.hs @@ -27,6 +27,7 @@ module Annex ( inRepo, fromRepo, calcRepo, + calcRepo', getGitConfig, overrideGitConfig, changeGitRepo, @@ -361,6 +362,11 @@ calcRepo a = do s <- getState id liftIO $ a (repo s) (gitconfig s) +calcRepo' :: (Git.Repo -> GitConfig -> a) -> Annex a +calcRepo' f = do + s <- getState id + pure $ f (repo s) (gitconfig s) + {- Gets the GitConfig settings. -} getGitConfig :: Annex GitConfig getGitConfig = getState gitconfig diff --git a/Annex/AdjustedBranch/Merge.hs b/Annex/AdjustedBranch/Merge.hs index 79f86c2884..d3031cd478 100644 --- a/Annex/AdjustedBranch/Merge.hs +++ b/Annex/AdjustedBranch/Merge.hs @@ -87,7 +87,7 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm whenM (doesFileExist src) $ do dest <- relPathDirToFile git_dir src' let dest' = toRawFilePath tmpgit P. dest - createDirectoryUnder git_dir + createDirectoryUnder [git_dir] (P.takeDirectory dest') void $ createLinkOrCopy src' dest' -- This reset makes git merge not care @@ -115,7 +115,7 @@ mergeToAdjustedBranch tomerge (origbranch, adj) mergeconfig canresolvemerge comm setup = do whenM (doesDirectoryExist d) $ removeDirectoryRecursive d - createDirectoryUnder git_dir (toRawFilePath d) + createDirectoryUnder [git_dir] (toRawFilePath d) cleanup _ = removeDirectoryRecursive d {- A merge commit has been made between the basisbranch and diff --git a/Annex/ChangedRefs.hs b/Annex/ChangedRefs.hs index 4450bd4c4c..83aa5561a7 100644 --- a/Annex/ChangedRefs.hs +++ b/Annex/ChangedRefs.hs @@ -83,7 +83,7 @@ watchChangedRefs = do g <- gitRepo let gittop = Git.localGitDir g let refdir = gittop P. "refs" - liftIO $ createDirectoryUnder gittop refdir + liftIO $ createDirectoryUnder [gittop] refdir let notifyhook = Just $ notifyHook chan let hooks = mkWatchHooks diff --git a/Annex/Common.hs b/Annex/Common.hs index 5dceb24ef4..2edc0788b4 100644 --- a/Annex/Common.hs +++ b/Annex/Common.hs @@ -6,7 +6,7 @@ import Common as X import Types as X import Key as X import Types.UUID as X -import Annex as X (gitRepo, inRepo, fromRepo, calcRepo) +import Annex as X (gitRepo, inRepo, fromRepo, calcRepo, calcRepo') import Annex.Locations as X import Annex.Debug as X (fastDebug, debug) import Messages as X diff --git a/Annex/Content/Presence.hs b/Annex/Content/Presence.hs index 9f44e3d840..52020a9902 100644 --- a/Annex/Content/Presence.hs +++ b/Annex/Content/Presence.hs @@ -137,7 +137,7 @@ withContentLockFile :: Key -> (Maybe RawFilePath -> Annex a) -> Annex a withContentLockFile k a = do v <- getVersion if versionNeedsWritableContentFiles v - then withSharedLock gitAnnexContentLockLock $ do + then fromRepo gitAnnexContentLockLock >>= \lck -> withSharedLock lck $ do {- While the lock is held, check to see if the git - config has changed, and reload it if so. This - updates the annex.version after the v10 upgrade, @@ -156,7 +156,7 @@ withContentLockFile k a = do reloadConfig getVersion go (v') - else (go v) + else go v where go v = contentLockFile k v >>= a diff --git a/Annex/Import.hs b/Annex/Import.hs index bc2f860ea0..d985e22cd5 100644 --- a/Annex/Import.hs +++ b/Annex/Import.hs @@ -412,8 +412,10 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec | importcontent = a | otherwise = reuseVectorClockWhile a - withciddb = withExclusiveLock gitAnnexContentIdentifierLock . - bracket CIDDb.openDb CIDDb.closeDb + withciddb a = do + cidlck <- calcRepo' gitAnnexContentIdentifierLock + withExclusiveLock cidlck $ + bracket CIDDb.openDb CIDDb.closeDb a run cidmap importing db = do largematcher <- largeFilesMatcher diff --git a/Annex/Journal.hs b/Annex/Journal.hs index 249fb20037..f2d3738f73 100644 --- a/Annex/Journal.hs +++ b/Annex/Journal.hs @@ -281,4 +281,6 @@ data JournalLocked = ProduceJournalLocked {- Runs an action that modifies the journal, using locking to avoid - contention with other git-annex processes. -} lockJournal :: (JournalLocked -> Annex a) -> Annex a -lockJournal a = withExclusiveLock gitAnnexJournalLock $ a ProduceJournalLocked +lockJournal a = do + lck <- fromRepo gitAnnexJournalLock + withExclusiveLock lck $ a ProduceJournalLocked diff --git a/Annex/Locations.hs b/Annex/Locations.hs index cbc9c52e89..f119c9dca0 100644 --- a/Annex/Locations.hs +++ b/Annex/Locations.hs @@ -37,7 +37,7 @@ module Annex.Locations ( gitAnnexBadDir, gitAnnexBadLocation, gitAnnexUnusedLog, - gitAnnexKeysDb, + gitAnnexKeysDbDir, gitAnnexKeysDbLock, gitAnnexKeysDbIndexCache, gitAnnexFsckState, @@ -321,38 +321,42 @@ gitAnnexUnusedLog :: RawFilePath -> Git.Repo -> RawFilePath gitAnnexUnusedLog prefix r = gitAnnexDir r P. (prefix <> "unused") {- .git/annex/keysdb/ contains a database of information about keys. -} -gitAnnexKeysDb :: Git.Repo -> RawFilePath -gitAnnexKeysDb r = gitAnnexDir r P. "keysdb" +gitAnnexKeysDbDir :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexKeysDbDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c) P. "keysdb" {- Lock file for the keys database. -} -gitAnnexKeysDbLock :: Git.Repo -> RawFilePath -gitAnnexKeysDbLock r = gitAnnexKeysDb r <> ".lck" +gitAnnexKeysDbLock :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexKeysDbLock r c = gitAnnexKeysDbDir r c <> ".lck" {- Contains the stat of the last index file that was - reconciled with the keys database. -} -gitAnnexKeysDbIndexCache :: Git.Repo -> RawFilePath -gitAnnexKeysDbIndexCache r = gitAnnexKeysDb r <> ".cache" +gitAnnexKeysDbIndexCache :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexKeysDbIndexCache r c = gitAnnexKeysDbDir r c <> ".cache" {- .git/annex/fsck/uuid/ is used to store information about incremental - fscks. -} -gitAnnexFsckDir :: UUID -> Git.Repo -> RawFilePath -gitAnnexFsckDir u r = gitAnnexDir r P. "fsck" P. fromUUID u +gitAnnexFsckDir :: UUID -> Git.Repo -> Maybe GitConfig -> RawFilePath +gitAnnexFsckDir u r mc = case annexDbDir =<< mc of + Nothing -> go (gitAnnexDir r) + Just d -> go d + where + go d = d P. "fsck" P. fromUUID u {- used to store information about incremental fscks. -} gitAnnexFsckState :: UUID -> Git.Repo -> RawFilePath -gitAnnexFsckState u r = gitAnnexFsckDir u r P. "state" +gitAnnexFsckState u r = gitAnnexFsckDir u r Nothing P. "state" {- Directory containing database used to record fsck info. -} -gitAnnexFsckDbDir :: UUID -> Git.Repo -> RawFilePath -gitAnnexFsckDbDir u r = gitAnnexFsckDir u r P. "fsckdb" +gitAnnexFsckDbDir :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexFsckDbDir u r c = gitAnnexFsckDir u r (Just c) P. "fsckdb" {- Directory containing old database used to record fsck info. -} -gitAnnexFsckDbDirOld :: UUID -> Git.Repo -> RawFilePath -gitAnnexFsckDbDirOld u r = gitAnnexFsckDir u r P. "db" +gitAnnexFsckDbDirOld :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexFsckDbDirOld u r c = gitAnnexFsckDir u r (Just c) P. "db" {- Lock file for the fsck database. -} -gitAnnexFsckDbLock :: UUID -> Git.Repo -> RawFilePath -gitAnnexFsckDbLock u r = gitAnnexFsckDir u r P. "fsck.lck" +gitAnnexFsckDbLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexFsckDbLock u r c = gitAnnexFsckDir u r (Just c) P. "fsck.lck" {- .git/annex/fsckresults/uuid is used to store results of git fscks -} gitAnnexFsckResultsLog :: UUID -> Git.Repo -> RawFilePath @@ -384,20 +388,22 @@ gitAnnexMoveLock r = gitAnnexDir r P. "move.lck" {- .git/annex/export/ is used to store information about - exports to special remotes. -} -gitAnnexExportDir :: Git.Repo -> RawFilePath -gitAnnexExportDir r = gitAnnexDir r P. "export" +gitAnnexExportDir :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexExportDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c) P. "export" {- Directory containing database used to record export info. -} -gitAnnexExportDbDir :: UUID -> Git.Repo -> RawFilePath -gitAnnexExportDbDir u r = gitAnnexExportDir r P. fromUUID u P. "exportdb" +gitAnnexExportDbDir :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexExportDbDir u r c = + gitAnnexExportDir r c P. fromUUID u P. "exportdb" -{- Lock file for export state for a special remote. -} -gitAnnexExportLock :: UUID -> Git.Repo -> RawFilePath -gitAnnexExportLock u r = gitAnnexExportDbDir u r <> ".lck" +{- Lock file for export database. -} +gitAnnexExportLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexExportLock u r c = gitAnnexExportDbDir u r c <> ".lck" -{- Lock file for updating the export state for a special remote. -} -gitAnnexExportUpdateLock :: UUID -> Git.Repo -> RawFilePath -gitAnnexExportUpdateLock u r = gitAnnexExportDbDir u r <> ".upl" +{- Lock file for updating the export database with information from the + - repository. -} +gitAnnexExportUpdateLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath +gitAnnexExportUpdateLock u r c = gitAnnexExportDbDir u r c <> ".upl" {- Log file used to keep track of files that were in the tree exported to a - remote, but were excluded by its preferred content settings. -} @@ -409,12 +415,13 @@ gitAnnexExportExcludeLog u r = gitAnnexDir r P. "export.ex" P. fromUUID u - (This used to be "cid", but a problem with the database caused it to - need to be rebuilt with a new name.) -} -gitAnnexContentIdentifierDbDir :: Git.Repo -> RawFilePath -gitAnnexContentIdentifierDbDir r = gitAnnexDir r P. "cidsdb" +gitAnnexContentIdentifierDbDir :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexContentIdentifierDbDir r c = + fromMaybe (gitAnnexDir r) (annexDbDir c) P. "cidsdb" {- Lock file for writing to the content id database. -} -gitAnnexContentIdentifierLock :: Git.Repo -> RawFilePath -gitAnnexContentIdentifierLock r = gitAnnexContentIdentifierDbDir r <> ".lck" +gitAnnexContentIdentifierLock :: Git.Repo -> GitConfig -> RawFilePath +gitAnnexContentIdentifierLock r c = gitAnnexContentIdentifierDbDir r c <> ".lck" {- .git/annex/schedulestate is used to store information about when - scheduled jobs were last run. -} diff --git a/Annex/LockFile.hs b/Annex/LockFile.hs index ad3ef53296..bd72e02ce2 100644 --- a/Annex/LockFile.hs +++ b/Annex/LockFile.hs @@ -21,7 +21,6 @@ module Annex.LockFile ( import Annex.Common import Annex import Types.LockCache -import qualified Git import Annex.Perms import Annex.LockPool @@ -63,9 +62,8 @@ changeLockCache a = do {- Runs an action with a shared lock held. If an exclusive lock is held, - blocks until it becomes free. -} -withSharedLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex a -withSharedLock getlockfile a = debugLocks $ do - lockfile <- fromRepo getlockfile +withSharedLock :: RawFilePath -> Annex a -> Annex a +withSharedLock lockfile a = debugLocks $ do createAnnexDirectory $ P.takeDirectory lockfile mode <- annexFileMode bracket (lock mode lockfile) (liftIO . dropLock) (const a) @@ -78,16 +76,15 @@ withSharedLock getlockfile a = debugLocks $ do {- Runs an action with an exclusive lock held. If the lock is already - held, blocks until it becomes free. -} -withExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex a -withExclusiveLock getlockfile a = bracket - (takeExclusiveLock getlockfile) +withExclusiveLock :: RawFilePath -> Annex a -> Annex a +withExclusiveLock lockfile a = bracket + (takeExclusiveLock lockfile) (liftIO . dropLock) (const a) {- Takes an exclusive lock, blocking until it's free. -} -takeExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex LockHandle -takeExclusiveLock getlockfile = debugLocks $ do - lockfile <- fromRepo getlockfile +takeExclusiveLock :: RawFilePath -> Annex LockHandle +takeExclusiveLock lockfile = debugLocks $ do createAnnexDirectory $ P.takeDirectory lockfile mode <- annexFileMode lock mode lockfile @@ -100,9 +97,8 @@ takeExclusiveLock getlockfile = debugLocks $ do {- Tries to take an exclusive lock and run an action. If the lock is - already held, returns Nothing. -} -tryExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex (Maybe a) -tryExclusiveLock getlockfile a = debugLocks $ do - lockfile <- fromRepo getlockfile +tryExclusiveLock :: RawFilePath -> Annex a -> Annex (Maybe a) +tryExclusiveLock lockfile a = debugLocks $ do createAnnexDirectory $ P.takeDirectory lockfile mode <- annexFileMode bracket (lock mode lockfile) (liftIO . unlock) go diff --git a/Annex/Perms.hs b/Annex/Perms.hs index 63178883df..011ac8b085 100644 --- a/Annex/Perms.hs +++ b/Annex/Perms.hs @@ -106,13 +106,16 @@ annexFileMode = withShared $ return . go go _ = stdFileMode sharedmode = combineModes groupSharedModes -{- Creates a directory inside the gitAnnexDir, creating any parent - - directories up to and including the gitAnnexDir. +{- Creates a directory inside the gitAnnexDir (or possibly the dbdir), + - creating any parent directories up to and including the gitAnnexDir. - Makes directories with appropriate permissions. -} createAnnexDirectory :: RawFilePath -> Annex () createAnnexDirectory dir = do top <- parentDir <$> fromRepo gitAnnexDir - createDirectoryUnder' top dir createdir + tops <- annexDbDir <$> Annex.getGitConfig >>= return . \case + Nothing -> [top] + Just dbdir -> [top, parentDir dbdir] + createDirectoryUnder' tops dir createdir where createdir p = do liftIO $ R.createDirectory p @@ -126,7 +129,7 @@ createAnnexDirectory dir = do createWorkTreeDirectory :: RawFilePath -> Annex () createWorkTreeDirectory dir = do fromRepo repoWorkTree >>= liftIO . \case - Just wt -> createDirectoryUnder wt dir + Just wt -> createDirectoryUnder [wt] dir -- Should never happen, but let whatever tries to write -- to the directory be what throws an exception, as that -- will be clearer than an exception from here. diff --git a/Annex/Queue.hs b/Annex/Queue.hs index 2dc613a750..f11681cbaa 100644 --- a/Annex/Queue.hs +++ b/Annex/Queue.hs @@ -65,9 +65,11 @@ flush = do - git locking files. So, only one queue is allowed to flush at a time. -} flush' :: Git.Queue.Queue Annex -> Annex (Git.Queue.Queue Annex) -flush' q = withExclusiveLock gitAnnexGitQueueLock $ do - showStoringStateAction - Git.Queue.flush q =<< gitRepo +flush' q = do + lck <- fromRepo gitAnnexGitQueueLock + withExclusiveLock lck $ do + showStoringStateAction + Git.Queue.flush q =<< gitRepo {- Gets the size of the queue. -} size :: Annex Int diff --git a/Annex/ReplaceFile.hs b/Annex/ReplaceFile.hs index 21c6f29744..9f671cb9d6 100644 --- a/Annex/ReplaceFile.hs +++ b/Annex/ReplaceFile.hs @@ -33,7 +33,7 @@ replaceGitAnnexDirFile = replaceFile createAnnexDirectory replaceGitDirFile :: FilePath -> (FilePath -> Annex a) -> Annex a replaceGitDirFile = replaceFile $ \dir -> do top <- fromRepo localGitDir - liftIO $ createDirectoryUnder top dir + liftIO $ createDirectoryUnder [top] dir {- replaceFile on a worktree file. -} replaceWorkTreeFile :: FilePath -> (FilePath -> Annex a) -> Annex a diff --git a/Annex/Tmp.hs b/Annex/Tmp.hs index 720d1f4945..f602f56805 100644 --- a/Annex/Tmp.hs +++ b/Annex/Tmp.hs @@ -27,7 +27,7 @@ withOtherTmp a = do Annex.addCleanupAction OtherTmpCleanup cleanupOtherTmp tmpdir <- fromRepo gitAnnexTmpOtherDir tmplck <- fromRepo gitAnnexTmpOtherLock - withSharedLock (const tmplck) $ do + withSharedLock tmplck $ do void $ createAnnexDirectory tmpdir a tmpdir @@ -56,7 +56,7 @@ withEventuallyCleanedOtherTmp = bracket setup cleanup cleanupOtherTmp :: Annex () cleanupOtherTmp = do tmplck <- fromRepo gitAnnexTmpOtherLock - void $ tryIO $ tryExclusiveLock (const tmplck) $ do + void $ tryIO $ tryExclusiveLock tmplck $ do tmpdir <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDir void $ liftIO $ tryIO $ removeDirectoryRecursive tmpdir oldtmp <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDirOld diff --git a/Assistant/Threads/Merger.hs b/Assistant/Threads/Merger.hs index 23c9700df0..01f7200801 100644 --- a/Assistant/Threads/Merger.hs +++ b/Assistant/Threads/Merger.hs @@ -34,7 +34,7 @@ mergeThread = namedThread "Merger" $ do g <- liftAnnex gitRepo let gitd = Git.localGitDir g let dir = gitd P. "refs" - liftIO $ createDirectoryUnder gitd dir + liftIO $ createDirectoryUnder [gitd] dir let hook a = Just <$> asIO2 (runHandler a) changehook <- hook onChange errhook <- hook onErr diff --git a/CHANGELOG b/CHANGELOG index 9d2f01b94b..411bde1209 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -17,6 +17,9 @@ git-annex (10.20220725) UNRELEASED; urgency=medium * When bup split fails, display its stderr. * Avoid running multiple bup split processes concurrently, since bup is not concurrency safe. + * Added annex.dbdir config which can be used to move sqlite databases + to a different filesystem than the git-annex repo, when the repo is on + a filesystem that sqlite does not work well in. -- Joey Hess Mon, 25 Jul 2022 15:35:45 -0400 diff --git a/Command/Move.hs b/Command/Move.hs index 56da232d78..efb02b7c2f 100644 --- a/Command/Move.hs +++ b/Command/Move.hs @@ -375,12 +375,14 @@ logMove srcuuid destuuid deststartedwithcopy key a = go =<< setup setup = do logf <- fromRepo gitAnnexMoveLog + lckf <- fromRepo gitAnnexMoveLock -- Only log when there was no copy. unless deststartedwithcopy $ - appendLogFile logf gitAnnexMoveLock logline + appendLogFile logf lckf logline return logf cleanup logf = do + lck <- fromRepo gitAnnexMoveLock -- This buffers the log file content in memory. -- The log file length is limited to the number of -- concurrent jobs, times the number of times a move @@ -388,13 +390,14 @@ logMove srcuuid destuuid deststartedwithcopy key a = go =<< setup -- That could grow without bounds given enough time, -- so the log is also truncated to the most recent -- 100 items. - modifyLogFile logf gitAnnexMoveLock + modifyLogFile logf lck (filter (/= logline) . reverse . take 100 . reverse) go logf -- Only need to check log when there is a copy. | deststartedwithcopy = do - wasnocopy <- checkLogFile (fromRawFilePath logf) gitAnnexMoveLock + lck <- fromRepo gitAnnexMoveLock + wasnocopy <- checkLogFile (fromRawFilePath logf) lck (== logline) if wasnocopy then go' logf False diff --git a/Config/Smudge.hs b/Config/Smudge.hs index 3a33bcfda3..2b3303307f 100644 --- a/Config/Smudge.hs +++ b/Config/Smudge.hs @@ -41,7 +41,7 @@ configureSmudgeFilter = unlessM (fromRepo Git.repoIsLocalBare) $ do gfs <- readattr gf gittop <- Git.localGitDir <$> gitRepo liftIO $ unless ("filter=annex" `isInfixOf` (lfs ++ gfs)) $ do - createDirectoryUnder gittop (P.takeDirectory lf) + createDirectoryUnder [gittop] (P.takeDirectory lf) writeFile (fromRawFilePath lf) (lfs ++ "\n" ++ unlines stdattr) where readattr = liftIO . catchDefaultIO "" . readFileStrict . fromRawFilePath diff --git a/Database/ContentIdentifier.hs b/Database/ContentIdentifier.hs index 9dce98d104..aa595a98ce 100644 --- a/Database/ContentIdentifier.hs +++ b/Database/ContentIdentifier.hs @@ -78,7 +78,7 @@ AnnexBranch -} openDb :: Annex ContentIdentifierHandle openDb = do - dbdir <- fromRepo gitAnnexContentIdentifierDbDir + dbdir <- calcRepo' gitAnnexContentIdentifierDbDir let db = dbdir P. "db" ifM (liftIO $ not <$> R.doesPathExist db) ( initDb db $ void $ diff --git a/Database/Export.hs b/Database/Export.hs index 5caf312e19..b5c58afd0b 100644 --- a/Database/Export.hs +++ b/Database/Export.hs @@ -99,7 +99,7 @@ ExportTreeCurrent -} openDb :: UUID -> Annex ExportHandle openDb u = do - dbdir <- fromRepo (gitAnnexExportDbDir u) + dbdir <- calcRepo' (gitAnnexExportDbDir u) let db = dbdir P. "db" unlessM (liftIO $ R.doesPathExist db) $ do initDb db $ void $ @@ -263,8 +263,9 @@ updateExportDb = runExportDiffUpdater $ mkExportDiffUpdater removeold addnew -} writeLockDbWhile :: ExportHandle -> Annex a -> Annex a writeLockDbWhile db@(ExportHandle _ u) a = do - updatelck <- takeExclusiveLock (gitAnnexExportUpdateLock u) - withExclusiveLock (gitAnnexExportLock u) $ do + updatelck <- takeExclusiveLock =<< calcRepo' (gitAnnexExportUpdateLock u) + exlck <- calcRepo' (gitAnnexExportLock u) + withExclusiveLock exlck $ do bracket_ (setup updatelck) cleanup a where setup updatelck = do @@ -285,15 +286,17 @@ data ExportUpdateResult = ExportUpdateSuccess | ExportUpdateConflict - not. Either way, it will block until the update is complete. -} updateExportTreeFromLog :: ExportHandle -> Annex ExportUpdateResult -updateExportTreeFromLog db@(ExportHandle _ u) = +updateExportTreeFromLog db@(ExportHandle _ u) = do -- If another process or thread is performing the update, -- this will block until it's done. - withExclusiveLock (gitAnnexExportUpdateLock u) $ do + exlck <- calcRepo' (gitAnnexExportUpdateLock u) + withExclusiveLock exlck $ do + lck <- calcRepo' (gitAnnexExportLock u) -- If the database is locked by something else, -- this will not run the update. But, in that case, -- writeLockDbWhile is running, and has already -- completed the update, so we don't need to do anything. - mr <- tryExclusiveLock (gitAnnexExportLock u) $ + mr <- tryExclusiveLock lck $ updateExportTreeFromLog' db case mr of Just r -> return r diff --git a/Database/Fsck.hs b/Database/Fsck.hs index 4023390ace..61e932e3da 100644 --- a/Database/Fsck.hs +++ b/Database/Fsck.hs @@ -60,29 +60,31 @@ Fscked - or unknown behavior. -} newPass :: UUID -> Annex Bool -newPass u = isJust <$> tryExclusiveLock (gitAnnexFsckDbLock u) go +newPass u = do + lck <- calcRepo' (gitAnnexFsckDbLock u) + isJust <$> tryExclusiveLock lck go where go = do - removedb =<< fromRepo (gitAnnexFsckDbDir u) - removedb =<< fromRepo (gitAnnexFsckDbDirOld u) + removedb =<< calcRepo' (gitAnnexFsckDbDir u) + removedb =<< calcRepo' (gitAnnexFsckDbDirOld u) removedb = liftIO . void . tryIO . removeDirectoryRecursive . fromRawFilePath {- Opens the database, creating it if it doesn't exist yet. -} openDb :: UUID -> Annex FsckHandle openDb u = do - dbdir <- fromRepo (gitAnnexFsckDbDir u) + dbdir <- calcRepo' (gitAnnexFsckDbDir u) let db = dbdir P. "db" unlessM (liftIO $ R.doesPathExist db) $ do initDb db $ void $ runMigrationSilent migrateFsck - lockFileCached =<< fromRepo (gitAnnexFsckDbLock u) + lockFileCached =<< calcRepo' (gitAnnexFsckDbLock u) h <- liftIO $ H.openDbQueue db "fscked" return $ FsckHandle h u closeDb :: FsckHandle -> Annex () closeDb (FsckHandle h u) = do liftIO $ H.closeDbQueue h - unlockFile =<< fromRepo (gitAnnexFsckDbLock u) + unlockFile =<< calcRepo' (gitAnnexFsckDbLock u) addDb :: FsckHandle -> Key -> IO () addDb (FsckHandle h _) k = H.queueDb h checkcommit $ diff --git a/Database/Init.hs b/Database/Init.hs index 2f928a44bb..0ccde7b19f 100644 --- a/Database/Init.hs +++ b/Database/Init.hs @@ -10,6 +10,7 @@ module Database.Init where import Annex.Common +import qualified Annex import Annex.Perms import Utility.FileMode import Utility.Directory.Create @@ -34,9 +35,13 @@ initDb db migration = do let tmpdbdir = dbdir <> ".tmp" let tmpdb = tmpdbdir P. "db" let tdb = T.pack (fromRawFilePath tmpdb) + gc <- Annex.getGitConfig top <- parentDir <$> fromRepo gitAnnexDir + let tops = case annexDbDir gc of + Just topdbdir -> [top, parentDir topdbdir] + Nothing -> [top] liftIO $ do - createDirectoryUnder top tmpdbdir + createDirectoryUnder tops tmpdbdir runSqliteInfo (enableWAL tdb) migration setAnnexDirPerm tmpdbdir -- Work around sqlite bug that prevents it from honoring diff --git a/Database/Keys.hs b/Database/Keys.hs index 020336a057..218d2f39b9 100644 --- a/Database/Keys.hs +++ b/Database/Keys.hs @@ -118,15 +118,17 @@ runWriterIO a = runWriter (liftIO . a) openDb :: Bool -> DbState -> Annex DbState openDb _ st@(DbOpen _) = return st openDb False DbUnavailable = return DbUnavailable -openDb forwrite _ = catchPermissionDenied permerr $ withExclusiveLock gitAnnexKeysDbLock $ do - dbdir <- fromRepo gitAnnexKeysDb - let db = dbdir P. "db" - dbexists <- liftIO $ R.doesPathExist db - case dbexists of - True -> open db - False -> do - initDb db SQL.createTables - open db +openDb forwrite _ = do + lck <- calcRepo' gitAnnexKeysDbLock + catchPermissionDenied permerr $ withExclusiveLock lck $ do + dbdir <- calcRepo' gitAnnexKeysDbDir + let db = dbdir P. "db" + dbexists <- liftIO $ R.doesPathExist db + case dbexists of + True -> open db + False -> do + initDb db SQL.createTables + open db where -- If permissions don't allow opening the database, and it's being -- opened for read, treat it as if it does not exist. @@ -248,7 +250,7 @@ isInodeKnown i s = or <$> runReaderIO ((:[]) <$$> SQL.isInodeKnown i s) reconcileStaged :: H.DbQueue -> Annex () reconcileStaged qh = unlessM (Git.Config.isBare <$> gitRepo) $ do gitindex <- inRepo currentIndexFile - indexcache <- fromRawFilePath <$> fromRepo gitAnnexKeysDbIndexCache + indexcache <- fromRawFilePath <$> calcRepo' gitAnnexKeysDbIndexCache withTSDelta (liftIO . genInodeCache gitindex) >>= \case Just cur -> readindexcache indexcache >>= \case Nothing -> go cur indexcache =<< getindextree diff --git a/Git/Repair.hs b/Git/Repair.hs index 7e058e25df..3b51ff1479 100644 --- a/Git/Repair.hs +++ b/Git/Repair.hs @@ -269,7 +269,7 @@ explodePackedRefsFile r = do let gitd = localGitDir r let dest = gitd P. fromRef' ref let dest' = fromRawFilePath dest - createDirectoryUnder gitd (parentDir dest) + createDirectoryUnder [gitd] (parentDir dest) unlessM (doesFileExist dest') $ writeFile dest' (fromRef sha) diff --git a/Logs/File.hs b/Logs/File.hs index c62f16ce6d..be6aa72d15 100644 --- a/Logs/File.hs +++ b/Logs/File.hs @@ -1,6 +1,6 @@ {- git-annex log files - - - Copyright 2018-2020 Joey Hess + - Copyright 2018-2022 Joey Hess - - Licensed under the GNU AGPL version 3 or higher. -} @@ -20,7 +20,6 @@ import Annex.Common import Annex.Perms import Annex.LockFile import Annex.ReplaceFile -import qualified Git import Utility.Tmp import qualified Data.ByteString.Lazy as L @@ -51,7 +50,7 @@ withLogHandle f a = do -- | Appends a line to a log file, first locking it to prevent -- concurrent writers. -appendLogFile :: RawFilePath -> (Git.Repo -> RawFilePath) -> L.ByteString -> Annex () +appendLogFile :: RawFilePath -> RawFilePath -> L.ByteString -> Annex () appendLogFile f lck c = createDirWhenNeeded f $ withExclusiveLock lck $ do @@ -69,7 +68,7 @@ appendLogFile f lck c = -- -- The file is locked to prevent concurrent writers, and it is written -- atomically. -modifyLogFile :: RawFilePath -> (Git.Repo -> RawFilePath) -> ([L.ByteString] -> [L.ByteString]) -> Annex () +modifyLogFile :: RawFilePath -> RawFilePath -> ([L.ByteString] -> [L.ByteString]) -> Annex () modifyLogFile f lck modf = withExclusiveLock lck $ do ls <- liftIO $ fromMaybe [] <$> tryWhenExists (L8.lines <$> L.readFile f') @@ -89,7 +88,7 @@ modifyLogFile f lck modf = withExclusiveLock lck $ do -- action is concurrently modifying the file. It does not lock the file, -- for speed, but instead relies on the fact that a log file usually -- ends in a newline. -checkLogFile :: FilePath -> (Git.Repo -> RawFilePath) -> (L.ByteString -> Bool) -> Annex Bool +checkLogFile :: FilePath -> RawFilePath -> (L.ByteString -> Bool) -> Annex Bool checkLogFile f lck matchf = withExclusiveLock lck $ bracket setup cleanup go where setup = liftIO $ tryWhenExists $ openFile f ReadMode @@ -123,7 +122,7 @@ fullLines = go [] -- -- Locking is used to prevent writes to to the log file while this -- is running. -streamLogFile :: FilePath -> (Git.Repo -> RawFilePath) -> (String -> Annex ()) -> Annex () +streamLogFile :: FilePath -> RawFilePath -> (String -> Annex ()) -> Annex () streamLogFile f lck a = withExclusiveLock lck $ bracketOnError setup cleanup go where setup = liftIO $ tryWhenExists $ openFile f ReadMode diff --git a/Logs/Smudge.hs b/Logs/Smudge.hs index 90e5475f92..9cde95a1d9 100644 --- a/Logs/Smudge.hs +++ b/Logs/Smudge.hs @@ -19,7 +19,8 @@ import qualified Data.ByteString.Lazy as L smudgeLog :: Key -> TopFilePath -> Annex () smudgeLog k f = do logf <- fromRepo gitAnnexSmudgeLog - appendLogFile logf gitAnnexSmudgeLock $ L.fromStrict $ + lckf <- fromRepo gitAnnexSmudgeLock + appendLogFile logf lckf $ L.fromStrict $ serializeKey' k <> " " <> getTopFilePath f -- | Streams all smudged files, and then empties the log at the end. @@ -32,7 +33,8 @@ smudgeLog k f = do streamSmudged :: (Key -> TopFilePath -> Annex ()) -> Annex () streamSmudged a = do logf <- fromRepo gitAnnexSmudgeLog - streamLogFile (fromRawFilePath logf) gitAnnexSmudgeLock $ \l -> + lckf <- fromRepo gitAnnexSmudgeLock + streamLogFile (fromRawFilePath logf) lckf $ \l -> case parse l of Nothing -> noop Just (k, f) -> a k f diff --git a/Logs/Upgrade.hs b/Logs/Upgrade.hs index 62974f65c0..f1ff0bd56c 100644 --- a/Logs/Upgrade.hs +++ b/Logs/Upgrade.hs @@ -24,8 +24,9 @@ import Data.Time.Clock.POSIX writeUpgradeLog :: RepoVersion -> POSIXTime-> Annex () writeUpgradeLog v t = do - logfile <- fromRepo gitAnnexUpgradeLog - appendLogFile logfile gitAnnexUpgradeLock $ encodeBL $ + logf <- fromRepo gitAnnexUpgradeLog + lckf <- fromRepo gitAnnexUpgradeLock + appendLogFile logf lckf $ encodeBL $ show (fromRepoVersion v) ++ " " ++ show t readUpgradeLog :: Annex [(RepoVersion, POSIXTime)] diff --git a/Remote/Bup.hs b/Remote/Bup.hs index 9c35884b7e..e4608e8a50 100644 --- a/Remote/Bup.hs +++ b/Remote/Bup.hs @@ -336,5 +336,5 @@ lockBup writer r a = do let remoteid = fromUUID (uuid r) let lck = dir P. remoteid <> ".lck" if writer - then withExclusiveLock (const lck) a - else withSharedLock (const lck) a + then withExclusiveLock lck a + else withSharedLock lck a diff --git a/Remote/Directory.hs b/Remote/Directory.hs index 96490f4a9b..8220b57dda 100644 --- a/Remote/Directory.hs +++ b/Remote/Directory.hs @@ -182,7 +182,7 @@ storeKeyM :: RawFilePath -> ChunkConfig -> CopyCoWTried -> Storer storeKeyM d chunkconfig cow k c m = ifM (checkDiskSpaceDirectory d k) ( do - void $ liftIO $ tryIO $ createDirectoryUnder d tmpdir + void $ liftIO $ tryIO $ createDirectoryUnder [d] tmpdir store , giveup "Not enough free disk space." ) @@ -229,7 +229,7 @@ checkDiskSpaceDirectory d k = do finalizeStoreGeneric :: RawFilePath -> RawFilePath -> RawFilePath -> IO () finalizeStoreGeneric d tmp dest = do removeDirGeneric (fromRawFilePath d) dest' - createDirectoryUnder d (parentDir dest) + createDirectoryUnder [d] (parentDir dest) renameDirectory (fromRawFilePath tmp) dest' -- may fail on some filesystems void $ tryIO $ do @@ -309,7 +309,7 @@ checkPresentGeneric' d check = ifM check storeExportM :: RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> MeterUpdate -> Annex () storeExportM d cow src _k loc p = do - liftIO $ createDirectoryUnder d (P.takeDirectory dest) + liftIO $ createDirectoryUnder [d] (P.takeDirectory dest) -- Write via temp file so that checkPresentGeneric will not -- see it until it's fully stored. viaTmp go (fromRawFilePath dest) () @@ -337,7 +337,7 @@ checkPresentExportM d _k loc = renameExportM :: RawFilePath -> Key -> ExportLocation -> ExportLocation -> Annex (Maybe ()) renameExportM d _k oldloc newloc = liftIO $ do - createDirectoryUnder d (P.takeDirectory dest) + createDirectoryUnder [d] (P.takeDirectory dest) renameFile (fromRawFilePath src) (fromRawFilePath dest) removeExportLocation d oldloc return (Just ()) @@ -502,7 +502,7 @@ retrieveExportWithContentIdentifierM ii dir cow loc cid dest gk p = storeExportWithContentIdentifierM :: IgnoreInodes -> RawFilePath -> CopyCoWTried -> FilePath -> Key -> ExportLocation -> [ContentIdentifier] -> MeterUpdate -> Annex ContentIdentifier storeExportWithContentIdentifierM ii dir cow src _k loc overwritablecids p = do - liftIO $ createDirectoryUnder dir (toRawFilePath destdir) + liftIO $ createDirectoryUnder [dir] (toRawFilePath destdir) withTmpFileIn destdir template $ \tmpf tmph -> do liftIO $ hClose tmph void $ liftIO $ fileCopier cow src tmpf p Nothing diff --git a/Remote/Directory/LegacyChunked.hs b/Remote/Directory/LegacyChunked.hs index 42a94f8d02..fe1065d76c 100644 --- a/Remote/Directory/LegacyChunked.hs +++ b/Remote/Directory/LegacyChunked.hs @@ -79,7 +79,7 @@ storeLegacyChunked' meterupdate chunksize (d:dests) bs c = do storeHelper :: FilePath -> (RawFilePath -> RawFilePath -> IO ()) -> Key -> ([FilePath] -> IO [FilePath]) -> FilePath -> FilePath -> IO () storeHelper repotop finalizer key storer tmpdir destdir = do void $ liftIO $ tryIO $ createDirectoryUnder - (toRawFilePath repotop) + [toRawFilePath repotop] (toRawFilePath tmpdir) Legacy.storeChunks key tmpdir destdir storer recorder (legacyFinalizer finalizer) where diff --git a/Remote/GCrypt.hs b/Remote/GCrypt.hs index e78a6c88ed..f758932bdc 100644 --- a/Remote/GCrypt.hs +++ b/Remote/GCrypt.hs @@ -388,7 +388,7 @@ store' repo r rsyncopts accessmethod | not $ Git.repoIsUrl repo = byteStorer $ \k b p -> guardUsable repo (giveup "cannot access remote") $ liftIO $ do let tmpdir = Git.repoPath repo P. "tmp" P. keyFile k - void $ tryIO $ createDirectoryUnder (Git.repoPath repo) tmpdir + void $ tryIO $ createDirectoryUnder [Git.repoPath repo] tmpdir let tmpf = tmpdir P. keyFile k meteredWriteFile p (fromRawFilePath tmpf) b let destdir = parentDir $ toRawFilePath $ gCryptLocation repo k diff --git a/Remote/Helper/ExportImport.hs b/Remote/Helper/ExportImport.hs index d753dee66a..f781275b88 100644 --- a/Remote/Helper/ExportImport.hs +++ b/Remote/Helper/ExportImport.hs @@ -245,7 +245,8 @@ adjustExportImport' isexport isimport r rs = do oldcids <- liftIO $ concat <$> mapM (ContentIdentifier.getContentIdentifiers db rs) oldks newcid <- storeExportWithContentIdentifier (importActions r) f k loc oldcids p - withExclusiveLock gitAnnexContentIdentifierLock $ do + cidlck <- calcRepo' gitAnnexContentIdentifierLock + withExclusiveLock cidlck $ do liftIO $ ContentIdentifier.recordContentIdentifier db rs newcid k liftIO $ ContentIdentifier.flushDbQueue db recordContentIdentifier rs newcid k @@ -280,8 +281,10 @@ adjustExportImport' isexport isimport r rs = do ( do db <- ContentIdentifier.openDb ContentIdentifier.needsUpdateFromLog db >>= \case - Just v -> withExclusiveLock gitAnnexContentIdentifierLock $ - ContentIdentifier.updateFromLog db v + Just v -> do + cidlck <- calcRepo' gitAnnexContentIdentifierLock + withExclusiveLock cidlck $ + ContentIdentifier.updateFromLog db v Nothing -> noop liftIO $ atomically $ putTMVar dbtv db return db diff --git a/Test.hs b/Test.hs index 79c6adcca6..2d32f34d55 100644 --- a/Test.hs +++ b/Test.hs @@ -817,7 +817,7 @@ test_lock_force = intmpclonerepo $ do Database.Keys.removeInodeCaches k Database.Keys.closeDb liftIO . removeWhenExistsWith R.removeLink - =<< Annex.fromRepo Annex.Locations.gitAnnexKeysDbIndexCache + =<< Annex.calcRepo' Annex.Locations.gitAnnexKeysDbIndexCache writecontent annexedfile "test_lock_force content" git_annex_shouldfail "lock" [annexedfile] "lock of modified file should not be allowed" git_annex "lock" ["--force", annexedfile] "lock --force of modified file" diff --git a/Types/GitConfig.hs b/Types/GitConfig.hs index a97fb00b68..d6b54e1852 100644 --- a/Types/GitConfig.hs +++ b/Types/GitConfig.hs @@ -119,6 +119,7 @@ data GitConfig = GitConfig , annexVerify :: Bool , annexPidLock :: Bool , annexPidLockTimeout :: Seconds + , annexDbDir :: Maybe RawFilePath , annexAddUnlocked :: GlobalConfigurable (Maybe String) , annexSecureHashesOnly :: Bool , annexRetry :: Maybe Integer @@ -212,6 +213,7 @@ extractGitConfig configsource r = GitConfig , annexPidLock = getbool (annexConfig "pidlock") False , annexPidLockTimeout = Seconds $ fromMaybe 300 $ getmayberead (annexConfig "pidlocktimeout") + , annexDbDir = toRawFilePath <$> getmaybe (annexConfig "dbdir") , annexAddUnlocked = configurable Nothing $ fmap Just $ getmaybe (annexConfig "addunlocked") , annexSecureHashesOnly = getbool (annexConfig "securehashesonly") False diff --git a/Upgrade/V7.hs b/Upgrade/V7.hs index 467d55442b..28e808b309 100644 --- a/Upgrade/V7.hs +++ b/Upgrade/V7.hs @@ -37,17 +37,17 @@ upgrade automatic = do -- The old content identifier database is deleted here, but the -- new database is not populated. It will be automatically -- populated from the git-annex branch the next time it is used. - removeOldDb gitAnnexContentIdentifierDbDirOld + removeOldDb . fromRawFilePath =<< fromRepo gitAnnexContentIdentifierDbDirOld liftIO . removeWhenExistsWith R.removeLink =<< fromRepo gitAnnexContentIdentifierLockOld -- The export databases are deleted here. The new databases -- will be populated by the next thing that needs them, the same -- way as they would be in a fresh clone. - removeOldDb gitAnnexExportDir + removeOldDb . fromRawFilePath =<< calcRepo' gitAnnexExportDir populateKeysDb - removeOldDb gitAnnexKeysDbOld + removeOldDb . fromRawFilePath =<< fromRepo gitAnnexKeysDbOld liftIO . removeWhenExistsWith R.removeLink =<< fromRepo gitAnnexKeysDbIndexCacheOld liftIO . removeWhenExistsWith R.removeLink @@ -72,9 +72,8 @@ gitAnnexContentIdentifierDbDirOld r = gitAnnexDir r P. "cids" gitAnnexContentIdentifierLockOld :: Git.Repo -> RawFilePath gitAnnexContentIdentifierLockOld r = gitAnnexContentIdentifierDbDirOld r <> ".lck" -removeOldDb :: (Git.Repo -> RawFilePath) -> Annex () -removeOldDb getdb = do - db <- fromRawFilePath <$> fromRepo getdb +removeOldDb :: FilePath -> Annex () +removeOldDb db = whenM (liftIO $ doesDirectoryExist db) $ do v <- liftIO $ tryNonAsync $ #if MIN_VERSION_directory(1,2,7) diff --git a/Upgrade/V9.hs b/Upgrade/V9.hs index f2878856a9..951feb9ab4 100644 --- a/Upgrade/V9.hs +++ b/Upgrade/V9.hs @@ -71,7 +71,8 @@ performUpgrade automatic = do {- Take a lock to ensure that there are no other git-annex - processes running that are using the old content locking method. -} - withExclusiveLock gitAnnexContentLockLock $ do + lck <- fromRepo gitAnnexContentLockLock + withExclusiveLock lck $ do {- When core.sharedRepository is set, object files - used to have their write bits set. That can now be - removed, if the user the upgrade is running as has diff --git a/Utility/Directory/Create.hs b/Utility/Directory/Create.hs index 32c0bcfd46..5650f96db1 100644 --- a/Utility/Directory/Create.hs +++ b/Utility/Directory/Create.hs @@ -31,10 +31,10 @@ import qualified Utility.RawFilePath as R import Utility.PartialPrelude {- Like createDirectoryIfMissing True, but it will only create - - missing parent directories up to but not including the directory - - in the first parameter. + - missing parent directories up to but not including a directory + - from the first parameter. - - - For example, createDirectoryUnder "/tmp/foo" "/tmp/foo/bar/baz" + - For example, createDirectoryUnder ["/tmp/foo"] "/tmp/foo/bar/baz" - will create /tmp/foo/bar if necessary, but if /tmp/foo does not exist, - it will throw an exception. - @@ -45,40 +45,43 @@ import Utility.PartialPrelude - FilePath (or the same as it), it will fail with an exception - even if the second FilePath's parent directory already exists. - - - Either or both of the FilePaths can be relative, or absolute. + - The FilePaths can be relative, or absolute. - They will be normalized as necessary. - - Note that, the second FilePath, if relative, is relative to the current - - working directory, not to the first FilePath. + - working directory. -} -createDirectoryUnder :: RawFilePath -> RawFilePath -> IO () -createDirectoryUnder topdir dir = - createDirectoryUnder' topdir dir R.createDirectory +createDirectoryUnder :: [RawFilePath] -> RawFilePath -> IO () +createDirectoryUnder topdirs dir = + createDirectoryUnder' topdirs dir R.createDirectory createDirectoryUnder' :: (MonadIO m, MonadCatch m) - => RawFilePath + => [RawFilePath] -> RawFilePath -> (RawFilePath -> m ()) -> m () -createDirectoryUnder' topdir dir0 mkdir = do - p <- liftIO $ relPathDirToFile topdir dir0 - let dirs = P.splitDirectories p - -- Catch cases where the dir is not beneath the topdir. +createDirectoryUnder' topdirs dir0 mkdir = do + relps <- liftIO $ forM topdirs $ \topdir -> relPathDirToFile topdir dir0 + let relparts = map P.splitDirectories relps + -- Catch cases where dir0 is not beneath a topdir. -- If the relative path between them starts with "..", -- it's not. And on Windows, if they are on different drives, -- the path will not be relative. - if headMaybe dirs == Just ".." || P.isAbsolute p - then liftIO $ ioError $ customerror userErrorType - ("createDirectoryFrom: not located in " ++ fromRawFilePath topdir) - -- If dir0 is the same as the topdir, don't try to create - -- it, but make sure it does exist. - else if null dirs - then liftIO $ unlessM (doesDirectoryExist (fromRawFilePath topdir)) $ - ioError $ customerror doesNotExistErrorType - "createDirectoryFrom: does not exist" - else createdirs $ - map (topdir P.) (reverse (scanl1 (P.) dirs)) + let notbeneath = \(_topdir, (relp, dirs)) -> + headMaybe dirs /= Just ".." && not (P.isAbsolute relp) + case filter notbeneath $ zip topdirs (zip relps relparts) of + ((topdir, (_relp, dirs)):_) + -- If dir0 is the same as the topdir, don't try to + -- create it, but make sure it does exist. + | null dirs -> + liftIO $ unlessM (doesDirectoryExist (fromRawFilePath topdir)) $ + ioError $ customerror doesNotExistErrorType $ + "createDirectoryFrom: " ++ fromRawFilePath topdir ++ " does not exist" + | otherwise -> createdirs $ + map (topdir P.) (reverse (scanl1 (P.) dirs)) + _ -> liftIO $ ioError $ customerror userErrorType + ("createDirectoryFrom: not located in " ++ unwords (map fromRawFilePath topdirs)) where customerror t s = mkIOError t s Nothing (Just (fromRawFilePath dir0)) diff --git a/doc/git-annex.mdwn b/doc/git-annex.mdwn index a53f7bf7eb..a401596ef3 100644 --- a/doc/git-annex.mdwn +++ b/doc/git-annex.mdwn @@ -1228,6 +1228,15 @@ repository, using [[git-annex-config]]. See its man page for a list.) file system. This timeout prevents git-annex waiting forever in such a situation. +* `annex.dbdir` + + The directory where git-annex should store its sqlite databases. + The default location is inside `.git/annex/`. + + Certian filesystems, such as cifs, may not support locking operations + that sqlite needs, and setting this to a directory on another filesystem + can work around such a problem. + * `annex.cachecreds` When "true" (the default), git-annex will cache credentials used to