add annex.dbdir (WIP)
WIP: This is mostly complete, but there is a problem: createDirectoryUnder throws an error when annex.dbdir is set to outside the git repo. annex.dbdir is a workaround for filesystems where sqlite does not work, due to eg, the filesystem not properly supporting locking. It's intended to be set before initializing the repository. Changing it in an existing repository can be done, but would be the same as making a new repository and moving all the annexed objects into it. While the databases get recreated from the git-annex branch in that situation, any information that is in the databases but not stored in the branch gets lost. It may be that no information ever gets stored in the databases that cannot be reconstructed from the branch, but I have not verified that. Sponsored-by: Dartmouth College's Datalad project
This commit is contained in:
parent
425deaf615
commit
e60766543f
26 changed files with 152 additions and 104 deletions
6
Annex.hs
6
Annex.hs
|
@ -27,6 +27,7 @@ module Annex (
|
|||
inRepo,
|
||||
fromRepo,
|
||||
calcRepo,
|
||||
calcRepo',
|
||||
getGitConfig,
|
||||
overrideGitConfig,
|
||||
changeGitRepo,
|
||||
|
@ -361,6 +362,11 @@ calcRepo a = do
|
|||
s <- getState id
|
||||
liftIO $ a (repo s) (gitconfig s)
|
||||
|
||||
calcRepo' :: (Git.Repo -> GitConfig -> a) -> Annex a
|
||||
calcRepo' f = do
|
||||
s <- getState id
|
||||
pure $ f (repo s) (gitconfig s)
|
||||
|
||||
{- Gets the GitConfig settings. -}
|
||||
getGitConfig :: Annex GitConfig
|
||||
getGitConfig = getState gitconfig
|
||||
|
|
|
@ -6,7 +6,7 @@ import Common as X
|
|||
import Types as X
|
||||
import Key as X
|
||||
import Types.UUID as X
|
||||
import Annex as X (gitRepo, inRepo, fromRepo, calcRepo)
|
||||
import Annex as X (gitRepo, inRepo, fromRepo, calcRepo, calcRepo')
|
||||
import Annex.Locations as X
|
||||
import Annex.Debug as X (fastDebug, debug)
|
||||
import Messages as X
|
||||
|
|
|
@ -137,7 +137,7 @@ withContentLockFile :: Key -> (Maybe RawFilePath -> Annex a) -> Annex a
|
|||
withContentLockFile k a = do
|
||||
v <- getVersion
|
||||
if versionNeedsWritableContentFiles v
|
||||
then withSharedLock gitAnnexContentLockLock $ do
|
||||
then fromRepo gitAnnexContentLockLock >>= \lck -> withSharedLock lck $ do
|
||||
{- While the lock is held, check to see if the git
|
||||
- config has changed, and reload it if so. This
|
||||
- updates the annex.version after the v10 upgrade,
|
||||
|
@ -156,7 +156,7 @@ withContentLockFile k a = do
|
|||
reloadConfig
|
||||
getVersion
|
||||
go (v')
|
||||
else (go v)
|
||||
else go v
|
||||
where
|
||||
go v = contentLockFile k v >>= a
|
||||
|
||||
|
|
|
@ -412,8 +412,10 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec
|
|||
| importcontent = a
|
||||
| otherwise = reuseVectorClockWhile a
|
||||
|
||||
withciddb = withExclusiveLock gitAnnexContentIdentifierLock .
|
||||
bracket CIDDb.openDb CIDDb.closeDb
|
||||
withciddb a = do
|
||||
cidlck <- calcRepo' gitAnnexContentIdentifierLock
|
||||
withExclusiveLock cidlck $
|
||||
bracket CIDDb.openDb CIDDb.closeDb a
|
||||
|
||||
run cidmap importing db = do
|
||||
largematcher <- largeFilesMatcher
|
||||
|
|
|
@ -281,4 +281,6 @@ data JournalLocked = ProduceJournalLocked
|
|||
{- Runs an action that modifies the journal, using locking to avoid
|
||||
- contention with other git-annex processes. -}
|
||||
lockJournal :: (JournalLocked -> Annex a) -> Annex a
|
||||
lockJournal a = withExclusiveLock gitAnnexJournalLock $ a ProduceJournalLocked
|
||||
lockJournal a = do
|
||||
lck <- fromRepo gitAnnexJournalLock
|
||||
withExclusiveLock lck $ a ProduceJournalLocked
|
||||
|
|
|
@ -37,7 +37,7 @@ module Annex.Locations (
|
|||
gitAnnexBadDir,
|
||||
gitAnnexBadLocation,
|
||||
gitAnnexUnusedLog,
|
||||
gitAnnexKeysDb,
|
||||
gitAnnexKeysDbDir,
|
||||
gitAnnexKeysDbLock,
|
||||
gitAnnexKeysDbIndexCache,
|
||||
gitAnnexFsckState,
|
||||
|
@ -321,38 +321,42 @@ gitAnnexUnusedLog :: RawFilePath -> Git.Repo -> RawFilePath
|
|||
gitAnnexUnusedLog prefix r = gitAnnexDir r P.</> (prefix <> "unused")
|
||||
|
||||
{- .git/annex/keysdb/ contains a database of information about keys. -}
|
||||
gitAnnexKeysDb :: Git.Repo -> RawFilePath
|
||||
gitAnnexKeysDb r = gitAnnexDir r P.</> "keysdb"
|
||||
gitAnnexKeysDbDir :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexKeysDbDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c) P.</> "keysdb"
|
||||
|
||||
{- Lock file for the keys database. -}
|
||||
gitAnnexKeysDbLock :: Git.Repo -> RawFilePath
|
||||
gitAnnexKeysDbLock r = gitAnnexKeysDb r <> ".lck"
|
||||
gitAnnexKeysDbLock :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexKeysDbLock r c = gitAnnexKeysDbDir r c <> ".lck"
|
||||
|
||||
{- Contains the stat of the last index file that was
|
||||
- reconciled with the keys database. -}
|
||||
gitAnnexKeysDbIndexCache :: Git.Repo -> RawFilePath
|
||||
gitAnnexKeysDbIndexCache r = gitAnnexKeysDb r <> ".cache"
|
||||
gitAnnexKeysDbIndexCache :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexKeysDbIndexCache r c = gitAnnexKeysDbDir r c <> ".cache"
|
||||
|
||||
{- .git/annex/fsck/uuid/ is used to store information about incremental
|
||||
- fscks. -}
|
||||
gitAnnexFsckDir :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexFsckDir u r = gitAnnexDir r P.</> "fsck" P.</> fromUUID u
|
||||
gitAnnexFsckDir :: UUID -> Git.Repo -> Maybe GitConfig -> RawFilePath
|
||||
gitAnnexFsckDir u r mc = case annexDbDir =<< mc of
|
||||
Nothing -> go (gitAnnexDir r)
|
||||
Just d -> go d
|
||||
where
|
||||
go d = d P.</> "fsck" P.</> fromUUID u
|
||||
|
||||
{- used to store information about incremental fscks. -}
|
||||
gitAnnexFsckState :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexFsckState u r = gitAnnexFsckDir u r P.</> "state"
|
||||
gitAnnexFsckState u r = gitAnnexFsckDir u r Nothing P.</> "state"
|
||||
|
||||
{- Directory containing database used to record fsck info. -}
|
||||
gitAnnexFsckDbDir :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexFsckDbDir u r = gitAnnexFsckDir u r P.</> "fsckdb"
|
||||
gitAnnexFsckDbDir :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexFsckDbDir u r c = gitAnnexFsckDir u r (Just c) P.</> "fsckdb"
|
||||
|
||||
{- Directory containing old database used to record fsck info. -}
|
||||
gitAnnexFsckDbDirOld :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexFsckDbDirOld u r = gitAnnexFsckDir u r P.</> "db"
|
||||
gitAnnexFsckDbDirOld :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexFsckDbDirOld u r c = gitAnnexFsckDir u r (Just c) P.</> "db"
|
||||
|
||||
{- Lock file for the fsck database. -}
|
||||
gitAnnexFsckDbLock :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexFsckDbLock u r = gitAnnexFsckDir u r P.</> "fsck.lck"
|
||||
gitAnnexFsckDbLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexFsckDbLock u r c = gitAnnexFsckDir u r (Just c) P.</> "fsck.lck"
|
||||
|
||||
{- .git/annex/fsckresults/uuid is used to store results of git fscks -}
|
||||
gitAnnexFsckResultsLog :: UUID -> Git.Repo -> RawFilePath
|
||||
|
@ -384,20 +388,22 @@ gitAnnexMoveLock r = gitAnnexDir r P.</> "move.lck"
|
|||
|
||||
{- .git/annex/export/ is used to store information about
|
||||
- exports to special remotes. -}
|
||||
gitAnnexExportDir :: Git.Repo -> RawFilePath
|
||||
gitAnnexExportDir r = gitAnnexDir r P.</> "export"
|
||||
gitAnnexExportDir :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexExportDir r c = fromMaybe (gitAnnexDir r) (annexDbDir c) P.</> "export"
|
||||
|
||||
{- Directory containing database used to record export info. -}
|
||||
gitAnnexExportDbDir :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexExportDbDir u r = gitAnnexExportDir r P.</> fromUUID u P.</> "exportdb"
|
||||
gitAnnexExportDbDir :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexExportDbDir u r c =
|
||||
gitAnnexExportDir r c P.</> fromUUID u P.</> "exportdb"
|
||||
|
||||
{- Lock file for export state for a special remote. -}
|
||||
gitAnnexExportLock :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexExportLock u r = gitAnnexExportDbDir u r <> ".lck"
|
||||
{- Lock file for export database. -}
|
||||
gitAnnexExportLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexExportLock u r c = gitAnnexExportDbDir u r c <> ".lck"
|
||||
|
||||
{- Lock file for updating the export state for a special remote. -}
|
||||
gitAnnexExportUpdateLock :: UUID -> Git.Repo -> RawFilePath
|
||||
gitAnnexExportUpdateLock u r = gitAnnexExportDbDir u r <> ".upl"
|
||||
{- Lock file for updating the export database with information from the
|
||||
- repository. -}
|
||||
gitAnnexExportUpdateLock :: UUID -> Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexExportUpdateLock u r c = gitAnnexExportDbDir u r c <> ".upl"
|
||||
|
||||
{- Log file used to keep track of files that were in the tree exported to a
|
||||
- remote, but were excluded by its preferred content settings. -}
|
||||
|
@ -409,12 +415,13 @@ gitAnnexExportExcludeLog u r = gitAnnexDir r P.</> "export.ex" P.</> fromUUID u
|
|||
- (This used to be "cid", but a problem with the database caused it to
|
||||
- need to be rebuilt with a new name.)
|
||||
-}
|
||||
gitAnnexContentIdentifierDbDir :: Git.Repo -> RawFilePath
|
||||
gitAnnexContentIdentifierDbDir r = gitAnnexDir r P.</> "cidsdb"
|
||||
gitAnnexContentIdentifierDbDir :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexContentIdentifierDbDir r c =
|
||||
fromMaybe (gitAnnexDir r) (annexDbDir c) P.</> "cidsdb"
|
||||
|
||||
{- Lock file for writing to the content id database. -}
|
||||
gitAnnexContentIdentifierLock :: Git.Repo -> RawFilePath
|
||||
gitAnnexContentIdentifierLock r = gitAnnexContentIdentifierDbDir r <> ".lck"
|
||||
gitAnnexContentIdentifierLock :: Git.Repo -> GitConfig -> RawFilePath
|
||||
gitAnnexContentIdentifierLock r c = gitAnnexContentIdentifierDbDir r c <> ".lck"
|
||||
|
||||
{- .git/annex/schedulestate is used to store information about when
|
||||
- scheduled jobs were last run. -}
|
||||
|
|
|
@ -21,7 +21,6 @@ module Annex.LockFile (
|
|||
import Annex.Common
|
||||
import Annex
|
||||
import Types.LockCache
|
||||
import qualified Git
|
||||
import Annex.Perms
|
||||
import Annex.LockPool
|
||||
|
||||
|
@ -63,9 +62,8 @@ changeLockCache a = do
|
|||
|
||||
{- Runs an action with a shared lock held. If an exclusive lock is held,
|
||||
- blocks until it becomes free. -}
|
||||
withSharedLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex a
|
||||
withSharedLock getlockfile a = debugLocks $ do
|
||||
lockfile <- fromRepo getlockfile
|
||||
withSharedLock :: RawFilePath -> Annex a -> Annex a
|
||||
withSharedLock lockfile a = debugLocks $ do
|
||||
createAnnexDirectory $ P.takeDirectory lockfile
|
||||
mode <- annexFileMode
|
||||
bracket (lock mode lockfile) (liftIO . dropLock) (const a)
|
||||
|
@ -78,16 +76,15 @@ withSharedLock getlockfile a = debugLocks $ do
|
|||
|
||||
{- Runs an action with an exclusive lock held. If the lock is already
|
||||
- held, blocks until it becomes free. -}
|
||||
withExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex a
|
||||
withExclusiveLock getlockfile a = bracket
|
||||
(takeExclusiveLock getlockfile)
|
||||
withExclusiveLock :: RawFilePath -> Annex a -> Annex a
|
||||
withExclusiveLock lockfile a = bracket
|
||||
(takeExclusiveLock lockfile)
|
||||
(liftIO . dropLock)
|
||||
(const a)
|
||||
|
||||
{- Takes an exclusive lock, blocking until it's free. -}
|
||||
takeExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex LockHandle
|
||||
takeExclusiveLock getlockfile = debugLocks $ do
|
||||
lockfile <- fromRepo getlockfile
|
||||
takeExclusiveLock :: RawFilePath -> Annex LockHandle
|
||||
takeExclusiveLock lockfile = debugLocks $ do
|
||||
createAnnexDirectory $ P.takeDirectory lockfile
|
||||
mode <- annexFileMode
|
||||
lock mode lockfile
|
||||
|
@ -100,9 +97,8 @@ takeExclusiveLock getlockfile = debugLocks $ do
|
|||
|
||||
{- Tries to take an exclusive lock and run an action. If the lock is
|
||||
- already held, returns Nothing. -}
|
||||
tryExclusiveLock :: (Git.Repo -> RawFilePath) -> Annex a -> Annex (Maybe a)
|
||||
tryExclusiveLock getlockfile a = debugLocks $ do
|
||||
lockfile <- fromRepo getlockfile
|
||||
tryExclusiveLock :: RawFilePath -> Annex a -> Annex (Maybe a)
|
||||
tryExclusiveLock lockfile a = debugLocks $ do
|
||||
createAnnexDirectory $ P.takeDirectory lockfile
|
||||
mode <- annexFileMode
|
||||
bracket (lock mode lockfile) (liftIO . unlock) go
|
||||
|
|
|
@ -65,9 +65,11 @@ flush = do
|
|||
- git locking files. So, only one queue is allowed to flush at a time.
|
||||
-}
|
||||
flush' :: Git.Queue.Queue Annex -> Annex (Git.Queue.Queue Annex)
|
||||
flush' q = withExclusiveLock gitAnnexGitQueueLock $ do
|
||||
showStoringStateAction
|
||||
Git.Queue.flush q =<< gitRepo
|
||||
flush' q = do
|
||||
lck <- fromRepo gitAnnexGitQueueLock
|
||||
withExclusiveLock lck $ do
|
||||
showStoringStateAction
|
||||
Git.Queue.flush q =<< gitRepo
|
||||
|
||||
{- Gets the size of the queue. -}
|
||||
size :: Annex Int
|
||||
|
|
|
@ -27,7 +27,7 @@ withOtherTmp a = do
|
|||
Annex.addCleanupAction OtherTmpCleanup cleanupOtherTmp
|
||||
tmpdir <- fromRepo gitAnnexTmpOtherDir
|
||||
tmplck <- fromRepo gitAnnexTmpOtherLock
|
||||
withSharedLock (const tmplck) $ do
|
||||
withSharedLock tmplck $ do
|
||||
void $ createAnnexDirectory tmpdir
|
||||
a tmpdir
|
||||
|
||||
|
@ -56,7 +56,7 @@ withEventuallyCleanedOtherTmp = bracket setup cleanup
|
|||
cleanupOtherTmp :: Annex ()
|
||||
cleanupOtherTmp = do
|
||||
tmplck <- fromRepo gitAnnexTmpOtherLock
|
||||
void $ tryIO $ tryExclusiveLock (const tmplck) $ do
|
||||
void $ tryIO $ tryExclusiveLock tmplck $ do
|
||||
tmpdir <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDir
|
||||
void $ liftIO $ tryIO $ removeDirectoryRecursive tmpdir
|
||||
oldtmp <- fromRawFilePath <$> fromRepo gitAnnexTmpOtherDirOld
|
||||
|
|
|
@ -17,6 +17,9 @@ git-annex (10.20220725) UNRELEASED; urgency=medium
|
|||
* When bup split fails, display its stderr.
|
||||
* Avoid running multiple bup split processes concurrently, since
|
||||
bup is not concurrency safe.
|
||||
* Added annex.dbdir config which can be used to move sqlite databases
|
||||
to a different filesystem than the git-annex repo, when the repo is on
|
||||
a filesystem that sqlite does not work well in.
|
||||
|
||||
-- Joey Hess <id@joeyh.name> Mon, 25 Jul 2022 15:35:45 -0400
|
||||
|
||||
|
|
|
@ -375,12 +375,14 @@ logMove srcuuid destuuid deststartedwithcopy key a = go =<< setup
|
|||
|
||||
setup = do
|
||||
logf <- fromRepo gitAnnexMoveLog
|
||||
lckf <- fromRepo gitAnnexMoveLock
|
||||
-- Only log when there was no copy.
|
||||
unless deststartedwithcopy $
|
||||
appendLogFile logf gitAnnexMoveLock logline
|
||||
appendLogFile logf lckf logline
|
||||
return logf
|
||||
|
||||
cleanup logf = do
|
||||
lck <- fromRepo gitAnnexMoveLock
|
||||
-- This buffers the log file content in memory.
|
||||
-- The log file length is limited to the number of
|
||||
-- concurrent jobs, times the number of times a move
|
||||
|
@ -388,13 +390,14 @@ logMove srcuuid destuuid deststartedwithcopy key a = go =<< setup
|
|||
-- That could grow without bounds given enough time,
|
||||
-- so the log is also truncated to the most recent
|
||||
-- 100 items.
|
||||
modifyLogFile logf gitAnnexMoveLock
|
||||
modifyLogFile logf lck
|
||||
(filter (/= logline) . reverse . take 100 . reverse)
|
||||
|
||||
go logf
|
||||
-- Only need to check log when there is a copy.
|
||||
| deststartedwithcopy = do
|
||||
wasnocopy <- checkLogFile (fromRawFilePath logf) gitAnnexMoveLock
|
||||
lck <- fromRepo gitAnnexMoveLock
|
||||
wasnocopy <- checkLogFile (fromRawFilePath logf) lck
|
||||
(== logline)
|
||||
if wasnocopy
|
||||
then go' logf False
|
||||
|
|
|
@ -78,7 +78,7 @@ AnnexBranch
|
|||
-}
|
||||
openDb :: Annex ContentIdentifierHandle
|
||||
openDb = do
|
||||
dbdir <- fromRepo gitAnnexContentIdentifierDbDir
|
||||
dbdir <- calcRepo' gitAnnexContentIdentifierDbDir
|
||||
let db = dbdir P.</> "db"
|
||||
ifM (liftIO $ not <$> R.doesPathExist db)
|
||||
( initDb db $ void $
|
||||
|
|
|
@ -99,7 +99,7 @@ ExportTreeCurrent
|
|||
-}
|
||||
openDb :: UUID -> Annex ExportHandle
|
||||
openDb u = do
|
||||
dbdir <- fromRepo (gitAnnexExportDbDir u)
|
||||
dbdir <- calcRepo' (gitAnnexExportDbDir u)
|
||||
let db = dbdir P.</> "db"
|
||||
unlessM (liftIO $ R.doesPathExist db) $ do
|
||||
initDb db $ void $
|
||||
|
@ -263,8 +263,9 @@ updateExportDb = runExportDiffUpdater $ mkExportDiffUpdater removeold addnew
|
|||
-}
|
||||
writeLockDbWhile :: ExportHandle -> Annex a -> Annex a
|
||||
writeLockDbWhile db@(ExportHandle _ u) a = do
|
||||
updatelck <- takeExclusiveLock (gitAnnexExportUpdateLock u)
|
||||
withExclusiveLock (gitAnnexExportLock u) $ do
|
||||
updatelck <- takeExclusiveLock =<< calcRepo' (gitAnnexExportUpdateLock u)
|
||||
exlck <- calcRepo' (gitAnnexExportLock u)
|
||||
withExclusiveLock exlck $ do
|
||||
bracket_ (setup updatelck) cleanup a
|
||||
where
|
||||
setup updatelck = do
|
||||
|
@ -285,15 +286,17 @@ data ExportUpdateResult = ExportUpdateSuccess | ExportUpdateConflict
|
|||
- not. Either way, it will block until the update is complete.
|
||||
-}
|
||||
updateExportTreeFromLog :: ExportHandle -> Annex ExportUpdateResult
|
||||
updateExportTreeFromLog db@(ExportHandle _ u) =
|
||||
updateExportTreeFromLog db@(ExportHandle _ u) = do
|
||||
-- If another process or thread is performing the update,
|
||||
-- this will block until it's done.
|
||||
withExclusiveLock (gitAnnexExportUpdateLock u) $ do
|
||||
exlck <- calcRepo' (gitAnnexExportUpdateLock u)
|
||||
withExclusiveLock exlck $ do
|
||||
lck <- calcRepo' (gitAnnexExportLock u)
|
||||
-- If the database is locked by something else,
|
||||
-- this will not run the update. But, in that case,
|
||||
-- writeLockDbWhile is running, and has already
|
||||
-- completed the update, so we don't need to do anything.
|
||||
mr <- tryExclusiveLock (gitAnnexExportLock u) $
|
||||
mr <- tryExclusiveLock lck $
|
||||
updateExportTreeFromLog' db
|
||||
case mr of
|
||||
Just r -> return r
|
||||
|
|
|
@ -60,29 +60,31 @@ Fscked
|
|||
- or unknown behavior.
|
||||
-}
|
||||
newPass :: UUID -> Annex Bool
|
||||
newPass u = isJust <$> tryExclusiveLock (gitAnnexFsckDbLock u) go
|
||||
newPass u = do
|
||||
lck <- calcRepo' (gitAnnexFsckDbLock u)
|
||||
isJust <$> tryExclusiveLock lck go
|
||||
where
|
||||
go = do
|
||||
removedb =<< fromRepo (gitAnnexFsckDbDir u)
|
||||
removedb =<< fromRepo (gitAnnexFsckDbDirOld u)
|
||||
removedb =<< calcRepo' (gitAnnexFsckDbDir u)
|
||||
removedb =<< calcRepo' (gitAnnexFsckDbDirOld u)
|
||||
removedb = liftIO . void . tryIO . removeDirectoryRecursive . fromRawFilePath
|
||||
|
||||
{- Opens the database, creating it if it doesn't exist yet. -}
|
||||
openDb :: UUID -> Annex FsckHandle
|
||||
openDb u = do
|
||||
dbdir <- fromRepo (gitAnnexFsckDbDir u)
|
||||
dbdir <- calcRepo' (gitAnnexFsckDbDir u)
|
||||
let db = dbdir P.</> "db"
|
||||
unlessM (liftIO $ R.doesPathExist db) $ do
|
||||
initDb db $ void $
|
||||
runMigrationSilent migrateFsck
|
||||
lockFileCached =<< fromRepo (gitAnnexFsckDbLock u)
|
||||
lockFileCached =<< calcRepo' (gitAnnexFsckDbLock u)
|
||||
h <- liftIO $ H.openDbQueue db "fscked"
|
||||
return $ FsckHandle h u
|
||||
|
||||
closeDb :: FsckHandle -> Annex ()
|
||||
closeDb (FsckHandle h u) = do
|
||||
liftIO $ H.closeDbQueue h
|
||||
unlockFile =<< fromRepo (gitAnnexFsckDbLock u)
|
||||
unlockFile =<< calcRepo' (gitAnnexFsckDbLock u)
|
||||
|
||||
addDb :: FsckHandle -> Key -> IO ()
|
||||
addDb (FsckHandle h _) k = H.queueDb h checkcommit $
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
module Database.Init where
|
||||
|
||||
import Annex.Common
|
||||
import qualified Annex
|
||||
import Annex.Perms
|
||||
import Utility.FileMode
|
||||
import Utility.Directory.Create
|
||||
|
@ -34,7 +35,10 @@ initDb db migration = do
|
|||
let tmpdbdir = dbdir <> ".tmp"
|
||||
let tmpdb = tmpdbdir P.</> "db"
|
||||
let tdb = T.pack (fromRawFilePath tmpdb)
|
||||
top <- parentDir <$> fromRepo gitAnnexDir
|
||||
gc <- Annex.getGitConfig
|
||||
top <- case annexDbDir gc of
|
||||
Just topdbdir -> pure $ parentDir $ topdbdir
|
||||
Nothing -> parentDir <$> fromRepo gitAnnexDir
|
||||
liftIO $ do
|
||||
createDirectoryUnder top tmpdbdir
|
||||
runSqliteInfo (enableWAL tdb) migration
|
||||
|
|
|
@ -118,15 +118,17 @@ runWriterIO a = runWriter (liftIO . a)
|
|||
openDb :: Bool -> DbState -> Annex DbState
|
||||
openDb _ st@(DbOpen _) = return st
|
||||
openDb False DbUnavailable = return DbUnavailable
|
||||
openDb forwrite _ = catchPermissionDenied permerr $ withExclusiveLock gitAnnexKeysDbLock $ do
|
||||
dbdir <- fromRepo gitAnnexKeysDb
|
||||
let db = dbdir P.</> "db"
|
||||
dbexists <- liftIO $ R.doesPathExist db
|
||||
case dbexists of
|
||||
True -> open db
|
||||
False -> do
|
||||
initDb db SQL.createTables
|
||||
open db
|
||||
openDb forwrite _ = do
|
||||
lck <- calcRepo' gitAnnexKeysDbLock
|
||||
catchPermissionDenied permerr $ withExclusiveLock lck $ do
|
||||
dbdir <- calcRepo' gitAnnexKeysDbDir
|
||||
let db = dbdir P.</> "db"
|
||||
dbexists <- liftIO $ R.doesPathExist db
|
||||
case dbexists of
|
||||
True -> open db
|
||||
False -> do
|
||||
initDb db SQL.createTables
|
||||
open db
|
||||
where
|
||||
-- If permissions don't allow opening the database, and it's being
|
||||
-- opened for read, treat it as if it does not exist.
|
||||
|
@ -248,7 +250,7 @@ isInodeKnown i s = or <$> runReaderIO ((:[]) <$$> SQL.isInodeKnown i s)
|
|||
reconcileStaged :: H.DbQueue -> Annex ()
|
||||
reconcileStaged qh = unlessM (Git.Config.isBare <$> gitRepo) $ do
|
||||
gitindex <- inRepo currentIndexFile
|
||||
indexcache <- fromRawFilePath <$> fromRepo gitAnnexKeysDbIndexCache
|
||||
indexcache <- fromRawFilePath <$> calcRepo' gitAnnexKeysDbIndexCache
|
||||
withTSDelta (liftIO . genInodeCache gitindex) >>= \case
|
||||
Just cur -> readindexcache indexcache >>= \case
|
||||
Nothing -> go cur indexcache =<< getindextree
|
||||
|
|
11
Logs/File.hs
11
Logs/File.hs
|
@ -1,6 +1,6 @@
|
|||
{- git-annex log files
|
||||
-
|
||||
- Copyright 2018-2020 Joey Hess <id@joeyh.name>
|
||||
- Copyright 2018-2022 Joey Hess <id@joeyh.name>
|
||||
-
|
||||
- Licensed under the GNU AGPL version 3 or higher.
|
||||
-}
|
||||
|
@ -20,7 +20,6 @@ import Annex.Common
|
|||
import Annex.Perms
|
||||
import Annex.LockFile
|
||||
import Annex.ReplaceFile
|
||||
import qualified Git
|
||||
import Utility.Tmp
|
||||
|
||||
import qualified Data.ByteString.Lazy as L
|
||||
|
@ -51,7 +50,7 @@ withLogHandle f a = do
|
|||
|
||||
-- | Appends a line to a log file, first locking it to prevent
|
||||
-- concurrent writers.
|
||||
appendLogFile :: RawFilePath -> (Git.Repo -> RawFilePath) -> L.ByteString -> Annex ()
|
||||
appendLogFile :: RawFilePath -> RawFilePath -> L.ByteString -> Annex ()
|
||||
appendLogFile f lck c =
|
||||
createDirWhenNeeded f $
|
||||
withExclusiveLock lck $ do
|
||||
|
@ -69,7 +68,7 @@ appendLogFile f lck c =
|
|||
--
|
||||
-- The file is locked to prevent concurrent writers, and it is written
|
||||
-- atomically.
|
||||
modifyLogFile :: RawFilePath -> (Git.Repo -> RawFilePath) -> ([L.ByteString] -> [L.ByteString]) -> Annex ()
|
||||
modifyLogFile :: RawFilePath -> RawFilePath -> ([L.ByteString] -> [L.ByteString]) -> Annex ()
|
||||
modifyLogFile f lck modf = withExclusiveLock lck $ do
|
||||
ls <- liftIO $ fromMaybe []
|
||||
<$> tryWhenExists (L8.lines <$> L.readFile f')
|
||||
|
@ -89,7 +88,7 @@ modifyLogFile f lck modf = withExclusiveLock lck $ do
|
|||
-- action is concurrently modifying the file. It does not lock the file,
|
||||
-- for speed, but instead relies on the fact that a log file usually
|
||||
-- ends in a newline.
|
||||
checkLogFile :: FilePath -> (Git.Repo -> RawFilePath) -> (L.ByteString -> Bool) -> Annex Bool
|
||||
checkLogFile :: FilePath -> RawFilePath -> (L.ByteString -> Bool) -> Annex Bool
|
||||
checkLogFile f lck matchf = withExclusiveLock lck $ bracket setup cleanup go
|
||||
where
|
||||
setup = liftIO $ tryWhenExists $ openFile f ReadMode
|
||||
|
@ -123,7 +122,7 @@ fullLines = go []
|
|||
--
|
||||
-- Locking is used to prevent writes to to the log file while this
|
||||
-- is running.
|
||||
streamLogFile :: FilePath -> (Git.Repo -> RawFilePath) -> (String -> Annex ()) -> Annex ()
|
||||
streamLogFile :: FilePath -> RawFilePath -> (String -> Annex ()) -> Annex ()
|
||||
streamLogFile f lck a = withExclusiveLock lck $ bracketOnError setup cleanup go
|
||||
where
|
||||
setup = liftIO $ tryWhenExists $ openFile f ReadMode
|
||||
|
|
|
@ -19,7 +19,8 @@ import qualified Data.ByteString.Lazy as L
|
|||
smudgeLog :: Key -> TopFilePath -> Annex ()
|
||||
smudgeLog k f = do
|
||||
logf <- fromRepo gitAnnexSmudgeLog
|
||||
appendLogFile logf gitAnnexSmudgeLock $ L.fromStrict $
|
||||
lckf <- fromRepo gitAnnexSmudgeLock
|
||||
appendLogFile logf lckf $ L.fromStrict $
|
||||
serializeKey' k <> " " <> getTopFilePath f
|
||||
|
||||
-- | Streams all smudged files, and then empties the log at the end.
|
||||
|
@ -32,7 +33,8 @@ smudgeLog k f = do
|
|||
streamSmudged :: (Key -> TopFilePath -> Annex ()) -> Annex ()
|
||||
streamSmudged a = do
|
||||
logf <- fromRepo gitAnnexSmudgeLog
|
||||
streamLogFile (fromRawFilePath logf) gitAnnexSmudgeLock $ \l ->
|
||||
lckf <- fromRepo gitAnnexSmudgeLock
|
||||
streamLogFile (fromRawFilePath logf) lckf $ \l ->
|
||||
case parse l of
|
||||
Nothing -> noop
|
||||
Just (k, f) -> a k f
|
||||
|
|
|
@ -24,8 +24,9 @@ import Data.Time.Clock.POSIX
|
|||
|
||||
writeUpgradeLog :: RepoVersion -> POSIXTime-> Annex ()
|
||||
writeUpgradeLog v t = do
|
||||
logfile <- fromRepo gitAnnexUpgradeLog
|
||||
appendLogFile logfile gitAnnexUpgradeLock $ encodeBL $
|
||||
logf <- fromRepo gitAnnexUpgradeLog
|
||||
lckf <- fromRepo gitAnnexUpgradeLock
|
||||
appendLogFile logf lckf $ encodeBL $
|
||||
show (fromRepoVersion v) ++ " " ++ show t
|
||||
|
||||
readUpgradeLog :: Annex [(RepoVersion, POSIXTime)]
|
||||
|
|
|
@ -336,5 +336,5 @@ lockBup writer r a = do
|
|||
let remoteid = fromUUID (uuid r)
|
||||
let lck = dir P.</> remoteid <> ".lck"
|
||||
if writer
|
||||
then withExclusiveLock (const lck) a
|
||||
else withSharedLock (const lck) a
|
||||
then withExclusiveLock lck a
|
||||
else withSharedLock lck a
|
||||
|
|
|
@ -245,7 +245,8 @@ adjustExportImport' isexport isimport r rs = do
|
|||
oldcids <- liftIO $ concat
|
||||
<$> mapM (ContentIdentifier.getContentIdentifiers db rs) oldks
|
||||
newcid <- storeExportWithContentIdentifier (importActions r) f k loc oldcids p
|
||||
withExclusiveLock gitAnnexContentIdentifierLock $ do
|
||||
cidlck <- calcRepo' gitAnnexContentIdentifierLock
|
||||
withExclusiveLock cidlck $ do
|
||||
liftIO $ ContentIdentifier.recordContentIdentifier db rs newcid k
|
||||
liftIO $ ContentIdentifier.flushDbQueue db
|
||||
recordContentIdentifier rs newcid k
|
||||
|
@ -280,8 +281,10 @@ adjustExportImport' isexport isimport r rs = do
|
|||
( do
|
||||
db <- ContentIdentifier.openDb
|
||||
ContentIdentifier.needsUpdateFromLog db >>= \case
|
||||
Just v -> withExclusiveLock gitAnnexContentIdentifierLock $
|
||||
ContentIdentifier.updateFromLog db v
|
||||
Just v -> do
|
||||
cidlck <- calcRepo' gitAnnexContentIdentifierLock
|
||||
withExclusiveLock cidlck $
|
||||
ContentIdentifier.updateFromLog db v
|
||||
Nothing -> noop
|
||||
liftIO $ atomically $ putTMVar dbtv db
|
||||
return db
|
||||
|
|
2
Test.hs
2
Test.hs
|
@ -817,7 +817,7 @@ test_lock_force = intmpclonerepo $ do
|
|||
Database.Keys.removeInodeCaches k
|
||||
Database.Keys.closeDb
|
||||
liftIO . removeWhenExistsWith R.removeLink
|
||||
=<< Annex.fromRepo Annex.Locations.gitAnnexKeysDbIndexCache
|
||||
=<< Annex.calcRepo' Annex.Locations.gitAnnexKeysDbIndexCache
|
||||
writecontent annexedfile "test_lock_force content"
|
||||
git_annex_shouldfail "lock" [annexedfile] "lock of modified file should not be allowed"
|
||||
git_annex "lock" ["--force", annexedfile] "lock --force of modified file"
|
||||
|
|
|
@ -119,6 +119,7 @@ data GitConfig = GitConfig
|
|||
, annexVerify :: Bool
|
||||
, annexPidLock :: Bool
|
||||
, annexPidLockTimeout :: Seconds
|
||||
, annexDbDir :: Maybe RawFilePath
|
||||
, annexAddUnlocked :: GlobalConfigurable (Maybe String)
|
||||
, annexSecureHashesOnly :: Bool
|
||||
, annexRetry :: Maybe Integer
|
||||
|
@ -212,6 +213,7 @@ extractGitConfig configsource r = GitConfig
|
|||
, annexPidLock = getbool (annexConfig "pidlock") False
|
||||
, annexPidLockTimeout = Seconds $ fromMaybe 300 $
|
||||
getmayberead (annexConfig "pidlocktimeout")
|
||||
, annexDbDir = toRawFilePath <$> getmaybe (annexConfig "dbdir")
|
||||
, annexAddUnlocked = configurable Nothing $
|
||||
fmap Just $ getmaybe (annexConfig "addunlocked")
|
||||
, annexSecureHashesOnly = getbool (annexConfig "securehashesonly") False
|
||||
|
|
|
@ -37,17 +37,17 @@ upgrade automatic = do
|
|||
-- The old content identifier database is deleted here, but the
|
||||
-- new database is not populated. It will be automatically
|
||||
-- populated from the git-annex branch the next time it is used.
|
||||
removeOldDb gitAnnexContentIdentifierDbDirOld
|
||||
removeOldDb . fromRawFilePath =<< fromRepo gitAnnexContentIdentifierDbDirOld
|
||||
liftIO . removeWhenExistsWith R.removeLink
|
||||
=<< fromRepo gitAnnexContentIdentifierLockOld
|
||||
|
||||
-- The export databases are deleted here. The new databases
|
||||
-- will be populated by the next thing that needs them, the same
|
||||
-- way as they would be in a fresh clone.
|
||||
removeOldDb gitAnnexExportDir
|
||||
removeOldDb . fromRawFilePath =<< calcRepo' gitAnnexExportDir
|
||||
|
||||
populateKeysDb
|
||||
removeOldDb gitAnnexKeysDbOld
|
||||
removeOldDb . fromRawFilePath =<< fromRepo gitAnnexKeysDbOld
|
||||
liftIO . removeWhenExistsWith R.removeLink
|
||||
=<< fromRepo gitAnnexKeysDbIndexCacheOld
|
||||
liftIO . removeWhenExistsWith R.removeLink
|
||||
|
@ -72,9 +72,8 @@ gitAnnexContentIdentifierDbDirOld r = gitAnnexDir r P.</> "cids"
|
|||
gitAnnexContentIdentifierLockOld :: Git.Repo -> RawFilePath
|
||||
gitAnnexContentIdentifierLockOld r = gitAnnexContentIdentifierDbDirOld r <> ".lck"
|
||||
|
||||
removeOldDb :: (Git.Repo -> RawFilePath) -> Annex ()
|
||||
removeOldDb getdb = do
|
||||
db <- fromRawFilePath <$> fromRepo getdb
|
||||
removeOldDb :: FilePath -> Annex ()
|
||||
removeOldDb db =
|
||||
whenM (liftIO $ doesDirectoryExist db) $ do
|
||||
v <- liftIO $ tryNonAsync $
|
||||
#if MIN_VERSION_directory(1,2,7)
|
||||
|
|
|
@ -71,7 +71,8 @@ performUpgrade automatic = do
|
|||
|
||||
{- Take a lock to ensure that there are no other git-annex
|
||||
- processes running that are using the old content locking method. -}
|
||||
withExclusiveLock gitAnnexContentLockLock $ do
|
||||
lck <- fromRepo gitAnnexContentLockLock
|
||||
withExclusiveLock lck $ do
|
||||
{- When core.sharedRepository is set, object files
|
||||
- used to have their write bits set. That can now be
|
||||
- removed, if the user the upgrade is running as has
|
||||
|
|
|
@ -1228,6 +1228,15 @@ repository, using [[git-annex-config]]. See its man page for a list.)
|
|||
file system. This timeout prevents git-annex waiting forever in such a
|
||||
situation.
|
||||
|
||||
* `annex.dbdir`
|
||||
|
||||
Set to a directory where git-annex should store its sqlite databases.
|
||||
The default location is inside `.git/annex/`.
|
||||
|
||||
Certian filesystems, such as cifs, may not support locking operations
|
||||
that sqlite needs, and setting this to a directory on another filesystem
|
||||
can work around such a problem.
|
||||
|
||||
* `annex.cachecreds`
|
||||
|
||||
When "true" (the default), git-annex will cache credentials used to
|
||||
|
|
Loading…
Reference in a new issue