git-annex/Logs/MetaData.hs
Joey Hess 40ecf58d4b
update licenses from GPL to AGPL
This does not change the overall license of the git-annex program, which
was already AGPL due to a number of sources files being AGPL already.

Legally speaking, I'm adding a new license under which these files are
now available; I already released their current contents under the GPL
license. Now they're dual licensed GPL and AGPL. However, I intend
for all my future changes to these files to only be released under the
AGPL license, and I won't be tracking the dual licensing status, so I'm
simply changing the license statement to say it's AGPL.

(In some cases, others wrote parts of the code of a file and released it
under the GPL; but in all cases I have contributed a significant portion
of the code in each file and it's that code that is getting the AGPL
license; the GPL license of other contributors allows combining with
AGPL code.)
2019-03-13 15:48:14 -04:00

152 lines
4.9 KiB
Haskell

{- git-annex general metadata storage log and per-remote metadata storage log.
-
- A line of the log will look like "timestamp field [+-]value [...]"
-
- (In the per-remote log, each field is prefixed with "uuid:")
-
- Note that unset values are preserved. Consider this case:
-
- We have:
-
- 100 foo +x
- 200 foo -x
-
- An unmerged remote has:
-
- 150 foo +x
-
- After union merge, because the foo -x was preserved, we know that
- after the other remote redundantly set foo +x, it was unset,
- and so foo currently has no value.
-
-
- Licensed under the GNU AGPL version 3 or higher.
-}
module Logs.MetaData (
getCurrentMetaData,
getCurrentRemoteMetaData,
addMetaData,
addRemoteMetaData,
addMetaDataClocked,
currentMetaData,
copyMetaData,
) where
import Annex.Common
import Types.MetaData
import Annex.MetaData.StandardFields
import Annex.VectorClock
import qualified Annex.Branch
import qualified Annex
import Logs
import Utility.TimeStamp
import Logs.MetaData.Pure
import qualified Data.Set as S
import qualified Data.Map as M
{- Go through the log from oldest to newest, and combine it all
- into a single MetaData representing the current state.
-
- Automatically generates a lastchanged metadata for each field that's
- currently set, based on timestamps in the log.
-}
getCurrentMetaData :: Key -> Annex MetaData
getCurrentMetaData = getCurrentMetaData' metaDataLogFile
getCurrentMetaData' :: (GitConfig -> Key -> FilePath) -> Key -> Annex MetaData
getCurrentMetaData' getlogfile k = do
config <- Annex.getGitConfig
ls <- S.toAscList <$> readLog (getlogfile config k)
let loggedmeta = logToCurrentMetaData ls
return $ currentMetaData $ unionMetaData loggedmeta
(lastchanged ls loggedmeta)
where
lastchanged [] _ = emptyMetaData
lastchanged ls (MetaData currentlyset) =
let m = foldl' (flip M.union) M.empty (map genlastchanged ls)
in MetaData $
-- Add a overall lastchanged using the oldest log
-- item (log is in ascending order).
M.insert lastChangedField (lastchangedval $ Prelude.last ls) $
M.mapKeys mkLastChangedField $
-- Only include fields that are currently set.
m `M.intersection` currentlyset
-- Makes each field have the timestamp as its value.
genlastchanged l =
let MetaData m = value l
ts = lastchangedval l
in M.map (const ts) m
lastchangedval l = S.singleton $ toMetaValue $ encodeBS $ showts $
case changed l of
VectorClock t -> t
Unknown -> 0
showts = formatPOSIXTime "%F@%H-%M-%S"
getCurrentRemoteMetaData :: UUID -> Key -> Annex RemoteMetaData
getCurrentRemoteMetaData u k = extractRemoteMetaData u <$>
getCurrentMetaData' remoteMetaDataLogFile k
{- Adds in some metadata, which can override existing values, or unset
- them, but otherwise leaves any existing metadata as-is. -}
addMetaData :: Key -> MetaData -> Annex ()
addMetaData = addMetaData' metaDataLogFile
addMetaData' :: (GitConfig -> Key -> FilePath) -> Key -> MetaData -> Annex ()
addMetaData' getlogfile k metadata =
addMetaDataClocked' getlogfile k metadata =<< liftIO currentVectorClock
{- Reusing the same VectorClock when making changes to the metadata
- of multiple keys is a nice optimisation. The same metadata lines
- will tend to be generated across the different log files, and so
- git will be able to pack the data more efficiently. -}
addMetaDataClocked :: Key -> MetaData -> VectorClock -> Annex ()
addMetaDataClocked = addMetaDataClocked' metaDataLogFile
addMetaDataClocked' :: (GitConfig -> Key -> FilePath) -> Key -> MetaData -> VectorClock -> Annex ()
addMetaDataClocked' getlogfile k d@(MetaData m) c
| d == emptyMetaData = noop
| otherwise = do
config <- Annex.getGitConfig
Annex.Branch.change (getlogfile config k) $
buildLog . simplifyLog
. S.insert (LogEntry c metadata)
. parseLog
where
metadata = MetaData $ M.filterWithKey (\f _ -> not (isLastChangedField f)) m
addRemoteMetaData :: Key -> RemoteMetaData -> Annex ()
addRemoteMetaData k m = do
addMetaData' remoteMetaDataLogFile k (fromRemoteMetaData m)
getMetaDataLog :: Key -> Annex (Log MetaData)
getMetaDataLog key = do
config <- Annex.getGitConfig
readLog $ metaDataLogFile config key
{- Copies the metadata from the old key to the new key.
-
- The exact content of the metadata file is copied, so that the timestamps
- remain the same, and because this is more space-efficient in the git
- repository.
-
- Any metadata already attached to the new key is not preserved.
-
- Returns True when metadata was copied.
-}
copyMetaData :: Key -> Key -> Annex Bool
copyMetaData oldkey newkey
| oldkey == newkey = return False
| otherwise = do
l <- getMetaDataLog oldkey
if logToCurrentMetaData (S.toAscList l) == emptyMetaData
then return False
else do
config <- Annex.getGitConfig
Annex.Branch.change (metaDataLogFile config newkey) $
const $ buildLog l
return True
readLog :: FilePath -> Annex (Log MetaData)
readLog = parseLog <$$> Annex.Branch.get