Apply codespell -w throughout

This commit is contained in:
Yaroslav Halchenko 2023-03-13 22:39:16 -04:00 committed by Joey Hess
parent 100f5aabb6
commit 84b0a3707a
No known key found for this signature in database
GPG key ID: DB12DB0FF05F8F38
108 changed files with 135 additions and 135 deletions

View file

@ -400,7 +400,7 @@ addGitConfigOverride v = do
r { Git.gitGlobalOpts = go (Git.gitGlobalOpts r) }
changeState $ \st -> st { gitconfigoverride = v : gitconfigoverride st }
where
-- Remove any prior occurrance of the setting to avoid
-- Remove any prior occurrence of the setting to avoid
-- building up many of them when the adjustment is run repeatedly,
-- and add the setting to the end.
go [] = [Param "-c", Param v]

View file

@ -460,7 +460,7 @@ findAdjustingCommit (AdjBranch b) = go =<< catCommit b
_ -> return Nothing
{- Check for any commits present on the adjusted branch that have not yet
- been propigated to the basis branch, and propigate them to the basis
- been propigated to the basis branch, and propagate them to the basis
- branch and from there on to the orig branch.
-
- After propigating the commits back to the basis branch,
@ -536,7 +536,7 @@ rebaseOnTopMsg = "rebasing adjusted branch on top of updated original branch"
reverseAdjustedCommit :: Sha -> Adjustment -> (Sha, Commit) -> OrigBranch -> Annex (Either String Sha)
reverseAdjustedCommit commitparent adj (csha, basiscommit) origbranch
| length (commitParent basiscommit) > 1 = return $
Left $ "unable to propigate merge commit " ++ show csha ++ " back to " ++ show origbranch
Left $ "unable to propagate merge commit " ++ show csha ++ " back to " ++ show origbranch
| otherwise = do
cmode <- annexCommitMode <$> Annex.getGitConfig
treesha <- reverseAdjustedTree commitparent adj csha

View file

@ -396,7 +396,7 @@ getRef ref file = withIndex $ catFile ref file
{- Applies a function to modify the content of a file.
-
- Note that this does not cause the branch to be merged, it only
- modifes the current content of the file on the branch.
- modifies the current content of the file on the branch.
-}
change :: Journalable content => RegardingUUID -> RawFilePath -> (L.ByteString -> content) -> Annex ()
change ru file f = lockJournal $ \jl -> f <$> getToChange ru file >>= set jl ru file
@ -422,7 +422,7 @@ data ChangeOrAppend t = Change t | Append t
- value it provides is always appended to the journal file. That avoids
- reading the journal file, and so can be faster when many lines are being
- written to it. The information that is recorded will be effectively the
- same, only obsolate log lines will not get compacted.
- same, only obsolete log lines will not get compacted.
-
- Currently, only appends when annex.alwayscompact=false. That is to
- avoid appending when an older version of git-annex is also in use in the
@ -494,7 +494,7 @@ append jl f appendable toappend = do
invalidateCache
{- Commit message used when making a commit of whatever data has changed
- to the git-annex brach. -}
- to the git-annex branch. -}
commitMessage :: Annex String
commitMessage = fromMaybe "update" . annexCommitMessage <$> Annex.getGitConfig
@ -624,7 +624,7 @@ branchFiles' = Git.Command.pipeNullSplit' $
{- Populates the branch's index file with the current branch contents.
-
- This is only done when the index doesn't yet exist, and the index
- is used to build up changes to be commited to the branch, and merge
- is used to build up changes to be committed to the branch, and merge
- in changes from other branches.
-}
genIndex :: Git.Repo -> IO ()

View file

@ -106,6 +106,6 @@ notifyHook chan reffile _
sha <- catchDefaultIO Nothing $
extractSha <$> S.readFile reffile
-- When the channel is full, there is probably no reader
-- running, or ref changes have been occuring very fast,
-- running, or ref changes have been occurring very fast,
-- so it's ok to not write the change to it.
maybe noop (void . atomically . tryWriteTBMChan chan) sha

View file

@ -392,9 +392,9 @@ withTmp key action = do
- with colliding files it's their own fault and B) adding such a check
- would not catch all cases of colliding keys. For example, perhaps
- a remote has a key; if it's then added again with different content then
- the overall system now has two different peices of content for that
- the overall system now has two different pieces of content for that
- key, and one of them will probably get deleted later. So, adding the
- check here would only raise expectations that git-annex cannot truely
- check here would only raise expectations that git-annex cannot truly
- meet.
-
- May return false, when a particular variety of key is not being
@ -555,7 +555,7 @@ sendAnnex key rollback sendobject = go =<< prepSendAnnex' key
{- Returns a file that contains an object's content,
- and a check to run after the transfer is complete.
-
- When a file is unlocked, it's possble for its content to
- When a file is unlocked, it's possible for its content to
- change as it's being sent. The check detects this case
- and returns False.
-

View file

@ -164,7 +164,7 @@ contentLockFile :: Key -> Maybe RepoVersion -> Annex (Maybe RawFilePath)
#ifndef mingw32_HOST_OS
{- Older versions of git-annex locked content files themselves, but newer
- versions use a separate lock file, to better support repos shared
- amoung users in eg a group. -}
- among users in eg a group. -}
contentLockFile key v
| versionNeedsWritableContentFiles v = pure Nothing
| otherwise = Just <$> calcRepo (gitAnnexContentLock key)

View file

@ -66,7 +66,7 @@ data CopyMethod = CopiedCoW | Copied
{- Copies from src to dest, updating a meter. Preserves mode and mtime.
- Uses copy-on-write if it is supported. If the the destination already
- exists, an interruped copy will resume where it left off.
- exists, an interrupted copy will resume where it left off.
-
- The IncrementalVerifier is updated with the content of the file as it's
- being copied. But it is not finalized at the end.

View file

@ -492,7 +492,7 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec
startimport cidmap importing db i@(loc, (cid, _sz)) oldversion largematcher = getcidkey cidmap db cid >>= \case
(k:ks) ->
-- If the same content was imported before
-- yeilding multiple different keys, it's not clear
-- yielding multiple different keys, it's not clear
-- which is best to use this time, so pick the
-- first in the list. But, if any of them is a
-- git sha, use it, because the content must

View file

@ -421,7 +421,7 @@ initSharedClone True = do
trustSet u UnTrusted
setConfig (annexConfig "hardlink") (Git.Config.boolConfig True)
{- Propigate annex.securehashesonly from then global config to local
{- Propagate annex.securehashesonly from then global config to local
- config. This makes a clone inherit a parent's setting, but once
- a repository has a local setting, changes to the global config won't
- affect it. -}

View file

@ -73,7 +73,7 @@ privateUUIDsKnown' = not . S.null . annexPrivateRepos . Annex.gitconfig
{- Records content for a file in the branch to the journal.
-
- Using the journal, rather than immediatly staging content to the index
- Using the journal, rather than immediately staging content to the index
- avoids git needing to rewrite the index after every change.
-
- The file in the journal is updated atomically. This avoids an

View file

@ -580,7 +580,7 @@ gitAnnexAssistantDefaultDir = "annex"
- dealing with characters that cause problems.
-
- This is used when a new Key is initially being generated, eg by genKey.
- Unlike keyFile and fileKey, it does not need to be a reversable
- Unlike keyFile and fileKey, it does not need to be a reversible
- escaping. Also, it's ok to change this to add more problematic
- characters later. Unlike changing keyFile, which could result in the
- filenames used for existing keys changing and contents getting lost.
@ -666,7 +666,7 @@ keyPath key hasher = hasher key P.</> f P.</> f
where
f = keyFile key
{- All possibile locations to store a key in a special remote
{- All possible locations to store a key in a special remote
- using different directory hashes.
-
- This is compatible with the annexLocationsNonBare and annexLocationsBare,

View file

@ -203,7 +203,7 @@ numCopiesCheck' file vs have = do
data UnVerifiedCopy = UnVerifiedRemote Remote | UnVerifiedHere
deriving (Ord, Eq)
{- Verifies that enough copies of a key exist amoung the listed remotes,
{- Verifies that enough copies of a key exist among the listed remotes,
- to safely drop it, running an action with a proof if so, and
- printing an informative message if not.
-}

View file

@ -95,7 +95,7 @@ gitAnnexGitConfigOverrides = concatMap (\c -> [Param "-c", Param c])
- to daemonize it. Used with Utility.Daemon.daemonize. -}
gitAnnexDaemonizeParams :: Annex [CommandParam]
gitAnnexDaemonizeParams = do
-- This inclues -c parameters passed to git, as well as ones
-- This includes -c parameters passed to git, as well as ones
-- passed to git-annex.
cps <- gitAnnexGitConfigOverrides
-- Get every parameter git-annex was run with.

View file

@ -29,7 +29,7 @@ type MkViewedFile = FilePath -> ViewedFile
{- Converts a filepath used in a reference branch to the
- filename that will be used in the view.
-
- No two filepaths from the same branch should yeild the same result,
- No two filepaths from the same branch should yield the same result,
- so all directory structure needs to be included in the output filename
- in some way.
-

View file

@ -58,7 +58,7 @@ youtubeDlNotAllowedMessage = unwords
-- and youtube-dl needs to finish up with only one file in the directory
-- so we know which one it downloaded.
--
-- (Note that we can't use --output to specifiy the file to download to,
-- (Note that we can't use --output to specify the file to download to,
-- due to <https://github.com/rg3/youtube-dl/issues/14864>)
youtubeDl :: URLString -> FilePath -> MeterUpdate -> Annex (Either String (Maybe FilePath))
youtubeDl url workdir p = ifM ipAddressesUnlimited

View file

@ -35,7 +35,7 @@ type AlertPair = (AlertId, Alert)
- - High priority alerts, newest first
- - Medium priority Activity, newest first (mostly used for Activity)
- - Low priority alerts, newest first
- - Filler priorty alerts, newest first
- - Filler priority alerts, newest first
- - Ties are broken by the AlertClass, with Errors etc coming first.
-}
compareAlertPairs :: AlertPair -> AlertPair -> Ordering

View file

@ -68,7 +68,7 @@ initRepo True primary_assistant_repo dir desc mgroup = inDir dir $ do
{- Repositories directly managed by the assistant use
- an adjusted unlocked branch with annex.thin set.
-
- Automatic gc is disabled, as it can be slow. Insted, gc is done
- Automatic gc is disabled, as it can be slow. Instead, gc is done
- once a day.
-}
when primary_assistant_repo $ do

View file

@ -27,7 +27,7 @@ import Control.Concurrent
{- This is an arbitrary port in the dynamic port range, that could
- conceivably be used for some other broadcast messages.
- If so, hope they ignore the garbage from us; we'll certianly
- If so, hope they ignore the garbage from us; we'll certainly
- ignore garbage from them. Wild wild west. -}
pairingPort :: PortNumber
pairingPort = 55556

View file

@ -361,7 +361,7 @@ setSshConfig sshdata config = do
- non-alphanumerics, other than "_"
-
- The real hostname is not normally encoded at all. This is done for
- backwards compatability and to avoid unnecessary ugliness in the
- backwards compatibility and to avoid unnecessary ugliness in the
- filename. However, when it contains special characters
- (notably ":" which cannot be used on some filesystems), it is url
- encoded. To indicate it was encoded, the mangled hostname

View file

@ -107,7 +107,7 @@ reconnectRemotes rs = void $ do
- When there's a lot of activity, we may fail more than once.
- On the other hand, we may fail because the remote is not available.
- Rather than retrying indefinitely, after the first retry we enter a
- fallback mode, where our push is guarenteed to succeed if the remote is
- fallback mode, where our push is guaranteed to succeed if the remote is
- reachable. If the fallback fails, the push is queued to be retried
- later.
-

View file

@ -95,7 +95,7 @@ refill cs = do
- runs an action to commit them. If more changes arrive while this is
- going on, they're handled intelligently, batching up changes into
- large commits where possible, doing rename detection, and
- commiting immediately otherwise. -}
- committing immediately otherwise. -}
waitChangeTime :: (([Change], UTCTime) -> Assistant Int) -> Assistant ()
waitChangeTime a = waitchanges 0
where

View file

@ -111,7 +111,7 @@ startOneService client (x:xs) = do
, startOneService client xs
)
{- Filter matching events recieved when drives are mounted and unmounted. -}
{- Filter matching events received when drives are mounted and unmounted. -}
mountChanged :: [MatchRule]
mountChanged = [udisks2mount, udisks2umount]
where

View file

@ -163,9 +163,9 @@ listenNMConnections client setconnected =
- ConnectResultsSent:
- Variant "success"
-
- Diconnection example:
- Disconnection example:
- StatusChanged
- [Variant 0, Variant [Varient ""]]
- [Variant 0, Variant [Variant ""]]
-}
listenWicdConnections :: Client -> (Bool -> IO ()) -> IO ()
listenWicdConnections client setconnected = do

View file

@ -80,7 +80,7 @@ onModify file = case parseTransferFile file of
\i -> i { bytesComplete = bytesComplete newinfo }
{- This thread can only watch transfer sizes when the DirWatcher supports
- tracking modificatons to files. -}
- tracking modifications to files. -}
watchesTransferSize :: Bool
watchesTransferSize = modifyTracked

View file

@ -18,7 +18,7 @@ data RepoProblem = RepoProblem
, afterFix :: IO ()
}
{- The afterFix actions are assumed to all be equivilant. -}
{- The afterFix actions are assumed to all be equivalent. -}
sameRepoProblem :: RepoProblem -> RepoProblem -> Bool
sameRepoProblem = (==) `on` problemUUID

View file

@ -79,7 +79,7 @@ expireUnused duration = do
lockContentForRemoval k noop removeAnnex
logStatus k InfoMissing
where
boundry = durationToPOSIXTime <$> duration
tooold now (_, mt) = case boundry of
boundary = durationToPOSIXTime <$> duration
tooold now (_, mt) = case boundary of
Nothing -> True
Just b -> maybe False (\t -> now - t >= b) mt

View file

@ -642,7 +642,7 @@ enableRsyncNetGCrypt sshinput reponame =
- used on rsync.net. If successful, runs an action with its SshData.
-
- To append the ssh key to rsync.net's authorized_keys, their
- documentation recommends a dd methodd, where the line is fed
- documentation recommends a dd method, where the line is fed
- in to ssh over stdin.
-}
prepRsyncNet :: SshInput -> String -> (SshData -> Handler Html) -> Handler Html

View file

@ -52,7 +52,7 @@ getShutdownConfirmedR = do
{- Use a custom page to avoid putting long polling elements on it that will
- fail and cause thet web browser to show an error once the webapp is
- truely stopped. -}
- truly stopped. -}
getNotRunningR :: Handler Html
getNotRunningR = customPage' False Nothing $
$(widgetFile "control/notrunning")

View file

@ -48,7 +48,7 @@ transfersDisplay = do
AssociatedFile (Just af) -> fromRawFilePath af
{- Simplifies a list of transfers, avoiding display of redundant
- equivilant transfers. -}
- equivalent transfers. -}
simplifyTransfers :: [(Transfer, TransferInfo)] -> [(Transfer, TransferInfo)]
simplifyTransfers [] = []
simplifyTransfers (x:[]) = [x]

View file

@ -66,7 +66,7 @@ getSideBarR nid = do
waitNotifier getAlertBroadcaster nid
{- This 0.1 second delay avoids very transient notifications from
- being displayed and churning the sidebar unnecesarily.
- being displayed and churning the sidebar unnecessarily.
-
- This needs to be below the level perceptable by the user,
- to avoid slowing down user actions like closing alerts. -}

View file

@ -34,7 +34,7 @@ import qualified Backend.External
import qualified Data.Map as M
{- Build-in backends. Does not include externals. -}
{- Built-in backends. Does not include externals. -}
builtinList :: [Backend]
builtinList = Backend.Hash.backends ++ Backend.WORM.backends ++ Backend.URL.backends

View file

@ -158,7 +158,7 @@ sendMessage p m = liftIO $ do
where
line = unwords $ Proto.formatMessage m
{- A response handler can yeild a result, or it can request that another
{- A response handler can yield a result, or it can request that another
- message be consumed from the external. -}
data ResponseHandlerResult a
= Result a
@ -170,7 +170,7 @@ result :: a -> Maybe (Annex (ResponseHandlerResult a))
result = Just . return . Result
{- Waits for a message from the external backend, and passes it to the
- apppropriate handler.
- appropriate handler.
-
- If the handler returns Nothing, this is a protocol error.
-}

View file

@ -281,7 +281,7 @@ md5Hasher = mkHasher md5 md5_context
descChecksum :: String
descChecksum = "checksum"
{- A varient of the SHA256E backend, for testing that needs special keys
{- A variant of the SHA256E backend, for testing that needs special keys
- that cannot collide with legitimate keys in the repository.
-
- This is accomplished by appending a special extension to the key,

View file

@ -55,7 +55,7 @@ mklibs top _installedbins = do
where
-- hwcap lib dirs are things like foo/tls and foo/x86.
-- Hard to know if a directory is, so this is a heuristic
-- looking for things that are certianly not. If this heuristic
-- looking for things that are certainly not. If this heuristic
-- fails, a minor optimisation will not happen, but there will be
-- no bad results.
hwcaplibdir d = not $ or

View file

@ -104,7 +104,7 @@ otool appbase installedbins replacement_libs libmap = do
-- ImageIO.framework uses libPng which is built against a
-- specific version of libz; other versions lack the
-- _inflateValidate symbol. So, avoid bundling libz unless
-- this incompatability is resolved.
-- this incompatibility is resolved.
&& not ("libz." `isInfixOf` s)
lib_present s
| "@rpath" `isInfixOf` s = return True

View file

@ -40,7 +40,7 @@ cmdsMap = M.fromList $ map mk
readonlycmds = map addAnnexOptions
[ Command.ConfigList.cmd
, gitAnnexShellCheck Command.NotifyChanges.cmd
-- p2pstdio checks the enviroment variables to
-- p2pstdio checks the environment variables to
-- determine the security policy to use
, gitAnnexShellCheck Command.P2PStdIO.cmd
, gitAnnexShellCheck Command.InAnnex.cmd

View file

@ -94,7 +94,7 @@ autoStart o = do
, putStrLn "failed"
)
return Nothing
-- Wait for any foreground jobs to finish and propigate exit status.
-- Wait for any foreground jobs to finish and propagate exit status.
ifM (all (== True) <$> mapConcurrently checkSuccessProcess (catMaybes pids))
( exitSuccess
, exitFailure

View file

@ -16,7 +16,7 @@ cmd :: Command
cmd = noCommit $ noMessages $ dontCheck repoExists $
withAnnexOptions [backendOption] $
command "calckey" SectionPlumbing
"calulate key for a file"
"calculate key for a file"
(paramRepeating paramFile)
(batchable run (pure ()))

View file

@ -667,7 +667,7 @@ recordFsckTime inc key = withFsckDb inc $ \h -> liftIO $ FsckDb.addDb h key
{- Records the start time of an incremental fsck.
-
- To guard against time stamp damange (for example, if an annex directory
- To guard against time stamp damage (for example, if an annex directory
- is copied without -a), the fsckstate file contains a time that should
- be identical to its modification time.
- (This is not possible to do on Windows, and so the timestamp in

View file

@ -519,7 +519,7 @@ feedState url = fromRepo $ gitAnnexFeedState $ fromUrl url Nothing
-
- So, it's not safe to use T.unpack to convert that to a String,
- because later use of that String by eg encodeBS will crash
- with an encoding error. Use this instad.
- with an encoding error. Use this instead.
-
- This should not be used on a Text that is read using the
- filesystem encoding because it does not reverse that encoding.

View file

@ -356,7 +356,7 @@ fromToStart removewhen afile key ai si src dest = do
- drop the local copy, and finally drop from the src.
-
- Using a regular download of the local copy, rather than download to
- some other file makes resuming an interruped download work as usual,
- some other file makes resuming an interrupted download work as usual,
- and simplifies implementation. It does mean that, if `git-annex get` of
- the same content is being run at the same time as this move, the content
- may end up locally present, or not. This is similar to the behavior

View file

@ -87,7 +87,7 @@ unusedPeerRemoteName = go (1 :: Integer) =<< usednames
-- Only addresses are output to stdout, to allow scripting.
genAddresses :: [P2PAddress] -> Annex ()
genAddresses [] = giveup "No P2P networks are currrently available."
genAddresses [] = giveup "No P2P networks are currently available."
genAddresses addrs = do
authtoken <- liftIO $ genAuthToken 128
storeP2PAuthToken authtoken
@ -124,7 +124,7 @@ linkRemote remotename = starting "p2p link" ai si $
AuthenticationError e -> giveup e
startPairing :: RemoteName -> [P2PAddress] -> CommandStart
startPairing _ [] = giveup "No P2P networks are currrently available."
startPairing _ [] = giveup "No P2P networks are currently available."
startPairing remotename addrs = ifM (liftIO Wormhole.isInstalled)
( starting "p2p pair" ai si $
performPairing remotename addrs

View file

@ -454,7 +454,7 @@ updateBranches (Just branch, madj) = do
case madj' of
Nothing -> noop
Just adj -> updateadjustedbranch adj
-- When in an adjusted branch, propigate any changes
-- When in an adjusted branch, propagate any changes
-- made to it back to the original branch.
Nothing -> case madj of
Just adj -> do
@ -739,7 +739,7 @@ newer remote b = do
)
{- Without --all, only looks at files in the work tree.
- (Or, when in an ajusted branch where some files are hidden, at files in
- (Or, when in an adjusted branch where some files are hidden, at files in
- the original branch.)
-
- With --all, when preferred content expressions look at filenames,

View file

@ -110,7 +110,7 @@ checkoutViewBranch view madj mkbranch = do
setView view
{- A git repo can easily have empty directories in it,
- and this pollutes the view, so remove them.
- (However, emptry directories used by submodules are not
- (However, empty directories used by submodules are not
- removed.) -}
top <- liftIO . absPath =<< fromRepo Git.repoPath
(l, cleanup) <- inRepo $

View file

@ -103,7 +103,7 @@ start' allowauto o = do
liftIO $ firstRun o
{- If HOME is a git repo, even if it's initialized for git-annex,
- the user almost certianly does not want to run the assistant there. -}
- the user almost certainly does not want to run the assistant there. -}
notHome :: Annex Bool
notHome = do
g <- Annex.gitRepo

View file

@ -1,7 +1,7 @@
{- git-annex crypto
-
- Currently using gpg; could later be modified to support different
- crypto backends if neccessary.
- crypto backends if necessary.
-
- Copyright 2011-2022 Joey Hess <id@joeyh.name>
-
@ -62,7 +62,7 @@ import qualified Data.ByteString.Short as S (toShort)
- (ideally) 64 and 128 bytes of entropy.
-
- The remaining characters (320 bytes of entropy) is enough for GnuPG's
- symetric cipher; unlike weaker public key crypto, the key does not
- symmetric cipher; unlike weaker public key crypto, the key does not
- need to be too large.
-}
cipherBeginning :: Int
@ -164,7 +164,7 @@ decryptCipher' cmd environ c (EncryptedCipher t variant _) =
type EncKey = Key -> Key
{- Generates an encrypted form of a Key. The encryption does not need to be
- reversable, nor does it need to be the same type of encryption used
- reversible, nor does it need to be the same type of encryption used
- on content. It does need to be repeatable. -}
encryptKey :: Mac -> Cipher -> EncKey
encryptKey mac c k = mkKey $ \d -> d

View file

@ -33,7 +33,7 @@ import Data.Maybe
-- Note on indexes: KeyFileIndex etc are really uniqueness constraints,
-- which cause sqlite to automatically add indexes. So when adding indexes,
-- have to take care to only add ones that work as uniqueness constraints.
-- (Unfortunatly persistent does not support indexes that are not
-- (Unfortunately persistent does not support indexes that are not
-- uniqueness constraints; https://github.com/yesodweb/persistent/issues/109)
--
-- To speed up queries for a key, there's KeyFileIndex,

4
Git.hs
View file

@ -1,6 +1,6 @@
{- git repository handling
-
- This is written to be completely independant of git-annex and should be
- This is written to be completely independent of git-annex and should be
- suitable for other uses.
-
- Copyright 2010-2021 Joey Hess <id@joeyh.name>
@ -79,7 +79,7 @@ repoPath Repo { location = Local { worktree = Just d } } = d
repoPath Repo { location = Local { gitdir = d } } = d
repoPath Repo { location = LocalUnknown dir } = dir
repoPath Repo { location = Unknown } = error "unknown repoPath"
repoPath Repo { location = UnparseableUrl _u } = error "unknwon repoPath"
repoPath Repo { location = UnparseableUrl _u } = error "unknown repoPath"
repoWorkTree :: Repo -> Maybe RawFilePath
repoWorkTree Repo { location = Local { worktree = Just d } } = Just d

View file

@ -85,7 +85,7 @@ probeRepo loc baserepo = do
type GCryptId = String
{- gcrypt gives each encrypted repository a uique gcrypt-id,
{- gcrypt gives each encrypted repository a unique gcrypt-id,
- which is stored in the repository (in encrypted form)
- and cached in a per-remote gcrypt-id configuration setting. -}
remoteRepoId :: Repo -> Maybe RemoteName -> Maybe GCryptId

View file

@ -325,7 +325,7 @@ reduceUnmerged c (i:is) = reduceUnmerged (new:c) rest
&& isus x || isus y
&& not (isus x && isus y)
{- Gets the InodeCache equivilant information stored in the git index.
{- Gets the InodeCache equivalent information stored in the git index.
-
- Note that this uses a --debug option whose output could change at some
- point in the future. If the output is not as expected, will use Nothing.

View file

@ -33,7 +33,7 @@ import Utility.PartialPrelude
import Utility.FileSystemEncoding
{- This is a variable length binary string, but its size is limited to
- maxPktLineLength. Its serialization includes a 4 byte hexidecimal
- maxPktLineLength. Its serialization includes a 4 byte hexadecimal
- prefix giving its total length, including that prefix. -}
newtype PktLine = PktLine B.ByteString
deriving (Show)

View file

@ -119,7 +119,7 @@ new lim tlim = do
{- Adds an git command to the queue.
-
- Git commands with the same subcommand but different parameters are
- assumed to be equivilant enough to perform in any order with the same
- assumed to be equivalent enough to perform in any order with the same
- end result.
-}
addCommand :: MonadIO m => [CommandParam] -> String -> [CommandParam] -> [FilePath] -> Queue m -> Repo -> m (Queue m)

View file

@ -43,7 +43,7 @@ remoteKeyToRemoteName (ConfigKey k)
{- Construct a legal git remote name out of an arbitrary input string.
-
- There seems to be no formal definition of this in the git source,
- just some ad-hoc checks, and some other things that fail with certian
- just some ad-hoc checks, and some other things that fail with certain
- types of names (like ones starting with '-').
-}
makeLegalName :: String -> RemoteName

View file

@ -103,7 +103,7 @@ explodePacks r = go =<< listPackFiles r
return True
{- Try to retrieve a set of missing objects, from the remotes of a
- repository. Returns any that could not be retreived.
- repository. Returns any that could not be retrieved.
-
- If another clone of the repository exists locally, which might not be a
- remote of the repo being repaired, its path can be passed as a reference

View file

@ -283,7 +283,7 @@ matchLockStatus wantlocked (MatchingInfo p) =
matchLockStatus _ (MatchingUserInfo _) = pure False
{- Adds a limit to skip files not believed to be present
- in a specfied repository. Optionally on a prior date. -}
- in a specified repository. Optionally on a prior date. -}
addIn :: String -> Annex ()
addIn s = do
u <- Remote.nameToUUID name

View file

@ -5,7 +5,7 @@
-
- It's possible for a single object to be stored multiple times on the
- same remote using different chunk sizes. So, while this is a MapLog, it
- is not a normal UUIDBased log. Intead, it's a map from UUID and chunk
- is not a normal UUIDBased log. Instead, it's a map from UUID and chunk
- size to number of chunks.
-
- Format: "timestamp uuid:chunksize chunkcount"

View file

@ -106,7 +106,7 @@ recordExportUnderway remoteuuid ec = do
-- Record information about the export to the git-annex branch.
--
-- This is equivilant to recordExportBeginning followed by
-- This is equivalent to recordExportBeginning followed by
-- recordExportUnderway, but without the ability to clean up from
-- interrupted exports.
recordExport :: UUID -> Git.Ref -> ExportChange -> Annex ()

View file

@ -33,7 +33,7 @@ import qualified Data.Attoparsec.ByteString.Lazy as A
import qualified Data.Attoparsec.ByteString.Char8 as A8
import Data.ByteString.Builder
-- This constuctor is not itself exported to other modules, to enforce
-- This constructor is not itself exported to other modules, to enforce
-- consistent use of exportedTreeishes.
data Exported = Exported
{ exportedTreeish :: Git.Ref

View file

@ -4,7 +4,7 @@
- be union merged.
-
- The old format looks like: "UUID[ INFO[ timestamp=foo]]"
- The timestamp is last for backwards compatability reasons,
- The timestamp is last for backwards compatibility reasons,
- and may not be present on very old log lines.
-
- New uuid based logs instead use the form: "timestamp UUID INFO"

View file

@ -5,7 +5,7 @@
- The format: "int key timestamp"
-
- The int is a short, stable identifier that the user can use to
- refer to this key. (Equivilant to a filename.)
- refer to this key. (Equivalent to a filename.)
-
- The timestamp indicates when the key was first determined to be unused.
- Older versions of the log omit the timestamp.

View file

@ -136,7 +136,7 @@ showSideAction m = Annex.getState Annex.output >>= go
showStoringStateAction :: Annex ()
showStoringStateAction = showSideAction "recording state in git"
{- Performs an action, supressing showSideAction messages. -}
{- Performs an action, suppressing showSideAction messages. -}
doQuietSideAction :: Annex a -> Annex a
doQuietSideAction = doSideAction' InBlock

View file

@ -263,7 +263,7 @@ data LocalF c
-- so, terminate the protocol connection.
--
-- If the validity check is provided and fails, the content was
-- changed while it was being sent, so verificiation of the
-- changed while it was being sent, so verification of the
-- received content should be forced.
--
-- Note: The ByteString may not contain the entire remaining content
@ -387,7 +387,7 @@ serverLoop a = do
-- gives up, since it's not clear what state the client
-- is in, and so not possible to recover.
Just (ERROR _) -> return Nothing
-- When the client sends an unparseable message, the server
-- When the client sends an unparsable message, the server
-- responds with an error message, and loops. This allows
-- expanding the protocol with new messages.
Nothing -> do

View file

@ -99,7 +99,7 @@ remoteMap' mkv mkk = M.fromList . catMaybes <$> (mapM mk =<< remoteList)
Just k -> Just (k, mkv r)
{- Map of UUIDs of repositories and their descriptions.
- The names of Remotes are added to suppliment any description that has
- The names of Remotes are added to supplement any description that has
- been set for a repository. -}
uuidDescriptions :: Annex UUIDDescMap
uuidDescriptions = M.unionWith addName

View file

@ -141,7 +141,7 @@ borgSetup _ mu _ c _gc = do
M.lookup borgrepoField c
-- The borgrepo is stored in git config, as well as this repo's
-- persistant state, so it can vary between hosts.
-- persistent state, so it can vary between hosts.
gitConfigSpecialRemote u c [("borgrepo", borgrepo)]
return (c, u)

View file

@ -137,7 +137,7 @@ bupSetup _ mu _ c gc = do
storeBupUUID u buprepo
-- The buprepo is stored in git config, as well as this repo's
-- persistant state, so it can vary between hosts.
-- persistent state, so it can vary between hosts.
gitConfigSpecialRemote u c' [("buprepo", buprepo)]
return (c', u)

View file

@ -120,7 +120,7 @@ ddarSetup _ mu _ c gc = do
(c', _encsetup) <- encryptionSetup c gc
-- The ddarrepo is stored in git config, as well as this repo's
-- persistant state, so it can vary between hosts.
-- persistent state, so it can vary between hosts.
gitConfigSpecialRemote u c' [("ddarrepo", ddarrepo)]
return (c', u)

View file

@ -153,7 +153,7 @@ directorySetup _ mu _ c gc = do
(c', _encsetup) <- encryptionSetup c gc
-- The directory is stored in git config, not in this remote's
-- persistant state, so it can vary between hosts.
-- persistent state, so it can vary between hosts.
gitConfigSpecialRemote u c' [("directory", absdir)]
return (M.delete directoryField c', u)

View file

@ -536,7 +536,7 @@ receiveMessageAddonProcess p = do
shutdownAddonProcess :: AddonProcess.ExternalAddonProcess -> Bool -> IO ()
shutdownAddonProcess = AddonProcess.externalShutdown
{- A response handler can yeild a result, or it can request that another
{- A response handler can yield a result, or it can request that another
- message be consumed from the external. -}
data ResponseHandlerResult a
= Result a
@ -548,7 +548,7 @@ result :: a -> Maybe (Annex (ResponseHandlerResult a))
result = Just . return . Result
{- Waits for a message from the external remote, and passes it to the
- apppropriate handler.
- appropriate handler.
-
- If the handler returns Nothing, this is a protocol error.-}
receiveMessage

View file

@ -46,7 +46,7 @@ runRelayToExternalAsync external st annexrunner = do
, externalReceive = atomically (readTBMChan receiveq)
-- This shuts down the whole relay.
, externalShutdown = shutdown external st sendq sender receiver
-- These three TMVars are shared amoung all
-- These three TMVars are shared among all
-- ExternalStates that use this relay; they're
-- common state about the external process.
, externalPrepared = externalPrepared st

View file

@ -84,7 +84,7 @@ urlField = Accepted "url"
gen :: Git.Repo -> UUID -> RemoteConfig -> RemoteGitConfig -> RemoteStateHandle -> Annex (Maybe Remote)
gen r u rc gc rs = do
c <- parsedRemoteConfig remote rc
-- If the repo uses gcrypt, get the underlaying repo without the
-- If the repo uses gcrypt, get the underlying repo without the
-- gcrypt url, to do LFS endpoint discovery on.
r' <- if Git.GCrypt.isEncrypted r
then do

View file

@ -212,7 +212,7 @@ adjustExportImport' isexport isimport r rs = do
-- ones though, since they still allow accessing by Key.
-- And for thirdPartyPopulated, it depends on how the
-- content gets actually stored in the remote, so
-- is not overriddden here.
-- is not overridden here.
, untrustworthy =
if versioned || thirdPartyPopulated (remotetype r)
then untrustworthy r
@ -235,7 +235,7 @@ adjustExportImport' isexport isimport r rs = do
where
thirdpartypopulated = thirdPartyPopulated (remotetype r)
-- exportActions adjusted to use the equivilant import actions,
-- exportActions adjusted to use the equivalent import actions,
-- which take ContentIdentifiers into account.
exportActionsForImport dbv ciddbv ea = ea
{ storeExport = \f k loc p -> do

View file

@ -151,7 +151,7 @@ rsyncParamsRemote r direction key file = do
{- The rsync shell parameter controls where rsync
- goes, so the source/dest parameter can be a dummy value,
- that just enables remote rsync mode.
- For maximum compatability with some patched rsyncs,
- For maximum compatibility with some patched rsyncs,
- the dummy value needs to still contain a hostname,
- even though this hostname will never be used. -}
dummy = Param "dummy:"

View file

@ -205,7 +205,7 @@ rsyncSetup _ mu _ c gc = do
(c', _encsetup) <- encryptionSetup c gc
-- The rsyncurl is stored in git config, not only in this remote's
-- persistant state, so it can vary between hosts.
-- persistent state, so it can vary between hosts.
gitConfigSpecialRemote u c' [("rsyncurl", url)]
return (c', u)
@ -283,7 +283,7 @@ removeGeneric o includes = do
ps <- sendParams
opts <- rsyncOptions o
ok <- withRsyncScratchDir $ \tmp -> liftIO $ do
{- Send an empty directory to rysnc to make it delete. -}
{- Send an empty directory to rsync to make it delete. -}
rsync $ opts ++ ps ++
map (\s -> Param $ "--include=" ++ s) includes ++
[ Param "--exclude=*" -- exclude everything else

View file

@ -868,7 +868,7 @@ giveupS3HandleProblem S3HandleNeedCreds u = do
warning $ needS3Creds u
giveup "No S3 credentials configured"
giveupS3HandleProblem S3HandleAnonymousOldAws _ =
giveup "This S3 special remote is configured with signature=anonymous, but git-annex is buit with too old a version of the aws library to support that."
giveup "This S3 special remote is configured with signature=anonymous, but git-annex is built with too old a version of the aws library to support that."
{- Prepares a S3Handle for later use. Does not connect to S3 or do anything
- else expensive. -}

View file

@ -237,7 +237,7 @@ removeExportDav hdl _k loc = case exportLocation loc of
Right p -> withDavHandle hdl $ \h -> runExport h $ \_dav ->
removeHelper p
-- When the exportLocation is not legal for webdav,
-- the content is certianly not stored there, so it's ok for
-- the content is certainly not stored there, so it's ok for
-- removal to succeed. This allows recovery after failure to store
-- content there, as the user can rename the problem file and
-- this will be called to make sure it's gone.

View file

@ -66,7 +66,7 @@ keyTmpLocation = tmpLocation . fromRawFilePath . keyFile
-
- This could be just the keyTmpLocation, but when the file is in a
- subdirectory, the temp file is put in there. Partly this is to keep
- it close to the final destination; also certian webdav servers
- it close to the final destination; also certain webdav servers
- seem to be buggy when renaming files from the root into a subdir,
- and so writing to the subdir avoids such problems.
-}

View file

@ -42,7 +42,7 @@ checkShouldFetch gc transporthandle shas
| remoteAnnexPull gc = checkNewShas transporthandle shas
| otherwise = return False
-- Check if any of the shas are actally new in the local git repo,
-- Check if any of the shas are actually new in the local git repo,
-- to avoid unnecessary fetching.
checkNewShas :: TransportHandle -> [Git.Sha] -> IO Bool
checkNewShas transporthandle = check

View file

@ -51,7 +51,7 @@ data Emitted
| WARNING RemoteURI String
deriving (Show)
-- Messages that the deamon consumes.
-- Messages that the daemon consumes.
data Consumed
= PAUSE
| LOSTNET

View file

@ -1164,7 +1164,7 @@ test_info = intmpclonerepo $ do
isjson
readonly_query isjson
-- When presented with an input that it does not support,
-- info does not stop but processess subsequent inputs too.
-- info does not stop but processes subsequent inputs too.
git_annex'' (const True)
(sha1annexedfile `isInfixOf`)
"info"
@ -1837,7 +1837,7 @@ test_crypto = do
testscheme scheme = Utility.Tmp.Dir.withTmpDir "gpgtmp" $ \gpgtmp -> do
-- Use the system temp directory as gpg temp directory because
-- it needs to be able to store the agent socket there,
-- which can be problimatic when testing some filesystems.
-- which can be problematic when testing some filesystems.
absgpgtmp <- fromRawFilePath <$> absPath (toRawFilePath gpgtmp)
res <- testscheme' scheme absgpgtmp
-- gpg may still be running and would prevent

View file

@ -724,7 +724,7 @@ runFakeSsh (_host:cmd:[]) =
\_ _ _ pid -> exitWith =<< waitForProcess pid
runFakeSsh ps = error $ "fake ssh option parse error: " ++ show ps
{- Tests each TestTree in parallel, and exits with succcess/failure.
{- Tests each TestTree in parallel, and exits with success/failure.
-
- Tasty supports parallel tests, but this does not use it, because
- many tests need to be run in test repos, and chdir would not be

View file

@ -23,7 +23,7 @@ data BackendA a = Backend
, verifyKeyContent :: Maybe (Key -> RawFilePath -> a Bool)
-- Incrementally verifies the content of a key, using the same
-- hash as verifyKeyContent, but with the content provided
-- incrementally a peice at a time, until finalized.
-- incrementally a piece at a time, until finalized.
, verifyKeyContentIncrementally :: Maybe (Key -> a IncrementalVerifier)
-- Checks if a key can be upgraded to a better form.
, canUpgradeKey :: Maybe (Key -> Bool)

View file

@ -34,9 +34,9 @@ import Prelude
-- Describes differences from the v5 repository format.
--
-- The serialization is stored in difference.log, so avoid changes that
-- would break compatability.
-- would break compatibility.
--
-- Not breaking compatability is why a list of Differences is used, rather
-- Not breaking compatibility is why a list of Differences is used, rather
-- than a record type. With a record type, adding a new field for some future
-- difference would serialize to a value that an older version could not
-- parse, even if that new field was not used. With the Differences list,

View file

@ -98,7 +98,7 @@ data MatchFiles a = MatchFiles
type FileMatcher a = Matcher (MatchFiles a)
-- This is a matcher that can have tokens added to it while it's being
-- built, and once complete is compiled to an unchangable matcher.
-- built, and once complete is compiled to an unchangeable matcher.
data ExpandableMatcher a
= BuildingMatcher [Token (MatchFiles a)]
| CompleteMatcher (Matcher (MatchFiles a))

View file

@ -58,7 +58,7 @@ data ImportableContents info = ImportableContents
{ importableContents :: [(ImportLocation, info)]
, importableHistory :: [ImportableContents info]
-- ^ Used by remotes that support importing historical versions of
-- files that are stored in them. This is equivilant to a git
-- files that are stored in them. This is equivalent to a git
-- commit history.
--
-- When retrieving a historical version of a file,

View file

@ -137,7 +137,7 @@ data RemoteA a = Remote
, getRepo :: a Git.Repo
-- a Remote's configuration from git
, gitconfig :: RemoteGitConfig
-- a Remote can be assocated with a specific local filesystem path
-- a Remote can be associated with a specific local filesystem path
, localpath :: Maybe FilePath
-- a Remote can be known to be readonly
, readonly :: Bool

View file

@ -40,7 +40,7 @@ setIndirect = do
coreworktree = ConfigKey "core.worktree"
indirectworktree = ConfigKey "core.indirect-worktree"
setbare = do
-- core.worktree is not compatable with
-- core.worktree is not compatible with
-- core.bare; git does not allow both to be set, so
-- unset it when enabling direct mode, caching in
-- core.indirect-worktree

View file

@ -19,7 +19,7 @@ import Data.ByteString.UTF8 (fromString, toString)
import Data.Char
-- | This uses the FileSystemEncoding, so it can be used on Strings
-- that repesent filepaths containing arbitrarily encoded characters.
-- that represent filepaths containing arbitrarily encoded characters.
toB64 :: String -> String
toB64 = toString . B64.encode . encodeBS

View file

@ -1,4 +1,4 @@
{- bloomfilter compatability wrapper
{- bloomfilter compatibility wrapper
-
- Copyright 2014 Joey Hess <id@joeyh.name>
-

View file

@ -67,7 +67,7 @@ daemonize cmd params openlogfd pidfile changedirectory a = do
envvar = "DAEMONIZED"
#endif
{- To run an action that is normally daemonized in the forground. -}
{- To run an action that is normally daemonized in the foreground. -}
#ifndef mingw32_HOST_OS
foreground :: IO Fd -> Maybe FilePath -> IO () -> IO ()
foreground openlogfd pidfile a = do

View file

@ -21,7 +21,7 @@
- error. This was bad.
-
- So, a committee was formed. And it arrived at a committee-like decision,
- which satisfied noone, confused everyone, and made the world an uglier
- which satisfied no one, confused everyone, and made the world an uglier
- place. As with all committees, this was meh. Or in this case, "mib".
-
- And the drive manufacturers happily continued selling drives that are
@ -82,7 +82,7 @@ storageUnits =
, Unit (p 4) "TB" "terabyte"
, Unit (p 3) "GB" "gigabyte"
, Unit (p 2) "MB" "megabyte"
, Unit (p 1) "kB" "kilobyte" -- weird capitalization thanks to committe
, Unit (p 1) "kB" "kilobyte" -- weird capitalization thanks to committee
, Unit 1 "B" "byte"
]
where
@ -122,7 +122,7 @@ bandwidthUnits =
, Unit (p 4) "Tbit" "terabit"
, Unit (p 3) "Gbit" "gigabit"
, Unit (p 2) "Mbit" "megabit"
, Unit (p 1) "kbit" "kilobit" -- weird capitalization thanks to committe
, Unit (p 1) "kbit" "kilobit" -- weird capitalization thanks to committee
]
where
p :: Integer -> Integer

View file

@ -52,7 +52,7 @@ watchDir dir ignored scanevents hooks = do
else maybe (runhook delHook Nothing) handleadd
=<< getstatus (eventPath evt)
{- Add hooks are run when a file is modified for
- compatability with INotify, which calls the add
- compatibility with INotify, which calls the add
- hook when a file is closed, and so tends to call
- both add and modify for file modifications. -}
when (hasflag eventFlagItemModified && not (hasflag eventFlagItemIsDir)) $ do

View file

@ -33,7 +33,7 @@ watchDir dir ignored scanevents hooks = do
(Modified _ _)
| isDirectory evt -> noop
{- Add hooks are run when a file is modified for
- compatability with INotify, which calls the add
- compatibility with INotify, which calls the add
- hook when a file is closed, and so tends to call
- both add and modify for file modifications. -}
| otherwise -> do

View file

@ -284,7 +284,7 @@ props_macs_stable = map (\(desc, mac, result) -> (desc ++ " stable", calcMac mac
data IncrementalHasher = IncrementalHasher
{ updateIncrementalHasher :: S.ByteString -> IO ()
-- ^ Called repeatedly on each peice of the content.
-- ^ Called repeatedly on each piece of the content.
, finalizeIncrementalHasher :: IO (Maybe String)
-- ^ Called once the full content has been sent, returns
-- the hash. (Nothing if unableIncremental was called.)

View file

@ -48,7 +48,7 @@ isHtmlBs = isHtml . B8.unpack
-- | Check if the file is html.
--
-- It would be equivilant to use isHtml <$> readFile file,
-- It would be equivalent to use isHtml <$> readFile file,
-- but since that would not read all of the file, the handle
-- would remain open until it got garbage collected sometime later.
isHtmlFile :: FilePath -> IO Bool

View file

@ -53,13 +53,13 @@ isLoopbackAddress :: SockAddr -> Bool
isLoopbackAddress (SockAddrInet _ ipv4) = case hostAddressToTuple ipv4 of
-- localhost
(127,_,_,_) -> True
-- current network; functions equivilant to loopback
-- current network; functions equivalent to loopback
(0,_,_, _) -> True
_ -> False
isLoopbackAddress (SockAddrInet6 _ _ ipv6 _) = case hostAddress6ToTuple ipv6 of
-- localhost
(0,0,0,0,0,0,0,1) -> True
-- unspecified address; functions equivilant to loopback
-- unspecified address; functions equivalent to loopback
(0,0,0,0,0,0,0,0) -> True
v -> maybe False
(isLoopbackAddress . SockAddrInet 0)

View file

@ -131,7 +131,7 @@ replaceInode :: FileID -> InodeCache -> InodeCache
replaceInode inode (InodeCache (InodeCachePrim _ sz mtime)) =
InodeCache (InodeCachePrim inode sz mtime)
{- For backwards compatability, support low-res mtime with no
{- For backwards compatibility, support low-res mtime with no
- fractional seconds. -}
data MTime = MTimeLowRes EpochTime | MTimeHighRes POSIXTime
deriving (Show, Ord)
@ -249,7 +249,7 @@ data SentinalStatus = SentinalStatus
- On Windows, time stamp differences are ignored, since they change
- with the timezone.
-
- When the sential file does not exist, InodeCaches canot reliably be
- When the sential file does not exist, InodeCaches cannot reliably be
- compared, so the assumption is that there is has been a change.
-}
checkSentinalFile :: SentinalFile -> IO SentinalStatus

View file

@ -81,7 +81,7 @@ dropLock :: LockHandle -> IO ()
dropLock = closeHandle
{- If the initial lock fails, this is a BUSY wait, and does not
- guarentee FIFO order of waiters. In other news, Windows is a POS. -}
- guarantee FIFO order of waiters. In other news, Windows is a POS. -}
waitToLock :: IO (Maybe lockhandle) -> IO lockhandle
waitToLock locker = takelock
where

View file

@ -39,7 +39,7 @@ import Prelude
-- or the Seconds timeout if the pid lock is held by another process.
--
-- There are two levels of locks. A STM lock is used to handle
-- fine-grained locking amoung threads, locking a specific lockfile,
-- fine-grained locking among threads, locking a specific lockfile,
-- but only in memory. The pid lock handles locking between processes.
--
-- The pid lock is only taken once, and LockShared is used for it,

View file

@ -38,7 +38,7 @@ lockExclusive file = fmap fst <$> tryMakeLockHandle P.lockPool file
(\f _ -> fmap mk <$> F.lockExclusive f)
{- If the initial lock fails, this is a BUSY wait, and does not
- guarentee FIFO order of waiters. In other news, Windows is a POS. -}
- guarantee FIFO order of waiters. In other news, Windows is a POS. -}
waitToLock :: IO (Maybe lockhandle) -> IO lockhandle
waitToLock = F.waitToLock

View file

@ -37,7 +37,7 @@ import Common
import Data.Kind
{- A Token can be an Operation of an arbitrary type, or one of a few
- predefined peices of syntax. -}
- predefined pieces of syntax. -}
data Token op = Operation op | And | Or | Not | Open | Close
deriving (Show, Eq)

Some files were not shown because too many files have changed in this diff Show more