finished where indentation changes

This commit is contained in:
Joey Hess 2012-12-13 00:24:19 -04:00
parent b77290cecc
commit f87a781aa6
68 changed files with 1619 additions and 1628 deletions

View file

@ -72,18 +72,18 @@ create = void getBranch
{- Returns the ref of the branch, creating it first if necessary. -}
getBranch :: Annex Git.Ref
getBranch = maybe (hasOrigin >>= go >>= use) return =<< branchsha
where
go True = do
inRepo $ Git.Command.run "branch"
[Param $ show name, Param $ show originname]
fromMaybe (error $ "failed to create " ++ show name)
<$> branchsha
go False = withIndex' True $
inRepo $ Git.Branch.commit "branch created" fullname []
use sha = do
setIndexSha sha
return sha
branchsha = inRepo $ Git.Ref.sha fullname
where
go True = do
inRepo $ Git.Command.run "branch"
[Param $ show name, Param $ show originname]
fromMaybe (error $ "failed to create " ++ show name)
<$> branchsha
go False = withIndex' True $
inRepo $ Git.Branch.commit "branch created" fullname []
use sha = do
setIndexSha sha
return sha
branchsha = inRepo $ Git.Ref.sha fullname
{- Ensures that the branch and index are up-to-date; should be
- called before data is read from it. Runs only once per git-annex run. -}
@ -128,26 +128,26 @@ updateTo pairs = do
go branchref True [] []
else lockJournal $ go branchref dirty refs branches
return $ not $ null refs
where
isnewer (r, _) = inRepo $ Git.Branch.changed fullname r
go branchref dirty refs branches = withIndex $ do
cleanjournal <- if dirty then stageJournal else return noop
let merge_desc = if null branches
then "update"
else "merging " ++
unwords (map Git.Ref.describe branches) ++
" into " ++ show name
unless (null branches) $ do
showSideAction merge_desc
mergeIndex refs
ff <- if dirty
then return False
else inRepo $ Git.Branch.fastForward fullname refs
if ff
then updateIndex branchref
else commitBranch branchref merge_desc
(nub $ fullname:refs)
liftIO cleanjournal
where
isnewer (r, _) = inRepo $ Git.Branch.changed fullname r
go branchref dirty refs branches = withIndex $ do
cleanjournal <- if dirty then stageJournal else return noop
let merge_desc = if null branches
then "update"
else "merging " ++
unwords (map Git.Ref.describe branches) ++
" into " ++ show name
unless (null branches) $ do
showSideAction merge_desc
mergeIndex refs
ff <- if dirty
then return False
else inRepo $ Git.Branch.fastForward fullname refs
if ff
then updateIndex branchref
else commitBranch branchref merge_desc
(nub $ fullname:refs)
liftIO cleanjournal
{- Gets the content of a file, which may be in the journal, or committed
- to the branch. Due to limitatons of git cat-file, does *not* get content
@ -168,15 +168,14 @@ getStale = get' True
get' :: Bool -> FilePath -> Annex String
get' staleok file = fromjournal =<< getJournalFile file
where
fromjournal (Just content) = return content
fromjournal Nothing
| staleok = withIndex frombranch
| otherwise = do
update
frombranch
frombranch = withIndex $
L.unpack <$> catFile fullname file
where
fromjournal (Just content) = return content
fromjournal Nothing
| staleok = withIndex frombranch
| otherwise = do
update
frombranch
frombranch = withIndex $ L.unpack <$> catFile fullname file
{- Applies a function to modifiy the content of a file.
-
@ -228,27 +227,27 @@ commitBranch' branchref message parents = do
parentrefs <- commitparents <$> catObject committedref
when (racedetected branchref parentrefs) $
fixrace committedref parentrefs
where
-- look for "parent ref" lines and return the refs
commitparents = map (Git.Ref . snd) . filter isparent .
map (toassoc . L.unpack) . L.lines
toassoc = separate (== ' ')
isparent (k,_) = k == "parent"
where
-- look for "parent ref" lines and return the refs
commitparents = map (Git.Ref . snd) . filter isparent .
map (toassoc . L.unpack) . L.lines
toassoc = separate (== ' ')
isparent (k,_) = k == "parent"
{- The race can be detected by checking the commit's
- parent, which will be the newly pushed branch,
- instead of the expected ref that the index was updated to. -}
racedetected expectedref parentrefs
| expectedref `elem` parentrefs = False -- good parent
| otherwise = True -- race!
{- The race can be detected by checking the commit's
- parent, which will be the newly pushed branch,
- instead of the expected ref that the index was updated to. -}
racedetected expectedref parentrefs
| expectedref `elem` parentrefs = False -- good parent
| otherwise = True -- race!
{- To recover from the race, union merge the lost refs
- into the index, and recommit on top of the bad commit. -}
fixrace committedref lostrefs = do
mergeIndex lostrefs
commitBranch committedref racemessage [committedref]
{- To recover from the race, union merge the lost refs
- into the index, and recommit on top of the bad commit. -}
fixrace committedref lostrefs = do
mergeIndex lostrefs
commitBranch committedref racemessage [committedref]
racemessage = message ++ " (recovery from race)"
racemessage = message ++ " (recovery from race)"
{- Lists all files on the branch. There may be duplicates in the list. -}
files :: Annex [FilePath]
@ -345,9 +344,9 @@ stageJournal = withIndex $ do
[genstream dir h fs]
hashObjectStop h
return $ liftIO $ mapM_ removeFile $ map (dir </>) fs
where
genstream dir h fs streamer = forM_ fs $ \file -> do
let path = dir </> file
sha <- hashFile h path
streamer $ Git.UpdateIndex.updateIndexLine
sha FileBlob (asTopFilePath $ fileJournal file)
where
genstream dir h fs streamer = forM_ fs $ \file -> do
let path = dir </> file
sha <- hashFile h path
streamer $ Git.UpdateIndex.updateIndexLine
sha FileBlob (asTopFilePath $ fileJournal file)

View file

@ -37,8 +37,8 @@ catObjectDetails ref = do
catFileHandle :: Annex Git.CatFile.CatFileHandle
catFileHandle = maybe startup return =<< Annex.getState Annex.catfilehandle
where
startup = do
h <- inRepo Git.CatFile.catFileStart
Annex.changeState $ \s -> s { Annex.catfilehandle = Just h }
return h
where
startup = do
h <- inRepo Git.CatFile.catFileStart
Annex.changeState $ \s -> s { Annex.catfilehandle = Just h }
return h

View file

@ -28,8 +28,8 @@ checkAttr attr file = do
checkAttrHandle :: Annex Git.CheckAttrHandle
checkAttrHandle = maybe startup return =<< Annex.getState Annex.checkattrhandle
where
startup = do
h <- inRepo $ Git.checkAttrStart annexAttrs
Annex.changeState $ \s -> s { Annex.checkattrhandle = Just h }
return h
where
startup = do
h <- inRepo $ Git.checkAttrStart annexAttrs
Annex.changeState $ \s -> s { Annex.checkattrhandle = Just h }
return h

View file

@ -62,19 +62,19 @@ inAnnex' a key = do
- is not in the process of being removed. -}
inAnnexSafe :: Key -> Annex (Maybe Bool)
inAnnexSafe = inAnnex' $ \f -> openforlock f >>= check
where
openforlock f = catchMaybeIO $
openFd f ReadOnly Nothing defaultFileFlags
check Nothing = return is_missing
check (Just h) = do
v <- getLock h (ReadLock, AbsoluteSeek, 0, 0)
closeFd h
return $ case v of
Just _ -> is_locked
Nothing -> is_unlocked
is_locked = Nothing
is_unlocked = Just True
is_missing = Just False
where
openforlock f = catchMaybeIO $
openFd f ReadOnly Nothing defaultFileFlags
check Nothing = return is_missing
check (Just h) = do
v <- getLock h (ReadLock, AbsoluteSeek, 0, 0)
closeFd h
return $ case v of
Just _ -> is_locked
Nothing -> is_unlocked
is_locked = Nothing
is_unlocked = Just True
is_missing = Just False
{- Content is exclusively locked while running an action that might remove
- it. (If the content is not present, no locking is done.) -}
@ -82,25 +82,25 @@ lockContent :: Key -> Annex a -> Annex a
lockContent key a = do
file <- inRepo $ gitAnnexLocation key
bracketIO (openforlock file >>= lock) unlock a
where
{- Since files are stored with the write bit disabled, have
- to fiddle with permissions to open for an exclusive lock. -}
openforlock f = catchMaybeIO $ ifM (doesFileExist f)
( withModifiedFileMode f
(`unionFileModes` ownerWriteMode)
open
, open
)
where
open = openFd f ReadWrite Nothing defaultFileFlags
lock Nothing = return Nothing
lock (Just fd) = do
v <- tryIO $ setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> error "content is locked"
Right _ -> return $ Just fd
unlock Nothing = noop
unlock (Just l) = closeFd l
where
{- Since files are stored with the write bit disabled, have
- to fiddle with permissions to open for an exclusive lock. -}
openforlock f = catchMaybeIO $ ifM (doesFileExist f)
( withModifiedFileMode f
(`unionFileModes` ownerWriteMode)
open
, open
)
where
open = openFd f ReadWrite Nothing defaultFileFlags
lock Nothing = return Nothing
lock (Just fd) = do
v <- tryIO $ setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> error "content is locked"
Right _ -> return $ Just fd
unlock Nothing = noop
unlock (Just l) = closeFd l
{- Calculates the relative path to use to link a file to a key. -}
calcGitLink :: FilePath -> Key -> Annex FilePath
@ -109,8 +109,8 @@ calcGitLink file key = do
let absfile = fromMaybe whoops $ absNormPath cwd file
loc <- inRepo $ gitAnnexLocation key
return $ relPathDirToFile (parentDir absfile) loc
where
whoops = error $ "unable to normalize " ++ file
where
whoops = error $ "unable to normalize " ++ file
{- Updates the Logs.Location when a key's presence changes in the current
- repository. -}
@ -186,13 +186,13 @@ checkDiskSpace destination key alreadythere = do
needmorespace (need + reserve - have - alreadythere)
return ok
_ -> return True
where
dir = maybe (fromRepo gitAnnexDir) return destination
needmorespace n =
warning $ "not enough free space, need " ++
roughSize storageUnits True n ++
" more" ++ forcemsg
forcemsg = " (use --force to override this check or adjust annex.diskreserve)"
where
dir = maybe (fromRepo gitAnnexDir) return destination
needmorespace n =
warning $ "not enough free space, need " ++
roughSize storageUnits True n ++
" more" ++ forcemsg
forcemsg = " (use --force to override this check or adjust annex.diskreserve)"
{- Moves a file into .git/annex/objects/
-
@ -237,12 +237,12 @@ cleanObjectLoc :: Key -> Annex ()
cleanObjectLoc key = do
file <- inRepo $ gitAnnexLocation key
liftIO $ removeparents file (3 :: Int)
where
removeparents _ 0 = noop
removeparents file n = do
let dir = parentDir file
maybe noop (const $ removeparents dir (n-1))
<=< catchMaybeIO $ removeDirectory dir
where
removeparents _ 0 = noop
removeparents file n = do
let dir = parentDir file
maybe noop (const $ removeparents dir (n-1))
<=< catchMaybeIO $ removeDirectory dir
{- Removes a key's file from .git/annex/objects/ -}
removeAnnex :: Key -> Annex ()
@ -278,19 +278,19 @@ moveBad key = do
{- List of keys whose content exists in .git/annex/objects/ -}
getKeysPresent :: Annex [Key]
getKeysPresent = liftIO . traverse (2 :: Int) =<< fromRepo gitAnnexObjectDir
where
traverse depth dir = do
contents <- catchDefaultIO [] (dirContents dir)
if depth == 0
then continue (mapMaybe (fileKey . takeFileName) contents) []
else do
let deeper = traverse (depth - 1)
continue [] (map deeper contents)
continue keys [] = return keys
continue keys (a:as) = do
{- Force lazy traversal with unsafeInterleaveIO. -}
morekeys <- unsafeInterleaveIO a
continue (morekeys++keys) as
where
traverse depth dir = do
contents <- catchDefaultIO [] (dirContents dir)
if depth == 0
then continue (mapMaybe (fileKey . takeFileName) contents) []
else do
let deeper = traverse (depth - 1)
continue [] (map deeper contents)
continue keys [] = return keys
continue keys (a:as) = do
{- Force lazy traversal with unsafeInterleaveIO. -}
morekeys <- unsafeInterleaveIO a
continue (morekeys++keys) as
{- Things to do to record changes to content when shutting down.
-
@ -303,9 +303,9 @@ saveState nocommit = doSideAction $ do
unless nocommit $
whenM alwayscommit $
Annex.Branch.commit "update"
where
alwayscommit = fromMaybe True . Git.Config.isTrue
<$> getConfig (annexConfig "alwayscommit") ""
where
alwayscommit = fromMaybe True . Git.Config.isTrue
<$> getConfig (annexConfig "alwayscommit") ""
{- Downloads content from any of a list of urls. -}
downloadUrl :: [Url.URLString] -> FilePath -> Annex Bool
@ -318,41 +318,41 @@ downloadUrl urls file = do
- This is used to speed up some rsyncs. -}
preseedTmp :: Key -> FilePath -> Annex Bool
preseedTmp key file = go =<< inAnnex key
where
go False = return False
go True = do
ok <- copy
when ok $ thawContent file
return ok
copy = ifM (liftIO $ doesFileExist file)
( return True
, do
s <- inRepo $ gitAnnexLocation key
liftIO $ copyFileExternal s file
)
where
go False = return False
go True = do
ok <- copy
when ok $ thawContent file
return ok
copy = ifM (liftIO $ doesFileExist file)
( return True
, do
s <- inRepo $ gitAnnexLocation key
liftIO $ copyFileExternal s file
)
{- Blocks writing to an annexed file. The file is made unwritable
- to avoid accidental edits. core.sharedRepository may change
- who can read it. -}
freezeContent :: FilePath -> Annex ()
freezeContent file = liftIO . go =<< fromRepo getSharedRepository
where
go GroupShared = modifyFileMode file $
removeModes writeModes .
addModes [ownerReadMode, groupReadMode]
go AllShared = modifyFileMode file $
removeModes writeModes .
addModes readModes
go _ = preventWrite file
where
go GroupShared = modifyFileMode file $
removeModes writeModes .
addModes [ownerReadMode, groupReadMode]
go AllShared = modifyFileMode file $
removeModes writeModes .
addModes readModes
go _ = preventWrite file
{- Allows writing to an annexed file that freezeContent was called on
- before. -}
thawContent :: FilePath -> Annex ()
thawContent file = liftIO . go =<< fromRepo getSharedRepository
where
go GroupShared = groupWriteRead file
go AllShared = groupWriteRead file
go _ = allowWrite file
where
go GroupShared = groupWriteRead file
go AllShared = groupWriteRead file
go _ = allowWrite file
{- Blocks writing to the directory an annexed file is in, to prevent the
- file accidentially being deleted. However, if core.sharedRepository
@ -361,11 +361,11 @@ thawContent file = liftIO . go =<< fromRepo getSharedRepository
-}
freezeContentDir :: FilePath -> Annex ()
freezeContentDir file = liftIO . go =<< fromRepo getSharedRepository
where
dir = parentDir file
go GroupShared = groupWriteRead dir
go AllShared = groupWriteRead dir
go _ = preventWrite dir
where
dir = parentDir file
go GroupShared = groupWriteRead dir
go AllShared = groupWriteRead dir
go _ = preventWrite dir
{- Makes the directory tree to store an annexed file's content,
- with appropriate permissions on each level. -}
@ -375,5 +375,5 @@ createContentDir dest = do
createAnnexDirectory dir
-- might have already existed with restricted perms
liftIO $ allowWrite dir
where
dir = parentDir dest
where
dir = parentDir dest

View file

@ -63,10 +63,10 @@ journalDirty = not . null <$> getJournalFiles
-}
journalFile :: FilePath -> Git.Repo -> FilePath
journalFile file repo = gitAnnexJournalDir repo </> concatMap mangle file
where
mangle '/' = "_"
mangle '_' = "__"
mangle c = [c]
where
mangle '/' = "_"
mangle '_' = "__"
mangle c = [c]
{- Converts a journal file (relative to the journal dir) back to the
- filename on the branch. -}
@ -81,9 +81,9 @@ lockJournal a = do
createAnnexDirectory $ takeDirectory file
mode <- annexFileMode
bracketIO (lock file mode) unlock a
where
lock file mode = do
l <- noUmask mode $ createFile file mode
waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
return l
unlock = closeFd
where
lock file mode = do
l <- noUmask mode $ createFile file mode
waitToSetLock l (WriteLock, AbsoluteSeek, 0, 0)
return l
unlock = closeFd

View file

@ -17,21 +17,21 @@ import Annex.Perms
{- Create a specified lock file, and takes a shared lock. -}
lockFile :: FilePath -> Annex ()
lockFile file = go =<< fromPool file
where
go (Just _) = noop -- already locked
go Nothing = do
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd file ReadOnly (Just mode) defaultFileFlags
liftIO $ waitToSetLock fd (ReadLock, AbsoluteSeek, 0, 0)
changePool $ M.insert file fd
where
go (Just _) = noop -- already locked
go Nothing = do
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd file ReadOnly (Just mode) defaultFileFlags
liftIO $ waitToSetLock fd (ReadLock, AbsoluteSeek, 0, 0)
changePool $ M.insert file fd
unlockFile :: FilePath -> Annex ()
unlockFile file = maybe noop go =<< fromPool file
where
go fd = do
liftIO $ closeFd fd
changePool $ M.delete file
where
go fd = do
liftIO $ closeFd fd
changePool $ M.delete file
getPool :: Annex (M.Map FilePath Fd)
getPool = getState lockpool

View file

@ -21,11 +21,11 @@ import System.Posix.Types
withShared :: (SharedRepository -> Annex a) -> Annex a
withShared a = maybe startup a =<< Annex.getState Annex.shared
where
startup = do
shared <- fromRepo getSharedRepository
Annex.changeState $ \s -> s { Annex.shared = Just shared }
a shared
where
startup = do
shared <- fromRepo getSharedRepository
Annex.changeState $ \s -> s { Annex.shared = Just shared }
a shared
{- Sets appropriate file mode for a file or directory in the annex,
- other than the content files and content directory. Normally,
@ -33,38 +33,38 @@ withShared a = maybe startup a =<< Annex.getState Annex.shared
- allow the group to write, etc. -}
setAnnexPerm :: FilePath -> Annex ()
setAnnexPerm file = withShared $ liftIO . go
where
go GroupShared = groupWriteRead file
go AllShared = modifyFileMode file $ addModes $
[ ownerWriteMode, groupWriteMode ] ++ readModes
go _ = noop
where
go GroupShared = groupWriteRead file
go AllShared = modifyFileMode file $ addModes $
[ ownerWriteMode, groupWriteMode ] ++ readModes
go _ = noop
{- Gets the appropriate mode to use for creating a file in the annex
- (other than content files, which are locked down more). -}
annexFileMode :: Annex FileMode
annexFileMode = withShared $ return . go
where
go GroupShared = sharedmode
go AllShared = combineModes (sharedmode:readModes)
go _ = stdFileMode
sharedmode = combineModes
[ ownerWriteMode, groupWriteMode
, ownerReadMode, groupReadMode
]
where
go GroupShared = sharedmode
go AllShared = combineModes (sharedmode:readModes)
go _ = stdFileMode
sharedmode = combineModes
[ ownerWriteMode, groupWriteMode
, ownerReadMode, groupReadMode
]
{- Creates a directory inside the gitAnnexDir, including any parent
- directories. Makes directories with appropriate permissions. -}
createAnnexDirectory :: FilePath -> Annex ()
createAnnexDirectory dir = traverse dir [] =<< top
where
top = parentDir <$> fromRepo gitAnnexDir
traverse d below stop
| d `equalFilePath` stop = done
| otherwise = ifM (liftIO $ doesDirectoryExist d)
( done
, traverse (parentDir d) (d:below) stop
)
where
done = forM_ below $ \p -> do
liftIO $ createDirectory p
setAnnexPerm p
where
top = parentDir <$> fromRepo gitAnnexDir
traverse d below stop
| d `equalFilePath` stop = done
| otherwise = ifM (liftIO $ doesDirectoryExist d)
( done
, traverse (parentDir d) (d:below) stop
)
where
done = forM_ below $ \p -> do
liftIO $ createDirectory p
setAnnexPerm p

View file

@ -58,8 +58,8 @@ new = do
q <- Git.Queue.new <$> queuesize
store q
return q
where
queuesize = readish <$> getConfig (annexConfig "queuesize") ""
where
queuesize = readish <$> getConfig (annexConfig "queuesize") ""
store :: Git.Queue.Queue -> Annex ()
store q = changeState $ \s -> s { repoqueue = Just q }

View file

@ -27,19 +27,19 @@ import qualified Build.SysConfig as SysConfig
- port, with connection caching. -}
sshParams :: (String, Maybe Integer) -> [CommandParam] -> Annex [CommandParam]
sshParams (host, port) opts = go =<< sshInfo (host, port)
where
go (Nothing, params) = ret params
go (Just socketfile, params) = do
cleanstale
liftIO $ createDirectoryIfMissing True $ parentDir socketfile
lockFile $ socket2lock socketfile
ret params
ret ps = return $ ps ++ opts ++ portParams port ++ [Param host]
-- If the lock pool is empty, this is the first ssh of this
-- run. There could be stale ssh connections hanging around
-- from a previous git-annex run that was interrupted.
cleanstale = whenM (not . any isLock . M.keys <$> getPool) $
sshCleanup
where
go (Nothing, params) = ret params
go (Just socketfile, params) = do
cleanstale
liftIO $ createDirectoryIfMissing True $ parentDir socketfile
lockFile $ socket2lock socketfile
ret params
ret ps = return $ ps ++ opts ++ portParams port ++ [Param host]
-- If the lock pool is empty, this is the first ssh of this
-- run. There could be stale ssh connections hanging around
-- from a previous git-annex run that was interrupted.
cleanstale = whenM (not . any isLock . M.keys <$> getPool) $
sshCleanup
sshInfo :: (String, Maybe Integer) -> Annex (Maybe FilePath, [CommandParam])
sshInfo (host, port) = ifM caching
@ -55,13 +55,13 @@ sshInfo (host, port) = ifM caching
else return (Nothing, [])
, return (Nothing, [])
)
where
where
#ifdef WITH_OLD_SSH
caching = return False
caching = return False
#else
caching = fromMaybe SysConfig.sshconnectioncaching
. Git.Config.isTrue
<$> getConfig (annexConfig "sshcaching") ""
caching = fromMaybe SysConfig.sshconnectioncaching
. Git.Config.isTrue
<$> getConfig (annexConfig "sshcaching") ""
#endif
cacheParams :: FilePath -> [CommandParam]
@ -81,34 +81,34 @@ sshCleanup = do
sockets <- filter (not . isLock) <$>
liftIO (catchDefaultIO [] $ dirContents dir)
forM_ sockets cleanup
where
cleanup socketfile = do
-- Drop any shared lock we have, and take an
-- exclusive lock, without blocking. If the lock
-- succeeds, nothing is using this ssh, and it can
-- be stopped.
let lockfile = socket2lock socketfile
unlockFile lockfile
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd lockfile ReadWrite (Just mode) defaultFileFlags
v <- liftIO $ tryIO $
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> noop
Right _ -> stopssh socketfile
liftIO $ closeFd fd
stopssh socketfile = do
let (host, port) = socket2hostport socketfile
(_, params) <- sshInfo (host, port)
-- "ssh -O stop" is noisy on stderr even with -q
void $ liftIO $ catchMaybeIO $
withQuietOutput createProcessSuccess $
proc "ssh" $ toCommand $
[ Params "-O stop"
] ++ params ++ [Param host]
-- Cannot remove the lock file; other processes may
-- be waiting on our exclusive lock to use it.
where
cleanup socketfile = do
-- Drop any shared lock we have, and take an
-- exclusive lock, without blocking. If the lock
-- succeeds, nothing is using this ssh, and it can
-- be stopped.
let lockfile = socket2lock socketfile
unlockFile lockfile
mode <- annexFileMode
fd <- liftIO $ noUmask mode $
openFd lockfile ReadWrite (Just mode) defaultFileFlags
v <- liftIO $ tryIO $
setLock fd (WriteLock, AbsoluteSeek, 0, 0)
case v of
Left _ -> noop
Right _ -> stopssh socketfile
liftIO $ closeFd fd
stopssh socketfile = do
let (host, port) = socket2hostport socketfile
(_, params) <- sshInfo (host, port)
-- "ssh -O stop" is noisy on stderr even with -q
void $ liftIO $ catchMaybeIO $
withQuietOutput createProcessSuccess $
proc "ssh" $ toCommand $
[ Params "-O stop"
] ++ params ++ [Param host]
-- Cannot remove the lock file; other processes may
-- be waiting on our exclusive lock to use it.
hostport2socket :: String -> Maybe Integer -> FilePath
hostport2socket host Nothing = host
@ -118,8 +118,8 @@ socket2hostport :: FilePath -> (String, Maybe Integer)
socket2hostport socket
| null p = (h, Nothing)
| otherwise = (h, readish p)
where
(h, p) = separate (== '!') $ takeFileName socket
where
(h, p) = separate (== '!') $ takeFileName socket
socket2lock :: FilePath -> FilePath
socket2lock socket = socket ++ lockExt

View file

@ -34,10 +34,10 @@ configkey = annexConfig "uuid"
- so use the command line tool. -}
genUUID :: IO UUID
genUUID = gen . lines <$> readProcess command params
where
gen [] = error $ "no output from " ++ command
gen (l:_) = toUUID l
(command:params) = words SysConfig.uuid
where
gen [] = error $ "no output from " ++ command
gen (l:_) = toUUID l
(command:params) = words SysConfig.uuid
{- Get current repository's UUID. -}
getUUID :: Annex UUID
@ -54,19 +54,19 @@ getRepoUUID r = do
updatecache u
return u
else return c
where
updatecache u = do
g <- gitRepo
when (g /= r) $ storeUUID cachekey u
cachekey = remoteConfig r "uuid"
where
updatecache u = do
g <- gitRepo
when (g /= r) $ storeUUID cachekey u
cachekey = remoteConfig r "uuid"
removeRepoUUID :: Annex ()
removeRepoUUID = unsetConfig configkey
getUncachedUUID :: Git.Repo -> UUID
getUncachedUUID = toUUID . Git.Config.get key ""
where
(ConfigKey key) = configkey
where
(ConfigKey key) = configkey
{- Make sure that the repo has an annex.uuid setting. -}
prepUUID :: Annex ()

View file

@ -26,9 +26,9 @@ versionField = annexConfig "version"
getVersion :: Annex (Maybe Version)
getVersion = handle <$> getConfig versionField ""
where
handle [] = Nothing
handle v = Just v
where
handle [] = Nothing
handle v = Just v
setVersion :: Annex ()
setVersion = setConfig versionField defaultVersion
@ -41,6 +41,6 @@ checkVersion v
| v `elem` supportedVersions = noop
| v `elem` upgradableVersions = err "Upgrade this repository: git-annex upgrade"
| otherwise = err "Upgrade git-annex."
where
err msg = error $ "Repository version " ++ v ++
" is not supported. " ++ msg
where
err msg = error $ "Repository version " ++ v ++
" is not supported. " ++ msg

View file

@ -33,11 +33,11 @@ similarityFloor = 7
fuzzymatches :: String -> (c -> String) -> [c] -> [c]
fuzzymatches input showchoice choices = fst $ unzip $
sortBy comparecost $ filter similarEnough $ zip choices costs
where
distance = restrictedDamerauLevenshteinDistance gitEditCosts input
costs = map (distance . showchoice) choices
comparecost a b = compare (snd a) (snd b)
similarEnough (_, cst) = cst < similarityFloor
where
distance = restrictedDamerauLevenshteinDistance gitEditCosts input
costs = map (distance . showchoice) choices
comparecost a b = compare (snd a) (snd b)
similarEnough (_, cst) = cst < similarityFloor
{- Takes action based on git's autocorrect configuration, in preparation for
- an autocorrected command being run. -}
@ -49,23 +49,23 @@ prepare input showmatch matches r =
| n < 0 -> warn
| otherwise -> sleep n
Nothing -> list
where
list = error $ unlines $
[ "Unknown command '" ++ input ++ "'"
, ""
, "Did you mean one of these?"
] ++ map (\m -> "\t" ++ showmatch m) matches
warn =
hPutStr stderr $ unlines
[ "WARNING: You called a command named '" ++
input ++ "', which does not exist."
, "Continuing under the assumption that you meant '" ++
showmatch (Prelude.head matches) ++ "'"
]
sleep n = do
warn
hPutStrLn stderr $ unwords
[ "in"
, show (fromIntegral n / 10 :: Float)
, "seconds automatically..."]
threadDelay (n * 100000) -- deciseconds to microseconds
where
list = error $ unlines $
[ "Unknown command '" ++ input ++ "'"
, ""
, "Did you mean one of these?"
] ++ map (\m -> "\t" ++ showmatch m) matches
warn =
hPutStr stderr $ unlines
[ "WARNING: You called a command named '" ++
input ++ "', which does not exist."
, "Continuing under the assumption that you meant '" ++
showmatch (Prelude.head matches) ++ "'"
]
sleep n = do
warn
hPutStrLn stderr $ unwords
[ "in"
, show (fromIntegral n / 10 :: Float)
, "seconds automatically..."]
threadDelay (n * 100000) -- deciseconds to microseconds

View file

@ -36,10 +36,10 @@ current r = do
currentUnsafe :: Repo -> IO (Maybe Git.Ref)
currentUnsafe r = parse . firstLine
<$> pipeReadStrict [Param "symbolic-ref", Param "HEAD"] r
where
parse l
| null l = Nothing
| otherwise = Just $ Git.Ref l
where
parse l
| null l = Nothing
| otherwise = Just $ Git.Ref l
{- Checks if the second branch has any commits not present on the first
- branch. -}
@ -47,12 +47,12 @@ changed :: Branch -> Branch -> Repo -> IO Bool
changed origbranch newbranch repo
| origbranch == newbranch = return False
| otherwise = not . null <$> diffs
where
diffs = pipeReadStrict
[ Param "log"
, Param (show origbranch ++ ".." ++ show newbranch)
, Params "--oneline -n1"
] repo
where
diffs = pipeReadStrict
[ Param "log"
, Param (show origbranch ++ ".." ++ show newbranch)
, Params "--oneline -n1"
] repo
{- Given a set of refs that are all known to have commits not
- on the branch, tries to update the branch by a fast-forward.
@ -70,23 +70,23 @@ fastForward branch (first:rest) repo =
( no_ff
, maybe no_ff do_ff =<< findbest first rest
)
where
no_ff = return False
do_ff to = do
run "update-ref"
[Param $ show branch, Param $ show to] repo
return True
findbest c [] = return $ Just c
findbest c (r:rs)
| c == r = findbest c rs
| otherwise = do
better <- changed c r repo
worse <- changed r c repo
case (better, worse) of
(True, True) -> return Nothing -- divergent fail
(True, False) -> findbest r rs -- better
(False, True) -> findbest c rs -- worse
(False, False) -> findbest c rs -- same
where
no_ff = return False
do_ff to = do
run "update-ref"
[Param $ show branch, Param $ show to] repo
return True
findbest c [] = return $ Just c
findbest c (r:rs)
| c == r = findbest c rs
| otherwise = do
better <- changed c r repo
worse <- changed r c repo
case (better, worse) of
(True, True) -> return Nothing -- divergent fail
(True, False) -> findbest r rs -- better
(False, True) -> findbest c rs -- worse
(False, False) -> findbest c rs -- same
{- Commits the index into the specified branch (or other ref),
- with the specified parent refs, and returns the committed sha -}
@ -99,5 +99,5 @@ commit message branch parentrefs repo = do
message repo
run "update-ref" [Param $ show branch, Param $ show sha] repo
return sha
where
ps = concatMap (\r -> ["-p", show r]) parentrefs
where
ps = concatMap (\r -> ["-p", show r]) parentrefs

View file

@ -48,28 +48,28 @@ catObject h object = maybe L.empty fst <$> catObjectDetails h object
{- Gets both the content of an object, and its Sha. -}
catObjectDetails :: CatFileHandle -> Ref -> IO (Maybe (L.ByteString, Sha))
catObjectDetails h object = CoProcess.query h send receive
where
send to = do
fileEncoding to
hPutStrLn to $ show object
receive from = do
fileEncoding from
header <- hGetLine from
case words header of
[sha, objtype, size]
| length sha == shaSize &&
isJust (readObjectType objtype) ->
case reads size of
[(bytes, "")] -> readcontent bytes from sha
_ -> dne
| otherwise -> dne
_
| header == show object ++ " missing" -> dne
| otherwise -> error $ "unknown response from git cat-file " ++ show (header, object)
readcontent bytes from sha = do
content <- S.hGet from bytes
c <- hGetChar from
when (c /= '\n') $
error "missing newline from git cat-file"
return $ Just (L.fromChunks [content], Ref sha)
dne = return Nothing
where
send to = do
fileEncoding to
hPutStrLn to $ show object
receive from = do
fileEncoding from
header <- hGetLine from
case words header of
[sha, objtype, size]
| length sha == shaSize &&
isJust (readObjectType objtype) ->
case reads size of
[(bytes, "")] -> readcontent bytes from sha
_ -> dne
| otherwise -> dne
_
| header == show object ++ " missing" -> dne
| otherwise -> error $ "unknown response from git cat-file " ++ show (header, object)
readcontent bytes from sha = do
content <- S.hGet from bytes
c <- hGetChar from
when (c /= '\n') $
error "missing newline from git cat-file"
return $ Just (L.fromChunks [content], Ref sha)
dne = return Nothing

View file

@ -24,12 +24,12 @@ checkAttrStart attrs repo = do
cwd <- getCurrentDirectory
h <- gitCoProcessStart params repo
return (h, attrs, cwd)
where
params =
[ Param "check-attr"
, Params "-z --stdin"
] ++ map Param attrs ++
[ Param "--" ]
where
params =
[ Param "check-attr"
, Params "-z --stdin"
] ++ map Param attrs ++
[ Param "--" ]
checkAttrStop :: CheckAttrHandle -> IO ()
checkAttrStop (h, _, _) = CoProcess.stop h
@ -42,26 +42,26 @@ checkAttr (h, attrs, cwd) want file = do
case vals of
[v] -> return v
_ -> error $ "unable to determine " ++ want ++ " attribute of " ++ file
where
send to = do
fileEncoding to
hPutStr to $ file' ++ "\0"
receive from = forM attrs $ \attr -> do
fileEncoding from
l <- hGetLine from
return (attr, attrvalue attr l)
{- Before git 1.7.7, git check-attr worked best with
- absolute filenames; using them worked around some bugs
- with relative filenames.
-
- With newer git, git check-attr chokes on some absolute
- filenames, and the bugs that necessitated them were fixed,
- so use relative filenames. -}
oldgit = Git.Version.older "1.7.7"
file'
| oldgit = absPathFrom cwd file
| otherwise = relPathDirToFile cwd $ absPathFrom cwd file
attrvalue attr l = end bits !! 0
where
bits = split sep l
sep = ": " ++ attr ++ ": "
where
send to = do
fileEncoding to
hPutStr to $ file' ++ "\0"
receive from = forM attrs $ \attr -> do
fileEncoding from
l <- hGetLine from
return (attr, attrvalue attr l)
{- Before git 1.7.7, git check-attr worked best with
- absolute filenames; using them worked around some bugs
- with relative filenames.
-
- With newer git, git check-attr chokes on some absolute
- filenames, and the bugs that necessitated them were fixed,
- so use relative filenames. -}
oldgit = Git.Version.older "1.7.7"
file'
| oldgit = absPathFrom cwd file
| otherwise = relPathDirToFile cwd $ absPathFrom cwd file
attrvalue attr l = end bits !! 0
where
bits = split sep l
sep = ": " ++ attr ++ ": "

View file

@ -17,11 +17,11 @@ import qualified Utility.CoProcess as CoProcess
{- Constructs a git command line operating on the specified repo. -}
gitCommandLine :: [CommandParam] -> Repo -> [CommandParam]
gitCommandLine params Repo { location = l@(Local _ _ ) } = setdir : settree ++ params
where
setdir = Param $ "--git-dir=" ++ gitdir l
settree = case worktree l of
Nothing -> []
Just t -> [Param $ "--work-tree=" ++ t]
where
setdir = Param $ "--git-dir=" ++ gitdir l
settree = case worktree l of
Nothing -> []
Just t -> [Param $ "--work-tree=" ++ t]
gitCommandLine _ repo = assertLocal repo $ error "internal"
{- Runs git in the specified repo. -}
@ -49,8 +49,8 @@ pipeReadLazy params repo = assertLocal repo $ do
fileEncoding h
c <- hGetContents h
return (c, checkSuccessProcess pid)
where
p = gitCreateProcess params repo
where
p = gitCreateProcess params repo
{- Runs a git subcommand, and returns its output, strictly.
-
@ -63,8 +63,8 @@ pipeReadStrict params repo = assertLocal repo $
output <- hGetContentsStrict h
hClose h
return output
where
p = gitCreateProcess params repo
where
p = gitCreateProcess params repo
{- Runs a git subcommand, feeding it input, and returning its output,
- which is expected to be fairly small, since it's all read into memory
@ -85,8 +85,8 @@ pipeNullSplit :: [CommandParam] -> Repo -> IO ([String], IO Bool)
pipeNullSplit params repo = do
(s, cleanup) <- pipeReadLazy params repo
return (filter (not . null) $ split sep s, cleanup)
where
sep = "\0"
where
sep = "\0"
pipeNullSplitZombie :: [CommandParam] -> Repo -> IO [String]

View file

@ -48,18 +48,18 @@ reRead r = read' $ r
-}
read' :: Repo -> IO Repo
read' repo = go repo
where
go Repo { location = Local { gitdir = d } } = git_config d
go Repo { location = LocalUnknown d } = git_config d
go _ = assertLocal repo $ error "internal"
git_config d = withHandle StdoutHandle createProcessSuccess p $
hRead repo
where
params = ["config", "--null", "--list"]
p = (proc "git" params)
{ cwd = Just d
, env = gitEnv repo
}
where
go Repo { location = Local { gitdir = d } } = git_config d
go Repo { location = LocalUnknown d } = git_config d
go _ = assertLocal repo $ error "internal"
git_config d = withHandle StdoutHandle createProcessSuccess p $
hRead repo
where
params = ["config", "--null", "--list"]
p = (proc "git" params)
{ cwd = Just d
, env = gitEnv repo
}
{- Gets the global git config, returning a dummy Repo containing it. -}
global :: IO (Maybe Repo)
@ -73,9 +73,9 @@ global = do
return $ Just repo'
, return Nothing
)
where
params = ["config", "--null", "--list", "--global"]
p = (proc "git" params)
where
params = ["config", "--null", "--list", "--global"]
p = (proc "git" params)
{- Reads git config from a handle and populates a repo with it. -}
hRead :: Repo -> Handle -> IO Repo
@ -133,10 +133,10 @@ parse s
| all ('=' `elem`) (take 1 ls) = sep '=' ls
-- --null --list output separates keys from values with newlines
| otherwise = sep '\n' $ split "\0" s
where
ls = lines s
sep c = M.fromListWith (++) . map (\(k,v) -> (k, [v])) .
map (separate (== c))
where
ls = lines s
sep c = M.fromListWith (++) . map (\(k,v) -> (k, [v])) .
map (separate (== c))
{- Checks if a string from git config is a true value. -}
isTrue :: String -> Maybe Bool
@ -144,8 +144,8 @@ isTrue s
| s' == "true" = Just True
| s' == "false" = Just False
| otherwise = Nothing
where
s' = map toLower s
where
s' = map toLower s
isBare :: Repo -> Bool
isBare r = fromMaybe False $ isTrue =<< getMaybe "core.bare" r

View file

@ -33,15 +33,15 @@ import Utility.UserInfo
- directory. -}
fromCwd :: IO Repo
fromCwd = getCurrentDirectory >>= seekUp checkForRepo
where
norepo = error "Not in a git repository."
seekUp check dir = do
r <- check dir
case r of
Nothing -> case parentDir dir of
"" -> norepo
d -> seekUp check d
Just loc -> newFrom loc
where
norepo = error "Not in a git repository."
seekUp check dir = do
r <- check dir
case r of
Nothing -> case parentDir dir of
"" -> norepo
d -> seekUp check d
Just loc -> newFrom loc
{- Local Repo constructor, accepts a relative or absolute path. -}
fromPath :: FilePath -> IO Repo
@ -55,21 +55,21 @@ fromAbsPath dir
ifM (doesDirectoryExist dir') ( ret dir' , hunt )
| otherwise =
error $ "internal error, " ++ dir ++ " is not absolute"
where
ret = newFrom . LocalUnknown
{- Git always looks for "dir.git" in preference to
- to "dir", even if dir ends in a "/". -}
canondir = dropTrailingPathSeparator dir
dir' = canondir ++ ".git"
{- When dir == "foo/.git", git looks for "foo/.git/.git",
- and failing that, uses "foo" as the repository. -}
hunt
| "/.git" `isSuffixOf` canondir =
ifM (doesDirectoryExist $ dir </> ".git")
( ret dir
, ret $ takeDirectory canondir
)
| otherwise = ret dir
where
ret = newFrom . LocalUnknown
{- Git always looks for "dir.git" in preference to
- to "dir", even if dir ends in a "/". -}
canondir = dropTrailingPathSeparator dir
dir' = canondir ++ ".git"
{- When dir == "foo/.git", git looks for "foo/.git/.git",
- and failing that, uses "foo" as the repository. -}
hunt
| "/.git" `isSuffixOf` canondir =
ifM (doesDirectoryExist $ dir </> ".git")
( ret dir
, ret $ takeDirectory canondir
)
| otherwise = ret dir
{- Remote Repo constructor. Throws exception on invalid url.
-
@ -85,9 +85,9 @@ fromUrlStrict :: String -> IO Repo
fromUrlStrict url
| startswith "file://" url = fromAbsPath $ uriPath u
| otherwise = newFrom $ Url u
where
u = fromMaybe bad $ parseURI url
bad = error $ "bad url " ++ url
where
u = fromMaybe bad $ parseURI url
bad = error $ "bad url " ++ url
{- Creates a repo that has an unknown location. -}
fromUnknown :: IO Repo
@ -100,21 +100,23 @@ localToUrl reference r
| not $ repoIsUrl reference = error "internal error; reference repo not url"
| repoIsUrl r = r
| otherwise = r { location = Url $ fromJust $ parseURI absurl }
where
absurl =
Url.scheme reference ++ "//" ++
Url.authority reference ++
repoPath r
where
absurl = concat
[ Url.scheme reference
, "//"
, Url.authority reference
, repoPath r
]
{- Calculates a list of a repo's configured remotes, by parsing its config. -}
fromRemotes :: Repo -> IO [Repo]
fromRemotes repo = mapM construct remotepairs
where
filterconfig f = filter f $ M.toList $ config repo
filterkeys f = filterconfig (\(k,_) -> f k)
remotepairs = filterkeys isremote
isremote k = startswith "remote." k && endswith ".url" k
construct (k,v) = remoteNamedFromKey k $ fromRemoteLocation v repo
where
filterconfig f = filter f $ M.toList $ config repo
filterkeys f = filterconfig (\(k,_) -> f k)
remotepairs = filterkeys isremote
isremote k = startswith "remote." k && endswith ".url" k
construct (k,v) = remoteNamedFromKey k $ fromRemoteLocation v repo
{- Sets the name of a remote when constructing the Repo to represent it. -}
remoteNamed :: String -> IO Repo -> IO Repo
@ -126,50 +128,48 @@ remoteNamed n constructor = do
"remote.foo.url". -}
remoteNamedFromKey :: String -> IO Repo -> IO Repo
remoteNamedFromKey k = remoteNamed basename
where
basename = join "." $ reverse $ drop 1 $
reverse $ drop 1 $ split "." k
where
basename = join "." $ reverse $ drop 1 $ reverse $ drop 1 $ split "." k
{- Constructs a new Repo for one of a Repo's remotes using a given
- location (ie, an url). -}
fromRemoteLocation :: String -> Repo -> IO Repo
fromRemoteLocation s repo = gen $ calcloc s
where
gen v
| scpstyle v = fromUrl $ scptourl v
| urlstyle v = fromUrl v
| otherwise = fromRemotePath v repo
-- insteadof config can rewrite remote location
calcloc l
| null insteadofs = l
| otherwise = replacement ++ drop (length bestvalue) l
where
replacement = drop (length prefix) $
take (length bestkey - length suffix) bestkey
(bestkey, bestvalue) = maximumBy longestvalue insteadofs
longestvalue (_, a) (_, b) = compare b a
insteadofs = filterconfig $ \(k, v) ->
startswith prefix k &&
endswith suffix k &&
startswith v l
filterconfig f = filter f $
concatMap splitconfigs $
M.toList $ fullconfig repo
splitconfigs (k, vs) = map (\v -> (k, v)) vs
(prefix, suffix) = ("url." , ".insteadof")
urlstyle v = isURI v || ":" `isInfixOf` v && "//" `isInfixOf` v
-- git remotes can be written scp style -- [user@]host:dir
-- but foo::bar is a git-remote-helper location instead
scpstyle v = ":" `isInfixOf` v
&& not ("//" `isInfixOf` v)
&& not ("::" `isInfixOf` v)
scptourl v = "ssh://" ++ host ++ slash dir
where
(host, dir) = separate (== ':') v
slash d | d == "" = "/~/" ++ d
| "/" `isPrefixOf` d = d
| "~" `isPrefixOf` d = '/':d
| otherwise = "/~/" ++ d
where
gen v
| scpstyle v = fromUrl $ scptourl v
| urlstyle v = fromUrl v
| otherwise = fromRemotePath v repo
-- insteadof config can rewrite remote location
calcloc l
| null insteadofs = l
| otherwise = replacement ++ drop (length bestvalue) l
where
replacement = drop (length prefix) $
take (length bestkey - length suffix) bestkey
(bestkey, bestvalue) = maximumBy longestvalue insteadofs
longestvalue (_, a) (_, b) = compare b a
insteadofs = filterconfig $ \(k, v) ->
startswith prefix k &&
endswith suffix k &&
startswith v l
filterconfig f = filter f $
concatMap splitconfigs $ M.toList $ fullconfig repo
splitconfigs (k, vs) = map (\v -> (k, v)) vs
(prefix, suffix) = ("url." , ".insteadof")
urlstyle v = isURI v || ":" `isInfixOf` v && "//" `isInfixOf` v
-- git remotes can be written scp style -- [user@]host:dir
-- but foo::bar is a git-remote-helper location instead
scpstyle v = ":" `isInfixOf` v
&& not ("//" `isInfixOf` v)
&& not ("::" `isInfixOf` v)
scptourl v = "ssh://" ++ host ++ slash dir
where
(host, dir) = separate (== ':') v
slash d | d == "" = "/~/" ++ d
| "/" `isPrefixOf` d = d
| "~" `isPrefixOf` d = '/':d
| otherwise = "/~/" ++ d
{- Constructs a Repo from the path specified in the git remotes of
- another Repo. -}
@ -191,25 +191,25 @@ repoAbsPath d = do
expandTilde :: FilePath -> IO FilePath
expandTilde = expandt True
where
expandt _ [] = return ""
expandt _ ('/':cs) = do
v <- expandt True cs
return ('/':v)
expandt True ('~':'/':cs) = do
h <- myHomeDir
return $ h </> cs
expandt True ('~':cs) = do
let (name, rest) = findname "" cs
u <- getUserEntryForName name
return $ homeDirectory u </> rest
expandt _ (c:cs) = do
v <- expandt False cs
return (c:v)
findname n [] = (n, "")
findname n (c:cs)
| c == '/' = (n, cs)
| otherwise = findname (n++[c]) cs
where
expandt _ [] = return ""
expandt _ ('/':cs) = do
v <- expandt True cs
return ('/':v)
expandt True ('~':'/':cs) = do
h <- myHomeDir
return $ h </> cs
expandt True ('~':cs) = do
let (name, rest) = findname "" cs
u <- getUserEntryForName name
return $ homeDirectory u </> rest
expandt _ (c:cs) = do
v <- expandt False cs
return (c:v)
findname n [] = (n, "")
findname n (c:cs)
| c == '/' = (n, cs)
| otherwise = findname (n++[c]) cs
checkForRepo :: FilePath -> IO (Maybe RepoLocation)
checkForRepo dir =
@ -217,28 +217,28 @@ checkForRepo dir =
check gitDirFile $
check isBareRepo $
return Nothing
where
check test cont = maybe cont (return . Just) =<< test
checkdir c = ifM c
( return $ Just $ LocalUnknown dir
, return Nothing
)
isRepo = checkdir $ gitSignature $ ".git" </> "config"
isBareRepo = checkdir $ gitSignature "config"
<&&> doesDirectoryExist (dir </> "objects")
gitDirFile = do
c <- firstLine <$>
catchDefaultIO "" (readFile $ dir </> ".git")
return $ if gitdirprefix `isPrefixOf` c
then Just $ Local
{ gitdir = absPathFrom dir $
drop (length gitdirprefix) c
, worktree = Just dir
}
else Nothing
where
gitdirprefix = "gitdir: "
gitSignature file = doesFileExist $ dir </> file
where
check test cont = maybe cont (return . Just) =<< test
checkdir c = ifM c
( return $ Just $ LocalUnknown dir
, return Nothing
)
isRepo = checkdir $ gitSignature $ ".git" </> "config"
isBareRepo = checkdir $ gitSignature "config"
<&&> doesDirectoryExist (dir </> "objects")
gitDirFile = do
c <- firstLine <$>
catchDefaultIO "" (readFile $ dir </> ".git")
return $ if gitdirprefix `isPrefixOf` c
then Just $ Local
{ gitdir = absPathFrom dir $
drop (length gitdirprefix) c
, worktree = Just dir
}
else Nothing
where
gitdirprefix = "gitdir: "
gitSignature file = doesFileExist $ dir </> file
newFrom :: RepoLocation -> IO Repo
newFrom l = return Repo

View file

@ -39,23 +39,23 @@ get = do
unless (d `dirContains` cwd) $
changeWorkingDirectory d
return $ addworktree wt r
where
pathenv s = do
v <- getEnv s
case v of
Just d -> do
unsetEnv s
Just <$> absPath d
Nothing -> return Nothing
configure Nothing r = Git.Config.read r
configure (Just d) r = do
r' <- Git.Config.read r
-- Let GIT_DIR override the default gitdir.
absd <- absPath d
return $ changelocation r' $ Local
{ gitdir = absd
, worktree = worktree (location r')
}
addworktree w r = changelocation r $
Local { gitdir = gitdir (location r), worktree = w }
changelocation r l = r { location = l }
where
pathenv s = do
v <- getEnv s
case v of
Just d -> do
unsetEnv s
Just <$> absPath d
Nothing -> return Nothing
configure Nothing r = Git.Config.read r
configure (Just d) r = do
r' <- Git.Config.read r
-- Let GIT_DIR override the default gitdir.
absd <- absPath d
return $ changelocation r' $ Local
{ gitdir = absd
, worktree = worktree (location r')
}
addworktree w r = changelocation r $
Local { gitdir = gitdir (location r), worktree = w }
changelocation r l = r { location = l }

View file

@ -29,17 +29,17 @@ hashObjectStop = CoProcess.stop
{- Injects a file into git, returning the Sha of the object. -}
hashFile :: HashObjectHandle -> FilePath -> IO Sha
hashFile h file = CoProcess.query h send receive
where
send to = do
fileEncoding to
hPutStrLn to file
receive from = getSha "hash-object" $ hGetLine from
where
send to = do
fileEncoding to
hPutStrLn to file
receive from = getSha "hash-object" $ hGetLine from
{- Injects some content into git, returning its Sha. -}
hashObject :: ObjectType -> String -> Repo -> IO Sha
hashObject objtype content repo = getSha subcmd $ do
s <- pipeWriteRead (map Param params) content repo
return s
where
subcmd = "hash-object"
params = [subcmd, "-t", show objtype, "-w", "--stdin"]
where
subcmd = "hash-object"
params = [subcmd, "-t", show objtype, "-w", "--stdin"]

View file

@ -21,7 +21,7 @@ override index = do
res <- getEnv var
setEnv var index True
return $ reset res
where
var = "GIT_INDEX_FILE"
reset (Just v) = setEnv var v True
reset _ = unsetEnv var
where
var = "GIT_INDEX_FILE"
reset (Just v) = setEnv var v True
reset _ = unsetEnv var

View file

@ -31,12 +31,12 @@ inRepo l = pipeNullSplit $ Params "ls-files --cached -z --" : map File l
{- Scans for files at the specified locations that are not checked into git. -}
notInRepo :: Bool -> [FilePath] -> Repo -> IO ([FilePath], IO Bool)
notInRepo include_ignored l repo = pipeNullSplit params repo
where
params = [Params "ls-files --others"] ++ exclude ++
[Params "-z --"] ++ map File l
exclude
| include_ignored = []
| otherwise = [Param "--exclude-standard"]
where
params = [Params "ls-files --others"] ++ exclude ++
[Params "-z --"] ++ map File l
exclude
| include_ignored = []
| otherwise = [Param "--exclude-standard"]
{- Returns a list of all files that are staged for commit. -}
staged :: [FilePath] -> Repo -> IO ([FilePath], IO Bool)
@ -49,15 +49,15 @@ stagedNotDeleted = staged' [Param "--diff-filter=ACMRT"]
staged' :: [CommandParam] -> [FilePath] -> Repo -> IO ([FilePath], IO Bool)
staged' ps l = pipeNullSplit $ prefix ++ ps ++ suffix
where
prefix = [Params "diff --cached --name-only -z"]
suffix = Param "--" : map File l
where
prefix = [Params "diff --cached --name-only -z"]
suffix = Param "--" : map File l
{- Returns a list of files that have unstaged changes. -}
changedUnstaged :: [FilePath] -> Repo -> IO ([FilePath], IO Bool)
changedUnstaged l = pipeNullSplit params
where
params = Params "diff --name-only -z --" : map File l
where
params = Params "diff --name-only -z --" : map File l
{- Returns a list of the files in the specified locations that are staged
- for commit, and whose type has changed. -}
@ -77,9 +77,9 @@ typeChanged' ps l repo = do
let top = repoPath repo
cwd <- getCurrentDirectory
return (map (\f -> relPathDirToFile cwd $ top </> f) fs, cleanup)
where
prefix = [Params "diff --name-only --diff-filter=T -z"]
suffix = Param "--" : map File l
where
prefix = [Params "diff --name-only --diff-filter=T -z"]
suffix = Param "--" : map File l
{- A item in conflict has two possible values.
- Either can be Nothing, when that side deleted the file. -}
@ -108,8 +108,8 @@ unmerged :: [FilePath] -> Repo -> IO ([Unmerged], IO Bool)
unmerged l repo = do
(fs, cleanup) <- pipeNullSplit params repo
return (reduceUnmerged [] $ catMaybes $ map parseUnmerged fs, cleanup)
where
params = Params "ls-files --unmerged -z --" : map File l
where
params = Params "ls-files --unmerged -z --" : map File l
data InternalUnmerged = InternalUnmerged
{ isus :: Bool
@ -131,28 +131,28 @@ parseUnmerged s
return $ InternalUnmerged (stage == 2) file
(Just blobtype) (Just sha)
_ -> Nothing
where
(metadata, file) = separate (== '\t') s
where
(metadata, file) = separate (== '\t') s
reduceUnmerged :: [Unmerged] -> [InternalUnmerged] -> [Unmerged]
reduceUnmerged c [] = c
reduceUnmerged c (i:is) = reduceUnmerged (new:c) rest
where
(rest, sibi) = findsib i is
(blobtypeA, blobtypeB, shaA, shaB)
| isus i = (iblobtype i, iblobtype sibi, isha i, isha sibi)
| otherwise = (iblobtype sibi, iblobtype i, isha sibi, isha i)
new = Unmerged
{ unmergedFile = ifile i
, unmergedBlobType = Conflicting blobtypeA blobtypeB
, unmergedSha = Conflicting shaA shaB
}
findsib templatei [] = ([], deleted templatei)
findsib templatei (l:ls)
| ifile l == ifile templatei = (ls, l)
| otherwise = (l:ls, deleted templatei)
deleted templatei = templatei
{ isus = not (isus templatei)
, iblobtype = Nothing
, isha = Nothing
}
where
(rest, sibi) = findsib i is
(blobtypeA, blobtypeB, shaA, shaB)
| isus i = (iblobtype i, iblobtype sibi, isha i, isha sibi)
| otherwise = (iblobtype sibi, iblobtype i, isha sibi, isha i)
new = Unmerged
{ unmergedFile = ifile i
, unmergedBlobType = Conflicting blobtypeA blobtypeB
, unmergedSha = Conflicting shaA shaB
}
findsib templatei [] = ([], deleted templatei)
findsib templatei (l:ls)
| ifile l == ifile templatei = (ls, l)
| otherwise = (l:ls, deleted templatei)
deleted templatei = templatei
{ isus = not (isus templatei)
, iblobtype = Nothing
, isha = Nothing
}

View file

@ -47,11 +47,11 @@ parseLsTree l = TreeItem
, sha = s
, file = Git.Filename.decode f
}
where
-- l = <mode> SP <type> SP <sha> TAB <file>
-- All fields are fixed, so we can pull them out of
-- specific positions in the line.
(m, past_m) = splitAt 7 l
(t, past_t) = splitAt 4 past_m
(s, past_s) = splitAt 40 $ Prelude.tail past_t
f = Prelude.tail past_s
where
-- l = <mode> SP <type> SP <sha> TAB <file>
-- All fields are fixed, so we can pull them out of
-- specific positions in the line.
(m, past_m) = splitAt 7 l
(t, past_t) = splitAt 4 past_m
(s, past_s) = splitAt 40 $ Prelude.tail past_t
f = Prelude.tail past_s

View file

@ -86,30 +86,30 @@ new lim = Queue 0 (fromMaybe defaultLimit lim) M.empty
addCommand :: String -> [CommandParam] -> [FilePath] -> Queue -> Repo -> IO Queue
addCommand subcommand params files q repo =
updateQueue action different (length newfiles) q repo
where
key = actionKey action
action = CommandAction
{ getSubcommand = subcommand
, getParams = params
, getFiles = newfiles
}
newfiles = files ++ maybe [] getFiles (M.lookup key $ items q)
where
key = actionKey action
action = CommandAction
{ getSubcommand = subcommand
, getParams = params
, getFiles = newfiles
}
newfiles = files ++ maybe [] getFiles (M.lookup key $ items q)
different (CommandAction { getSubcommand = s }) = s /= subcommand
different _ = True
different (CommandAction { getSubcommand = s }) = s /= subcommand
different _ = True
{- Adds an update-index streamer to the queue. -}
addUpdateIndex :: Git.UpdateIndex.Streamer -> Queue -> Repo -> IO Queue
addUpdateIndex streamer q repo =
updateQueue action different 1 q repo
where
key = actionKey action
-- the list is built in reverse order
action = UpdateIndexAction $ streamer : streamers
streamers = maybe [] getStreamers $ M.lookup key $ items q
where
key = actionKey action
-- the list is built in reverse order
action = UpdateIndexAction $ streamer : streamers
streamers = maybe [] getStreamers $ M.lookup key $ items q
different (UpdateIndexAction _) = False
different _ = True
different (UpdateIndexAction _) = False
different _ = True
{- Updates or adds an action in the queue. If the queue already contains a
- different action, it will be flushed; this is to ensure that conflicting
@ -118,15 +118,15 @@ updateQueue :: Action -> (Action -> Bool) -> Int -> Queue -> Repo -> IO Queue
updateQueue !action different sizeincrease q repo
| null (filter different (M.elems (items q))) = return $ go q
| otherwise = go <$> flush q repo
where
go q' = newq
where
!newq = q'
{ size = newsize
, items = newitems
}
!newsize = size q' + sizeincrease
!newitems = M.insertWith' const (actionKey action) action (items q')
where
go q' = newq
where
!newq = q'
{ size = newsize
, items = newitems
}
!newsize = size q' + sizeincrease
!newitems = M.insertWith' const (actionKey action) action (items q')
{- Is a queue large enough that it should be flushed? -}
full :: Queue -> Bool
@ -153,8 +153,8 @@ runAction repo action@(CommandAction {}) =
fileEncoding h
hPutStr h $ join "\0" $ getFiles action
hClose h
where
p = (proc "xargs" params) { env = gitEnv repo }
params = "-0":"git":baseparams
baseparams = toCommand $ gitCommandLine
(Param (getSubcommand action):getParams action) repo
where
p = (proc "xargs" params) { env = gitEnv repo }
params = "-0":"git":baseparams
baseparams = toCommand $ gitCommandLine
(Param (getSubcommand action):getParams action) repo

View file

@ -21,10 +21,10 @@ describe = show . base
- Converts such a fully qualified ref into a base ref (eg: master). -}
base :: Ref -> Ref
base = Ref . remove "refs/heads/" . remove "refs/remotes/" . show
where
remove prefix s
| prefix `isPrefixOf` s = drop (length prefix) s
| otherwise = s
where
remove prefix s
| prefix `isPrefixOf` s = drop (length prefix) s
| otherwise = s
{- Given a directory such as "refs/remotes/origin", and a ref such as
- refs/heads/master, yields a version of that ref under the directory,
@ -40,51 +40,51 @@ exists ref = runBool "show-ref"
{- Get the sha of a fully qualified git ref, if it exists. -}
sha :: Branch -> Repo -> IO (Maybe Sha)
sha branch repo = process <$> showref repo
where
showref = pipeReadStrict [Param "show-ref",
Param "--hash", -- get the hash
Param $ show branch]
process [] = Nothing
process s = Just $ Ref $ firstLine s
where
showref = pipeReadStrict [Param "show-ref",
Param "--hash", -- get the hash
Param $ show branch]
process [] = Nothing
process s = Just $ Ref $ firstLine s
{- List of (refs, branches) matching a given ref spec. -}
matching :: Ref -> Repo -> IO [(Ref, Branch)]
matching ref repo = map gen . lines <$>
pipeReadStrict [Param "show-ref", Param $ show ref] repo
where
gen l = let (r, b) = separate (== ' ') l in
(Ref r, Ref b)
where
gen l = let (r, b) = separate (== ' ') l
in (Ref r, Ref b)
{- List of (refs, branches) matching a given ref spec.
- Duplicate refs are filtered out. -}
matchingUniq :: Ref -> Repo -> IO [(Ref, Branch)]
matchingUniq ref repo = nubBy uniqref <$> matching ref repo
where
uniqref (a, _) (b, _) = a == b
where
uniqref (a, _) (b, _) = a == b
{- Checks if a String is a legal git ref name.
-
- The rules for this are complex; see git-check-ref-format(1) -}
legal :: Bool -> String -> Bool
legal allowonelevel s = all (== False) illegal
where
illegal =
[ any ("." `isPrefixOf`) pathbits
, any (".lock" `isSuffixOf`) pathbits
, not allowonelevel && length pathbits < 2
, contains ".."
, any (\c -> contains [c]) illegalchars
, begins "/"
, ends "/"
, contains "//"
, ends "."
, contains "@{"
, null s
]
contains v = v `isInfixOf` s
ends v = v `isSuffixOf` s
begins v = v `isPrefixOf` s
where
illegal =
[ any ("." `isPrefixOf`) pathbits
, any (".lock" `isSuffixOf`) pathbits
, not allowonelevel && length pathbits < 2
, contains ".."
, any (\c -> contains [c]) illegalchars
, begins "/"
, ends "/"
, contains "//"
, ends "."
, contains "@{"
, null s
]
contains v = v `isInfixOf` s
ends v = v `isSuffixOf` s
begins v = v `isPrefixOf` s
pathbits = split "/" s
illegalchars = " ~^:?*[\\" ++ controlchars
controlchars = chr 0o177 : [chr 0 .. chr (0o40-1)]
pathbits = split "/" s
illegalchars = " ~^:?*[\\" ++ controlchars
controlchars = chr 0o177 : [chr 0 .. chr (0o40-1)]

View file

@ -14,8 +14,8 @@ import Git.Types
any trailing newline, returning the sha. -}
getSha :: String -> IO String -> IO Sha
getSha subcommand a = maybe bad return =<< extractSha <$> a
where
bad = error $ "failed to read sha from git " ++ subcommand
where
bad = error $ "failed to read sha from git " ++ subcommand
{- Extracts the Sha from a string. There can be a trailing newline after
- it, but nothing else. -}
@ -24,12 +24,12 @@ extractSha s
| len == shaSize = val s
| len == shaSize + 1 && length s' == shaSize = val s'
| otherwise = Nothing
where
len = length s
s' = firstLine s
val v
| all (`elem` "1234567890ABCDEFabcdef") v = Just $ Ref v
| otherwise = Nothing
where
len = length s
s' = firstLine s
val v
| all (`elem` "1234567890ABCDEFabcdef") v = Just $ Ref v
| otherwise = Nothing
{- Size of a git sha. -}
shaSize :: Int

View file

@ -62,11 +62,11 @@ doMerge ch differ repo streamer = do
(diff, cleanup) <- pipeNullSplit (map Param differ) repo
go diff
void $ cleanup
where
go [] = noop
go (info:file:rest) = mergeFile info file ch repo >>=
maybe (go rest) (\l -> streamer l >> go rest)
go (_:[]) = error $ "parse error " ++ show differ
where
go [] = noop
go (info:file:rest) = mergeFile info file ch repo >>=
maybe (go rest) (\l -> streamer l >> go rest)
go (_:[]) = error $ "parse error " ++ show differ
{- Given an info line from a git raw diff, and the filename, generates
- a line suitable for update-index that union merges the two sides of the
@ -78,16 +78,16 @@ mergeFile info file h repo = case filter (/= nullSha) [Ref asha, Ref bsha] of
shas -> use
=<< either return (\s -> hashObject BlobObject (unlines s) repo)
=<< calcMerge . zip shas <$> mapM getcontents shas
where
[_colonmode, _bmode, asha, bsha, _status] = words info
use sha = return $ Just $
updateIndexLine sha FileBlob $ asTopFilePath file
-- We don't know how the file is encoded, but need to
-- split it into lines to union merge. Using the
-- FileSystemEncoding for this is a hack, but ensures there
-- are no decoding errors. Note that this works because
-- hashObject sets fileEncoding on its write handle.
getcontents s = lines . encodeW8 . L.unpack <$> catObject h s
where
[_colonmode, _bmode, asha, bsha, _status] = words info
use sha = return $ Just $
updateIndexLine sha FileBlob $ asTopFilePath file
-- We don't know how the file is encoded, but need to
-- split it into lines to union merge. Using the
-- FileSystemEncoding for this is a hack, but ensures there
-- are no decoding errors. Note that this works because
-- hashObject sets fileEncoding on its write handle.
getcontents s = lines . encodeW8 . L.unpack <$> catObject h s
{- Calculates a union merge between a list of refs, with contents.
-
@ -98,7 +98,7 @@ calcMerge :: [(Ref, [String])] -> Either Ref [String]
calcMerge shacontents
| null reuseable = Right $ new
| otherwise = Left $ fst $ Prelude.head reuseable
where
reuseable = filter (\c -> sorteduniq (snd c) == new) shacontents
new = sorteduniq $ concat $ map snd shacontents
sorteduniq = S.toList . S.fromList
where
reuseable = filter (\c -> sorteduniq (snd c) == new) shacontents
new = sorteduniq $ concat $ map snd shacontents
sorteduniq = S.toList . S.fromList

View file

@ -38,12 +38,12 @@ streamUpdateIndex repo as = pipeWrite params repo $ \h -> do
fileEncoding h
forM_ as (stream h)
hClose h
where
params = map Param ["update-index", "-z", "--index-info"]
stream h a = a (streamer h)
streamer h s = do
hPutStr h s
hPutStr h "\0"
where
params = map Param ["update-index", "-z", "--index-info"]
stream h a = a (streamer h)
streamer h s = do
hPutStr h s
hPutStr h "\0"
{- A streamer that adds the current tree for a ref. Useful for eg, copying
- and modifying branches. -}
@ -52,8 +52,8 @@ lsTree (Ref x) repo streamer = do
(s, cleanup) <- pipeNullSplit params repo
mapM_ streamer s
void $ cleanup
where
params = map Param ["ls-tree", "-z", "-r", "--full-tree", x]
where
params = map Param ["ls-tree", "-z", "-r", "--full-tree", x]
{- Generates a line suitable to be fed into update-index, to add
- a given file with a given sha. -}

View file

@ -28,13 +28,13 @@ scheme repo = notUrl repo
- <http://trac.haskell.org/network/ticket/40> -}
uriRegName' :: URIAuth -> String
uriRegName' a = fixup $ uriRegName a
where
fixup x@('[':rest)
| rest !! len == ']' = take len rest
| otherwise = x
where
len = length rest - 1
fixup x = x
where
fixup x@('[':rest)
| rest !! len == ']' = take len rest
| otherwise = x
where
len = length rest - 1
fixup x = x
{- Hostname of an URL repo. -}
host :: Repo -> String
@ -55,14 +55,14 @@ hostuser r = authpart uriUserInfo r ++ authpart uriRegName' r
{- The full authority portion an URL repo. (ie, "user@host:port") -}
authority :: Repo -> String
authority = authpart assemble
where
assemble a = uriUserInfo a ++ uriRegName' a ++ uriPort a
where
assemble a = uriUserInfo a ++ uriRegName' a ++ uriPort a
{- Applies a function to extract part of the uriAuthority of an URL repo. -}
authpart :: (URIAuth -> a) -> Repo -> a
authpart a Repo { location = Url u } = a auth
where
auth = fromMaybe (error $ "bad url " ++ show u) (uriAuthority u)
where
auth = fromMaybe (error $ "bad url " ++ show u) (uriAuthority u)
authpart _ repo = notUrl repo
notUrl :: Repo -> a

View file

@ -26,13 +26,13 @@ normalize :: String -> Integer
normalize = sum . mult 1 . reverse .
extend precision . take precision .
map readi . split "."
where
extend n l = l ++ replicate (n - length l) 0
mult _ [] = []
mult n (x:xs) = (n*x) : mult (n*10^width) xs
readi :: String -> Integer
readi s = case reads s of
((x,_):_) -> x
_ -> 0
precision = 10 -- number of segments of the version to compare
width = length "yyyymmddhhmmss" -- maximum width of a segment
where
extend n l = l ++ replicate (n - length l) 0
mult _ [] = []
mult n (x:xs) = (n*x) : mult (n*10^width) xs
readi :: String -> Integer
readi s = case reads s of
((x,_):_) -> x
_ -> 0
precision = 10 -- number of segments of the version to compare
width = length "yyyymmddhhmmss" -- maximum width of a segment

View file

@ -15,4 +15,4 @@ toB64 = encode . s2w8
fromB64 :: String -> String
fromB64 s = maybe bad w82s $ decode s
where bad = error "bad base64 encoded data"
where bad = error "bad base64 encoded data"

View file

@ -17,9 +17,9 @@ copyFileExternal src dest = do
whenM (doesFileExist dest) $
removeFile dest
boolSystem "cp" $ params ++ [File src, File dest]
where
params = map snd $ filter fst
[ (SysConfig.cp_reflink_auto, Param "--reflink=auto")
, (SysConfig.cp_a, Param "-a")
, (SysConfig.cp_p && not SysConfig.cp_a, Param "-p")
]
where
params = map snd $ filter fst
[ (SysConfig.cp_reflink_auto, Param "--reflink=auto")
, (SysConfig.cp_a, Param "-a")
, (SysConfig.cp_p && not SysConfig.cp_a, Param "-p")
]

View file

@ -57,10 +57,10 @@ runClient getaddr clientaction = do
e <- takeMVar mv
disconnect client
throw e
where
threadrunner storeerr io = loop
where
loop = catchClientError (io >> loop) storeerr
where
threadrunner storeerr io = loop
where
loop = catchClientError (io >> loop) storeerr
{- Connects to the bus, and runs the client action.
-
@ -73,10 +73,10 @@ persistentClient getaddr v onretry clientaction =
{- runClient can fail with not just ClientError, but also other
- things, if dbus is not running. Let async exceptions through. -}
runClient getaddr clientaction `catchNonAsync` retry
where
retry e = do
v' <- onretry e v
persistentClient getaddr v' onretry clientaction
where
retry e = do
v' <- onretry e v
persistentClient getaddr v' onretry clientaction
{- Catches only ClientError -}
catchClientError :: IO () -> (ClientError -> IO ()) -> IO ()

View file

@ -22,27 +22,27 @@ daemonize logfd pidfile changedirectory a = do
maybe noop checkalreadyrunning pidfile
_ <- forkProcess child1
out
where
checkalreadyrunning f = maybe noop (const $ alreadyRunning)
=<< checkDaemon f
child1 = do
_ <- createSession
_ <- forkProcess child2
out
child2 = do
maybe noop lockPidFile pidfile
when changedirectory $
setCurrentDirectory "/"
nullfd <- openFd "/dev/null" ReadOnly Nothing defaultFileFlags
_ <- redir nullfd stdInput
mapM_ (redir logfd) [stdOutput, stdError]
closeFd logfd
a
out
redir newh h = do
closeFd h
dupTo newh h
out = exitImmediately ExitSuccess
where
checkalreadyrunning f = maybe noop (const $ alreadyRunning)
=<< checkDaemon f
child1 = do
_ <- createSession
_ <- forkProcess child2
out
child2 = do
maybe noop lockPidFile pidfile
when changedirectory $
setCurrentDirectory "/"
nullfd <- openFd "/dev/null" ReadOnly Nothing defaultFileFlags
_ <- redir nullfd stdInput
mapM_ (redir logfd) [stdOutput, stdError]
closeFd logfd
a
out
redir newh h = do
closeFd h
dupTo newh h
out = exitImmediately ExitSuccess
{- Locks the pid file, with an exclusive, non-blocking lock.
- Writes the pid to the file, fully atomically.
@ -62,8 +62,8 @@ lockPidFile file = do
_ <- fdWrite fd' =<< show <$> getProcessID
renameFile newfile file
closeFd fd
where
newfile = file ++ ".new"
where
newfile = file ++ ".new"
alreadyRunning :: IO ()
alreadyRunning = error "Daemon is already running."
@ -82,19 +82,19 @@ checkDaemon pidfile = do
p <- readish <$> readFile pidfile
return $ check locked p
Nothing -> return Nothing
where
check Nothing _ = Nothing
check _ Nothing = Nothing
check (Just (pid, _)) (Just pid')
| pid == pid' = Just pid
| otherwise = error $
"stale pid in " ++ pidfile ++
" (got " ++ show pid' ++
"; expected " ++ show pid ++ " )"
where
check Nothing _ = Nothing
check _ Nothing = Nothing
check (Just (pid, _)) (Just pid')
| pid == pid' = Just pid
| otherwise = error $
"stale pid in " ++ pidfile ++
" (got " ++ show pid' ++
"; expected " ++ show pid ++ " )"
{- Stops the daemon, safely. -}
stopDaemon :: FilePath -> IO ()
stopDaemon pidfile = go =<< checkDaemon pidfile
where
go Nothing = noop
go (Just pid) = signalProcess sigTERM pid
where
go Nothing = noop
go (Just pid) = signalProcess sigTERM pid

View file

@ -72,9 +72,9 @@ storageUnits =
, Unit (p 1) "kB" "kilobyte" -- weird capitalization thanks to committe
, Unit (p 0) "B" "byte"
]
where
p :: Integer -> Integer
p n = 1000^n
where
p :: Integer -> Integer
p n = 1000^n
{- Memory units are (stupidly named) powers of 2. -}
memoryUnits :: [Unit]
@ -89,9 +89,9 @@ memoryUnits =
, Unit (p 1) "KiB" "kibibyte"
, Unit (p 0) "B" "byte"
]
where
p :: Integer -> Integer
p n = 2^(n*10)
where
p :: Integer -> Integer
p n = 2^(n*10)
{- Bandwidth units are only measured in bits if you're some crazy telco. -}
bandwidthUnits :: [Unit]
@ -100,32 +100,32 @@ bandwidthUnits = error "stop trying to rip people off"
{- Do you yearn for the days when men were men and megabytes were megabytes? -}
oldSchoolUnits :: [Unit]
oldSchoolUnits = zipWith (curry mingle) storageUnits memoryUnits
where
mingle (Unit _ a n, Unit s' _ _) = Unit s' a n
where
mingle (Unit _ a n, Unit s' _ _) = Unit s' a n
{- approximate display of a particular number of bytes -}
roughSize :: [Unit] -> Bool -> ByteSize -> String
roughSize units abbrev i
| i < 0 = '-' : findUnit units' (negate i)
| otherwise = findUnit units' i
where
units' = reverse $ sort units -- largest first
where
units' = reverse $ sort units -- largest first
findUnit (u@(Unit s _ _):us) i'
| i' >= s = showUnit i' u
| otherwise = findUnit us i'
findUnit [] i' = showUnit i' (last units') -- bytes
findUnit (u@(Unit s _ _):us) i'
| i' >= s = showUnit i' u
| otherwise = findUnit us i'
findUnit [] i' = showUnit i' (last units') -- bytes
showUnit i' (Unit s a n) = let num = chop i' s in
show num ++ " " ++
(if abbrev then a else plural num n)
showUnit i' (Unit s a n) = let num = chop i' s in
show num ++ " " ++
(if abbrev then a else plural num n)
chop :: Integer -> Integer -> Integer
chop i' d = round $ (fromInteger i' :: Double) / fromInteger d
chop :: Integer -> Integer -> Integer
chop i' d = round $ (fromInteger i' :: Double) / fromInteger d
plural n u
| n == 1 = u
| otherwise = u ++ "s"
plural n u
| n == 1 = u
| otherwise = u ++ "s"
{- displays comparison of two sizes -}
compareSizes :: [Unit] -> Bool -> ByteSize -> ByteSize -> String
@ -139,22 +139,22 @@ readSize :: [Unit] -> String -> Maybe ByteSize
readSize units input
| null parsednum || null parsedunit = Nothing
| otherwise = Just $ round $ number * fromIntegral multiplier
where
(number, rest) = head parsednum
multiplier = head parsedunit
unitname = takeWhile isAlpha $ dropWhile isSpace rest
where
(number, rest) = head parsednum
multiplier = head parsedunit
unitname = takeWhile isAlpha $ dropWhile isSpace rest
parsednum = reads input :: [(Double, String)]
parsedunit = lookupUnit units unitname
parsednum = reads input :: [(Double, String)]
parsedunit = lookupUnit units unitname
lookupUnit _ [] = [1] -- no unit given, assume bytes
lookupUnit [] _ = []
lookupUnit (Unit s a n:us) v
| a ~~ v || n ~~ v = [s]
| plural n ~~ v || a ~~ byteabbrev v = [s]
| otherwise = lookupUnit us v
lookupUnit _ [] = [1] -- no unit given, assume bytes
lookupUnit [] _ = []
lookupUnit (Unit s a n:us) v
| a ~~ v || n ~~ v = [s]
| plural n ~~ v || a ~~ byteabbrev v = [s]
| otherwise = lookupUnit us v
a ~~ b = map toLower a == map toLower b
a ~~ b = map toLower a == map toLower b
plural n = n ++ "s"
byteabbrev a = a ++ "b"
plural n = n ++ "s"
byteabbrev a = a ++ "b"

View file

@ -44,46 +44,46 @@ dirContentsRecursive' (dir:dirs) = unsafeInterleaveIO $ do
(files, dirs') <- collect [] [] =<< catchDefaultIO [] (dirContents dir)
files' <- dirContentsRecursive' (dirs' ++ dirs)
return (files ++ files')
where
collect files dirs' [] = return (reverse files, reverse dirs')
collect files dirs' (entry:entries)
| dirCruft entry = collect files dirs' entries
| otherwise = do
ifM (doesDirectoryExist entry)
( collect files (entry:dirs') entries
, collect (entry:files) dirs' entries
)
where
collect files dirs' [] = return (reverse files, reverse dirs')
collect files dirs' (entry:entries)
| dirCruft entry = collect files dirs' entries
| otherwise = do
ifM (doesDirectoryExist entry)
( collect files (entry:dirs') entries
, collect (entry:files) dirs' entries
)
{- Moves one filename to another.
- First tries a rename, but falls back to moving across devices if needed. -}
moveFile :: FilePath -> FilePath -> IO ()
moveFile src dest = tryIO (rename src dest) >>= onrename
where
onrename (Right _) = noop
onrename (Left e)
| isPermissionError e = rethrow
| isDoesNotExistError e = rethrow
| otherwise = do
-- copyFile is likely not as optimised as
-- the mv command, so we'll use the latter.
-- But, mv will move into a directory if
-- dest is one, which is not desired.
whenM (isdir dest) rethrow
viaTmp mv dest undefined
where
rethrow = throw e
mv tmp _ = do
ok <- boolSystem "mv" [Param "-f",
Param src, Param tmp]
unless ok $ do
-- delete any partial
_ <- tryIO $ removeFile tmp
rethrow
isdir f = do
r <- tryIO $ getFileStatus f
case r of
(Left _) -> return False
(Right s) -> return $ isDirectory s
where
onrename (Right _) = noop
onrename (Left e)
| isPermissionError e = rethrow
| isDoesNotExistError e = rethrow
| otherwise = do
-- copyFile is likely not as optimised as
-- the mv command, so we'll use the latter.
-- But, mv will move into a directory if
-- dest is one, which is not desired.
whenM (isdir dest) rethrow
viaTmp mv dest undefined
where
rethrow = throw e
mv tmp _ = do
ok <- boolSystem "mv" [Param "-f", Param src, Param tmp]
unless ok $ do
-- delete any partial
_ <- tryIO $ removeFile tmp
rethrow
isdir f = do
r <- tryIO $ getFileStatus f
case r of
(Left _) -> return False
(Right s) -> return $ isDirectory s
{- Removes a file, which may or may not exist.
-

View file

@ -25,5 +25,5 @@ getDiskFree path = withFilePath path $ \c_path -> do
( return $ Just $ toInteger free
, return Nothing
)
where
safeErrno (Errno v) = v == 0
where
safeErrno (Errno v) = v == 0

View file

@ -10,9 +10,9 @@ module Utility.Dot where -- import qualified
{- generates a graph description from a list of lines -}
graph :: [String] -> String
graph s = unlines $ [header] ++ map indent s ++ [footer]
where
header = "digraph map {"
footer= "}"
where
header = "digraph map {"
footer= "}"
{- a node in the graph -}
graphNode :: String -> String -> String
@ -21,8 +21,8 @@ graphNode nodeid desc = label desc $ quote nodeid
{- an edge between two nodes -}
graphEdge :: String -> String -> Maybe String -> String
graphEdge fromid toid desc = indent $ maybe edge (`label` edge) desc
where
edge = quote fromid ++ " -> " ++ quote toid
where
edge = quote fromid ++ " -> " ++ quote toid
{- adds a label to a node or edge -}
label :: String -> String -> String
@ -46,18 +46,18 @@ subGraph subid l color s =
ii setcolor ++
ii s ++
indent "}"
where
-- the "cluster_" makes dot draw a box
name = quote ("cluster_" ++ subid)
setlabel = "label=" ++ quote l
setfilled = "style=" ++ quote "filled"
setcolor = "fillcolor=" ++ quote color
ii x = indent (indent x) ++ "\n"
where
-- the "cluster_" makes dot draw a box
name = quote ("cluster_" ++ subid)
setlabel = "label=" ++ quote l
setfilled = "style=" ++ quote "filled"
setcolor = "fillcolor=" ++ quote color
ii x = indent (indent x) ++ "\n"
indent ::String -> String
indent s = '\t' : s
quote :: String -> String
quote s = "\"" ++ s' ++ "\""
where
s' = filter (/= '"') s
where
s' = filter (/= '"') s

View file

@ -37,10 +37,10 @@ removeModes ms m = m `intersectFileModes` complement (combineModes ms)
{- Runs an action after changing a file's mode, then restores the old mode. -}
withModifiedFileMode :: FilePath -> (FileMode -> FileMode) -> IO a -> IO a
withModifiedFileMode file convert a = bracket setup cleanup go
where
setup = modifyFileMode' file convert
cleanup oldmode = modifyFileMode file (const oldmode)
go _ = a
where
setup = modifyFileMode' file convert
cleanup oldmode = modifyFileMode file (const oldmode)
go _ = a
writeModes :: [FileMode]
writeModes = [ownerWriteMode, groupWriteMode, otherWriteMode]
@ -83,10 +83,10 @@ noUmask :: FileMode -> IO a -> IO a
noUmask mode a
| mode == stdFileMode = a
| otherwise = bracket setup cleanup go
where
setup = setFileCreationMask nullFileMode
cleanup = setFileCreationMask
go _ = a
where
setup = setFileCreationMask nullFileMode
cleanup = setFileCreationMask
go _ = a
combineModes :: [FileMode] -> FileMode
combineModes [] = undefined

View file

@ -43,19 +43,19 @@ type Variables = M.Map String String
- This can be repeatedly called, efficiently. -}
format :: Format -> Variables -> String
format f vars = concatMap expand f
where
expand (Const s) = s
expand (Var name j)
| "escaped_" `isPrefixOf` name =
justify j $ encode_c_strict $
getvar $ drop (length "escaped_") name
| otherwise = justify j $ getvar name
getvar name = fromMaybe "" $ M.lookup name vars
justify UnJustified s = s
justify (LeftJustified i) s = s ++ pad i s
justify (RightJustified i) s = pad i s ++ s
pad i s = take (i - length s) spaces
spaces = repeat ' '
where
expand (Const s) = s
expand (Var name j)
| "escaped_" `isPrefixOf` name =
justify j $ encode_c_strict $
getvar $ drop (length "escaped_") name
| otherwise = justify j $ getvar name
getvar name = fromMaybe "" $ M.lookup name vars
justify UnJustified s = s
justify (LeftJustified i) s = s ++ pad i s
justify (RightJustified i) s = pad i s ++ s
pad i s = take (i - length s) spaces
spaces = repeat ' '
{- Generates a Format that can be used to expand variables in a
- format string, such as "${foo} ${bar;10} ${baz;-10}\n"
@ -64,37 +64,37 @@ format f vars = concatMap expand f
-}
gen :: FormatString -> Format
gen = filter (not . empty) . fuse [] . scan [] . decode_c
where
-- The Format is built up in reverse, for efficiency,
-- and can have many adjacent Consts. Fusing it fixes both
-- problems.
fuse f [] = f
fuse f (Const c1:Const c2:vs) = fuse f $ Const (c2++c1) : vs
fuse f (v:vs) = fuse (v:f) vs
where
-- The Format is built up in reverse, for efficiency,
-- and can have many adjacent Consts. Fusing it fixes both
-- problems.
fuse f [] = f
fuse f (Const c1:Const c2:vs) = fuse f $ Const (c2++c1) : vs
fuse f (v:vs) = fuse (v:f) vs
scan f (a:b:cs)
| a == '$' && b == '{' = invar f [] cs
| otherwise = scan (Const [a] : f ) (b:cs)
scan f v = Const v : f
scan f (a:b:cs)
| a == '$' && b == '{' = invar f [] cs
| otherwise = scan (Const [a] : f ) (b:cs)
scan f v = Const v : f
invar f var [] = Const (novar var) : f
invar f var (c:cs)
| c == '}' = foundvar f var UnJustified cs
| isAlphaNum c || c == '_' = invar f (c:var) cs
| c == ';' = inpad "" f var cs
| otherwise = scan ((Const $ novar $ c:var):f) cs
invar f var [] = Const (novar var) : f
invar f var (c:cs)
| c == '}' = foundvar f var UnJustified cs
| isAlphaNum c || c == '_' = invar f (c:var) cs
| c == ';' = inpad "" f var cs
| otherwise = scan ((Const $ novar $ c:var):f) cs
inpad p f var (c:cs)
| c == '}' = foundvar f var (readjustify $ reverse p) cs
| otherwise = inpad (c:p) f var cs
inpad p f var [] = Const (novar $ p++";"++var) : f
readjustify = getjustify . fromMaybe 0 . readish
getjustify i
| i == 0 = UnJustified
| i < 0 = LeftJustified (-1 * i)
| otherwise = RightJustified i
novar v = "${" ++ reverse v
foundvar f v p = scan (Var (reverse v) p : f)
inpad p f var (c:cs)
| c == '}' = foundvar f var (readjustify $ reverse p) cs
| otherwise = inpad (c:p) f var cs
inpad p f var [] = Const (novar $ p++";"++var) : f
readjustify = getjustify . fromMaybe 0 . readish
getjustify i
| i == 0 = UnJustified
| i < 0 = LeftJustified (-1 * i)
| otherwise = RightJustified i
novar v = "${" ++ reverse v
foundvar f v p = scan (Var (reverse v) p : f)
empty :: Frag -> Bool
empty (Const "") = True
@ -106,36 +106,34 @@ empty _ = False
decode_c :: FormatString -> FormatString
decode_c [] = []
decode_c s = unescape ("", s)
where
e = '\\'
unescape (b, []) = b
-- look for escapes starting with '\'
unescape (b, v) = b ++ fst pair ++ unescape (handle $ snd pair)
where
pair = span (/= e) v
isescape x = x == e
-- \NNN is an octal encoded character
handle (x:n1:n2:n3:rest)
| isescape x && alloctal = (fromoctal, rest)
where
alloctal = isOctDigit n1 &&
isOctDigit n2 &&
isOctDigit n3
fromoctal = [chr $ readoctal [n1, n2, n3]]
readoctal o = Prelude.read $ "0o" ++ o :: Int
-- \C is used for a few special characters
handle (x:nc:rest)
| isescape x = ([echar nc], rest)
where
echar 'a' = '\a'
echar 'b' = '\b'
echar 'f' = '\f'
echar 'n' = '\n'
echar 'r' = '\r'
echar 't' = '\t'
echar 'v' = '\v'
echar a = a
handle n = ("", n)
where
e = '\\'
unescape (b, []) = b
-- look for escapes starting with '\'
unescape (b, v) = b ++ fst pair ++ unescape (handle $ snd pair)
where
pair = span (/= e) v
isescape x = x == e
-- \NNN is an octal encoded character
handle (x:n1:n2:n3:rest)
| isescape x && alloctal = (fromoctal, rest)
where
alloctal = isOctDigit n1 && isOctDigit n2 && isOctDigit n3
fromoctal = [chr $ readoctal [n1, n2, n3]]
readoctal o = Prelude.read $ "0o" ++ o :: Int
-- \C is used for a few special characters
handle (x:nc:rest)
| isescape x = ([echar nc], rest)
where
echar 'a' = '\a'
echar 'b' = '\b'
echar 'f' = '\f'
echar 'n' = '\n'
echar 'r' = '\r'
echar 't' = '\t'
echar 'v' = '\v'
echar a = a
handle n = ("", n)
{- Inverse of decode_c. -}
encode_c :: FormatString -> FormatString
@ -147,28 +145,28 @@ encode_c_strict = encode_c' isSpace
encode_c' :: (Char -> Bool) -> FormatString -> FormatString
encode_c' p = concatMap echar
where
e c = '\\' : [c]
echar '\a' = e 'a'
echar '\b' = e 'b'
echar '\f' = e 'f'
echar '\n' = e 'n'
echar '\r' = e 'r'
echar '\t' = e 't'
echar '\v' = e 'v'
echar '\\' = e '\\'
echar '"' = e '"'
echar c
| ord c < 0x20 = e_asc c -- low ascii
| ord c >= 256 = e_utf c -- unicode
| ord c > 0x7E = e_asc c -- high ascii
| p c = e_asc c -- unprintable ascii
| otherwise = [c] -- printable ascii
-- unicode character is decomposed to individual Word8s,
-- and each is shown in octal
e_utf c = showoctal =<< (Codec.Binary.UTF8.String.encode [c] :: [Word8])
e_asc c = showoctal $ ord c
showoctal i = '\\' : printf "%03o" i
where
e c = '\\' : [c]
echar '\a' = e 'a'
echar '\b' = e 'b'
echar '\f' = e 'f'
echar '\n' = e 'n'
echar '\r' = e 'r'
echar '\t' = e 't'
echar '\v' = e 'v'
echar '\\' = e '\\'
echar '"' = e '"'
echar c
| ord c < 0x20 = e_asc c -- low ascii
| ord c >= 256 = e_utf c -- unicode
| ord c > 0x7E = e_asc c -- high ascii
| p c = e_asc c -- unprintable ascii
| otherwise = [c] -- printable ascii
-- unicode character is decomposed to individual Word8s,
-- and each is shown in octal
e_utf c = showoctal =<< (Codec.Binary.UTF8.String.encode [c] :: [Word8])
e_asc c = showoctal $ ord c
showoctal i = '\\' : printf "%03o" i
{- for quickcheck -}
prop_idempotent_deencode :: String -> Bool

View file

@ -51,8 +51,8 @@ toString(NumericV f) = show f
toString (ListV l)
| null l = ""
| otherwise = (intercalate ";" $ map (escapesemi . toString) l) ++ ";"
where
escapesemi = join "\\;" . split ";"
where
escapesemi = join "\\;" . split ";"
genDesktopEntry :: String -> String -> Bool -> FilePath -> [String] -> DesktopEntry
genDesktopEntry name comment terminal program categories =
@ -64,13 +64,13 @@ genDesktopEntry name comment terminal program categories =
, item "Exec" StringV program
, item "Categories" ListV (map StringV categories)
]
where
item x c y = (x, c y)
where
item x c y = (x, c y)
buildDesktopMenuFile :: DesktopEntry -> String
buildDesktopMenuFile d = unlines ("[Desktop Entry]" : map keyvalue d) ++ "\n"
where
keyvalue (k, v) = k ++ "=" ++ toString v
where
keyvalue (k, v) = k ++ "=" ++ toString v
writeDesktopMenuFile :: DesktopEntry -> String -> IO ()
writeDesktopMenuFile d file = do
@ -115,11 +115,10 @@ userConfigDir = xdgEnvHome "CONFIG_HOME" ".config"
- to ~/Desktop. -}
userDesktopDir :: IO FilePath
userDesktopDir = maybe fallback return =<< (parse <$> xdg_user_dir)
where
parse = maybe Nothing (headMaybe . lines)
xdg_user_dir = catchMaybeIO $
readProcess "xdg-user-dir" ["DESKTOP"]
fallback = xdgEnvHome "DESKTOP_DIR" "Desktop"
where
parse = maybe Nothing (headMaybe . lines)
xdg_user_dir = catchMaybeIO $ readProcess "xdg-user-dir" ["DESKTOP"]
fallback = xdgEnvHome "DESKTOP_DIR" "Desktop"
xdgEnvHome :: String -> String -> IO String
xdgEnvHome envbase homedef = do

View file

@ -29,9 +29,9 @@ stdParams params = do
then []
else ["--batch", "--no-tty", "--use-agent"]
return $ batch ++ defaults ++ toCommand params
where
-- be quiet, even about checking the trustdb
defaults = ["--quiet", "--trust-model", "always"]
where
-- be quiet, even about checking the trustdb
defaults = ["--quiet", "--trust-model", "always"]
{- Runs gpg with some params and returns its stdout, strictly. -}
readStrict :: [CommandParam] -> IO String
@ -74,22 +74,22 @@ feedRead params passphrase feeder reader = do
params' <- stdParams $ passphrasefd ++ params
closeFd frompipe `after`
withBothHandles createProcessSuccess (proc "gpg" params') go
where
go (to, from) = do
void $ forkIO $ do
feeder to
hClose to
reader from
where
go (to, from) = do
void $ forkIO $ do
feeder to
hClose to
reader from
{- Finds gpg public keys matching some string. (Could be an email address,
- a key id, or a name. -}
findPubKeys :: String -> IO KeyIds
findPubKeys for = KeyIds . parse <$> readStrict params
where
params = [Params "--with-colons --list-public-keys", Param for]
parse = catMaybes . map (keyIdField . split ":") . lines
keyIdField ("pub":_:_:_:f:_) = Just f
keyIdField _ = Nothing
where
params = [Params "--with-colons --list-public-keys", Param for]
parse = catMaybes . map (keyIdField . split ":") . lines
keyIdField ("pub":_:_:_:f:_) = Just f
keyIdField _ = Nothing
{- Creates a block of high-quality random data suitable to use as a cipher.
- It is armored, to avoid newlines, since gpg only reads ciphers up to the
@ -100,9 +100,9 @@ genRandom size = readStrict
, Param $ show randomquality
, Param $ show size
]
where
-- 1 is /dev/urandom; 2 is /dev/random
randomquality = 1 :: Int
where
-- 1 is /dev/urandom; 2 is /dev/random
randomquality = 1 :: Int
{- A test key. This is provided pre-generated since generating a new gpg
- key is too much work (requires too much entropy) for a test suite to
@ -173,10 +173,10 @@ keyBlock public ls = unlines
, unlines ls
, "-----END PGP "++t++" KEY BLOCK-----"
]
where
t
| public = "PUBLIC"
| otherwise = "PRIVATE"
where
t
| public = "PUBLIC"
| otherwise = "PRIVATE"
{- Runs an action using gpg in a test harness, in which gpg does
- not use ~/.gpg/, but a directory with the test key set up to be used. -}
@ -184,20 +184,20 @@ testHarness :: IO a -> IO a
testHarness a = do
orig <- getEnv var
bracket setup (cleanup orig) (const a)
where
var = "GNUPGHOME"
where
var = "GNUPGHOME"
setup = do
base <- getTemporaryDirectory
dir <- mktmpdir $ base </> "gpgtmpXXXXXX"
setEnv var dir True
_ <- pipeStrict [Params "--import -q"] $ unlines
[testSecretKey, testKey]
return dir
setup = do
base <- getTemporaryDirectory
dir <- mktmpdir $ base </> "gpgtmpXXXXXX"
setEnv var dir True
_ <- pipeStrict [Params "--import -q"] $ unlines
[testSecretKey, testKey]
return dir
cleanup orig tmpdir = removeDirectoryRecursive tmpdir >> reset orig
reset (Just v) = setEnv var v True
reset _ = unsetEnv var
cleanup orig tmpdir = removeDirectoryRecursive tmpdir >> reset orig
reset (Just v) = setEnv var v True
reset _ = unsetEnv var
{- Tests the test harness. -}
testTestHarness :: IO Bool

View file

@ -17,10 +17,10 @@ parseDuration s = do
num <- readish s :: Maybe Integer
units <- findUnits =<< lastMaybe s
return $ fromIntegral num * units
where
findUnits 's' = Just 1
findUnits 'm' = Just 60
findUnits 'h' = Just $ 60 * 60
findUnits 'd' = Just $ 60 * 60 * 24
findUnits 'y' = Just $ 60 * 60 * 24 * 365
findUnits _ = Nothing
where
findUnits 's' = Just 1
findUnits 'm' = Just 60
findUnits 'h' = Just $ 60 * 60
findUnits 'd' = Just $ 60 * 60 * 24
findUnits 'y' = Just $ 60 * 60 * 24 * 365
findUnits _ = Nothing

View file

@ -59,116 +59,116 @@ watchDir i dir ignored hooks
withLock lock $
mapM_ scan =<< filter (not . dirCruft) <$>
getDirectoryContents dir
where
recurse d = watchDir i d ignored hooks
where
recurse d = watchDir i d ignored hooks
-- Select only inotify events required by the enabled
-- hooks, but always include Create so new directories can
-- be scanned.
watchevents = Create : addevents ++ delevents ++ modifyevents
addevents
| hashook addHook || hashook addSymlinkHook = [MoveIn, CloseWrite]
| otherwise = []
delevents
| hashook delHook || hashook delDirHook = [MoveOut, Delete]
| otherwise = []
modifyevents
| hashook modifyHook = [Modify]
| otherwise = []
-- Select only inotify events required by the enabled
-- hooks, but always include Create so new directories can
-- be scanned.
watchevents = Create : addevents ++ delevents ++ modifyevents
addevents
| hashook addHook || hashook addSymlinkHook = [MoveIn, CloseWrite]
| otherwise = []
delevents
| hashook delHook || hashook delDirHook = [MoveOut, Delete]
| otherwise = []
modifyevents
| hashook modifyHook = [Modify]
| otherwise = []
scan f = unless (ignored f) $ do
ms <- getstatus f
case ms of
Nothing -> return ()
Just s
| Files.isDirectory s ->
recurse $ indir f
| Files.isSymbolicLink s ->
runhook addSymlinkHook f ms
| Files.isRegularFile s ->
runhook addHook f ms
| otherwise ->
noop
scan f = unless (ignored f) $ do
ms <- getstatus f
case ms of
Nothing -> return ()
Just s
| Files.isDirectory s ->
recurse $ indir f
| Files.isSymbolicLink s ->
runhook addSymlinkHook f ms
| Files.isRegularFile s ->
runhook addHook f ms
| otherwise ->
noop
-- Ignore creation events for regular files, which won't be
-- done being written when initially created, but handle for
-- directories and symlinks.
go (Created { isDirectory = isd, filePath = f })
| isd = recurse $ indir f
| hashook addSymlinkHook =
checkfiletype Files.isSymbolicLink addSymlinkHook f
| otherwise = noop
-- Closing a file is assumed to mean it's done being written.
go (Closed { isDirectory = False, maybeFilePath = Just f }) =
checkfiletype Files.isRegularFile addHook f
-- When a file or directory is moved in, scan it to add new
-- stuff.
go (MovedIn { filePath = f }) = scan f
go (MovedOut { isDirectory = isd, filePath = f })
| isd = runhook delDirHook f Nothing
| otherwise = runhook delHook f Nothing
-- Verify that the deleted item really doesn't exist,
-- since there can be spurious deletion events for items
-- in a directory that has been moved out, but is still
-- being watched.
go (Deleted { isDirectory = isd, filePath = f })
| isd = guarded $ runhook delDirHook f Nothing
| otherwise = guarded $ runhook delHook f Nothing
where
guarded = unlessM (filetype (const True) f)
go (Modified { isDirectory = isd, maybeFilePath = Just f })
| isd = noop
| otherwise = runhook modifyHook f Nothing
go _ = noop
-- Ignore creation events for regular files, which won't be
-- done being written when initially created, but handle for
-- directories and symlinks.
go (Created { isDirectory = isd, filePath = f })
| isd = recurse $ indir f
| hashook addSymlinkHook =
checkfiletype Files.isSymbolicLink addSymlinkHook f
| otherwise = noop
-- Closing a file is assumed to mean it's done being written.
go (Closed { isDirectory = False, maybeFilePath = Just f }) =
checkfiletype Files.isRegularFile addHook f
-- When a file or directory is moved in, scan it to add new
-- stuff.
go (MovedIn { filePath = f }) = scan f
go (MovedOut { isDirectory = isd, filePath = f })
| isd = runhook delDirHook f Nothing
| otherwise = runhook delHook f Nothing
-- Verify that the deleted item really doesn't exist,
-- since there can be spurious deletion events for items
-- in a directory that has been moved out, but is still
-- being watched.
go (Deleted { isDirectory = isd, filePath = f })
| isd = guarded $ runhook delDirHook f Nothing
| otherwise = guarded $ runhook delHook f Nothing
where
guarded = unlessM (filetype (const True) f)
go (Modified { isDirectory = isd, maybeFilePath = Just f })
| isd = noop
| otherwise = runhook modifyHook f Nothing
go _ = noop
hashook h = isJust $ h hooks
hashook h = isJust $ h hooks
runhook h f s
| ignored f = noop
| otherwise = maybe noop (\a -> a (indir f) s) (h hooks)
runhook h f s
| ignored f = noop
| otherwise = maybe noop (\a -> a (indir f) s) (h hooks)
indir f = dir </> f
indir f = dir </> f
getstatus f = catchMaybeIO $ getSymbolicLinkStatus $ indir f
checkfiletype check h f = do
ms <- getstatus f
case ms of
Just s
| check s -> runhook h f ms
_ -> noop
filetype t f = catchBoolIO $ t <$> getSymbolicLinkStatus (indir f)
getstatus f = catchMaybeIO $ getSymbolicLinkStatus $ indir f
checkfiletype check h f = do
ms <- getstatus f
case ms of
Just s
| check s -> runhook h f ms
_ -> noop
filetype t f = catchBoolIO $ t <$> getSymbolicLinkStatus (indir f)
-- Inotify fails when there are too many watches with a
-- disk full error.
failedaddwatch e
| isFullError e =
case errHook hooks of
Nothing -> throw e
Just hook -> tooManyWatches hook dir
| otherwise = throw e
-- Inotify fails when there are too many watches with a
-- disk full error.
failedaddwatch e
| isFullError e =
case errHook hooks of
Nothing -> throw e
Just hook -> tooManyWatches hook dir
| otherwise = throw e
tooManyWatches :: (String -> Maybe FileStatus -> IO ()) -> FilePath -> IO ()
tooManyWatches hook dir = do
sysctlval <- querySysctl [Param maxwatches] :: IO (Maybe Integer)
hook (unlines $ basewarning : maybe withoutsysctl withsysctl sysctlval) Nothing
where
maxwatches = "fs.inotify.max_user_watches"
basewarning = "Too many directories to watch! (Not watching " ++ dir ++")"
withoutsysctl = ["Increase the value in /proc/sys/fs/inotify/max_user_watches"]
withsysctl n = let new = n * 10 in
[ "Increase the limit permanently by running:"
, " echo " ++ maxwatches ++ "=" ++ show new ++
" | sudo tee -a /etc/sysctl.conf; sudo sysctl -p"
, "Or temporarily by running:"
, " sudo sysctl -w " ++ maxwatches ++ "=" ++ show new
]
where
maxwatches = "fs.inotify.max_user_watches"
basewarning = "Too many directories to watch! (Not watching " ++ dir ++")"
withoutsysctl = ["Increase the value in /proc/sys/fs/inotify/max_user_watches"]
withsysctl n = let new = n * 10 in
[ "Increase the limit permanently by running:"
, " echo " ++ maxwatches ++ "=" ++ show new ++
" | sudo tee -a /etc/sysctl.conf; sudo sysctl -p"
, "Or temporarily by running:"
, " sudo sysctl -w " ++ maxwatches ++ "=" ++ show new
]
querySysctl :: Read a => [CommandParam] -> IO (Maybe a)
querySysctl ps = getM go ["sysctl", "/sbin/sysctl", "/usr/sbin/sysctl"]
where
go p = do
v <- catchMaybeIO $ readProcess p (toCommand ps)
case v of
Nothing -> return Nothing
Just s -> return $ parsesysctl s
parsesysctl s = readish =<< lastMaybe (words s)
where
go p = do
v <- catchMaybeIO $ readProcess p (toCommand ps)
case v of
Nothing -> return Nothing
Just s -> return $ parsesysctl s
parsesysctl s = readish =<< lastMaybe (words s)

View file

@ -21,15 +21,15 @@ start :: JSON a => [(String, a)] -> String
start l
| last s == endchar = init s
| otherwise = bad s
where
s = encodeStrict $ toJSObject l
where
s = encodeStrict $ toJSObject l
add :: JSON a => [(String, a)] -> String
add l
| head s == startchar = ',' : drop 1 s
| otherwise = bad s
where
s = start l
where
s = start l
end :: String
end = [endchar, '\n']

View file

@ -78,44 +78,44 @@ getDirInfo dir = do
l <- filter (not . dirCruft) <$> getDirectoryContents dir
contents <- S.fromList . catMaybes <$> mapM getDirEnt l
return $ DirInfo dir contents
where
getDirEnt f = catchMaybeIO $ do
s <- getFileStatus (dir </> f)
return $ DirEnt f (fileID s) (isDirectory s)
where
getDirEnt f = catchMaybeIO $ do
s <- getFileStatus (dir </> f)
return $ DirEnt f (fileID s) (isDirectory s)
{- Difference between the dirCaches of two DirInfos. -}
(//) :: DirInfo -> DirInfo -> [Change]
oldc // newc = deleted ++ added
where
deleted = calc gendel oldc newc
added = calc genadd newc oldc
gendel x = (if isSubDir x then DeletedDir else Deleted) $
dirName oldc </> dirEnt x
genadd x = Added $ dirName newc </> dirEnt x
calc a x y = map a $ S.toList $
S.difference (dirCache x) (dirCache y)
where
deleted = calc gendel oldc newc
added = calc genadd newc oldc
gendel x = (if isSubDir x then DeletedDir else Deleted) $
dirName oldc </> dirEnt x
genadd x = Added $ dirName newc </> dirEnt x
calc a x y = map a $ S.toList $
S.difference (dirCache x) (dirCache y)
{- Builds a map of directories in a tree, possibly pruning some.
- Opens each directory in the tree, and records its current contents. -}
scanRecursive :: FilePath -> Pruner -> IO DirMap
scanRecursive topdir prune = M.fromList <$> walk [] [topdir]
where
walk c [] = return c
walk c (dir:rest)
| prune dir = walk c rest
| otherwise = do
minfo <- catchMaybeIO $ getDirInfo dir
case minfo of
Nothing -> walk c rest
Just info -> do
mfd <- catchMaybeIO $
openFd dir ReadOnly Nothing defaultFileFlags
case mfd of
Nothing -> walk c rest
Just fd -> do
let subdirs = map (dir </>) . map dirEnt $
S.toList $ dirCache info
walk ((fd, info):c) (subdirs ++ rest)
where
walk c [] = return c
walk c (dir:rest)
| prune dir = walk c rest
| otherwise = do
minfo <- catchMaybeIO $ getDirInfo dir
case minfo of
Nothing -> walk c rest
Just info -> do
mfd <- catchMaybeIO $
openFd dir ReadOnly Nothing defaultFileFlags
case mfd of
Nothing -> walk c rest
Just fd -> do
let subdirs = map (dir </>) . map dirEnt $
S.toList $ dirCache info
walk ((fd, info):c) (subdirs ++ rest)
{- Adds a list of subdirectories (and all their children), unless pruned to a
- directory map. Adding a subdirectory that's already in the map will
@ -131,16 +131,16 @@ removeSubDir :: DirMap -> FilePath -> IO DirMap
removeSubDir dirmap dir = do
mapM_ closeFd $ M.keys toremove
return rest
where
(toremove, rest) = M.partition (dirContains dir . dirName) dirmap
where
(toremove, rest) = M.partition (dirContains dir . dirName) dirmap
findDirContents :: DirMap -> FilePath -> [FilePath]
findDirContents dirmap dir = concatMap absolutecontents $ search
where
absolutecontents i = map (dirName i </>)
(map dirEnt $ S.toList $ dirCache i)
search = map snd $ M.toList $
M.filter (\i -> dirName i == dir) dirmap
where
absolutecontents i = map (dirName i </>)
(map dirEnt $ S.toList $ dirCache i)
search = map snd $ M.toList $
M.filter (\i -> dirName i == dir) dirmap
foreign import ccall safe "libkqueue.h init_kqueue" c_init_kqueue
:: IO Fd
@ -181,8 +181,8 @@ waitChange kq@(Kqueue h _ dirmap _) = do
else case M.lookup changedfd dirmap of
Nothing -> nochange
Just info -> handleChange kq changedfd info
where
nochange = return (kq, [])
where
nochange = return (kq, [])
{- The kqueue interface does not tell what type of change took place in
- the directory; it could be an added file, a deleted file, a renamed
@ -196,36 +196,36 @@ waitChange kq@(Kqueue h _ dirmap _) = do
handleChange :: Kqueue -> Fd -> DirInfo -> IO (Kqueue, [Change])
handleChange kq@(Kqueue _ _ dirmap pruner) fd olddirinfo =
go =<< catchMaybeIO (getDirInfo $ dirName olddirinfo)
where
go (Just newdirinfo) = do
let changes = filter (not . pruner . changedFile) $
olddirinfo // newdirinfo
let (added, deleted) = partition isAdd changes
where
go (Just newdirinfo) = do
let changes = filter (not . pruner . changedFile) $
olddirinfo // newdirinfo
let (added, deleted) = partition isAdd changes
-- Scan newly added directories to add to the map.
-- (Newly added files will fail getDirInfo.)
newdirinfos <- catMaybes <$>
mapM (catchMaybeIO . getDirInfo . changedFile) added
newmap <- addSubDirs dirmap pruner $ map dirName newdirinfos
-- Scan newly added directories to add to the map.
-- (Newly added files will fail getDirInfo.)
newdirinfos <- catMaybes <$>
mapM (catchMaybeIO . getDirInfo . changedFile) added
newmap <- addSubDirs dirmap pruner $ map dirName newdirinfos
-- Remove deleted directories from the map.
newmap' <- foldM removeSubDir newmap (map changedFile deleted)
-- Remove deleted directories from the map.
newmap' <- foldM removeSubDir newmap (map changedFile deleted)
-- Update the cached dirinfo just looked up.
let newmap'' = M.insertWith' const fd newdirinfo newmap'
-- Update the cached dirinfo just looked up.
let newmap'' = M.insertWith' const fd newdirinfo newmap'
-- When new directories were added, need to update
-- the kqueue to watch them.
let kq' = kq { kqueueMap = newmap'' }
unless (null newdirinfos) $
updateKqueue kq'
-- When new directories were added, need to update
-- the kqueue to watch them.
let kq' = kq { kqueueMap = newmap'' }
unless (null newdirinfos) $
updateKqueue kq'
return (kq', changes)
go Nothing = do
-- The directory has been moved or deleted, so
-- remove it from our map.
newmap <- removeSubDir dirmap (dirName olddirinfo)
return (kq { kqueueMap = newmap }, [])
return (kq', changes)
go Nothing = do
-- The directory has been moved or deleted, so
-- remove it from our map.
newmap <- removeSubDir dirmap (dirName olddirinfo)
return (kq { kqueueMap = newmap }, [])
{- Processes changes on the Kqueue, calling the hooks as appropriate.
- Never returns. -}
@ -235,35 +235,33 @@ runHooks kq hooks = do
-- to catch any files created beforehand.
recursiveadd (kqueueMap kq) (Added $ kqueueTop kq)
loop kq
where
loop q = do
(q', changes) <- waitChange q
forM_ changes $ dispatch (kqueueMap q')
loop q'
where
loop q = do
(q', changes) <- waitChange q
forM_ changes $ dispatch (kqueueMap q')
loop q'
dispatch _ change@(Deleted _) =
callhook delHook Nothing change
dispatch _ change@(DeletedDir _) =
callhook delDirHook Nothing change
dispatch dirmap change@(Added _) =
withstatus change $ dispatchadd dirmap
dispatch _ change@(Deleted _) =
callhook delHook Nothing change
dispatch _ change@(DeletedDir _) =
callhook delDirHook Nothing change
dispatch dirmap change@(Added _) =
withstatus change $ dispatchadd dirmap
dispatchadd dirmap change s
| Files.isSymbolicLink s =
callhook addSymlinkHook (Just s) change
| Files.isDirectory s = recursiveadd dirmap change
| Files.isRegularFile s =
callhook addHook (Just s) change
| otherwise = noop
dispatchadd dirmap change s
| Files.isSymbolicLink s = callhook addSymlinkHook (Just s) change
| Files.isDirectory s = recursiveadd dirmap change
| Files.isRegularFile s = callhook addHook (Just s) change
| otherwise = noop
recursiveadd dirmap change = do
let contents = findDirContents dirmap $ changedFile change
forM_ contents $ \f ->
withstatus (Added f) $ dispatchadd dirmap
recursiveadd dirmap change = do
let contents = findDirContents dirmap $ changedFile change
forM_ contents $ \f ->
withstatus (Added f) $ dispatchadd dirmap
callhook h s change = case h hooks of
Nothing -> noop
Just a -> a (changedFile change) s
callhook h s change = case h hooks of
Nothing -> noop
Just a -> a (changedFile change) s
withstatus change a = maybe noop (a change) =<<
(catchMaybeIO (getSymbolicLinkStatus (changedFile change)))
withstatus change a = maybe noop (a change) =<<
(catchMaybeIO (getSymbolicLinkStatus (changedFile change)))

View file

@ -23,9 +23,9 @@ rotateLog logfile num
| otherwise = whenM (doesFileExist currfile) $ do
rotateLog logfile (num + 1)
renameFile currfile nextfile
where
currfile = filename num
nextfile = filename (num + 1)
filename n
| n == 0 = logfile
| otherwise = logfile ++ "." ++ show n
where
currfile = filename num
nextfile = filename (num + 1)
filename n
| n == 0 = logfile
| otherwise = logfile ++ "." ++ show n

View file

@ -36,8 +36,8 @@ query :: [String] -> IO [(FilePath, LsofOpenMode, ProcessInfo)]
query opts =
withHandle StdoutHandle (createProcessChecked checkSuccessProcess) p $ \h -> do
parse <$> hGetContentsStrict h
where
p = proc "lsof" ("-F0can" : opts)
where
p = proc "lsof" ("-F0can" : opts)
{- Parsing null-delimited output like:
-
@ -51,38 +51,36 @@ query opts =
-}
parse :: String -> [(FilePath, LsofOpenMode, ProcessInfo)]
parse s = bundle $ go [] $ lines s
where
bundle = concatMap (\(fs, p) -> map (\(f, m) -> (f, m, p)) fs)
where
bundle = concatMap (\(fs, p) -> map (\(f, m) -> (f, m, p)) fs)
go c [] = c
go c ((t:r):ls)
| t == 'p' =
let (fs, ls') = parsefiles [] ls
in go ((fs, parseprocess r):c) ls'
| otherwise = parsefail
go _ _ = parsefail
go c [] = c
go c ((t:r):ls)
| t == 'p' =
let (fs, ls') = parsefiles [] ls
in go ((fs, parseprocess r):c) ls'
| otherwise = parsefail
go _ _ = parsefail
parseprocess l =
case splitnull l of
[pid, 'c':cmdline, ""] ->
case readish pid of
(Just n) -> ProcessInfo n cmdline
Nothing -> parsefail
_ -> parsefail
parseprocess l = case splitnull l of
[pid, 'c':cmdline, ""] ->
case readish pid of
(Just n) -> ProcessInfo n cmdline
Nothing -> parsefail
_ -> parsefail
parsefiles c [] = (c, [])
parsefiles c (l:ls) =
case splitnull l of
['a':mode, 'n':file, ""] ->
parsefiles ((file, parsemode mode):c) ls
(('p':_):_) -> (c, l:ls)
_ -> parsefail
parsefiles c [] = (c, [])
parsefiles c (l:ls) = case splitnull l of
['a':mode, 'n':file, ""] ->
parsefiles ((file, parsemode mode):c) ls
(('p':_):_) -> (c, l:ls)
_ -> parsefail
parsemode ('r':_) = OpenReadOnly
parsemode ('w':_) = OpenWriteOnly
parsemode ('u':_) = OpenReadWrite
parsemode _ = OpenUnknown
parsemode ('r':_) = OpenReadOnly
parsemode ('w':_) = OpenWriteOnly
parsemode ('u':_) = OpenReadWrite
parsemode _ = OpenUnknown
splitnull = split "\0"
splitnull = split "\0"
parsefail = error $ "failed to parse lsof output: " ++ show s
parsefail = error $ "failed to parse lsof output: " ++ show s

View file

@ -58,36 +58,36 @@ tokens = words "and or not ( )"
{- Converts a list of Tokens into a Matcher. -}
generate :: [Token op] -> Matcher op
generate = go MAny
where
go m [] = m
go m ts = uncurry go $ consume m ts
where
go m [] = m
go m ts = uncurry go $ consume m ts
{- Consumes one or more Tokens, constructs a new Matcher,
- and returns unconsumed Tokens. -}
consume :: Matcher op -> [Token op] -> (Matcher op, [Token op])
consume m [] = (m, [])
consume m (t:ts) = go t
where
go And = cont $ m `MAnd` next
go Or = cont $ m `MOr` next
go Not = cont $ m `MAnd` MNot next
go Open = let (n, r) = consume next rest in (m `MAnd` n, r)
go Close = (m, ts)
go (Operation o) = (m `MAnd` MOp o, ts)
where
go And = cont $ m `MAnd` next
go Or = cont $ m `MOr` next
go Not = cont $ m `MAnd` MNot next
go Open = let (n, r) = consume next rest in (m `MAnd` n, r)
go Close = (m, ts)
go (Operation o) = (m `MAnd` MOp o, ts)
(next, rest) = consume MAny ts
cont v = (v, rest)
(next, rest) = consume MAny ts
cont v = (v, rest)
{- Checks if a Matcher matches, using a supplied function to check
- the value of Operations. -}
match :: (op -> v -> Bool) -> Matcher op -> v -> Bool
match a m v = go m
where
go MAny = True
go (MAnd m1 m2) = go m1 && go m2
go (MOr m1 m2) = go m1 || go m2
go (MNot m1) = not $ go m1
go (MOp o) = a o v
where
go MAny = True
go (MAnd m1 m2) = go m1 && go m2
go (MOr m1 m2) = go m1 || go m2
go (MNot m1) = not $ go m1
go (MOp o) = a o v
{- Runs a monadic Matcher, where Operations are actions in the monad. -}
matchM :: Monad m => Matcher (v -> m Bool) -> v -> m Bool
@ -98,12 +98,12 @@ matchM m v = matchMrun m $ \o -> o v
- parameter. -}
matchMrun :: forall o (m :: * -> *). Monad m => Matcher o -> (o -> m Bool) -> m Bool
matchMrun m run = go m
where
go MAny = return True
go (MAnd m1 m2) = go m1 <&&> go m2
go (MOr m1 m2) = go m1 <||> go m2
go (MNot m1) = liftM not (go m1)
go (MOp o) = run o
where
go MAny = return True
go (MAnd m1 m2) = go m1 <&&> go m2
go (MOr m1 m2) = go m1 <||> go m2
go (MNot m1) = liftM not (go m1)
go (MOp o) = run o
{- Checks if a matcher contains no limits. -}
isEmpty :: Matcher a -> Bool

View file

@ -33,10 +33,10 @@ readFileStrict = readFile >=> \s -> length s `seq` return s
-}
separate :: (a -> Bool) -> [a] -> ([a], [a])
separate c l = unbreak $ break c l
where
unbreak r@(a, b)
| null b = r
| otherwise = (a, tail b)
where
unbreak r@(a, b)
| null b = r
| otherwise = (a, tail b)
{- Breaks out the first line. -}
firstLine :: String -> String
@ -47,11 +47,11 @@ firstLine = takeWhile (/= '\n')
- Segments may be empty. -}
segment :: (a -> Bool) -> [a] -> [[a]]
segment p l = map reverse $ go [] [] l
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] (c:r) is
| otherwise = go (i:c) r is
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] (c:r) is
| otherwise = go (i:c) r is
prop_segment_regressionTest :: Bool
prop_segment_regressionTest = all id
@ -64,11 +64,11 @@ prop_segment_regressionTest = all id
{- Includes the delimiters as segments of their own. -}
segmentDelim :: (a -> Bool) -> [a] -> [[a]]
segmentDelim p l = map reverse $ go [] [] l
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] ([i]:c:r) is
| otherwise = go (i:c) r is
where
go c r [] = reverse $ c:r
go c r (i:is)
| p i = go [] ([i]:c:r) is
| otherwise = go (i:c) r is
{- Given two orderings, returns the second if the first is EQ and returns
- the first otherwise.
@ -96,9 +96,9 @@ hGetSomeString h sz = do
fp <- mallocForeignPtrBytes sz
len <- withForeignPtr fp $ \buf -> hGetBufSome h buf sz
map (chr . fromIntegral) <$> withForeignPtr fp (peekbytes len)
where
peekbytes :: Int -> Ptr Word8 -> IO [Word8]
peekbytes len buf = mapM (peekElemOff buf) [0..pred len]
where
peekbytes :: Int -> Ptr Word8 -> IO [Word8]
peekbytes len buf = mapM (peekElemOff buf) [0..pred len]
{- Reaps any zombie git processes.
-

View file

@ -41,21 +41,21 @@ getMounts = do
_ <- c_mounts_end h
return mntent
where
getmntent h c = do
ptr <- c_mounts_next h
if (ptr == nullPtr)
then return $ reverse c
else do
mnt_fsname_str <- #{peek struct mntent, mnt_fsname} ptr >>= peekCString
mnt_dir_str <- #{peek struct mntent, mnt_dir} ptr >>= peekCString
mnt_type_str <- #{peek struct mntent, mnt_type} ptr >>= peekCString
let ent = Mntent
{ mnt_fsname = mnt_fsname_str
, mnt_dir = mnt_dir_str
, mnt_type = mnt_type_str
}
getmntent h (ent:c)
where
getmntent h c = do
ptr <- c_mounts_next h
if (ptr == nullPtr)
then return $ reverse c
else do
mnt_fsname_str <- #{peek struct mntent, mnt_fsname} ptr >>= peekCString
mnt_dir_str <- #{peek struct mntent, mnt_dir} ptr >>= peekCString
mnt_type_str <- #{peek struct mntent, mnt_type} ptr >>= peekCString
let ent = Mntent
{ mnt_fsname = mnt_fsname_str
, mnt_dir = mnt_dir_str
, mnt_type = mnt_type_str
}
getmntent h (ent:c)
{- Using unsafe imports because the C functions are belived to never block.
- Note that getmntinfo is called with MNT_NOWAIT to avoid possibly blocking;

View file

@ -17,6 +17,5 @@ import Control.Applicative
- use uname -n when available. -}
getHostname :: IO (Maybe String)
getHostname = catchMaybeIO uname_node
where
uname_node = takeWhile (/= '\n') <$>
readProcess "uname" ["-n"]
where
uname_node = takeWhile (/= '\n') <$> readProcess "uname" ["-n"]

View file

@ -45,13 +45,13 @@ newNotificationHandle :: NotificationBroadcaster -> IO NotificationHandle
newNotificationHandle b = NotificationHandle
<$> pure b
<*> addclient
where
addclient = do
s <- newEmptySV
atomically $ do
l <- takeTMVar b
putTMVar b $ l ++ [s]
return $ NotificationId $ length l
where
addclient = do
s <- newEmptySV
atomically $ do
l <- takeTMVar b
putTMVar b $ l ++ [s]
return $ NotificationId $ length l
{- Extracts the identifier from a notification handle.
- This can be used to eg, pass the identifier through to a WebApp. -}
@ -66,8 +66,8 @@ sendNotification :: NotificationBroadcaster -> IO ()
sendNotification b = do
l <- atomically $ readTMVar b
mapM_ notify l
where
notify s = writeSV s ()
where
notify s = writeSV s ()
{- Used by a client to block until a new notification is available since
- the last time it tried. -}

View file

@ -23,13 +23,13 @@ inParallel a l = do
mvars <- mapM thread l
statuses <- mapM takeMVar mvars
return $ reduce $ partition snd $ zip l statuses
where
reduce (x,y) = (map fst x, map fst y)
thread v = do
mvar <- newEmptyMVar
_ <- forkIO $ do
r <- try (a v) :: IO (Either SomeException Bool)
case r of
Left _ -> putMVar mvar False
Right b -> putMVar mvar b
return mvar
where
reduce (x,y) = (map fst x, map fst y)
thread v = do
mvar <- newEmptyMVar
_ <- forkIO $ do
r <- try (a v) :: IO (Either SomeException Bool)
case r of
Left _ -> putMVar mvar False
Right b -> putMVar mvar b
return mvar

View file

@ -23,18 +23,18 @@ parentDir :: FilePath -> FilePath
parentDir dir
| not $ null dirs = slash ++ join s (init dirs)
| otherwise = ""
where
dirs = filter (not . null) $ split s dir
slash = if isAbsolute dir then s else ""
s = [pathSeparator]
where
dirs = filter (not . null) $ split s dir
slash = if isAbsolute dir then s else ""
s = [pathSeparator]
prop_parentDir_basics :: FilePath -> Bool
prop_parentDir_basics dir
| null dir = True
| dir == "/" = parentDir dir == ""
| otherwise = p /= dir
where
p = parentDir dir
where
p = parentDir dir
{- Checks if the first FilePath is, or could be said to contain the second.
- For example, "foo/" contains "foo/bar". Also, "foo", "./foo", "foo/" etc
@ -42,10 +42,10 @@ prop_parentDir_basics dir
-}
dirContains :: FilePath -> FilePath -> Bool
dirContains a b = a == b || a' == b' || (a'++"/") `isPrefixOf` b'
where
norm p = fromMaybe "" $ absNormPath p "."
a' = norm a
b' = norm b
where
norm p = fromMaybe "" $ absNormPath p "."
a' = norm a
b' = norm b
{- Converts a filename into a normalized, absolute path.
-
@ -60,8 +60,8 @@ absPath file = do
- from the specified cwd. -}
absPathFrom :: FilePath -> FilePath -> FilePath
absPathFrom cwd file = fromMaybe bad $ absNormPath cwd file
where
bad = error $ "unable to normalize " ++ file
where
bad = error $ "unable to normalize " ++ file
{- Constructs a relative path from the CWD to a file.
-
@ -78,31 +78,31 @@ relPathCwdToFile f = relPathDirToFile <$> getCurrentDirectory <*> absPath f
-}
relPathDirToFile :: FilePath -> FilePath -> FilePath
relPathDirToFile from to = join s $ dotdots ++ uncommon
where
s = [pathSeparator]
pfrom = split s from
pto = split s to
common = map fst $ takeWhile same $ zip pfrom pto
same (c,d) = c == d
uncommon = drop numcommon pto
dotdots = replicate (length pfrom - numcommon) ".."
numcommon = length common
where
s = [pathSeparator]
pfrom = split s from
pto = split s to
common = map fst $ takeWhile same $ zip pfrom pto
same (c,d) = c == d
uncommon = drop numcommon pto
dotdots = replicate (length pfrom - numcommon) ".."
numcommon = length common
prop_relPathDirToFile_basics :: FilePath -> FilePath -> Bool
prop_relPathDirToFile_basics from to
| from == to = null r
| otherwise = not (null r)
where
r = relPathDirToFile from to
where
r = relPathDirToFile from to
prop_relPathDirToFile_regressionTest :: Bool
prop_relPathDirToFile_regressionTest = same_dir_shortcurcuits_at_difference
where
{- Two paths have the same directory component at the same
- location, but it's not really the same directory.
- Code used to get this wrong. -}
same_dir_shortcurcuits_at_difference =
relPathDirToFile "/tmp/r/lll/xxx/yyy/18" "/tmp/r/.git/annex/objects/18/gk/SHA256-foo/SHA256-foo" == "../../../../.git/annex/objects/18/gk/SHA256-foo/SHA256-foo"
where
{- Two paths have the same directory component at the same
- location, but it's not really the same directory.
- Code used to get this wrong. -}
same_dir_shortcurcuits_at_difference =
relPathDirToFile "/tmp/r/lll/xxx/yyy/18" "/tmp/r/.git/annex/objects/18/gk/SHA256-foo/SHA256-foo" == "../../../../.git/annex/objects/18/gk/SHA256-foo/SHA256-foo"
{- Given an original list of paths, and an expanded list derived from it,
- generates a list of lists, where each sublist corresponds to one of the
@ -114,8 +114,8 @@ segmentPaths :: [FilePath] -> [FilePath] -> [[FilePath]]
segmentPaths [] new = [new]
segmentPaths [_] new = [new] -- optimisation
segmentPaths (l:ls) new = [found] ++ segmentPaths ls rest
where
(found, rest)=partition (l `dirContains`) new
where
(found, rest)=partition (l `dirContains`) new
{- This assumes that it's cheaper to call segmentPaths on the result,
- than it would be to run the action separately with each path. In
@ -135,8 +135,8 @@ relHome path = do
{- Checks if a command is available in PATH. -}
inPath :: String -> IO Bool
inPath command = getSearchPath >>= anyM indir
where
indir d = doesFileExist $ d </> command
where
indir d = doesFileExist $ d </> command
{- Checks if a filename is a unix dotfile. All files inside dotdirs
- count as dotfiles. -}
@ -146,5 +146,5 @@ dotfile file
| f == ".." = False
| f == "" = False
| otherwise = "." `isPrefixOf` f || dotfile (takeDirectory file)
where
f = takeFileName file
where
f = takeFileName file

View file

@ -28,11 +28,11 @@ showPercentage :: Int -> Percentage -> String
showPercentage precision (Percentage p)
| precision == 0 || remainder == 0 = go $ show int
| otherwise = go $ show int ++ "." ++ strip0s (show remainder)
where
go v = v ++ "%"
int :: Integer
(int, frac) = properFraction (fromRational p)
remainder = floor (frac * multiplier) :: Integer
strip0s = reverse . dropWhile (== '0') . reverse
multiplier :: Float
multiplier = 10 ** (fromIntegral precision)
where
go v = v ++ "%"
int :: Integer
(int, frac) = properFraction (fromRational p)
remainder = floor (frac * multiplier) :: Integer
strip0s = reverse . dropWhile (== '0') . reverse
multiplier :: Float
multiplier = 10 ** (fromIntegral precision)

View file

@ -59,11 +59,11 @@ readProcessEnv cmd args environ =
output <- hGetContentsStrict h
hClose h
return output
where
p = (proc cmd args)
{ std_out = CreatePipe
, env = environ
}
where
p = (proc cmd args)
{ std_out = CreatePipe
, env = environ
}
{- Writes a string to a process on its stdin,
- returns its output, and also allows specifying the environment.
@ -99,13 +99,13 @@ writeReadProcessEnv cmd args environ input adjusthandle = do
return output
where
p = (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
, env = environ
}
where
p = (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
, env = environ
}
{- Waits for a ProcessHandle, and throws an IOError if the process
- did not exit successfully. -}
@ -156,19 +156,19 @@ withHandle
-> (Handle -> IO a)
-> IO a
withHandle h creator p a = creator p' $ a . select
where
base = p
{ std_in = Inherit
, std_out = Inherit
, std_err = Inherit
}
(select, p')
| h == StdinHandle =
(stdinHandle, base { std_in = CreatePipe })
| h == StdoutHandle =
(stdoutHandle, base { std_out = CreatePipe })
| h == StderrHandle =
(stderrHandle, base { std_err = CreatePipe })
where
base = p
{ std_in = Inherit
, std_out = Inherit
, std_err = Inherit
}
(select, p')
| h == StdinHandle =
(stdinHandle, base { std_in = CreatePipe })
| h == StdoutHandle =
(stdoutHandle, base { std_out = CreatePipe })
| h == StderrHandle =
(stderrHandle, base { std_err = CreatePipe })
{- Like withHandle, but passes (stdin, stdout) handles to the action. -}
withBothHandles
@ -177,12 +177,12 @@ withBothHandles
-> ((Handle, Handle) -> IO a)
-> IO a
withBothHandles creator p a = creator p' $ a . bothHandles
where
p' = p
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
}
where
p' = p
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = Inherit
}
{- Forces the CreateProcessRunner to run quietly;
- both stdout and stderr are discarded. -}
@ -223,21 +223,21 @@ debugProcess p = do
[ action ++ ":"
, showCmd p
]
where
action
| piped (std_in p) && piped (std_out p) = "chat"
| piped (std_in p) = "feed"
| piped (std_out p) = "read"
| otherwise = "call"
piped Inherit = False
piped _ = True
where
action
| piped (std_in p) && piped (std_out p) = "chat"
| piped (std_in p) = "feed"
| piped (std_out p) = "read"
| otherwise = "call"
piped Inherit = False
piped _ = True
{- Shows the command that a CreateProcess will run. -}
showCmd :: CreateProcess -> String
showCmd = go . cmdspec
where
go (ShellCommand s) = s
go (RawCommand c ps) = c ++ " " ++ show ps
where
go (ShellCommand s) = s
go (RawCommand c ps) = c ++ " " ++ show ps
{- Wrappers for System.Process functions that do debug logging.
-

View file

@ -15,11 +15,11 @@ import Data.Char
- shell. -}
rsyncShell :: [CommandParam] -> [CommandParam]
rsyncShell command = [Param "-e", Param $ unwords $ map escape (toCommand command)]
where
{- rsync requires some weird, non-shell like quoting in
- here. A doubled single quote inside the single quoted
- string is a single quote. -}
escape s = "'" ++ join "''" (split "'" s) ++ "'"
where
{- rsync requires some weird, non-shell like quoting in
- here. A doubled single quote inside the single quoted
- string is a single quote. -}
escape s = "'" ++ join "''" (split "'" s) ++ "'"
{- Runs rsync in server mode to send a file. -}
rsyncServerSend :: FilePath -> IO Bool
@ -60,22 +60,22 @@ rsyncProgress callback params = do
- on. Reap the resulting zombie. -}
reapZombies
return r
where
p = proc "rsync" (toCommand params)
feedprogress prev buf h = do
s <- hGetSomeString h 80
if null s
then return True
else do
putStr s
hFlush stdout
let (mbytes, buf') = parseRsyncProgress (buf++s)
case mbytes of
Nothing -> feedprogress prev buf' h
(Just bytes) -> do
when (bytes /= prev) $
callback bytes
feedprogress bytes buf' h
where
p = proc "rsync" (toCommand params)
feedprogress prev buf h = do
s <- hGetSomeString h 80
if null s
then return True
else do
putStr s
hFlush stdout
let (mbytes, buf') = parseRsyncProgress (buf++s)
case mbytes of
Nothing -> feedprogress prev buf' h
(Just bytes) -> do
when (bytes /= prev) $
callback bytes
feedprogress bytes buf' h
{- Checks if an rsync url involves the remote shell (ssh or rsh).
- Use of such urls with rsync requires additional shell
@ -84,13 +84,13 @@ rsyncUrlIsShell :: String -> Bool
rsyncUrlIsShell s
| "rsync://" `isPrefixOf` s = False
| otherwise = go s
where
-- host::dir is rsync protocol, while host:dir is ssh/rsh
go [] = False
go (c:cs)
| c == '/' = False -- got to directory with no colon
| c == ':' = not $ ":" `isPrefixOf` cs
| otherwise = go cs
where
-- host::dir is rsync protocol, while host:dir is ssh/rsh
go [] = False
go (c:cs)
| c == '/' = False -- got to directory with no colon
| c == ':' = not $ ":" `isPrefixOf` cs
| otherwise = go cs
{- Checks if a rsync url is really just a local path. -}
rsyncUrlIsPath :: String -> Bool
@ -113,19 +113,19 @@ rsyncUrlIsPath s
-}
parseRsyncProgress :: String -> (Maybe Integer, String)
parseRsyncProgress = go [] . reverse . progresschunks
where
go remainder [] = (Nothing, remainder)
go remainder (x:xs) = case parsebytes (findbytesstart x) of
Nothing -> go (delim:x++remainder) xs
Just b -> (Just b, remainder)
where
go remainder [] = (Nothing, remainder)
go remainder (x:xs) = case parsebytes (findbytesstart x) of
Nothing -> go (delim:x++remainder) xs
Just b -> (Just b, remainder)
delim = '\r'
{- Find chunks that each start with delim.
- The first chunk doesn't start with it
- (it's empty when delim is at the start of the string). -}
progresschunks = drop 1 . split [delim]
findbytesstart s = dropWhile isSpace s
parsebytes s = case break isSpace s of
([], _) -> Nothing
(_, []) -> Nothing
(b, _) -> readish b
delim = '\r'
{- Find chunks that each start with delim.
- The first chunk doesn't start with it
- (it's empty when delim is at the start of the string). -}
progresschunks = drop 1 . split [delim]
findbytesstart s = dropWhile isSpace s
parsebytes s = case break isSpace s of
([], _) -> Nothing
(_, []) -> Nothing
(b, _) -> readish b

View file

@ -74,11 +74,11 @@ lookupSRV (SRV srv) = do
r <- withResolver seed $ flip DNS.lookupSRV $ B8.fromString srv
print r
return $ maybe [] (orderHosts . map tohosts) r
where
tohosts (priority, weight, port, hostname) =
( (priority, weight)
, (B8.toString hostname, PortNumber $ fromIntegral port)
)
where
tohosts (priority, weight, port, hostname) =
( (priority, weight)
, (B8.toString hostname, PortNumber $ fromIntegral port)
)
#else
lookupSRV = lookupSRVHost
#endif
@ -93,21 +93,21 @@ lookupSRVHost (SRV srv) = catchDefaultIO [] $
parseSrvHost :: String -> [HostPort]
parseSrvHost = orderHosts . catMaybes . map parse . lines
where
parse l = case words l of
[_, _, _, _, spriority, sweight, sport, hostname] -> do
let v =
( readish sport :: Maybe Int
, readish spriority :: Maybe Int
, readish sweight :: Maybe Int
where
parse l = case words l of
[_, _, _, _, spriority, sweight, sport, hostname] -> do
let v =
( readish sport :: Maybe Int
, readish spriority :: Maybe Int
, readish sweight :: Maybe Int
)
case v of
(Just port, Just priority, Just weight) -> Just
( (priority, weight)
, (hostname, PortNumber $ fromIntegral port)
)
case v of
(Just port, Just priority, Just weight) -> Just
( (priority, weight)
, (hostname, PortNumber $ fromIntegral port)
)
_ -> Nothing
_ -> Nothing
_ -> Nothing
_ -> Nothing
orderHosts :: [(PriorityWeight, HostPort)] -> [HostPort]
orderHosts = map snd . sortBy (compare `on` fst)

View file

@ -25,13 +25,13 @@ data CommandParam = Params String | Param String | File FilePath
- a command and expects Strings. -}
toCommand :: [CommandParam] -> [String]
toCommand = (>>= unwrap)
where
unwrap (Param s) = [s]
unwrap (Params s) = filter (not . null) (split " " s)
-- Files that start with a dash are modified to avoid
-- the command interpreting them as options.
unwrap (File s@('-':_)) = ["./" ++ s]
unwrap (File s) = [s]
where
unwrap (Param s) = [s]
unwrap (Params s) = filter (not . null) (split " " s)
-- Files that start with a dash are modified to avoid
-- the command interpreting them as options.
unwrap (File s@('-':_)) = ["./" ++ s]
unwrap (File s) = [s]
{- Run a system command, and returns True or False
- if it succeeded or failed.
@ -41,9 +41,9 @@ boolSystem command params = boolSystemEnv command params Nothing
boolSystemEnv :: FilePath -> [CommandParam] -> Maybe [(String, String)] -> IO Bool
boolSystemEnv command params environ = dispatch <$> safeSystemEnv command params environ
where
dispatch ExitSuccess = True
dispatch _ = False
where
dispatch ExitSuccess = True
dispatch _ = False
{- Runs a system command, returning the exit status. -}
safeSystem :: FilePath -> [CommandParam] -> IO ExitCode
@ -59,26 +59,26 @@ safeSystemEnv command params environ = do
- the shell. -}
shellEscape :: String -> String
shellEscape f = "'" ++ escaped ++ "'"
where
-- replace ' with '"'"'
escaped = join "'\"'\"'" $ split "'" f
where
-- replace ' with '"'"'
escaped = join "'\"'\"'" $ split "'" f
{- Unescapes a set of shellEscaped words or filenames. -}
shellUnEscape :: String -> [String]
shellUnEscape [] = []
shellUnEscape s = word : shellUnEscape rest
where
(word, rest) = findword "" s
findword w [] = (w, "")
findword w (c:cs)
| c == ' ' = (w, cs)
| c == '\'' = inquote c w cs
| c == '"' = inquote c w cs
| otherwise = findword (w++[c]) cs
inquote _ w [] = (w, "")
inquote q w (c:cs)
| c == q = findword w cs
| otherwise = inquote q (w++[c]) cs
where
(word, rest) = findword "" s
findword w [] = (w, "")
findword w (c:cs)
| c == ' ' = (w, cs)
| c == '\'' = inquote c w cs
| c == '"' = inquote c w cs
| otherwise = findword (w++[c]) cs
inquote _ w [] = (w, "")
inquote q w (c:cs)
| c == q = findword w cs
| otherwise = inquote q (w++[c]) cs
{- For quickcheck. -}
prop_idempotent_shellEscape :: String -> Bool

View file

@ -23,12 +23,12 @@ getTSet :: TSet a -> IO [a]
getTSet tset = runTSet $ do
c <- readTChan tset
go [c]
where
go l = do
v <- tryReadTChan tset
case v of
Nothing -> return l
Just c -> go (c:l)
where
go l = do
v <- tryReadTChan tset
case v of
Nothing -> return l
Just c -> go (c:l)
{- Puts items into a TSet. -}
putTSet :: TSet a -> [a] -> IO ()

View file

@ -32,11 +32,11 @@ instance IsString TenseText where
renderTense :: Tense -> TenseText -> Text
renderTense tense (TenseText chunks) = T.concat $ map render chunks
where
render (Tensed present past)
| tense == Present = present
| otherwise = past
render (UnTensed s) = s
where
render (Tensed present past)
| tense == Present = present
| otherwise = past
render (UnTensed s) = s
{- Builds up a TenseText, separating chunks with spaces.
-
@ -45,13 +45,13 @@ renderTense tense (TenseText chunks) = T.concat $ map render chunks
-}
tenseWords :: [TenseChunk] -> TenseText
tenseWords = TenseText . go []
where
go c [] = reverse c
go c (w:[]) = reverse (w:c)
go c ((UnTensed w):ws) = go (UnTensed (addspace w) : c) ws
go c ((Tensed w1 w2):ws) =
go (Tensed (addspace w1) (addspace w2) : c) ws
addspace w = T.append w " "
where
go c [] = reverse c
go c (w:[]) = reverse (w:c)
go c ((UnTensed w):ws) = go (UnTensed (addspace w) : c) ws
go c ((Tensed w1 w2):ws) =
go (Tensed (addspace w1) (addspace w2) : c) ws
addspace w = T.append w " "
unTensed :: Text -> TenseText
unTensed t = TenseText [UnTensed t]

View file

@ -26,8 +26,8 @@ runEvery n a = forever $ do
threadDelaySeconds :: Seconds -> IO ()
threadDelaySeconds (Seconds n) = unboundDelay (fromIntegral n * oneSecond)
where
oneSecond = 1000000 -- microseconds
where
oneSecond = 1000000 -- microseconds
{- Like threadDelay, but not bounded by an Int.
-
@ -52,6 +52,6 @@ waitForTermination = do
whenM (queryTerminal stdInput) $
check keyboardSignal lock
takeMVar lock
where
check sig lock = void $
installHandler sig (CatchOnce $ putMVar lock ()) Nothing
where
check sig lock = void $
installHandler sig (CatchOnce $ putMVar lock ()) Nothing

View file

@ -48,9 +48,9 @@ at_symlink_nofollow = #const AT_SYMLINK_NOFOLLOW
instance Storable TimeSpec where
-- use the larger alignment of the two types in the struct
alignment _ = max sec_alignment nsec_alignment
where
sec_alignment = alignment (undefined::CTime)
nsec_alignment = alignment (undefined::CLong)
where
sec_alignment = alignment (undefined::CTime)
nsec_alignment = alignment (undefined::CLong)
sizeOf _ = #{size struct timespec}
peek ptr = do
sec <- #{peek struct timespec, tv_sec} ptr
@ -70,10 +70,10 @@ touchBoth file atime mtime follow =
pokeArray ptr [atime, mtime]
r <- c_utimensat at_fdcwd f ptr flags
when (r /= 0) $ throwErrno "touchBoth"
where
flags = if follow
then 0
else at_symlink_nofollow
where
flags
| follow = 0
| otherwise = at_symlink_nofollow
#else
#if 0
@ -108,10 +108,10 @@ touchBoth file atime mtime follow =
r <- syscall f ptr
when (r /= 0) $
throwErrno "touchBoth"
where
syscall = if follow
then c_lutimes
else c_utimes
where
syscall
| follow = c_lutimes
| otherwise = c_utimes
#else
#warning "utimensat and lutimes not available; building without symlink timestamp preservation support"

View file

@ -29,10 +29,10 @@ type Headers = [String]
- also checking that its size, if available, matches a specified size. -}
check :: URLString -> Headers -> Maybe Integer -> IO Bool
check url headers expected_size = handle <$> exists url headers
where
handle (False, _) = False
handle (True, Nothing) = True
handle (True, s) = expected_size == s
where
handle (False, _) = False
handle (True, Nothing) = True
handle (True, s) = expected_size == s
{- Checks that an url exists and could be successfully downloaded,
- also returning its size if available. -}
@ -50,8 +50,8 @@ exists url headers = case parseURI url of
case rspCode r of
(2,_,_) -> return (True, size r)
_ -> return (False, Nothing)
where
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
where
size = liftM Prelude.read . lookupHeader HdrContentLength . rspHeaders
{- Used to download large files, such as the contents of keys.
-
@ -66,17 +66,17 @@ download :: URLString -> Headers -> [CommandParam] -> FilePath -> IO Bool
download url headers options file
| "file://" `isPrefixOf` url = curl
| otherwise = ifM (inPath "wget") (wget , curl)
where
headerparams = map (\h -> Param $ "--header=" ++ h) headers
wget = go "wget" $ headerparams ++ [Params "-c -O"]
{- Uses the -# progress display, because the normal
- one is very confusing when resuming, showing
- the remainder to download as the whole file,
- and not indicating how much percent was
- downloaded before the resume. -}
curl = go "curl" $ headerparams ++ [Params "-L -C - -# -o"]
go cmd opts = boolSystem cmd $
options++opts++[File file, File url]
where
headerparams = map (\h -> Param $ "--header=" ++ h) headers
wget = go "wget" $ headerparams ++ [Params "-c -O"]
{- Uses the -# progress display, because the normal
- one is very confusing when resuming, showing
- the remainder to download as the whole file,
- and not indicating how much percent was
- downloaded before the resume. -}
curl = go "curl" $ headerparams ++ [Params "-L -C - -# -o"]
go cmd opts = boolSystem cmd $
options++opts++[File file, File url]
{- Downloads a small file. -}
get :: URLString -> Headers -> IO String
@ -98,36 +98,36 @@ get url headers =
-}
request :: URI -> Headers -> RequestMethod -> IO (Response String)
request url headers requesttype = go 5 url
where
go :: Int -> URI -> IO (Response String)
go 0 _ = error "Too many redirects "
go n u = do
rsp <- Browser.browse $ do
Browser.setErrHandler ignore
Browser.setOutHandler ignore
Browser.setAllowRedirects False
let req = mkRequest requesttype u :: Request_String
snd <$> Browser.request (addheaders req)
case rspCode rsp of
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
_ -> return rsp
ignore = const noop
redir n u rsp = case retrieveHeaders HdrLocation rsp of
[] -> return rsp
(Header _ newu:_) ->
case parseURIReference newu of
Nothing -> return rsp
Just newURI -> go n newURI_abs
where
where
go :: Int -> URI -> IO (Response String)
go 0 _ = error "Too many redirects "
go n u = do
rsp <- Browser.browse $ do
Browser.setErrHandler ignore
Browser.setOutHandler ignore
Browser.setAllowRedirects False
let req = mkRequest requesttype u :: Request_String
snd <$> Browser.request (addheaders req)
case rspCode rsp of
(3,0,x) | x /= 5 -> redir (n - 1) u rsp
_ -> return rsp
ignore = const noop
redir n u rsp = case retrieveHeaders HdrLocation rsp of
[] -> return rsp
(Header _ newu:_) ->
case parseURIReference newu of
Nothing -> return rsp
Just newURI -> go n newURI_abs
where
#if defined VERSION_network
#if ! MIN_VERSION_network(2,4,0)
#define WITH_OLD_URI
#endif
#endif
#ifdef WITH_OLD_URI
newURI_abs = fromMaybe newURI (newURI `relativeTo` u)
newURI_abs = fromMaybe newURI (newURI `relativeTo` u)
#else
newURI_abs = newURI `relativeTo` u
newURI_abs = newURI `relativeTo` u
#endif
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
userheaders = rights $ map parseHeader headers
addheaders req = setHeaders req (rqHeaders req ++ userheaders)
userheaders = rights $ map parseHeader headers

View file

@ -26,7 +26,7 @@ myUserName = myVal ["USER", "LOGNAME"] userName
myVal :: [String] -> (UserEntry -> String) -> IO String
myVal envvars extract = maybe (extract <$> getpwent) return =<< check envvars
where
check [] = return Nothing
check (v:vs) = maybe (check vs) (return . Just) =<< getEnv v
getpwent = getUserEntryForID =<< getEffectiveUserID
where
check [] = return Nothing
check (v:vs) = maybe (check vs) (return . Just) =<< getEnv v
getpwent = getUserEntryForID =<< getEffectiveUserID

View file

@ -33,5 +33,5 @@ calcDigest v secret = showDigest $ hmacSha1 secret $ fromString v
{- for quickcheck -}
prop_verifiable_sane :: String -> String -> Bool
prop_verifiable_sane a s = verify (mkVerifiable a secret) secret
where
secret = fromString s
where
secret = fromString s

View file

@ -43,11 +43,11 @@ localhost = "localhost"
- Note: The url *will* be visible to an attacker. -}
runBrowser :: String -> (Maybe [(String, String)]) -> IO Bool
runBrowser url env = boolSystemEnv cmd [Param url] env
where
where
#ifdef darwin_HOST_OS
cmd = "open"
cmd = "open"
#else
cmd = "xdg-open"
cmd = "xdg-open"
#endif
{- Binds to a socket on localhost, and runs a webapp on it.
@ -75,25 +75,25 @@ localSocket = do
(v4addr:_, _) -> go v4addr
(_, v6addr:_) -> go v6addr
_ -> error "unable to bind to a local socket"
where
hints = defaultHints
{ addrFlags = [AI_ADDRCONFIG]
, addrSocketType = Stream
}
{- Repeated attempts because bind sometimes fails for an
- unknown reason on OSX. -}
go addr = go' 100 addr
go' :: Int -> AddrInfo -> IO Socket
go' 0 _ = error "unable to bind to local socket"
go' n addr = do
r <- tryIO $ bracketOnError (open addr) sClose (use addr)
either (const $ go' (pred n) addr) return r
open addr = socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
use addr sock = do
setSocketOption sock ReuseAddr 1
bindSocket sock (addrAddress addr)
listen sock maxListenQueue
return sock
where
hints = defaultHints
{ addrFlags = [AI_ADDRCONFIG]
, addrSocketType = Stream
}
{- Repeated attempts because bind sometimes fails for an
- unknown reason on OSX. -}
go addr = go' 100 addr
go' :: Int -> AddrInfo -> IO Socket
go' 0 _ = error "unable to bind to local socket"
go' n addr = do
r <- tryIO $ bracketOnError (open addr) sClose (use addr)
either (const $ go' (pred n) addr) return r
open addr = socket (addrFamily addr) (addrSocketType addr) (addrProtocol addr)
use addr sock = do
setSocketOption sock ReuseAddr 1
bindSocket sock (addrAddress addr)
listen sock maxListenQueue
return sock
{- Checks if debugging is actually enabled. -}
debugEnabled :: IO Bool
@ -121,8 +121,8 @@ logRequest req = do
--, frombs $ lookupRequestField "referer" req
, frombs $ lookupRequestField "user-agent" req
]
where
frombs v = toString $ L.fromChunks [v]
where
frombs v = toString $ L.fromChunks [v]
lookupRequestField :: CI.CI B.ByteString -> Wai.Request -> B.ByteString
lookupRequestField k req = fromMaybe "" . lookup k $ Wai.requestHeaders req
@ -179,12 +179,12 @@ insertAuthToken :: forall y. (y -> T.Text)
-> Builder
insertAuthToken extractToken predicate webapp root pathbits params =
fromText root `mappend` encodePath pathbits' encodedparams
where
pathbits' = if null pathbits then [T.empty] else pathbits
encodedparams = map (TE.encodeUtf8 *** go) params'
go "" = Nothing
go x = Just $ TE.encodeUtf8 x
authparam = (T.pack "auth", extractToken webapp)
params'
| predicate pathbits = authparam:params
| otherwise = params
where
pathbits' = if null pathbits then [T.empty] else pathbits
encodedparams = map (TE.encodeUtf8 *** go) params'
go "" = Nothing
go x = Just $ TE.encodeUtf8 x
authparam = (T.pack "auth", extractToken webapp)
params'
| predicate pathbits = authparam:params
| otherwise = params