diff --git a/Annex/Drop.hs b/Annex/Drop.hs index 49c15746c4..285ddf50c3 100644 --- a/Annex/Drop.hs +++ b/Annex/Drop.hs @@ -108,7 +108,7 @@ handleDropsFrom locs rs reason fromhere key afile si preverified runner = do [ "dropped" , case afile of AssociatedFile Nothing -> serializeKey key - AssociatedFile (Just af) -> fromRawFilePath af + AssociatedFile (Just af) -> fromOsPath af , "(from " ++ maybe "here" show u ++ ")" , "(copies now " ++ show (have - 1) ++ ")" , ": " ++ reason diff --git a/Annex/Import.hs b/Annex/Import.hs index 587d866a96..497a868c15 100644 --- a/Annex/Import.hs +++ b/Annex/Import.hs @@ -69,7 +69,6 @@ import Control.Concurrent.STM import qualified Data.Map.Strict as M import qualified Data.Set as S import qualified System.FilePath.Posix.ByteString as Posix -import qualified System.FilePath.ByteString as P import qualified Data.ByteArray.Encoding as BA {- Configures how to build an import tree. -} @@ -154,7 +153,7 @@ recordImportTree remote importtreeconfig addunlockedmatcher imported = do let subtreeref = Ref $ fromRef' finaltree <> ":" - <> getTopFilePath dir + <> fromOsPath (getTopFilePath dir) in fromMaybe emptyTree <$> inRepo (Git.Ref.tree subtreeref) updateexportdb importedtree @@ -349,11 +348,11 @@ mkImportTreeItem maddunlockedmatcher msubdir loc v = case v of lf = fromImportLocation loc treepath = asTopFilePath lf topf = asTopFilePath $ - maybe lf (\sd -> getTopFilePath sd P. lf) msubdir + maybe lf (\sd -> getTopFilePath sd lf) msubdir mklink k = do relf <- fromRepo $ fromTopFilePath topf symlink <- calcRepo $ gitAnnexLink relf k - linksha <- hashSymlink symlink + linksha <- hashSymlink (fromOsPath symlink) return $ TreeItem treepath (fromTreeItemType TreeSymlink) linksha mkpointer k = TreeItem treepath (fromTreeItemType TreeFile) <$> hashPointerFile k @@ -429,7 +428,8 @@ buildImportTreesGeneric converttree basetree msubdir importable@(ImportableConte -- Full directory prefix where the sub tree is located. let fullprefix = asTopFilePath $ case msubdir of Nothing -> subdir - Just d -> getTopFilePath d Posix. subdir + Just d -> toOsPath $ + fromOsPath (getTopFilePath d) Posix. fromOsPath subdir Tree ts <- converttree (Just fullprefix) $ map (\(p, i) -> (mkImportLocation p, i)) (importableContentsSubTree c) @@ -853,7 +853,7 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec let af = AssociatedFile (Just f) let downloader p' tmpfile = do _ <- Remote.retrieveExportWithContentIdentifier - ia loc [cid] (fromRawFilePath tmpfile) + ia loc [cid] tmpfile (Left k) (combineMeterUpdate p' p) ok <- moveAnnex k af tmpfile @@ -871,7 +871,7 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec doimportsmall cidmap loc cid sz p = do let downloader tmpfile = do (k, _) <- Remote.retrieveExportWithContentIdentifier - ia loc [cid] (fromRawFilePath tmpfile) + ia loc [cid] tmpfile (Right (mkkey tmpfile)) p case keyGitSha k of @@ -894,7 +894,7 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec let af = AssociatedFile (Just f) let downloader tmpfile p = do (k, _) <- Remote.retrieveExportWithContentIdentifier - ia loc [cid] (fromRawFilePath tmpfile) + ia loc [cid] tmpfile (Right (mkkey tmpfile)) p case keyGitSha k of @@ -950,7 +950,7 @@ importKeys remote importtreeconfig importcontent thirdpartypopulated importablec case importtreeconfig of ImportTree -> fromImportLocation loc ImportSubTree subdir _ -> - getTopFilePath subdir P. fromImportLocation loc + getTopFilePath subdir fromImportLocation loc getcidkey cidmap db cid = liftIO $ -- Avoiding querying the database when it's empty speeds up @@ -1091,7 +1091,7 @@ getImportableContents r importtreeconfig ci matcher = do isknown <||> (matches <&&> notignored) where -- Checks, from least to most expensive. - ingitdir = ".git" `elem` Posix.splitDirectories (fromImportLocation loc) + ingitdir = ".git" `elem` Posix.splitDirectories (fromOsPath (fromImportLocation loc)) matches = matchesImportLocation matcher loc sz isknown = isKnownImportLocation dbhandle loc notignored = notIgnoredImportLocation importtreeconfig ci loc @@ -1120,6 +1120,6 @@ notIgnoredImportLocation importtreeconfig ci loc = not <$> checkIgnored ci f where f = case importtreeconfig of ImportSubTree dir _ -> - getTopFilePath dir P. fromImportLocation loc + getTopFilePath dir fromImportLocation loc ImportTree -> fromImportLocation loc diff --git a/CmdLine/GitRemoteAnnex.hs b/CmdLine/GitRemoteAnnex.hs index 91bdc0b263..79d6befd5b 100644 --- a/CmdLine/GitRemoteAnnex.hs +++ b/CmdLine/GitRemoteAnnex.hs @@ -66,7 +66,6 @@ import Data.Char import qualified Data.ByteString as B import qualified Data.ByteString.Char8 as B8 import qualified Data.Map.Strict as M -import qualified System.FilePath.ByteString as P import qualified Data.Set as S run :: [String] -> IO () @@ -146,13 +145,14 @@ list st rmt forpush = do else downloadManifestOrFail rmt l <- forM (inManifest manifest) $ \k -> do b <- downloadGitBundle rmt k - heads <- inRepo $ Git.Bundle.listHeads b + let b' = fromOsPath b + heads <- inRepo $ Git.Bundle.listHeads b' -- Get all the objects from the bundle. This is done here -- so that the tracking refs can be updated with what is -- listed, and so what when a full repush is done, all -- objects are available to be pushed. when forpush $ - inRepo $ Git.Bundle.unbundle b + inRepo $ Git.Bundle.unbundle b' -- The bundle may contain tracking refs, or regular refs, -- make sure we're operating on regular refs. return $ map (\(s, r) -> (fromTrackingRef rmt r, s)) heads @@ -202,7 +202,8 @@ fetch' :: State -> Remote -> Annex () fetch' st rmt = do manifest <- maybe (downloadManifestOrFail rmt) pure (manifestCache st) forM_ (inManifest manifest) $ \k -> - downloadGitBundle rmt k >>= inRepo . Git.Bundle.unbundle + downloadGitBundle rmt k + >>= inRepo . Git.Bundle.unbundle . fromOsPath -- Newline indicates end of fetch. liftIO $ do putStrLn "" @@ -496,10 +497,9 @@ resolveSpecialRemoteWebUrl :: String -> Annex (Maybe String) resolveSpecialRemoteWebUrl url | "http://" `isPrefixOf` lcurl || "https://" `isPrefixOf` lcurl = Url.withUrlOptionsPromptingCreds $ \uo -> - withTmpFile (toOsPath "git-remote-annex") $ \tmp h -> do + withTmpFile (literalOsPath "git-remote-annex") $ \tmp h -> do liftIO $ hClose h - let tmp' = fromRawFilePath $ fromOsPath tmp - Url.download' nullMeterUpdate Nothing url tmp' uo >>= \case + Url.download' nullMeterUpdate Nothing url tmp uo >>= \case Left err -> giveup $ url ++ " " ++ err Right () -> liftIO $ fmap decodeBS @@ -728,9 +728,9 @@ downloadManifest rmt = get mkmain >>= maybe (get mkbak) (pure . Just) -- it needs to re-download it fresh every time, and the object -- file should not be stored locally. gettotmp dl = withOtherTmp $ \othertmp -> - withTmpFileIn (toOsPath othertmp) (toOsPath "GITMANIFEST") $ \tmp tmph -> do + withTmpFileIn othertmp (literalOsPath "GITMANIFEST") $ \tmp tmph -> do liftIO $ hClose tmph - _ <- dl (fromRawFilePath (fromOsPath tmp)) + _ <- dl tmp b <- liftIO (F.readFile' tmp) case parseManifest b of Right m -> Just <$> verifyManifest rmt m @@ -778,7 +778,7 @@ uploadManifest rmt manifest = do dropKey' rmt mk put mk - put mk = withTmpFile (toOsPath "GITMANIFEST") $ \tmp tmph -> do + put mk = withTmpFile (literalOsPath "GITMANIFEST") $ \tmp tmph -> do liftIO $ B8.hPut tmph (formatManifest manifest) liftIO $ hClose tmph -- Uploading needs the key to be in the annex objects @@ -789,13 +789,13 @@ uploadManifest rmt manifest = do -- keys, which it is not. objfile <- calcRepo (gitAnnexLocation mk) modifyContentDir objfile $ - linkOrCopy mk (fromOsPath tmp) objfile Nothing >>= \case + linkOrCopy mk tmp objfile Nothing >>= \case -- Important to set the right perms even -- though the object is only present -- briefly, since sending objects may rely -- on or even copy file perms. Just _ -> do - liftIO $ R.setFileMode objfile + liftIO $ R.setFileMode (fromOsPath objfile) =<< defaultFileMode freezeContent objfile Nothing -> uploadfailed @@ -843,9 +843,11 @@ parseManifest b = - interrupted before updating the manifest on the remote, or when a race - causes the uploaded manigest to be overwritten. -} -lastPushedManifestFile :: UUID -> Git.Repo -> RawFilePath -lastPushedManifestFile u r = gitAnnexDir r P. "git-remote-annex" - P. fromUUID u P. "manifest" +lastPushedManifestFile :: UUID -> Git.Repo -> OsPath +lastPushedManifestFile u r = gitAnnexDir r + literalOsPath "git-remote-annex" + fromUUID u + literalOsPath "manifest" {- Call before uploading anything. The returned manifest has added - to it any bundle keys that were in the lastPushedManifestFile @@ -861,7 +863,7 @@ startPush' rmt manifest = do f <- fromRepo (lastPushedManifestFile (Remote.uuid rmt)) oldmanifest <- liftIO $ fromRight mempty . parseManifest - <$> F.readFile' (toOsPath f) + <$> F.readFile' f `catchNonAsync` (const (pure mempty)) let oldmanifest' = mkManifest [] $ S.fromList (inManifest oldmanifest) @@ -911,7 +913,7 @@ verifyManifest rmt manifest = -- and so more things pulled from it, etc. -- 3. Git bundle objects are not usually transferred between repositories -- except special remotes (although the user can if they want to). -downloadGitBundle :: Remote -> Key -> Annex FilePath +downloadGitBundle :: Remote -> Key -> Annex OsPath downloadGitBundle rmt k = getKeyExportLocations rmt k >>= \case Nothing -> dlwith $ download rmt k (AssociatedFile Nothing) stdRetry noNotification @@ -919,7 +921,7 @@ downloadGitBundle rmt k = getKeyExportLocations rmt k >>= \case anyM getexport locs where dlwith a = ifM a - ( decodeBS <$> calcRepo (gitAnnexLocation k) + ( calcRepo (gitAnnexLocation k) , giveup $ "Failed to download " ++ serializeKey k ) @@ -927,7 +929,7 @@ downloadGitBundle rmt k = getKeyExportLocations rmt k >>= \case getexport' loc = getViaTmp rsp vc k (AssociatedFile Nothing) Nothing $ \tmp -> do v <- Remote.retrieveExport (Remote.exportActions rmt) - k loc (decodeBS tmp) nullMeterUpdate + k loc tmp nullMeterUpdate return (True, v) rsp = Remote.retrievalSecurityPolicy rmt vc = Remote.RemoteVerify rmt @@ -952,7 +954,7 @@ checkPresentGitBundle rmt k = uploadGitObject :: Remote -> Key -> Annex () uploadGitObject rmt k = getKeyExportLocations rmt k >>= \case Just (loc:_) -> do - objfile <- fromRawFilePath <$> calcRepo (gitAnnexLocation k) + objfile <- calcRepo (gitAnnexLocation k) Remote.storeExport (Remote.exportActions rmt) objfile k loc nullMeterUpdate _ -> unlessM (upload rmt k (AssociatedFile Nothing) retry noNotification) $ @@ -977,15 +979,14 @@ generateGitBundle -> Manifest -> Annex (Key, Annex ()) generateGitBundle rmt bs manifest = - withTmpFile (toOsPath "GITBUNDLE") $ \tmp tmph -> do - let tmp' = fromOsPath tmp + withTmpFile (literalOsPath "GITBUNDLE") $ \tmp tmph -> do liftIO $ hClose tmph - inRepo $ Git.Bundle.create (fromRawFilePath tmp') bs + inRepo $ Git.Bundle.create (fromOsPath tmp) bs bundlekey <- genGitBundleKey (Remote.uuid rmt) - tmp' nullMeterUpdate + tmp nullMeterUpdate if (bundlekey `notElem` inManifest manifest) then do - unlessM (moveAnnex bundlekey (AssociatedFile Nothing) tmp') $ + unlessM (moveAnnex bundlekey (AssociatedFile Nothing) tmp) $ giveup "Unable to push" return (bundlekey, uploadaction bundlekey) else return (bundlekey, noop) @@ -1025,7 +1026,7 @@ getKeyExportLocations rmt k = do keyExportLocations :: Remote -> Key -> GitConfig -> UUID -> Maybe [ExportLocation] keyExportLocations rmt k cfg uuid | exportTree (Remote.config rmt) || importTree (Remote.config rmt) = - Just $ map (\p -> mkExportLocation (".git" P. p)) $ + Just $ map (\p -> mkExportLocation (literalOsPath ".git" p)) $ concatMap (`annexLocationsBare` k) cfgs | otherwise = Nothing where @@ -1094,7 +1095,7 @@ getRepo = getEnv "GIT_WORK_TREE" >>= \case Nothing -> fixup <$> Git.CurrentRepo.get where fixup r@(Repo { location = loc@(Local { worktree = Just _ }) }) = - r { location = loc { worktree = Just (P.takeDirectory (gitdir loc)) } } + r { location = loc { worktree = Just (takeDirectory (gitdir loc)) } } fixup r = r -- Records what the git-annex branch was at the beginning of this command. @@ -1127,11 +1128,11 @@ startAnnexBranch = ifM (null <$> Annex.Branch.siblingBranches) -- journal writes to a temporary directory, so that all writes -- to the git-annex branch by the action will be discarded. specialRemoteFromUrl :: StartAnnexBranch -> Annex a -> Annex a -specialRemoteFromUrl sab a = withTmpDir (toOsPath "journal") $ \tmpdir -> do +specialRemoteFromUrl sab a = withTmpDir (literalOsPath "journal") $ \tmpdir -> do Annex.overrideGitConfig $ \c -> c { annexAlwaysCommit = False } Annex.BranchState.changeState $ \st -> - st { alternateJournal = Just (toRawFilePath tmpdir) } + st { alternateJournal = Just tmpdir } a `finally` cleanupInitialization sab tmpdir -- If the git-annex branch did not exist when this command started, @@ -1165,16 +1166,15 @@ specialRemoteFromUrl sab a = withTmpDir (toOsPath "journal") $ \tmpdir -> do -- involve checking out an adjusted branch. But git clone wants to do its -- own checkout. So no initialization is done then, and the git bundle -- objects are deleted. -cleanupInitialization :: StartAnnexBranch -> FilePath -> Annex () +cleanupInitialization :: StartAnnexBranch -> OsPath -> Annex () cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do - liftIO $ mapM_ R.removeLink - =<< dirContents (toRawFilePath alternatejournaldir) + liftIO $ mapM_ removeFile =<< dirContents alternatejournaldir case sab of AnnexBranchExistedAlready _ -> noop AnnexBranchCreatedEmpty r -> whenM ((r ==) <$> Annex.Branch.getBranch) $ do indexfile <- fromRepo gitAnnexIndex - liftIO $ removeWhenExistsWith R.removeLink indexfile + liftIO $ removeWhenExistsWith removeFile indexfile -- When cloning failed and this is being -- run as an exception is thrown, HEAD will -- not be set to a valid value, which will @@ -1202,7 +1202,7 @@ cleanupInitialization sab alternatejournaldir = void $ tryNonAsync $ do forM_ ks $ \k -> case fromKey keyVariety k of GitBundleKey -> lockContentForRemoval k noop removeAnnex _ -> noop - void $ liftIO $ tryIO $ removeDirectory (decodeBS annexobjectdir) + void $ liftIO $ tryIO $ removeDirectory annexobjectdir notcrippledfilesystem = not <$> probeCrippledFileSystem diff --git a/Command/Assist.hs b/Command/Assist.hs index bcdac9ae67..6e25fb3457 100644 --- a/Command/Assist.hs +++ b/Command/Assist.hs @@ -28,7 +28,8 @@ myseek o = do Command.Sync.prepMerge Command.Add.seek Command.Add.AddOptions - { Command.Add.addThese = Command.Sync.contentOfOption o + { Command.Add.addThese = map fromOsPath $ + Command.Sync.contentOfOption o , Command.Add.batchOption = NoBatch , Command.Add.updateOnly = False , Command.Add.largeFilesOverride = Nothing diff --git a/Command/Import.hs b/Command/Import.hs index c35055927e..7375b807df 100644 --- a/Command/Import.hs +++ b/Command/Import.hs @@ -129,7 +129,7 @@ seek :: ImportOptions -> CommandSeek seek o@(LocalImportOptions {}) = startConcurrency commandStages $ do repopath <- liftIO . absPath =<< fromRepo Git.repoPath inrepops <- liftIO $ filter (dirContains repopath) - <$> mapM (absPath . toRawFilePath) (importFiles o) + <$> mapM (absPath . toOsPath) (importFiles o) unless (null inrepops) $ do qp <- coreQuotePath <$> Annex.getGitConfig giveup $ decodeBS $ quote qp $ @@ -145,7 +145,7 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do giveup "That remote does not support imports." subdir <- maybe (pure Nothing) - (Just <$$> inRepo . toTopFilePath . toRawFilePath) + (Just <$$> inRepo . toTopFilePath . toOsPath) (importToSubDir o) addunlockedmatcher <- addUnlockedMatcher seekRemote r (importToBranch o) subdir (importContent o) @@ -153,9 +153,9 @@ seek o@(RemoteImportOptions {}) = startConcurrency commandStages $ do addunlockedmatcher (messageOption o) -startLocal :: ImportOptions -> AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (RawFilePath, RawFilePath) -> CommandStart +startLocal :: ImportOptions -> AddUnlockedMatcher -> GetFileMatcher -> DuplicateMode -> (OsPath, OsPath) -> CommandStart startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = - ifM (liftIO $ isRegularFile <$> R.getSymbolicLinkStatus srcfile) + ifM (liftIO $ isRegularFile <$> R.getSymbolicLinkStatus (fromOsPath srcfile)) ( starting "import" ai si pickaction , stop ) @@ -167,7 +167,7 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = showNote $ UnquotedString $ "duplicate of " ++ serializeKey k verifyExisting k destfile ( do - liftIO $ R.removeLink srcfile + liftIO $ removeFile srcfile next $ return True , do warning "Could not verify that the content is still present in the annex; not removing from the import location." @@ -183,26 +183,26 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = warning $ "not importing " <> QuotedPath destfile <> " which is .gitignored (use --no-check-gitignore to override)" stop else do - existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus destfile) + existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus (fromOsPath destfile)) case existing of Nothing -> importfilechecked ld k Just s | isDirectory s -> notoverwriting "(is a directory)" | isSymbolicLink s -> ifM (Annex.getRead Annex.force) ( do - liftIO $ removeWhenExistsWith R.removeLink destfile + liftIO $ removeWhenExistsWith removeFile destfile importfilechecked ld k , notoverwriting "(is a symlink)" ) | otherwise -> ifM (Annex.getRead Annex.force) ( do - liftIO $ removeWhenExistsWith R.removeLink destfile + liftIO $ removeWhenExistsWith removeFile destfile importfilechecked ld k , notoverwriting "(use --force to override, or a duplication option such as --deduplicate to clean up)" ) checkdestdir cont = do let destdir = parentDir destfile - existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus destdir) + existing <- liftIO (catchMaybeIO $ R.getSymbolicLinkStatus (fromOsPath destdir)) case existing of Nothing -> cont Just s @@ -217,10 +217,8 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = createWorkTreeDirectory (parentDir destfile) unwind <- liftIO $ if mode == Duplicate || mode == SkipDuplicates then do - void $ copyFileExternal CopyAllMetaData - (fromRawFilePath srcfile) - (fromRawFilePath destfile) - return $ removeWhenExistsWith R.removeLink destfile + void $ copyFileExternal CopyAllMetaData srcfile destfile + return $ removeWhenExistsWith removeFile destfile else do moveFile srcfile destfile return $ moveFile destfile srcfile @@ -241,7 +239,7 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = -- weakly the same as the originally locked down file's -- inode cache. (Since the file may have been copied, -- its inodes may not be the same.) - s <- liftIO $ R.getSymbolicLinkStatus destfile + s <- liftIO $ R.getSymbolicLinkStatus (fromOsPath destfile) newcache <- withTSDelta $ \d -> liftIO $ toInodeCache d destfile s let unchanged = case (newcache, inodeCache (keySource ld)) of (_, Nothing) -> True @@ -287,7 +285,7 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = -- the file gets copied into the repository. , checkWritePerms = False } - v <- lockDown cfg (fromRawFilePath srcfile) + v <- lockDown cfg srcfile case v of Just ld -> do backend <- chooseBackend destfile @@ -314,7 +312,7 @@ startLocal o addunlockedmatcher largematcher mode (srcfile, destfile) = showNote (s <> "; skipping") next (return True) -verifyExisting :: Key -> RawFilePath -> (CommandPerform, CommandPerform) -> CommandPerform +verifyExisting :: Key -> OsPath -> (CommandPerform, CommandPerform) -> CommandPerform verifyExisting key destfile (yes, no) = do -- Look up the numcopies setting for the file that it would be -- imported to, if it were imported. diff --git a/Command/PostReceive.hs b/Command/PostReceive.hs index 3ad80d8321..fd1c6b035d 100644 --- a/Command/PostReceive.hs +++ b/Command/PostReceive.hs @@ -9,6 +9,7 @@ module Command.PostReceive where +import Common import Command import qualified Annex import Annex.UpdateInstead @@ -107,12 +108,11 @@ fixPostReceiveHookEnv :: Annex () fixPostReceiveHookEnv = do g <- Annex.gitRepo case location g of - Local { gitdir = ".", worktree = Just "." } -> + l@(Local {}) | gitdir l == literalOsPath "." && worktree l == Just (literalOsPath ".") -> Annex.adjustGitRepo $ \g' -> pure $ g' { location = case location g' of loc@(Local {}) -> loc - { worktree = Just ".." } + { worktree = Just (literalOsPath "..") } loc -> loc } _ -> noop - diff --git a/Command/Sync.hs b/Command/Sync.hs index 5b2fa3c380..7b74f83b71 100644 --- a/Command/Sync.hs +++ b/Command/Sync.hs @@ -110,7 +110,7 @@ data SyncOptions = SyncOptions , pushOption :: Bool , contentOption :: Maybe Bool , noContentOption :: Maybe Bool - , contentOfOption :: [FilePath] + , contentOfOption :: [OsPath] , cleanupOption :: Bool , keyOptions :: Maybe KeyOptions , resolveMergeOverride :: Bool @@ -201,7 +201,7 @@ optParser mode desc = SyncOptions <> short 'g' <> help "do not transfer annexed file contents" ))) - <*> many (strOption + <*> many (stringToOsPath <$> strOption ( long "content-of" <> short 'C' <> help "transfer contents of annexed files in a given location" @@ -248,7 +248,7 @@ instance DeferredParseClass SyncOptions where <*> pure (pushOption v) <*> pure (contentOption v) <*> pure (noContentOption v) - <*> liftIO (mapM (fromRawFilePath <$$> absPath . toRawFilePath) (contentOfOption v)) + <*> liftIO (mapM absPath (contentOfOption v)) <*> pure (cleanupOption v) <*> pure (keyOptions v) <*> pure (resolveMergeOverride v) @@ -340,7 +340,7 @@ seek' o = startConcurrency transferStages $ do - of the repo. This also means that sync always acts on all files in the - repository, not just on a subdirectory. -} prepMerge :: Annex () -prepMerge = Annex.changeDirectory . fromRawFilePath =<< fromRepo Git.repoPath +prepMerge = Annex.changeDirectory =<< fromRepo Git.repoPath mergeConfig :: Bool -> Annex [Git.Merge.MergeConfig] mergeConfig mergeunrelated = do @@ -681,7 +681,7 @@ pushRemote o remote (Just branch, _) = do Nothing -> return True Just wt -> ifM needemulation ( gitAnnexChildProcess "post-receive" [] - (\cp -> cp { cwd = Just (fromRawFilePath wt) }) + (\cp -> cp { cwd = Just (fromOsPath wt) }) (\_ _ _ pid -> waitForProcess pid >>= return . \case ExitSuccess -> True _ -> False @@ -820,11 +820,13 @@ seekSyncContent o rs currbranch = do ) _ -> case currbranch of (Just origbranch, Just adj) | adjustmentHidesFiles adj -> do - l <- workTreeItems' (AllowHidden True) ww (contentOfOption o) + l <- workTreeItems' (AllowHidden True) ww + (map fromOsPath (contentOfOption o)) seekincludinghidden origbranch mvar l (const noop) pure Nothing _ -> do - l <- workTreeItems ww (contentOfOption o) + l <- workTreeItems ww + (map fromOsPath (contentOfOption o)) seekworktree mvar l (const noop) pure Nothing waitForAllRunningCommandActions @@ -1013,7 +1015,7 @@ seekExportContent' o rs (mcurrbranch, madj) mtree <- inRepo $ Git.Ref.tree b let addsubdir = case snd (splitRemoteAnnexTrackingBranchSubdir b) of Just subdir -> \cb -> Git.Ref $ - Git.fromRef' cb <> ":" <> getTopFilePath subdir + Git.fromRef' cb <> ":" <> fromOsPath (getTopFilePath subdir) Nothing -> id mcurrtree <- maybe (pure Nothing) (inRepo . Git.Ref.tree . addsubdir) diff --git a/Remote/Borg.hs b/Remote/Borg.hs index d8d17355f9..aa68455b85 100644 --- a/Remote/Borg.hs +++ b/Remote/Borg.hs @@ -233,7 +233,7 @@ listImportableContentsM u borgrepo c = prompt $ do -- importable keys, so avoids needing to buffer all -- the rest of the files in memory. in case ThirdPartyPopulated.importKey' loc reqsz of - Just k -> (fromOsPath loc, (borgContentIdentifier, retsz k)) + Just k -> (loc, (borgContentIdentifier, retsz k)) : parsefilelist archivename rest Nothing -> parsefilelist archivename rest parsefilelist _ _ = [] @@ -296,7 +296,7 @@ extractImportLocation loc = go $ splitDirectories $ -- last imported tree. And the contents of those archives can be retrieved -- by listing the subtree recursively, which will likely be quite a lot -- faster than running borg. -getImported :: UUID -> Annex (M.Map BorgArchiveName (Annex [(RawFilePath, (ContentIdentifier, ByteSize))])) +getImported :: UUID -> Annex (M.Map BorgArchiveName (Annex [(OsPath, (ContentIdentifier, ByteSize))])) getImported u = M.unions <$> (mapM go . exportedTreeishes =<< getExport u) where go t = M.fromList . mapMaybe mk @@ -317,7 +317,7 @@ getImported u = M.unions <$> (mapM go . exportedTreeishes =<< getExport u) mkImportLocation $ getTopFilePath $ LsTree.file ti k <- fileKey (takeFileName f) return - ( fromOsPath (genImportLocation f) + ( genImportLocation f , ( borgContentIdentifier -- defaulting to 0 size is ok, this size diff --git a/Types/Import.hs b/Types/Import.hs index 032b920f8b..c17adb4115 100644 --- a/Types/Import.hs +++ b/Types/Import.hs @@ -94,7 +94,7 @@ data ImportableContentsChunkable m info - of the main tree. Nested subtrees are not allowed. -} data ImportableContentsChunk m info = ImportableContentsChunk { importableContentsSubDir :: ImportChunkSubDir - , importableContentsSubTree :: [(RawFilePath, info)] + , importableContentsSubTree :: [(OsPath, info)] -- ^ locations are relative to importableContentsSubDir , importableContentsNextChunk :: m (Maybe (ImportableContentsChunk m info)) -- ^ Continuation to get the next chunk.