Merge branch 'master' into concurrentprogress
Conflicts: Command/Fsck.hs Messages.hs Remote/Directory.hs Remote/Git.hs Remote/Helper/Special.hs Types/Remote.hs debian/changelog git-annex.cabal
This commit is contained in:
commit
e27b97d364
378 changed files with 4978 additions and 1158 deletions
|
@ -162,9 +162,13 @@ retrieveCheap _ (UnpaddedChunks _) _ _ _ = return False
|
|||
retrieveCheap _ (LegacyChunks _) _ _ _ = return False
|
||||
#ifndef mingw32_HOST_OS
|
||||
retrieveCheap d NoChunks k _af f = liftIO $ catchBoolIO $ do
|
||||
file <- getLocation d k
|
||||
createSymbolicLink file f
|
||||
return True
|
||||
file <- absPath =<< getLocation d k
|
||||
ifM (doesFileExist file)
|
||||
( do
|
||||
createSymbolicLink file f
|
||||
return True
|
||||
, return False
|
||||
)
|
||||
#else
|
||||
retrieveCheap _ _ _ _ _ = return False
|
||||
#endif
|
||||
|
|
|
@ -397,7 +397,7 @@ getGCryptId fast r gc
|
|||
| Git.repoIsLocal r || Git.repoIsLocalUnknown r = extract <$>
|
||||
liftIO (catchMaybeIO $ Git.Config.read r)
|
||||
| not fast = extract . liftM fst <$> getM (eitherToMaybe <$>)
|
||||
[ Ssh.onRemote r (Git.Config.fromPipe r, return (Left undefined)) "configlist" [] []
|
||||
[ Ssh.onRemote r (Git.Config.fromPipe r, return (Left $ error "configlist failed")) "configlist" [] []
|
||||
, getConfigViaRsync r gc
|
||||
]
|
||||
| otherwise = return (Nothing, r)
|
||||
|
|
|
@ -200,7 +200,7 @@ tryGitConfigRead :: Git.Repo -> Annex Git.Repo
|
|||
tryGitConfigRead r
|
||||
| haveconfig r = return r -- already read
|
||||
| Git.repoIsSsh r = store $ do
|
||||
v <- Ssh.onRemote r (pipedconfig, return (Left undefined)) "configlist" [] []
|
||||
v <- Ssh.onRemote r (pipedconfig, return (Left $ error "configlist failed")) "configlist" [] []
|
||||
case v of
|
||||
Right r'
|
||||
| haveconfig r' -> return r'
|
||||
|
@ -229,9 +229,10 @@ tryGitConfigRead r
|
|||
uo <- Url.getUrlOptions
|
||||
v <- liftIO $ withTmpFile "git-annex.tmp" $ \tmpfile h -> do
|
||||
hClose h
|
||||
ifM (Url.downloadQuiet (Git.repoLocation r ++ "/config") tmpfile uo)
|
||||
let url = Git.repoLocation r ++ "/config"
|
||||
ifM (Url.downloadQuiet url tmpfile uo)
|
||||
( pipedconfig "git" [Param "config", Param "--null", Param "--list", Param "--file", File tmpfile]
|
||||
, return $ Left undefined
|
||||
, return $ Left $ error $ "unable to load config from " ++ url
|
||||
)
|
||||
case v of
|
||||
Left _ -> do
|
||||
|
@ -450,10 +451,17 @@ copyFromRemote' r key file dest meterupdate
|
|||
copyFromRemoteCheap :: Remote -> Key -> AssociatedFile -> FilePath -> Annex Bool
|
||||
#ifndef mingw32_HOST_OS
|
||||
copyFromRemoteCheap r key af file
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) (return False) $ do
|
||||
loc <- liftIO $ gitAnnexLocation key (repo r) $
|
||||
| not $ Git.repoIsUrl (repo r) = guardUsable (repo r) (return False) $ liftIO $ do
|
||||
loc <- gitAnnexLocation key (repo r) $
|
||||
fromJust $ remoteGitConfig $ gitconfig r
|
||||
liftIO $ catchBoolIO $ createSymbolicLink loc file >> return True
|
||||
ifM (doesFileExist loc)
|
||||
( do
|
||||
absloc <- absPath loc
|
||||
catchBoolIO $ do
|
||||
createSymbolicLink absloc file
|
||||
return True
|
||||
, return False
|
||||
)
|
||||
| Git.repoIsSsh (repo r) =
|
||||
ifM (Annex.Content.preseedTmp key file)
|
||||
( parallelMetered Nothing key af $
|
||||
|
|
|
@ -72,7 +72,7 @@ chunkKeyStream basek chunksize = ChunkKeyStream $ map mk [1..]
|
|||
|
||||
nextChunkKeyStream :: ChunkKeyStream -> (Key, ChunkKeyStream)
|
||||
nextChunkKeyStream (ChunkKeyStream (k:l)) = (k, ChunkKeyStream l)
|
||||
nextChunkKeyStream (ChunkKeyStream []) = undefined -- stream is infinite!
|
||||
nextChunkKeyStream (ChunkKeyStream []) = error "expected infinite ChunkKeyStream"
|
||||
|
||||
takeChunkKeyStream :: ChunkCount -> ChunkKeyStream -> [Key]
|
||||
takeChunkKeyStream n (ChunkKeyStream l) = genericTake n l
|
||||
|
|
|
@ -20,7 +20,8 @@ module Remote.Helper.Encryptable (
|
|||
) where
|
||||
|
||||
import qualified Data.Map as M
|
||||
import qualified "dataenc" Codec.Binary.Base64 as B64
|
||||
import qualified "sandi" Codec.Binary.Base64 as B64
|
||||
import qualified Data.ByteString as B
|
||||
import Data.Bits.Utils
|
||||
|
||||
import Common.Annex
|
||||
|
@ -172,12 +173,12 @@ describeEncryption c = case extractCipher c of
|
|||
]
|
||||
|
||||
{- Not using Utility.Base64 because these "Strings" are really
|
||||
- bags of bytes and that would convert to unicode and not roung-trip
|
||||
- bags of bytes and that would convert to unicode and not round-trip
|
||||
- cleanly. -}
|
||||
toB64bs :: String -> String
|
||||
toB64bs = B64.encode . s2w8
|
||||
toB64bs = w82s . B.unpack . B64.encode . B.pack . s2w8
|
||||
|
||||
fromB64bs :: String -> String
|
||||
fromB64bs s = fromMaybe bad $ w82s <$> B64.decode s
|
||||
fromB64bs s = either (const bad) (w82s . B.unpack) (B64.decode $ B.pack $ s2w8 s)
|
||||
where
|
||||
bad = error "bad base64 encoded data"
|
||||
|
|
|
@ -199,7 +199,7 @@ specialRemote' cfg c preparestorer prepareretriever prepareremover preparecheckp
|
|||
readBytes $ \encb ->
|
||||
storer (enck k) (ByteContent encb) p
|
||||
|
||||
-- call retrieve-r to get chunks; decrypt them; stream to dest file
|
||||
-- call retriever to get chunks; decrypt them; stream to dest file
|
||||
retrieveKeyFileGen k f dest p enc =
|
||||
safely $ prepareretriever k $ safely . go
|
||||
where
|
||||
|
|
54
Remote/S3.hs
54
Remote/S3.hs
|
@ -28,6 +28,8 @@ import Control.Monad.Trans.Resource
|
|||
import Control.Monad.Catch
|
||||
import Data.Conduit
|
||||
import Data.IORef
|
||||
import Data.Bits.Utils
|
||||
import System.Log.Logger
|
||||
|
||||
import Common.Annex
|
||||
import Types.Remote
|
||||
|
@ -88,13 +90,7 @@ gen r u c gc = do
|
|||
, availability = GloballyAvailable
|
||||
, remotetype = remote
|
||||
, mkUnavailable = gen r u (M.insert "host" "!dne!" c) gc
|
||||
, getInfo = includeCredsInfo c (AWS.creds u) $ catMaybes
|
||||
[ Just ("bucket", fromMaybe "unknown" (getBucketName c))
|
||||
, if configIA c
|
||||
then Just ("internet archive item", iaItemUrl $ fromMaybe "unknown" $ getBucketName c)
|
||||
else Nothing
|
||||
, Just ("partsize", maybe "unlimited" (roughSize storageUnits False) (getPartSize c))
|
||||
]
|
||||
, getInfo = includeCredsInfo c (AWS.creds u) (s3Info c)
|
||||
, claimUrl = Nothing
|
||||
, checkUrl = Nothing
|
||||
}
|
||||
|
@ -102,9 +98,9 @@ gen r u c gc = do
|
|||
s3Setup :: Maybe UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
|
||||
s3Setup mu mcreds c = do
|
||||
u <- maybe (liftIO genUUID) return mu
|
||||
s3Setup' u mcreds c
|
||||
s3Setup' :: UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
|
||||
s3Setup' u mcreds c = if configIA c then archiveorg else defaulthost
|
||||
s3Setup' (isNothing mu) u mcreds c
|
||||
s3Setup' :: Bool -> UUID -> Maybe CredPair -> RemoteConfig -> Annex (RemoteConfig, UUID)
|
||||
s3Setup' new u mcreds c = if configIA c then archiveorg else defaulthost
|
||||
where
|
||||
remotename = fromJust (M.lookup "name" c)
|
||||
defbucket = remotename ++ "-" ++ fromUUID u
|
||||
|
@ -124,7 +120,8 @@ s3Setup' u mcreds c = if configIA c then archiveorg else defaulthost
|
|||
(c', encsetup) <- encryptionSetup c
|
||||
c'' <- setRemoteCredPair encsetup c' (AWS.creds u) mcreds
|
||||
let fullconfig = c'' `M.union` defaults
|
||||
genBucket fullconfig u
|
||||
when new $
|
||||
genBucket fullconfig u
|
||||
use fullconfig
|
||||
|
||||
archiveorg = do
|
||||
|
@ -132,7 +129,7 @@ s3Setup' u mcreds c = if configIA c then archiveorg else defaulthost
|
|||
c' <- setRemoteCredPair noEncryptionUsed c (AWS.creds u) mcreds
|
||||
-- Ensure user enters a valid bucket name, since
|
||||
-- this determines the name of the archive.org item.
|
||||
let validbucket = replace " " "-" $ map toLower $
|
||||
let validbucket = replace " " "-" $
|
||||
fromMaybe (error "specify bucket=") $
|
||||
getBucketName c'
|
||||
let archiveconfig =
|
||||
|
@ -149,7 +146,7 @@ s3Setup' u mcreds c = if configIA c then archiveorg else defaulthost
|
|||
writeUUIDFile archiveconfig u
|
||||
use archiveconfig
|
||||
|
||||
-- Sets up a http connection manager for S3 encdpoint, which allows
|
||||
-- Sets up a http connection manager for S3 endpoint, which allows
|
||||
-- http connections to be reused across calls to the helper.
|
||||
prepareS3 :: Remote -> S3Info -> (S3Handle -> helper) -> Preparer helper
|
||||
prepareS3 r info = resourcePrepare $ const $
|
||||
|
@ -388,13 +385,13 @@ sendS3Handle'
|
|||
=> S3Handle
|
||||
-> r
|
||||
-> ResourceT IO a
|
||||
sendS3Handle' h = AWS.pureAws (hawscfg h) (hs3cfg h) (hmanager h)
|
||||
sendS3Handle' h r = AWS.pureAws (hawscfg h) (hs3cfg h) (hmanager h) r
|
||||
|
||||
withS3Handle :: RemoteConfig -> UUID -> S3Info -> (S3Handle -> Annex a) -> Annex a
|
||||
withS3Handle c u info a = do
|
||||
creds <- getRemoteCredPairFor "S3" c (AWS.creds u)
|
||||
awscreds <- liftIO $ genCredentials $ fromMaybe nocreds creds
|
||||
let awscfg = AWS.Configuration AWS.Timestamp awscreds (AWS.defaultLog AWS.Error)
|
||||
let awscfg = AWS.Configuration AWS.Timestamp awscreds debugMapper
|
||||
bracketIO (newManager httpcfg) closeManager $ \mgr ->
|
||||
a $ S3Handle mgr awscfg s3cfg info
|
||||
where
|
||||
|
@ -450,7 +447,7 @@ extractS3Info c = do
|
|||
}
|
||||
|
||||
getBucketName :: RemoteConfig -> Maybe BucketName
|
||||
getBucketName = M.lookup "bucket"
|
||||
getBucketName = map toLower <$$> M.lookup "bucket"
|
||||
|
||||
getStorageClass :: RemoteConfig -> S3.StorageClass
|
||||
getStorageClass c = case M.lookup "storageclass" c of
|
||||
|
@ -486,7 +483,7 @@ iaMunge = (>>= munge)
|
|||
where
|
||||
munge c
|
||||
| isAsciiUpper c || isAsciiLower c || isNumber c = [c]
|
||||
| c `elem` "_-.\"" = [c]
|
||||
| c `elem` ("_-.\"" :: String) = [c]
|
||||
| isSpace c = []
|
||||
| otherwise = "&" ++ show (ord c) ++ ";"
|
||||
|
||||
|
@ -518,3 +515,26 @@ genCredentials (keyid, secret) = AWS.Credentials
|
|||
mkLocationConstraint :: AWS.Region -> S3.LocationConstraint
|
||||
mkLocationConstraint "US" = S3.locationUsClassic
|
||||
mkLocationConstraint r = r
|
||||
|
||||
debugMapper :: AWS.Logger
|
||||
debugMapper level t = forward "S3" (T.unpack t)
|
||||
where
|
||||
forward = case level of
|
||||
AWS.Debug -> debugM
|
||||
AWS.Info -> infoM
|
||||
AWS.Warning -> warningM
|
||||
AWS.Error -> errorM
|
||||
|
||||
s3Info :: RemoteConfig -> [(String, String)]
|
||||
s3Info c = catMaybes
|
||||
[ Just ("bucket", fromMaybe "unknown" (getBucketName c))
|
||||
, Just ("endpoint", w82s (S.unpack (S3.s3Endpoint s3c)))
|
||||
, Just ("port", show (S3.s3Port s3c))
|
||||
, Just ("storage class", show (getStorageClass c))
|
||||
, if configIA c
|
||||
then Just ("internet archive item", iaItemUrl $ fromMaybe "unknown" $ getBucketName c)
|
||||
else Nothing
|
||||
, Just ("partsize", maybe "unlimited" (roughSize storageUnits False) (getPartSize c))
|
||||
]
|
||||
where
|
||||
s3c = s3Configuration c
|
||||
|
|
|
@ -177,7 +177,7 @@ getSharedConvergenceSecret configdir = go (60 :: Int)
|
|||
v <- catchMaybeIO (readFile f)
|
||||
case v of
|
||||
Just s | "\n" `isSuffixOf` s || "\r" `isSuffixOf` s ->
|
||||
return $ takeWhile (`notElem` "\n\r") s
|
||||
return $ takeWhile (`notElem` ("\n\r" :: String)) s
|
||||
_ -> do
|
||||
threadDelaySeconds (Seconds 1)
|
||||
go (n - 1)
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue