Merge branch 'master' into ghc7.0

This commit is contained in:
Joey Hess 2012-03-04 10:51:30 -04:00
commit cd1cdd0c46
24 changed files with 375 additions and 104 deletions

View file

@ -47,6 +47,7 @@ start = do
let (name, action) = case from of
Nothing -> (".", checkUnused)
Just "." -> (".", checkUnused)
Just "here" -> (".", checkUnused)
Just n -> (n, checkRemoteUnused n)
showStart "unused" name
next action

View file

@ -1,6 +1,6 @@
PREFIX=/usr
IGNORE=-ignore-package monads-fd
GHCFLAGS=-O2 -Wall $(IGNORE) -fspec-constr-count=8
GHCFLAGS=-O2 -Wall $(IGNORE)
ifdef PROFILE
GHCFLAGS=-prof -auto-all -rtsopts -caf-all -fforce-recomp $(IGNORE)

View file

@ -10,6 +10,8 @@ module Messages (
showNote,
showAction,
showProgress,
metered,
MeterUpdate,
showSideAction,
showOutput,
showLongNote,
@ -29,9 +31,13 @@ module Messages (
) where
import Text.JSON
import Data.Progress.Meter
import Data.Progress.Tracker
import Data.Quantity
import Common
import Types
import Types.Key
import qualified Annex
import qualified Messages.JSON as JSON
@ -46,10 +52,29 @@ showNote s = handle (JSON.note s) $
showAction :: String -> Annex ()
showAction s = showNote $ s ++ "..."
{- Progress dots. -}
showProgress :: Annex ()
showProgress = handle q $
flushed $ putStr "."
{- Shows a progress meter while performing a transfer of a key.
- The action is passed a callback to use to update the meter. -}
type MeterUpdate = Integer -> IO ()
metered :: Key -> (MeterUpdate -> Annex a) -> Annex a
metered key a = Annex.getState Annex.output >>= go (keySize key)
where
go (Just size) Annex.NormalOutput = do
progress <- liftIO $ newProgress "" size
meter <- liftIO $ newMeter progress "B" 25 (renderNums binaryOpts 1)
showOutput
liftIO $ displayMeter stdout meter
r <- a $ \n -> liftIO $ do
incrP progress n
displayMeter stdout meter
liftIO $ clearMeter stdout meter
return r
go _ _ = a (const $ return ())
showSideAction :: String -> Annex ()
showSideAction s = handle q $
putStrLn $ "(" ++ s ++ "...)"

View file

@ -90,6 +90,7 @@ byName' n = do
- .git/config. -}
nameToUUID :: String -> Annex UUID
nameToUUID "." = getUUID -- special case for current repo
nameToUUID "here" = getUUID
nameToUUID "" = error "no remote specified"
nameToUUID n = byName' n >>= go
where

View file

@ -129,8 +129,8 @@ retrieve buprepo k f = do
retrieveCheap :: BupRepo -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) f = do
retrieveEncrypted :: BupRepo -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted buprepo (cipher, enck) _ f = do
let params = bupParams "join" buprepo [Param $ show enck]
liftIO $ catchBoolIO $ do
(pid, h) <- hPipeFrom "bup" $ toCommand params

View file

@ -1,6 +1,6 @@
{- A "remote" that is just a filesystem directory.
-
- Copyright 2011 Joey Hess <joey@kitenet.net>
- Copyright 2011-2012 Joey Hess <joey@kitenet.net>
-
- Licensed under the GNU GPL version 3 or higher.
-}
@ -8,10 +8,11 @@
module Remote.Directory (remote) where
import qualified Data.ByteString.Lazy.Char8 as L
import qualified Data.ByteString.Char8 as S
import qualified Data.Map as M
import Control.Exception (bracket)
import Common.Annex
import Utility.CopyFile
import Types.Remote
import qualified Git
import Config
@ -19,6 +20,8 @@ import Utility.FileMode
import Remote.Helper.Special
import Remote.Helper.Encryptable
import Crypto
import Utility.DataUnits
import Data.Int
remote :: RemoteType
remote = RemoteType {
@ -32,24 +35,39 @@ gen :: Git.Repo -> UUID -> Maybe RemoteConfig -> Annex Remote
gen r u c = do
dir <- getConfig r "directory" (error "missing directory")
cst <- remoteCost r cheapRemoteCost
let chunksize = chunkSize c
return $ encryptableRemote c
(storeEncrypted dir)
(retrieveEncrypted dir)
(storeEncrypted dir chunksize)
(retrieveEncrypted dir chunksize)
Remote {
uuid = u,
cost = cst,
name = Git.repoDescribe r,
storeKey = store dir,
retrieveKeyFile = retrieve dir,
retrieveKeyFileCheap = retrieveCheap dir,
removeKey = remove dir,
hasKey = checkPresent dir,
storeKey = store dir chunksize,
retrieveKeyFile = retrieve dir chunksize,
retrieveKeyFileCheap = retrieveCheap dir chunksize,
removeKey = remove dir chunksize,
hasKey = checkPresent dir chunksize,
hasKeyCheap = True,
whereisKey = Nothing,
config = Nothing,
repo = r,
remotetype = remote
}
where
type ChunkSize = Maybe Int64
chunkSize :: Maybe RemoteConfig -> ChunkSize
chunkSize Nothing = Nothing
chunkSize (Just m) =
case M.lookup "chunksize" m of
Nothing -> Nothing
Just v -> case readSize dataUnits v of
Nothing -> error "bad chunksize"
Just size
| size <= 0 -> error "bad chunksize"
| otherwise -> Just $ fromInteger size
directorySetup :: UUID -> RemoteConfig -> Annex RemoteConfig
directorySetup u c = do
@ -69,69 +87,182 @@ directorySetup u c = do
locations :: FilePath -> Key -> [FilePath]
locations d k = map (d </>) (keyPaths k)
withCheckedFile :: (FilePath -> IO Bool) -> FilePath -> Key -> (FilePath -> IO Bool) -> IO Bool
withCheckedFile _ [] _ _ = return False
withCheckedFile check d k a = go $ locations d k
{- An infinite stream of chunks to use for a given file. -}
chunkStream :: FilePath -> [FilePath]
chunkStream f = map (\n -> f ++ ".chunk" ++ show n) [1 :: Integer ..]
{- A file that records the number of chunks used. -}
chunkCount :: FilePath -> FilePath
chunkCount f = f ++ ".chunkcount"
withCheckedFiles :: (FilePath -> IO Bool) -> ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withCheckedFiles _ _ [] _ _ = return False
withCheckedFiles check Nothing d k a = go $ locations d k
where
go [] = return False
go (f:fs) = do
use <- check f
if use
then a f
then a [f]
else go fs
withStoredFile :: FilePath -> Key -> (FilePath -> IO Bool) -> IO Bool
withStoredFile = withCheckedFile doesFileExist
store :: FilePath -> Key -> Annex Bool
store d k = do
src <- inRepo $ gitAnnexLocation k
liftIO $ catchBoolIO $ storeHelper d k $ copyFileExternal src
storeEncrypted :: FilePath -> (Cipher, Key) -> Key -> Annex Bool
storeEncrypted d (cipher, enck) k = do
src <- inRepo $ gitAnnexLocation k
liftIO $ catchBoolIO $ storeHelper d enck $ encrypt src
withCheckedFiles check (Just _) d k a = go $ locations d k
where
encrypt src dest = do
withEncryptedContent cipher (L.readFile src) $ L.writeFile dest
return True
go [] = return False
go (f:fs) = do
let chunkcount = chunkCount f
use <- check chunkcount
if use
then do
count <- readcount chunkcount
let chunks = take count $ chunkStream f
ok <- all id <$> mapM check chunks
if ok
then a chunks
else return False
else go fs
readcount f = fromMaybe (error $ "cannot parse " ++ f)
. (readish :: String -> Maybe Int)
<$> readFile f
storeHelper :: FilePath -> Key -> (FilePath -> IO Bool) -> IO Bool
storeHelper d key a = do
let dest = Prelude.head $ locations d key
let tmpdest = dest ++ ".tmp"
let dir = parentDir dest
withStoredFiles :: ChunkSize -> FilePath -> Key -> ([FilePath] -> IO Bool) -> IO Bool
withStoredFiles = withCheckedFiles doesFileExist
store :: FilePath -> ChunkSize -> Key -> Annex Bool
store d chunksize k = do
src <- inRepo $ gitAnnexLocation k
metered k $ \meterupdate ->
liftIO $ catchBoolIO $ storeHelper d chunksize k $ \dests ->
case chunksize of
Nothing -> do
let dest = Prelude.head dests
meteredWriteFile meterupdate dest
=<< L.readFile src
return [dest]
Just _ ->
storeSplit meterupdate chunksize dests
=<< L.readFile src
storeEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> Annex Bool
storeEncrypted d chunksize (cipher, enck) k = do
src <- inRepo $ gitAnnexLocation k
metered k $ \meterupdate ->
liftIO $ catchBoolIO $ storeHelper d chunksize enck $ \dests ->
withEncryptedContent cipher (L.readFile src) $ \s ->
case chunksize of
Nothing -> do
let dest = Prelude.head dests
meteredWriteFile meterupdate dest s
return [dest]
Just _ -> storeSplit meterupdate chunksize dests s
{- Splits a ByteString into chunks and writes to dests, obeying configured
- chunk size (not to be confused with the L.ByteString chunk size).
- Note: Must always write at least one file, even for empty ByteString. -}
storeSplit :: MeterUpdate -> ChunkSize -> [FilePath] -> L.ByteString -> IO [FilePath]
storeSplit _ Nothing _ _ = error "bad storeSplit call"
storeSplit _ _ [] _ = error "bad storeSplit call"
storeSplit meterupdate (Just chunksize) alldests@(firstdest:_) b
| L.null b = do
-- must always write at least one file, even for empty
L.writeFile firstdest b
return [firstdest]
| otherwise = storeSplit' meterupdate chunksize alldests (L.toChunks b) []
storeSplit' :: MeterUpdate -> Int64 -> [FilePath] -> [S.ByteString] -> [FilePath] -> IO [FilePath]
storeSplit' _ _ [] _ _ = error "ran out of dests"
storeSplit' _ _ _ [] c = return $ reverse c
storeSplit' meterupdate chunksize (d:dests) bs c = do
bs' <- bracket (openFile d WriteMode) hClose (feed chunksize bs)
storeSplit' meterupdate chunksize dests bs' (d:c)
where
feed _ [] _ = return []
feed sz (l:ls) h = do
let s = fromIntegral $ S.length l
if s <= sz
then do
S.hPut h l
meterupdate $ toInteger s
feed (sz - s) ls h
else return (l:ls)
{- Write a L.ByteString to a file, updating a progress meter
- after each chunk of the L.ByteString, typically every 64 kb or so. -}
meteredWriteFile :: MeterUpdate -> FilePath -> L.ByteString -> IO ()
meteredWriteFile meterupdate dest b =
bracket (openFile dest WriteMode) hClose (feed $ L.toChunks b)
where
feed [] _ = return ()
feed (l:ls) h = do
S.hPut h l
meterupdate $ toInteger $ S.length l
feed ls h
{- Generates a list of destinations to write to in order to store a key.
- When chunksize is specified, this list will be a list of chunks.
- The action should store the file, and return a list of the destinations
- it stored it to, or [] on error.
- The stored files are only put into their final place once storage is
- complete.
-}
storeHelper :: FilePath -> ChunkSize -> Key -> ([FilePath] -> IO [FilePath]) -> IO Bool
storeHelper d chunksize key a = do
let dir = parentDir desttemplate
createDirectoryIfMissing True dir
allowWrite dir
ok <- a tmpdest
when ok $ do
renameFile tmpdest dest
stored <- a tmpdests
forM_ stored $ \f -> do
let dest = detmpprefix f
renameFile f dest
preventWrite dest
preventWrite dir
return ok
when (chunksize /= Nothing) $ do
let chunkcount = chunkCount desttemplate
_ <- tryIO $ allowWrite chunkcount
writeFile chunkcount (show $ length stored)
preventWrite chunkcount
preventWrite dir
return (not $ null stored)
where
desttemplate = Prelude.head $ locations d key
tmpdests = case chunksize of
Nothing -> [desttemplate ++ tmpprefix]
Just _ -> map (++ tmpprefix) (chunkStream desttemplate)
tmpprefix = ".tmp"
detmpprefix f = take (length f - tmpprefixlen) f
tmpprefixlen = length tmpprefix
retrieve :: FilePath -> Key -> FilePath -> Annex Bool
retrieve d k f = liftIO $ withStoredFile d k $ \file -> copyFileExternal file f
retrieve :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
retrieve d chunksize k f = metered k $ \meterupdate ->
liftIO $ withStoredFiles chunksize d k $ \files ->
catchBoolIO $ do
meteredWriteFile meterupdate f =<<
(L.concat <$> mapM L.readFile files)
return True
retrieveCheap :: FilePath -> Key -> FilePath -> Annex Bool
retrieveCheap d k f = liftIO $ withStoredFile d k $ \file ->
catchBoolIO $ createSymbolicLink file f >> return True
retrieveEncrypted :: FilePath -> ChunkSize -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted d chunksize (cipher, enck) k f = metered k $ \meterupdate ->
liftIO $ withStoredFiles chunksize d enck $ \files ->
catchBoolIO $ do
withDecryptedContent cipher (L.concat <$> mapM L.readFile files) $
meteredWriteFile meterupdate f
return True
retrieveEncrypted :: FilePath -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted d (cipher, enck) f =
liftIO $ withStoredFile d enck $ \file -> catchBoolIO $ do
withDecryptedContent cipher (L.readFile file) $ L.writeFile f
return True
retrieveCheap :: FilePath -> ChunkSize -> Key -> FilePath -> Annex Bool
retrieveCheap _ (Just _) _ _ = return False -- no cheap retrieval for chunks
retrieveCheap d _ k f = liftIO $ withStoredFiles Nothing d k go
where
go [file] = catchBoolIO $ createSymbolicLink file f >> return True
go _files = return False
remove :: FilePath -> Key -> Annex Bool
remove d k = liftIO $ withStoredFile d k $ \file -> catchBoolIO $ do
let dir = parentDir file
allowWrite dir
removeFile file
removeDirectory dir
return True
remove :: FilePath -> ChunkSize -> Key -> Annex Bool
remove d chunksize k = liftIO $ withStoredFiles chunksize d k go
where
go files = all id <$> mapM removefile files
removefile file = catchBoolIO $ do
let dir = parentDir file
allowWrite dir
removeFile file
_ <- tryIO $ removeDirectory dir
return True
checkPresent :: FilePath -> Key -> Annex (Either String Bool)
checkPresent d k = liftIO $ catchMsgIO $ withStoredFile d k $
const $ return True -- withStoredFile checked that it exists
checkPresent :: FilePath -> ChunkSize -> Key -> Annex (Either String Bool)
checkPresent d chunksize k = liftIO $ catchMsgIO $ withStoredFiles chunksize d k $
const $ return True -- withStoredFiles checked that it exists

View file

@ -40,7 +40,7 @@ encryptionSetup c =
encryptableRemote
:: Maybe RemoteConfig
-> ((Cipher, Key) -> Key -> Annex Bool)
-> ((Cipher, Key) -> FilePath -> Annex Bool)
-> ((Cipher, Key) -> Key -> FilePath -> Annex Bool)
-> Remote
-> Remote
encryptableRemote c storeKeyEncrypted retrieveKeyFileEncrypted r =
@ -58,7 +58,7 @@ encryptableRemote c storeKeyEncrypted retrieveKeyFileEncrypted r =
(`storeKeyEncrypted` k)
retrieve k f = cip k >>= maybe
(retrieveKeyFile r k f)
(`retrieveKeyFileEncrypted` f)
(\enck -> retrieveKeyFileEncrypted enck k f)
retrieveCheap k f = cip k >>= maybe
(retrieveKeyFileCheap r k f)
(\_ -> return False)

View file

@ -114,8 +114,8 @@ retrieve h k f = runHook h "retrieve" k (Just f) $ return True
retrieveCheap :: String -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: String -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted h (cipher, enck) f = withTmp enck $ \tmp ->
retrieveEncrypted :: String -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted h (cipher, enck) _ f = withTmp enck $ \tmp ->
runHook h "retrieve" enck (Just tmp) $ liftIO $ catchBoolIO $ do
withDecryptedContent cipher (L.readFile tmp) $ L.writeFile f
return True

View file

@ -119,8 +119,8 @@ retrieveCheap o k f = do
then retrieve o k f
else return False
retrieveEncrypted :: RsyncOpts -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted o (cipher, enck) f = withTmp enck $ \tmp -> do
retrieveEncrypted :: RsyncOpts -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted o (cipher, enck) _ f = withTmp enck $ \tmp -> do
res <- retrieve o enck tmp
if res
then liftIO $ catchBoolIO $ do

View file

@ -144,9 +144,7 @@ storeHelper (conn, bucket) r k file = do
case fromJust $ M.lookup "storageclass" $ fromJust $ config r of
"REDUCED_REDUNDANCY" -> REDUCED_REDUNDANCY
_ -> STANDARD
getsize = do
s <- liftIO $ getFileStatus file
return $ fileSize s
getsize = fileSize <$> (liftIO $ getFileStatus file)
xheaders = filter isxheader $ M.assocs $ fromJust $ config r
isxheader (h, _) = "x-amz-" `isPrefixOf` h
@ -163,8 +161,8 @@ retrieve r k f = s3Action r False $ \(conn, bucket) -> do
retrieveCheap :: Remote -> Key -> FilePath -> Annex Bool
retrieveCheap _ _ _ = return False
retrieveEncrypted :: Remote -> (Cipher, Key) -> FilePath -> Annex Bool
retrieveEncrypted r (cipher, enck) f = s3Action r False $ \(conn, bucket) -> do
retrieveEncrypted :: Remote -> (Cipher, Key) -> Key -> FilePath -> Annex Bool
retrieveEncrypted r (cipher, enck) _ f = s3Action r False $ \(conn, bucket) -> do
res <- liftIO $ getObject conn $ bucketKey r bucket enck
case res of
Right o -> liftIO $

17
debian/changelog vendored
View file

@ -1,3 +1,20 @@
git-annex (3.20120230) UNRELEASED; urgency=low
* "here" can be used to refer to the current repository,
which can read better than the old "." (which still works too).
* Directory special remotes now support chunking files written to them,
avoiding writing files larger than a specified size.
* Add progress bar display to the directory special remote.
-- Joey Hess <joeyh@debian.org> Thu, 01 Mar 2012 22:34:27 -0400
git-annex (3.20120229) unstable; urgency=low
* Fix test suite to not require a unicode locale.
* Fix cabal build failure. Thanks, Sergei Trofimovich
-- Joey Hess <joeyh@debian.org> Wed, 29 Feb 2012 02:31:31 -0400
git-annex (3.20120227) unstable; urgency=low
* Modifications to support ghc 7.4's handling of filenames.

View file

@ -177,7 +177,7 @@ subdirectories).
The repository to describe can be specified by git remote name or
by uuid. To change the description of the current repository, use
"."
"here".
* initremote name [param=value ...]
@ -196,7 +196,7 @@ subdirectories).
Records that a repository is trusted to not unexpectedly lose
content. Use with care.
To trust the current repository, use "."
To trust the current repository, use "here".
* untrust [repository ...]
@ -273,7 +273,7 @@ subdirectories).
By default, only lists annexed files whose content is currently present.
This can be changed by specifying file matching options. To list all
annexed files, present or not, specify --include "*". To list all
annexed files whose content is not present, specify --not --in="."
annexed files whose content is not present, specify --not --in=here
To output filenames terminated with nulls, for use with xargs -0,
specify --print0. Or, a custom output formatting can be specified using
@ -527,7 +527,7 @@ file contents are present at either of two repositories.
The repository should be specified using the name of a configured remote,
or the UUID or description of a repository. For the current repository,
use "--in=."
use --in=here
* --copies=number

View file

@ -1,8 +0,0 @@
git-annex 3.20120113 released with [[!toggle text="these changes"]]
[[!toggleable text="""
* log: Add --gource mode, which generates output usable by gource.
* map: Fix display of remote repos
* Add annex-trustlevel configuration settings, which can be used to
override the trust level of a remote.
* git-annex, git-union-merge: Support GIT\_DIR and GIT\_WORK\_TREE.
* Add libghc-testpack-dev to build depends on all arches."""]]

View file

@ -1,8 +0,0 @@
git-annex 3.20120115 released with [[!toggle text="these changes"]]
[[!toggleable text="""
* Add a sanity check for bad StatFS results. On architectures
where StatFS does not currently work (s390, mips, powerpc, sparc),
this disables the diskreserve checking code, and attempting to
configure an annex.diskreserve will result in an error.
* Fix QuickCheck dependency in cabal file.
* Minor optimisations."""]]

View file

@ -0,0 +1,8 @@
[[!comment format=mdwn
username="http://peter-simons.myopenid.com/"
ip="77.184.15.65"
subject="Test-suite won't compile with GHC 7.4.x"
date="2012-02-28T17:39:59Z"
content="""
The recent version requires GHC 7.4.x, but some dependencies for the test suite don't build with that compiler, i.e. the `testpack` library. Do you have any recommendation how to deal with that situation? I would like to update, but I would very much like to run the regression test suite, too.
"""]]

View file

@ -0,0 +1,10 @@
[[!comment format=mdwn
username="http://joey.kitenet.net/"
nickname="joey"
subject="comment 2"
date="2012-02-29T04:27:47Z"
content="""
Here's the patch that was used to make testpack build with 7.4 on Debian:
<http://anonscm.debian.org/gitweb/?p=pkg-haskell/haskell-testpack.git;a=blobdiff;f=src/Test/QuickCheck/Instances.hs;h=7e920a102a50de5812af32f9a308b80f61284caf;hp=ada6674c972a6cc518f84041172ed035e36aec98;hb=ef9f6c109bd3c20f40fa25e962c928a51e1277d8;hpb=1240a417b9e970ce71757abcb01437e5eac9ee0e>
"""]]

View file

@ -0,0 +1,4 @@
git-annex 3.20120229 released with [[!toggle text="these changes"]]
[[!toggleable text="""
* Fix test suite to not require a unicode locale.
* Fix cabal build failure. Thanks, Sergei Trofimovich"""]]

View file

@ -0,0 +1,8 @@
[[!comment format=mdwn
username="http://peter-simons.myopenid.com/"
ip="77.186.152.146"
subject="How do you build the Crypto library with GHC 7.4.1?"
date="2012-02-29T19:20:20Z"
content="""
`Crypto 4.2.4` doesn't seem to compile with GHC 7.4.1. How did you build that package?
"""]]

View file

@ -0,0 +1,14 @@
[[!comment format=mdwn
username="http://joey.kitenet.net/"
nickname="joey"
subject="comment 2"
date="2012-02-29T22:54:01Z"
content="""
Probably this patch will help with Crypto:
<http://anonscm.debian.org/cgi-bin/darcsweb.cgi?r=pkg-haskell/haskell-crypto;a=filediff;h=20120213034652-b2814-0019a3f92e453e9be86166d6c1f1bc0dad6e4d12.gz;f=patches/class-constraints.diff>
Or, there's the `ghc7.0` branch of git-annex in git, which can be used to build with the older, stable ghc.
BTW, when asking, for help, a log of the build failure is always a good idea..
"""]]

View file

@ -13,7 +13,15 @@ They cannot be used by other git commands though.
* [[rsync]]
* [[web]]
* [[hook]]
* [[tahoe-lafs|forum/tips:_special__95__remotes__47__hook_with_tahoe-lafs]] - limited testing
The above special remotes can be used to tie git-annex
into many cloud services. Here are specific instructions
for various cloud things:
* [[tips/using_Amazon_S3]]
* [[tips/Internet_Archive_via_S3]]
* [[tahoe-lafs|forum/tips:_special__95__remotes__47__hook_with_tahoe-lafs]]
* [[tips/using_box.com_as_a_special_remote]]
## Unused content on special remotes

View file

@ -5,6 +5,25 @@ you want to use it to sneakernet files between systems (possibly with
[[encrypted|encryption]] contents). Just set up both systems to use
the drive's mountpoint as a directory remote.
## configuration
These parameters can be passed to `git annex initremote` to configure the
remote:
* `encryption` - Required. Either "none" to disable encryption of content
stored in the directory,
or a value that can be looked up (using gpg -k) to find a gpg encryption
key that will be given access to the remote. Note that additional gpg
keys can be given access to a remote by rerunning initremote with
the new key id. See [[encryption]].
* `chunksize` - Avoid storing files larger than the specified size in the
directory. For use on directories on mount points that have file size
limitations. The default is to never chunk files.
The value can use specified using any commonly used units.
Example: `chunksize=100 megabytes`
Note that enabling chunking on an existing remote with non-chunked
files is not recommended.
Setup example:
# git annex initremote usbdrive type=directory directory=/media/usbdrive/ encryption=none

View file

@ -0,0 +1,50 @@
[Box.com](http://box.com/) is a file storage service, currently notable
for providing 50 gb of free storage if you sign up with its Android client.
(Or a few GB free otherwise.)
With a little setup, git-annex can use Box as a
[[special remote|special_remotes]].
## davfs2 setup
* First, install
the [davfs2](http://savannah.nongnu.org/projects/davfs2) program,
which can mount Box using WebDAV. On Debian, just `sudo apt-get install davfs2`
* Allow users to mount davfs filesystems, by ensuring that
`/sbin/mount.davfs` is setuid root. On Debian, just `sudo dpkg-reconfigure davfs2`
* Add yourself to the davfs2 group.
sudo adduser $(whoami) davfs2
* Edit `/etc/fstab`, and add a line to mount Box using davfs.
sudo mkdir -p /media/box.com
echo "https://www.box.com/dav/ /media/box.com davfs noauto,user 0 0" | sudo tee -a /etc/fstab
* Create `~/.davfs2/davfs2.conf`:
mkdir ~/.davfs2/
echo use_locks 0 >> ~/.davfs2/davfs2.conf
echo delay_upload 0 >> ~/.davfs2/davfs2.conf
* Create `~/.davfs2/secrets`. This file contains your Box.com login and password.
Your login is probably the email address you signed up with.
echo "/media/box.com joey@kitenet.net mypassword" > ~/.davfs2/secrets
chmod 600 ~/.davfs2/secrets
* Now you should be able to mount Box, as a non-root user:
mount /media/box.com
## git-annex setup
You need git-annex version 3.20120303 or newer, which adds support for chunking
files larger than Box's 100 mb limit.
Create the special remote, in your git-annex repository.
** This example is non-encrypted; fill in your gpg key ID for a securely
encrypted special remote! **
git annex initremote box.com type=directory directory=/media/box.com chunksize=100mb encryption=none
Now git-annex can copy files to box.com, get files from it, etc, just like
with any other special remote.
% git annex copy bigfile --to box.com
bigfile (to box.com...) ok
% git annex drop bigfile
bigfile (checking box.com...) ok
% git annex get bigfile
bigfile (from box.com...) ok

View file

@ -1,5 +1,5 @@
Name: git-annex
Version: 3.20120227
Version: 3.20120229
Cabal-Version: >= 1.6
License: GPL
Maintainer: Joey Hess <joey@kitenet.net>
@ -39,6 +39,7 @@ Executable git-annex-shell
Executable git-union-merge
Main-Is: git-union-merge.hs
Build-Depends: text
source-repository head
type: git

10
test.hs
View file

@ -131,7 +131,7 @@ test_init = "git-annex init" ~: TestCase $ innewrepo $ do
reponame = "test repo"
test_add :: Test
test_add = "git-annex add" ~: TestList [basic, sha1dup, sha1unicode, subdirs]
test_add = "git-annex add" ~: TestList [basic, sha1dup, subdirs]
where
-- this test case runs in the main repo, to set up a basic
-- annexed file that later tests will use
@ -158,10 +158,6 @@ test_add = "git-annex add" ~: TestList [basic, sha1dup, sha1unicode, subdirs]
git_annex "add" [sha1annexedfiledup, "--backend=SHA1"] @? "add of second file with same SHA1 failed"
annexed_present sha1annexedfiledup
annexed_present sha1annexedfile
sha1unicode = TestCase $ intmpclonerepo $ do
writeFile sha1annexedfileunicode $ content sha1annexedfileunicode
git_annex "add" [sha1annexedfileunicode, "--backend=SHA1"] @? "add of unicode filename failed"
annexed_present sha1annexedfileunicode
subdirs = TestCase $ intmpclonerepo $ do
createDirectory "dir"
writeFile "dir/foo" $ content annexedfile
@ -923,9 +919,6 @@ sha1annexedfile = "sha1foo"
sha1annexedfiledup :: String
sha1annexedfiledup = "sha1foodup"
sha1annexedfileunicode :: String
sha1annexedfileunicode = "foo¡"
ingitfile :: String
ingitfile = "bar"
@ -935,7 +928,6 @@ content f
| f == ingitfile = "normal file content"
| f == sha1annexedfile ="sha1 annexed file content"
| f == sha1annexedfiledup = content sha1annexedfile
| f == sha1annexedfileunicode ="sha1 annexed file content ¡ünicodé!"
| f == wormannexedfile = "worm annexed file content"
| otherwise = "unknown file " ++ f