1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
|
--- src/backends.py 2006-02-03 04:44:31.000000000 +0100
+++ src/backends.py.new 2006-10-19 18:41:24.000000000 +0200
@@ -18,8 +18,12 @@
"""Provides functions and classes for getting/sending files to destination"""
-import os, types, ftplib, tempfile
+import os, types, ftplib, tempfile, time, sys
import log, path, dup_temp, file_naming
+import socket
+
+# TODO: move into globals?
+socket.setdefaulttimeout(10)
class BackendException(Exception): pass
class ParsingException(Exception): pass
@@ -110,8 +114,6 @@
and delete methods.
"""
- def init(self, parsed_url): pass
-
def put(self, source_path, remote_filename = None):
"""Transfer source_path (Path object) to remote_filename (string)
@@ -126,7 +128,7 @@
"""Retrieve remote_filename and place in local_path"""
local_path.setdata()
pass
-
+
def list(self):
"""Return list of filenames (strings) present in backend"""
pass
@@ -285,7 +287,7 @@
local_path.setdata()
if not local_path.exists():
raise BackendException("File %s not found" % local_path.name)
-
+
def list(self):
"""List files available for scp
@@ -318,21 +320,72 @@
class ftpBackend(Backend):
"""Connect to remote store using File Transfer Protocol"""
+ RETRY_SLEEP = 10 # time in seconds before reconnecting on errors (gets multiplied with the try counter)
+ RETRIES = 15 # number of retries
+
def __init__(self, parsed_url):
"""Create a new ftp backend object, log in to host"""
+ self.parsed_url = parsed_url
+ self.connect()
+
+ def connect(self):
+ """Connect to self.parsed_url"""
self.ftp = ftplib.FTP()
- if parsed_url.port is None: self.error_wrap('connect', parsed_url.host)
- else: self.error_wrap('connect', parsed_url.host, parsed_url.port)
+ self.is_connected = False
+ if self.parsed_url.port is None:
+ self.error_wrap('connect', self.parsed_url.host)
+ else: self.error_wrap('connect', self.parsed_url.host,
+ self.parsed_url.port)
+ self.is_connected = True
- if parsed_url.user is not None:
- self.error_wrap('login', parsed_url.user, self.get_password())
+ if self.parsed_url.user is not None:
+ self.error_wrap('login', self.parsed_url.user, self.get_password())
else: self.error_wrap('login')
- self.ftp.cwd(parsed_url.path)
+ self.ftp.cwd(self.parsed_url.path)
def error_wrap(self, command, *args):
"""Run self.ftp.command(*args), but raise BackendException on error"""
- try: return ftplib.FTP.__dict__[command](self.ftp, *args)
- except ftplib.all_errors, e: raise BackendException(e)
+
+ # Log FTP command:
+ if command is 'login':
+ if log.verbosity > 8:
+ # Log full args at level 9:
+ log.Log("FTP: %s %s" % (command,args), 9)
+ else:
+ # replace password with stars:
+ log_args = list(args)
+ log_args[1] = '*' * len(log_args[1])
+ log.Log("FTP: %s %s" % (command,log_args), 5)
+ else:
+ log.Log("FTP: %s %s" % (command,args), 5)
+
+ # Execute:
+ tries = 0
+ while( True ):
+ tries = tries+1
+ try:
+ return ftplib.FTP.__dict__[command](self.ftp, *args)
+ except ftplib.all_errors, e:
+ if "450" in str(e) and command == 'nlst':
+ # 450 on list isn't an error, but indicates an empty dir
+ return []
+
+ if tries > self.RETRIES:
+ # Give up:
+ log.FatalError("Catched exception %s%s (%d exceptions in total), giving up.." % (sys.exc_info()[0],sys.exc_info()[1],tries,))
+ raise BackendException(e)
+
+ # Sleep and retry (after trying to reconnect, if possible):
+ sleep_time = self.RETRY_SLEEP * tries;
+ log.Warn("Catched exception %s%s (#%d), sleeping %ds before retry.." % (sys.exc_info()[0],sys.exc_info()[1],tries,sleep_time,))
+ time.sleep(sleep_time)
+ try:
+ if self.is_connected:
+ self.connect()
+ return ftplib.FTP.__dict__[command](self.ftp, *args)
+ except ftplib.all_errors, e:
+ continue
+ else: break
def get_password(self):
"""Get ftp password using environment if possible"""
@@ -364,7 +417,8 @@
# Some ftp servers raise error 450 if the directory is empty
try: return self.error_wrap('nlst')
except BackendException, e:
- if "450" in str(e): return []
+ if "450" in str(e) or "500" in str(e) or "550" in str(e):
+ return []
raise
def delete(self, filename_list):
@@ -375,7 +429,10 @@
def close(self):
"""Shut down connection"""
- self.error_wrap('quit')
+ try: self.error_wrap('quit')
+ except BackendException, e:
+ if "104" in str(e): return
+ raise
class rsyncBackend(Backend):
@@ -405,7 +462,7 @@
local_path.setdata()
if not local_path.exists():
raise BackendException("File %s not found" % local_path.name)
-
+
def list(self):
"""List files"""
def split (str):
@@ -447,11 +504,121 @@
for file in to_delete:
os.unlink (file)
os.rmdir (dir)
-
+
+
+class BitBucketBackend(Backend):
+ """Backend for accessing Amazon S3 using the bitbucket.py module.
+
+ This backend supports access to Amazon S3 (http://aws.amazon.com/s3)
+ using a mix of environment variables and URL's. The access key and
+ secret key are taken from the environment variables S3KEY and S3SECRET
+ and the bucket name from the url. For example (in BASH):
+
+ $ export S3KEY='44CF9590006BF252F707'
+ $ export S3SECRET='OtxrzxIsfpFjA7SwPzILwy8Bw21TLhquhboDYROV'
+ $ duplicity /home/me s3+http://bucket_name
+
+ Note: / is disallowed in bucket names in case prefix support is implemented
+ in future.
+
+ TODO:
+ - support bucket prefixes with url's like s3+http://bucket_name/prefix
+ - bitbucket and amazon s3 are currently not very robust. We provide a
+ simplistic way of trying to re-connect and re-try an operation when
+ it fails. This is just a band-aid and should be removed if bitbucket
+ becomes more robust.
+ - Logging of actions.
+ - Better error messages for failures.
+ """
+
+ def __init__(self, parsed_url):
+ import bitbucket
+ self.module = bitbucket
+ self.bucket_name = parsed_url.suffix
+ if '/' in self.bucket_name:
+ raise NotImplementedError("/ disallowed in bucket names and "
+ "bucket prefixes not supported.")
+ self.access_key = os.environ["S3KEY"]
+ self.secret_key = os.environ["S3SECRET"]
+ self._connect()
+
+ def _connect(self):
+ self.connection = self.module.connect(access_key=self.access_key,
+ secret_key=self.secret_key)
+ self.bucket = self.connection.get_bucket(self.bucket_name)
+ # populate the bitbucket cache we do it here to be sure that
+ # even on re-connect we have a list of all keys on the server
+ self.bucket.fetch_all_keys()
+
+ def _logException(self, message=None):
+ # Simply dump the exception onto stderr since formatting it
+ # ourselves looks dangerous.
+ if message is not None:
+ sys.stderr.write(message)
+ sys.excepthook(*sys.exc_info())
+
+ def put(self, source_path, remote_filename = None):
+ """Transfer source_path (Path object) to remote_filename (string)
+
+ If remote_filename is None, get the filename from the last
+ path component of pathname.
+
+ """
+ if not remote_filename:
+ remote_filename = source_path.get_filename()
+ bits = self.module.Bits(filename=source_path.name)
+ try:
+ self.bucket[remote_filename] = bits
+ except:
+ self._logException("Error sending file %s, attempting to "
+ "re-connect.\n Got this Traceback:\n"
+ % remote_filename)
+ self._connect()
+ self.bucket[remote_filename] = bits
+
+ def get(self, remote_filename, local_path):
+ """Retrieve remote_filename and place in local_path"""
+ local_path.setdata()
+ try:
+ bits = self.bucket[remote_filename]
+ bits.to_file(local_path.name)
+ except:
+ self._logException("Error getting file %s, attempting to "
+ "re-connect.\n Got this Traceback:\n"
+ % remote_filename)
+ self._connect()
+ bits = self.bucket[remote_filename]
+ bits.to_file(local_path.name)
+ local_path.setdata()
+
+ def list(self):
+ """Return list of filenames (strings) present in backend"""
+ try:
+ keys = self.bucket.keys()
+ except:
+ self._logException("Error getting bucket keys, attempting to "
+ "re-connect.\n Got this Traceback:\n")
+ self._connect()
+ keys = self.bucket.keys()
+ return keys
+
+ def delete(self, filename_list):
+ """Delete each filename in filename_list, in order if possible"""
+ for file in filename_list:
+ try:
+ del self.bucket[file]
+ except:
+ self._logException("Error deleting file %s, attempting to "
+ "re-connect.\n Got this Traceback:\n"
+ % file)
+ self._connect()
+ del self.bucket[file]
+
# Dictionary relating protocol strings to backend_object classes.
protocol_class_dict = {"scp": scpBackend,
"ssh": scpBackend,
"file": LocalBackend,
"ftp": ftpBackend,
- "rsync": rsyncBackend}
+ "rsync": rsyncBackend,
+ "s3+http": BitBucketBackend}
|