Version Description
- 02/07/2013 =
- Amazon S3 now works for users with non-US buckets
- Further tweak to overlap detection
Download this release
Release Info
Developer | DavidAnderson |
Plugin | UpdraftPlus WordPress Backup Plugin |
Version | 1.4.4 |
Comparing to | |
See all releases |
Code changes from version 1.4.2 to 1.4.4
- methods/s3.php +52 -20
- readme.txt +6 -2
- updraftplus.php +16 -6
methods/s3.php
CHANGED
@@ -2,13 +2,38 @@
|
|
2 |
|
3 |
class UpdraftPlus_BackupModule_s3 {
|
4 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
function backup($backup_array) {
|
6 |
|
7 |
global $updraftplus;
|
8 |
|
9 |
if (!class_exists('S3')) require_once(UPDRAFTPLUS_DIR.'/includes/S3.php');
|
10 |
|
11 |
-
$s3 =
|
12 |
|
13 |
$bucket_name = untrailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_s3_remote_path'));
|
14 |
$bucket_path = "";
|
@@ -19,10 +44,15 @@ class UpdraftPlus_BackupModule_s3 {
|
|
19 |
$bucket_path = $bmatches[2]."/";
|
20 |
}
|
21 |
|
|
|
|
|
22 |
// See if we can detect the region (which implies the bucket exists and is ours), or if not create it
|
23 |
-
if (
|
|
|
|
|
|
|
24 |
|
25 |
-
foreach($backup_array as $file) {
|
26 |
|
27 |
// We upload in 5Mb chunks to allow more efficient resuming and hence uploading of larger files
|
28 |
// N.B.: 5Mb is Amazon's minimum. So don't go lower or you'll break it.
|
@@ -30,7 +60,7 @@ class UpdraftPlus_BackupModule_s3 {
|
|
30 |
$chunks = floor(filesize($fullpath) / 5242880)+1;
|
31 |
$hash = md5($file);
|
32 |
|
33 |
-
$updraftplus->log("S3 upload: $fullpath (chunks: $chunks) -> s3://$bucket_name/$bucket_path$file");
|
34 |
|
35 |
$filepath = $bucket_path.$file;
|
36 |
|
@@ -98,15 +128,15 @@ class UpdraftPlus_BackupModule_s3 {
|
|
98 |
$s3->setExceptions(true);
|
99 |
try {
|
100 |
if ($s3->completeMultipartUpload ($bucket_name, $filepath, $uploadId, $etags)) {
|
101 |
-
$updraftplus->log("S3 upload: re-assembly succeeded");
|
102 |
$updraftplus->uploaded_file($file);
|
103 |
} else {
|
104 |
-
$updraftplus->log("S3 upload: re-assembly failed");
|
105 |
-
$updraftplus->error("S3 upload: re-assembly failed ($file)");
|
106 |
}
|
107 |
} catch (Exception $e) {
|
108 |
-
$updraftplus->log(
|
109 |
-
$updraftplus->error(
|
110 |
}
|
111 |
// Remember to unset, as the deletion code later reuses the object
|
112 |
$s3->setExceptions(false);
|
@@ -138,16 +168,15 @@ class UpdraftPlus_BackupModule_s3 {
|
|
138 |
}
|
139 |
$updraftplus->log("S3: Delete remote: bucket=$s3_bucket, URI=$s3_uri");
|
140 |
|
141 |
-
|
142 |
-
|
143 |
-
|
144 |
-
|
145 |
-
|
146 |
-
|
147 |
-
|
148 |
-
$updraftplus->log("S3 Error: ".$rest->error['code'].": ".$rest->error['message']);
|
149 |
-
//$updraftplus->error("S3 delete error: ".$rest->error['code'].": ".$rest->error['message']);
|
150 |
}
|
|
|
151 |
|
152 |
}
|
153 |
|
@@ -156,7 +185,8 @@ class UpdraftPlus_BackupModule_s3 {
|
|
156 |
global $updraftplus;
|
157 |
if(!class_exists('S3')) require_once(UPDRAFTPLUS_DIR.'/includes/S3.php');
|
158 |
|
159 |
-
$s3 =
|
|
|
160 |
$bucket_name = untrailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_s3_remote_path'));
|
161 |
$bucket_path = "";
|
162 |
|
@@ -165,7 +195,9 @@ class UpdraftPlus_BackupModule_s3 {
|
|
165 |
$bucket_path = $bmatches[2]."/";
|
166 |
}
|
167 |
|
168 |
-
|
|
|
|
|
169 |
$fullpath = trailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_dir')).$file;
|
170 |
if (!$s3->getObject($bucket_name, $bucket_path.$file, $fullpath)) {
|
171 |
$updraftplus->error("S3 Error: Failed to download $file. Check your permissions and credentials.");
|
2 |
|
3 |
class UpdraftPlus_BackupModule_s3 {
|
4 |
|
5 |
+
function getS3($key, $secret) {
|
6 |
+
return new S3($key, $secret);
|
7 |
+
}
|
8 |
+
|
9 |
+
function set_endpoint($obj, $region) {
|
10 |
+
switch ($region) {
|
11 |
+
case 'EU':
|
12 |
+
$endpoint = 's3-eu-west-1.amazonaws.com';
|
13 |
+
break;
|
14 |
+
case 'us-west-1':
|
15 |
+
case 'us-west-2':
|
16 |
+
case 'ap-southeast-1':
|
17 |
+
case 'ap-southeast-2':
|
18 |
+
case 'ap-northeast-1':
|
19 |
+
case 'sa-east-1':
|
20 |
+
$endpoint = 's3-'.$region.'.amazonaws.com';
|
21 |
+
break;
|
22 |
+
default:
|
23 |
+
break;
|
24 |
+
}
|
25 |
+
if (isset($endpoint)) {
|
26 |
+
$obj->setEndpoint($endpoint);
|
27 |
+
}
|
28 |
+
}
|
29 |
+
|
30 |
function backup($backup_array) {
|
31 |
|
32 |
global $updraftplus;
|
33 |
|
34 |
if (!class_exists('S3')) require_once(UPDRAFTPLUS_DIR.'/includes/S3.php');
|
35 |
|
36 |
+
$s3 = $this->getS3(UpdraftPlus_Options::get_updraft_option('updraft_s3_login'), UpdraftPlus_Options::get_updraft_option('updraft_s3_pass'));
|
37 |
|
38 |
$bucket_name = untrailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_s3_remote_path'));
|
39 |
$bucket_path = "";
|
44 |
$bucket_path = $bmatches[2]."/";
|
45 |
}
|
46 |
|
47 |
+
$region = @$s3->getBucketLocation($bucket_name);
|
48 |
+
|
49 |
// See if we can detect the region (which implies the bucket exists and is ours), or if not create it
|
50 |
+
if (!empty($region) || @$s3->putBucket($bucket_name, S3::ACL_PRIVATE)) {
|
51 |
+
|
52 |
+
if (empty($region)) $region = $s3->getBucketLocation($bucket_name);
|
53 |
+
$this->set_endpoint($s3, $region);
|
54 |
|
55 |
+
foreach($backup_array as $key => $file) {
|
56 |
|
57 |
// We upload in 5Mb chunks to allow more efficient resuming and hence uploading of larger files
|
58 |
// N.B.: 5Mb is Amazon's minimum. So don't go lower or you'll break it.
|
60 |
$chunks = floor(filesize($fullpath) / 5242880)+1;
|
61 |
$hash = md5($file);
|
62 |
|
63 |
+
$updraftplus->log("S3 upload ($region): $fullpath (chunks: $chunks) -> s3://$bucket_name/$bucket_path$file");
|
64 |
|
65 |
$filepath = $bucket_path.$file;
|
66 |
|
128 |
$s3->setExceptions(true);
|
129 |
try {
|
130 |
if ($s3->completeMultipartUpload ($bucket_name, $filepath, $uploadId, $etags)) {
|
131 |
+
$updraftplus->log("S3 upload ($key): re-assembly succeeded");
|
132 |
$updraftplus->uploaded_file($file);
|
133 |
} else {
|
134 |
+
$updraftplus->log("S3 upload ($key): re-assembly failed");
|
135 |
+
$updraftplus->error("S3 upload ($key): re-assembly failed ($file)");
|
136 |
}
|
137 |
} catch (Exception $e) {
|
138 |
+
$updraftplus->log("S3 re-assembly error ($key): ".$e->getMessage().' (line: '.$e->getLine().', file: '.$e->getFile().')');
|
139 |
+
$updraftplus->error("S3 re-assembly error ($key): ".$e->getMessage().' (see log file for more)');
|
140 |
}
|
141 |
// Remember to unset, as the deletion code later reuses the object
|
142 |
$s3->setExceptions(false);
|
168 |
}
|
169 |
$updraftplus->log("S3: Delete remote: bucket=$s3_bucket, URI=$s3_uri");
|
170 |
|
171 |
+
$s3->setExceptions(true);
|
172 |
+
try {
|
173 |
+
if (!$s3->deleteObject($s3_bucket, $s3_uri)) {
|
174 |
+
$updraftplus->log("S3: Delete failed");
|
175 |
+
}
|
176 |
+
} catch (Exception $e) {
|
177 |
+
$updraftplus->log('S3 delete failed: '.$e->getMessage().' (line: '.$e->getLine().', file: '.$e->getFile().')');
|
|
|
|
|
178 |
}
|
179 |
+
$s3->setExceptions(false);
|
180 |
|
181 |
}
|
182 |
|
185 |
global $updraftplus;
|
186 |
if(!class_exists('S3')) require_once(UPDRAFTPLUS_DIR.'/includes/S3.php');
|
187 |
|
188 |
+
$s3 = $this->getS3(UpdraftPlus_Options::get_updraft_option('updraft_s3_login'), UpdraftPlus_Options::get_updraft_option('updraft_s3_pass'));
|
189 |
+
|
190 |
$bucket_name = untrailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_s3_remote_path'));
|
191 |
$bucket_path = "";
|
192 |
|
195 |
$bucket_path = $bmatches[2]."/";
|
196 |
}
|
197 |
|
198 |
+
$region = @$s3->getBucketLocation($bucket_name);
|
199 |
+
if (!empty($region)) {
|
200 |
+
$this->set_endpoint($s3, $region);
|
201 |
$fullpath = trailingslashit(UpdraftPlus_Options::get_updraft_option('updraft_dir')).$file;
|
202 |
if (!$s3->getObject($bucket_name, $bucket_path.$file, $fullpath)) {
|
203 |
$updraftplus->error("S3 Error: Failed to download $file. Check your permissions and credentials.");
|
readme.txt
CHANGED
@@ -3,12 +3,12 @@ Contributors: David Anderson
|
|
3 |
Tags: backup, restore, database, cloud, amazon, s3, dropbox, google drive, ftp, cloud, back up, multisite
|
4 |
Requires at least: 3.2
|
5 |
Tested up to: 3.5.1
|
6 |
-
Stable tag: 1.4.
|
7 |
Donate link: http://david.dw-perspective.org.uk/donate
|
8 |
License: GPLv3 or later
|
9 |
|
10 |
== Upgrade Notice ==
|
11 |
-
|
12 |
|
13 |
== Description ==
|
14 |
|
@@ -145,6 +145,10 @@ Thanks for asking - yes, I have. Check out my profile page - http://profiles.wor
|
|
145 |
|
146 |
== Changelog ==
|
147 |
|
|
|
|
|
|
|
|
|
148 |
= 1.4.2 - 02/06/2013 =
|
149 |
* More Amazon S3 logging which should help people with wrong details
|
150 |
* More race/overlap detection, and more flexible rescheduling
|
3 |
Tags: backup, restore, database, cloud, amazon, s3, dropbox, google drive, ftp, cloud, back up, multisite
|
4 |
Requires at least: 3.2
|
5 |
Tested up to: 3.5.1
|
6 |
+
Stable tag: 1.4.4
|
7 |
Donate link: http://david.dw-perspective.org.uk/donate
|
8 |
License: GPLv3 or later
|
9 |
|
10 |
== Upgrade Notice ==
|
11 |
+
Amazon S3 now working with non-US buckets
|
12 |
|
13 |
== Description ==
|
14 |
|
145 |
|
146 |
== Changelog ==
|
147 |
|
148 |
+
= 1.4.4 - 02/07/2013 =
|
149 |
+
* Amazon S3 now works for users with non-US buckets
|
150 |
+
* Further tweak to overlap detection
|
151 |
+
|
152 |
= 1.4.2 - 02/06/2013 =
|
153 |
* More Amazon S3 logging which should help people with wrong details
|
154 |
* More race/overlap detection, and more flexible rescheduling
|
updraftplus.php
CHANGED
@@ -4,7 +4,7 @@ Plugin Name: UpdraftPlus - Backup/Restore
|
|
4 |
Plugin URI: http://wordpress.org/extend/plugins/updraftplus
|
5 |
Description: Backup and restore: your content and database can be automatically backed up to Amazon S3, Dropbox, Google Drive, FTP or email, on separate schedules.
|
6 |
Author: David Anderson.
|
7 |
-
Version: 1.4.
|
8 |
Donate link: http://david.dw-perspective.org.uk/donate
|
9 |
License: GPLv3 or later
|
10 |
Author URI: http://wordshell.net
|
@@ -96,7 +96,7 @@ if (!class_exists('UpdraftPlus_Options')) require_once(UPDRAFTPLUS_DIR.'/options
|
|
96 |
|
97 |
class UpdraftPlus {
|
98 |
|
99 |
-
var $version = '1.4.
|
100 |
var $plugin_title = 'UpdraftPlus Backup/Restore';
|
101 |
|
102 |
// Choices will be shown in the admin menu in the order used here
|
@@ -129,6 +129,7 @@ class UpdraftPlus {
|
|
129 |
var $zipfiles_existingfiles;
|
130 |
var $zipfiles_dirbatched;
|
131 |
var $zipfiles_batched;
|
|
|
132 |
|
133 |
function __construct() {
|
134 |
// Initialisation actions - takes place on plugin load
|
@@ -731,7 +732,7 @@ class UpdraftPlus {
|
|
731 |
if (!is_numeric($resume_interval) || $resume_interval<200) { $resume_interval = 200; }
|
732 |
if ($this->newresumption_scheduled != false) $this->reschedule($resume_interval+$howmuch);
|
733 |
$this->jobdata_set('resume_interval', $resume_interval+$howmuch);
|
734 |
-
$this->log("To decrease the likelihood of overlaps, increasing resumption interval to: $resume_interval
|
735 |
}
|
736 |
|
737 |
function create_zip($create_from_dir, $whichone, $create_in_dir, $backup_file_basename) {
|
@@ -2214,6 +2215,7 @@ class UpdraftPlus {
|
|
2214 |
$this->zipfiles_added = 0;
|
2215 |
$this->zipfiles_dirbatched = array();
|
2216 |
$this->zipfiles_batched = array();
|
|
|
2217 |
|
2218 |
$last_error = -1;
|
2219 |
if (is_array($source)) {
|
@@ -2267,10 +2269,11 @@ class UpdraftPlus {
|
|
2267 |
$zip->addFile($file, $add_as);
|
2268 |
}
|
2269 |
$this->zipfiles_added++;
|
2270 |
-
if ($this->zipfiles_added % 100 == 0) $this->log("Zip: ".basename($zipfile).": ".$this->zipfiles_added." files added");
|
2271 |
}
|
2272 |
-
// Reset the
|
2273 |
$this->zipfiles_batched = array();
|
|
|
2274 |
return $zip->close();
|
2275 |
}
|
2276 |
|
@@ -2291,6 +2294,7 @@ class UpdraftPlus {
|
|
2291 |
if (is_readable($fullpath)) {
|
2292 |
$key = $use_path_when_storing.'/'.basename($fullpath);
|
2293 |
$this->zipfiles_batched[$fullpath] = $use_path_when_storing.'/'.basename($fullpath);
|
|
|
2294 |
} else {
|
2295 |
$this->log("$fullpath: unreadable file");
|
2296 |
$this->error("$fullpath: unreadable file");
|
@@ -2309,6 +2313,7 @@ class UpdraftPlus {
|
|
2309 |
if (is_file($deref)) {
|
2310 |
if (is_readable($deref)) {
|
2311 |
$this->zipfiles_batched[$deref] = $use_path_when_storing.'/'.$e;
|
|
|
2312 |
} else {
|
2313 |
$this->log("$deref: unreadable file");
|
2314 |
$this->error("$deref: unreadable file");
|
@@ -2319,6 +2324,7 @@ class UpdraftPlus {
|
|
2319 |
} elseif (is_file($fullpath.'/'.$e)) {
|
2320 |
if (is_readable($fullpath.'/'.$e)) {
|
2321 |
$this->zipfiles_batched[$fullpath.'/'.$e] = $use_path_when_storing.'/'.$e;
|
|
|
2322 |
} else {
|
2323 |
$this->log("$fullpath/$e: unreadable file");
|
2324 |
$this->error("$fullpath/$e: unreadable file");
|
@@ -2332,7 +2338,11 @@ class UpdraftPlus {
|
|
2332 |
closedir($dir_handle);
|
2333 |
}
|
2334 |
|
2335 |
-
|
|
|
|
|
|
|
|
|
2336 |
$ret = $this->makezip_addfiles($zipfile);
|
2337 |
} else {
|
2338 |
$ret = true;
|
4 |
Plugin URI: http://wordpress.org/extend/plugins/updraftplus
|
5 |
Description: Backup and restore: your content and database can be automatically backed up to Amazon S3, Dropbox, Google Drive, FTP or email, on separate schedules.
|
6 |
Author: David Anderson.
|
7 |
+
Version: 1.4.4
|
8 |
Donate link: http://david.dw-perspective.org.uk/donate
|
9 |
License: GPLv3 or later
|
10 |
Author URI: http://wordshell.net
|
96 |
|
97 |
class UpdraftPlus {
|
98 |
|
99 |
+
var $version = '1.4.4';
|
100 |
var $plugin_title = 'UpdraftPlus Backup/Restore';
|
101 |
|
102 |
// Choices will be shown in the admin menu in the order used here
|
129 |
var $zipfiles_existingfiles;
|
130 |
var $zipfiles_dirbatched;
|
131 |
var $zipfiles_batched;
|
132 |
+
var $zipfiles_batched_size;
|
133 |
|
134 |
function __construct() {
|
135 |
// Initialisation actions - takes place on plugin load
|
732 |
if (!is_numeric($resume_interval) || $resume_interval<200) { $resume_interval = 200; }
|
733 |
if ($this->newresumption_scheduled != false) $this->reschedule($resume_interval+$howmuch);
|
734 |
$this->jobdata_set('resume_interval', $resume_interval+$howmuch);
|
735 |
+
$this->log("To decrease the likelihood of overlaps, increasing resumption interval to: ".($resume_interval+$howmuch));
|
736 |
}
|
737 |
|
738 |
function create_zip($create_from_dir, $whichone, $create_in_dir, $backup_file_basename) {
|
2215 |
$this->zipfiles_added = 0;
|
2216 |
$this->zipfiles_dirbatched = array();
|
2217 |
$this->zipfiles_batched = array();
|
2218 |
+
$this->zipfiles_batched_size = 0;
|
2219 |
|
2220 |
$last_error = -1;
|
2221 |
if (is_array($source)) {
|
2269 |
$zip->addFile($file, $add_as);
|
2270 |
}
|
2271 |
$this->zipfiles_added++;
|
2272 |
+
if ($this->zipfiles_added % 100 == 0) $this->log("Zip: ".basename($zipfile).": ".$this->zipfiles_added." files added (size: ".round(filesize($zipfile)/1024,1)." Kb)");
|
2273 |
}
|
2274 |
+
// Reset the arrays
|
2275 |
$this->zipfiles_batched = array();
|
2276 |
+
$this->zipfiles_batched_size = 0;
|
2277 |
return $zip->close();
|
2278 |
}
|
2279 |
|
2294 |
if (is_readable($fullpath)) {
|
2295 |
$key = $use_path_when_storing.'/'.basename($fullpath);
|
2296 |
$this->zipfiles_batched[$fullpath] = $use_path_when_storing.'/'.basename($fullpath);
|
2297 |
+
$this->zipfiles_batched_size += filesize($fullpath);
|
2298 |
} else {
|
2299 |
$this->log("$fullpath: unreadable file");
|
2300 |
$this->error("$fullpath: unreadable file");
|
2313 |
if (is_file($deref)) {
|
2314 |
if (is_readable($deref)) {
|
2315 |
$this->zipfiles_batched[$deref] = $use_path_when_storing.'/'.$e;
|
2316 |
+
$this->zipfiles_batched_size += filesize($deref);
|
2317 |
} else {
|
2318 |
$this->log("$deref: unreadable file");
|
2319 |
$this->error("$deref: unreadable file");
|
2324 |
} elseif (is_file($fullpath.'/'.$e)) {
|
2325 |
if (is_readable($fullpath.'/'.$e)) {
|
2326 |
$this->zipfiles_batched[$fullpath.'/'.$e] = $use_path_when_storing.'/'.$e;
|
2327 |
+
$this->zipfiles_batched_size += filesize($fullpath.'/'.$e);
|
2328 |
} else {
|
2329 |
$this->log("$fullpath/$e: unreadable file");
|
2330 |
$this->error("$fullpath/$e: unreadable file");
|
2338 |
closedir($dir_handle);
|
2339 |
}
|
2340 |
|
2341 |
+
// We don't want to touch the zip file on every single file, so we batch them up
|
2342 |
+
// We go every 25 files, because if you wait too longer, the contents may have changed from under you
|
2343 |
+
// And we try to touch the file after 15 seconds, to help with the "recently modified" check on resumption (we saw a case where the file went for 155 seconds without being touched and so the other runner was not detected)
|
2344 |
+
// Also write out if there's more than a megabyte of data waiting
|
2345 |
+
if (count($this->zipfiles_batched) > 25 || $this->zipfiles_batched_size > 1048576 || (file_exists($zipfile) && ((time()-filemtime($zipfile)) > 15) )) {
|
2346 |
$ret = $this->makezip_addfiles($zipfile);
|
2347 |
} else {
|
2348 |
$ret = true;
|