UpdraftPlus WordPress Backup Plugin - Version 1.4.2

Version Description

  • 02/06/2013
Download this release

Release Info

Developer DavidAnderson
Plugin Icon 128x128 UpdraftPlus WordPress Backup Plugin
Version 1.4.2
Comparing to
See all releases

Code changes from version 1.4.0 to 1.4.2

Files changed (3) hide show
  1. methods/s3.php +12 -2
  2. readme.txt +5 -1
  3. updraftplus.php +81 -20
methods/s3.php CHANGED
@@ -48,9 +48,19 @@ class UpdraftPlus_BackupModule_s3 {
48
  // Retrieve the upload ID
49
  $uploadId = get_transient("updraft_${hash}_uid");
50
  if (empty($uploadId)) {
51
- $uploadId = $s3->initiateMultipartUpload($bucket_name, $filepath);
 
 
 
 
 
 
 
 
 
52
  if (empty($uploadId)) {
53
- $updraftplus->log("S3 upload: failed: could not get uploadId for multipart upload");
 
54
  continue;
55
  } else {
56
  $updraftplus->log("S3 chunked upload: got multipart ID: $uploadId");
48
  // Retrieve the upload ID
49
  $uploadId = get_transient("updraft_${hash}_uid");
50
  if (empty($uploadId)) {
51
+ $s3->setExceptions(true);
52
+ try {
53
+ $uploadId = $s3->initiateMultipartUpload($bucket_name, $filepath);
54
+ } catch (Exception $e) {
55
+ $updraftplus->log('S3 error whilst trying initiateMultipartUpload: '.$e->getMessage().' (line: '.$e->getLine().', file: '.$e->getFile().')');
56
+ $s3->setExceptions(false);
57
+ $uploadId = false;
58
+ }
59
+ $s3->setExceptions(false);
60
+
61
  if (empty($uploadId)) {
62
+ $updraftplus->log("S3 upload: failed: could not get uploadId for multipart upload ($filepath)");
63
+ $updraftplus->error("S3 upload: getting uploadID for multipart upload failed - see log file for more details");
64
  continue;
65
  } else {
66
  $updraftplus->log("S3 chunked upload: got multipart ID: $uploadId");
readme.txt CHANGED
@@ -3,7 +3,7 @@ Contributors: David Anderson
3
  Tags: backup, restore, database, cloud, amazon, s3, dropbox, google drive, ftp, cloud, back up, multisite
4
  Requires at least: 3.2
5
  Tested up to: 3.5.1
6
- Stable tag: 1.4.0
7
  Donate link: http://david.dw-perspective.org.uk/donate
8
  License: GPLv3 or later
9
 
@@ -145,6 +145,10 @@ Thanks for asking - yes, I have. Check out my profile page - http://profiles.wor
145
 
146
  == Changelog ==
147
 
 
 
 
 
148
  = 1.4.0 - 02/04/2013 =
149
  * Zip file creation is now resumable; and thus the entire backup operation is; there is now no "too early to resume" point. So even the most enormous site backups should now be able to proceed.
150
  * Prefer PHP's native zip functions if available - 25% speed-up on zip creation
3
  Tags: backup, restore, database, cloud, amazon, s3, dropbox, google drive, ftp, cloud, back up, multisite
4
  Requires at least: 3.2
5
  Tested up to: 3.5.1
6
+ Stable tag: 1.4.2
7
  Donate link: http://david.dw-perspective.org.uk/donate
8
  License: GPLv3 or later
9
 
145
 
146
  == Changelog ==
147
 
148
+ = 1.4.2 - 02/06/2013 =
149
+ * More Amazon S3 logging which should help people with wrong details
150
+ * More race/overlap detection, and more flexible rescheduling
151
+
152
  = 1.4.0 - 02/04/2013 =
153
  * Zip file creation is now resumable; and thus the entire backup operation is; there is now no "too early to resume" point. So even the most enormous site backups should now be able to proceed.
154
  * Prefer PHP's native zip functions if available - 25% speed-up on zip creation
updraftplus.php CHANGED
@@ -4,7 +4,7 @@ Plugin Name: UpdraftPlus - Backup/Restore
4
  Plugin URI: http://wordpress.org/extend/plugins/updraftplus
5
  Description: Backup and restore: your content and database can be automatically backed up to Amazon S3, Dropbox, Google Drive, FTP or email, on separate schedules.
6
  Author: David Anderson.
7
- Version: 1.4.0
8
  Donate link: http://david.dw-perspective.org.uk/donate
9
  License: GPLv3 or later
10
  Author URI: http://wordshell.net
@@ -67,13 +67,6 @@ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
67
  // 15 minutes
68
  @set_time_limit(900);
69
 
70
- if (!isset($updraftplus)) $updraftplus = new UpdraftPlus();
71
-
72
- if (!$updraftplus->memory_check(192)) {
73
- # TODO: Better solution is to split the backup set into manageable chunks based on this limit
74
- @ini_set('memory_limit', '192M'); //up the memory limit for large backup files
75
- }
76
-
77
  define('UPDRAFTPLUS_DIR', dirname(__FILE__));
78
  define('UPDRAFTPLUS_URL', plugins_url('', __FILE__));
79
  define('UPDRAFT_DEFAULT_OTHERS_EXCLUDE','upgrade,cache,updraft,index.php,backup,backups');
@@ -81,13 +74,29 @@ define('UPDRAFT_DEFAULT_OTHERS_EXCLUDE','upgrade,cache,updraft,index.php,backup,
81
  // Also one section requires at least 1% progress each run, so on a 5-minute schedule, that equals just under 9 hours
82
  define('UPDRAFT_TRANSTIME', 3600*9);
83
 
 
84
  if (is_file(UPDRAFTPLUS_DIR.'/premium.php')) require_once(UPDRAFTPLUS_DIR.'/premium.php');
85
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  if (!class_exists('UpdraftPlus_Options')) require_once(UPDRAFTPLUS_DIR.'/options.php');
87
 
88
  class UpdraftPlus {
89
 
90
- var $version = '1.4.0';
91
  var $plugin_title = 'UpdraftPlus Backup/Restore';
92
 
93
  // Choices will be shown in the admin menu in the order used here
@@ -114,7 +123,7 @@ class UpdraftPlus {
114
 
115
  // Used to schedule resumption attempts beyond the tenth, if needed
116
  var $current_resumption;
117
- var $newresumption_scheduled;
118
 
119
  var $zipfiles_added;
120
  var $zipfiles_existingfiles;
@@ -138,6 +147,7 @@ class UpdraftPlus {
138
  add_filter('cron_schedules', array($this,'modify_cron_schedules'));
139
  add_filter('plugin_action_links', array($this, 'plugin_action_links'), 10, 2);
140
  add_action('init', array($this, 'handle_url_actions'));
 
141
  }
142
 
143
  // Handle actions passed on to method plugins; e.g. Google OAuth 2.0 - ?page=updraftplus&action=updraftmethod-googledrive-auth
@@ -214,10 +224,13 @@ class UpdraftPlus {
214
  // i.e. Max 100 runs = 500 minutes = 8 hrs 40
215
  // If they get 2 minutes on each run, and the file is 1Gb, then that equals 10.2Mb/120s = minimum 87Kb/s upload speed required
216
 
217
- if ($this->current_resumption >= 9 && $this->newresumption_scheduled !== true && $percent > ( $this->current_resumption - 5)) {
218
- $this->newresumption_scheduled = true;
 
 
 
219
  $this->log("This is resumption ".$this->current_resumption.", but meaningful uploading is still taking place; so a new one will be scheduled");
220
- wp_schedule_single_event(time()+300, 'updraft_backup_resume', array($this->current_resumption + 1, $this->nonce, $this->backup_time));
221
  }
222
  }
223
 
@@ -237,13 +250,17 @@ class UpdraftPlus {
237
  $this->current_resumption = $resumption_no;
238
 
239
  // Schedule again, to run in 5 minutes again, in case we again fail
240
- $resume_delay = 300;
 
 
 
241
  // A different argument than before is needed otherwise the event is ignored
242
  $next_resumption = $resumption_no+1;
243
  if ($next_resumption < 10) {
244
  $this->log("Scheduling a resumption ($next_resumption) in case this run gets aborted");
245
- wp_schedule_single_event(time()+$resume_delay, 'updraft_backup_resume', array($next_resumption, $bnonce, $btime));
246
- $this->newresumption_scheduled=true;
 
247
  } else {
248
  $this->log("The current run is our tenth attempt - will not schedule a further attempt until we see something useful happening");
249
  }
@@ -418,6 +435,9 @@ class UpdraftPlus {
418
  if ($backup_files) $this->jobdata_set("backup_files", "begun");
419
  $this->jobdata_set('service', UpdraftPlus_Options::get_updraft_option('updraft_service'));
420
 
 
 
 
421
  // Everthing is now set up; now go
422
  $this->backup_resume(0, $this->nonce, $this->backup_time);
423
 
@@ -696,6 +716,24 @@ class UpdraftPlus {
696
  return true;
697
  }
698
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
699
  function create_zip($create_from_dir, $whichone, $create_in_dir, $backup_file_basename) {
700
  // Note: $create_from_dir can be an array or a string
701
  @set_time_limit(900);
@@ -715,10 +753,13 @@ class UpdraftPlus {
715
  $zip_name = $full_path.'.tmp';
716
  $time_now = time();
717
  $time_mod = (int)@filemtime($zip_name);
718
- if (file_exists($zip_name) && $time_mod>100 && ($time_now-$time_mod)<20) {
719
  $file_size = filesize($zip_name);
720
- $this->log("Terminate: the temporary file $zip_name already exists, and was modified within the last 20 seconds (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod).", size=$file_size). This likely means that another UpdraftPlus run is still at work; so we will exit.");
 
721
  die;
 
 
722
  }
723
 
724
  $microtime_start = microtime(true);
@@ -964,10 +1005,26 @@ class UpdraftPlus {
964
  $stitch_files[] = $table_file_prefix;
965
  }
966
 
 
 
 
 
 
 
 
 
 
 
 
 
 
967
  // Finally, stitch the files together
968
- $this->backup_db_open($backup_file_base.'-db.gz', true);
969
  $this->backup_db_header();
970
 
 
 
 
971
  foreach ($stitch_files as $table_file) {
972
  $this->log("{$table_file}.gz: adding to final database dump");
973
  if (!$handle = gzopen($updraft_dir.'/'.$table_file.'.gz', "r")) {
@@ -976,7 +1033,7 @@ class UpdraftPlus {
976
  } else {
977
  while ($line = gzgets($handle, 2048)) { $this->stow($line); }
978
  gzclose($handle);
979
- @unlink($updraft_dir.'/'.$table_file.'.gz');
980
  }
981
  }
982
 
@@ -989,6 +1046,10 @@ class UpdraftPlus {
989
  $this->log($file_base.'-db.gz: finished writing out complete database file');
990
  $this->close($this->dbhandle);
991
 
 
 
 
 
992
  if (count($this->errors)) {
993
  return false;
994
  } else {
4
  Plugin URI: http://wordpress.org/extend/plugins/updraftplus
5
  Description: Backup and restore: your content and database can be automatically backed up to Amazon S3, Dropbox, Google Drive, FTP or email, on separate schedules.
6
  Author: David Anderson.
7
+ Version: 1.4.2
8
  Donate link: http://david.dw-perspective.org.uk/donate
9
  License: GPLv3 or later
10
  Author URI: http://wordshell.net
67
  // 15 minutes
68
  @set_time_limit(900);
69
 
 
 
 
 
 
 
 
70
  define('UPDRAFTPLUS_DIR', dirname(__FILE__));
71
  define('UPDRAFTPLUS_URL', plugins_url('', __FILE__));
72
  define('UPDRAFT_DEFAULT_OTHERS_EXCLUDE','upgrade,cache,updraft,index.php,backup,backups');
74
  // Also one section requires at least 1% progress each run, so on a 5-minute schedule, that equals just under 9 hours
75
  define('UPDRAFT_TRANSTIME', 3600*9);
76
 
77
+ // Load add-ons
78
  if (is_file(UPDRAFTPLUS_DIR.'/premium.php')) require_once(UPDRAFTPLUS_DIR.'/premium.php');
79
 
80
+ if ($dir_handle = @opendir(UPDRAFTPLUS_DIR.'/addons')) {
81
+ while ($e = readdir($dir_handle)) {
82
+ if (is_file(UPDRAFTPLUS_DIR.'/addons/'.$e)) {
83
+ include_once(UPDRAFTPLUS_DIR.'/addons/'.$e);
84
+ }
85
+ }
86
+ }
87
+
88
+ if (!isset($updraftplus)) $updraftplus = new UpdraftPlus();
89
+
90
+ if (!$updraftplus->memory_check(192)) {
91
+ # TODO: Better solution is to split the backup set into manageable chunks based on this limit
92
+ @ini_set('memory_limit', '192M'); //up the memory limit for large backup files
93
+ }
94
+
95
  if (!class_exists('UpdraftPlus_Options')) require_once(UPDRAFTPLUS_DIR.'/options.php');
96
 
97
  class UpdraftPlus {
98
 
99
+ var $version = '1.4.2';
100
  var $plugin_title = 'UpdraftPlus Backup/Restore';
101
 
102
  // Choices will be shown in the admin menu in the order used here
123
 
124
  // Used to schedule resumption attempts beyond the tenth, if needed
125
  var $current_resumption;
126
+ var $newresumption_scheduled = false;
127
 
128
  var $zipfiles_added;
129
  var $zipfiles_existingfiles;
147
  add_filter('cron_schedules', array($this,'modify_cron_schedules'));
148
  add_filter('plugin_action_links', array($this, 'plugin_action_links'), 10, 2);
149
  add_action('init', array($this, 'handle_url_actions'));
150
+
151
  }
152
 
153
  // Handle actions passed on to method plugins; e.g. Google OAuth 2.0 - ?page=updraftplus&action=updraftmethod-googledrive-auth
224
  // i.e. Max 100 runs = 500 minutes = 8 hrs 40
225
  // If they get 2 minutes on each run, and the file is 1Gb, then that equals 10.2Mb/120s = minimum 87Kb/s upload speed required
226
 
227
+ if ($this->current_resumption >= 9 && $this->newresumption_scheduled == false && $percent > ( $this->current_resumption - 5)) {
228
+ $resume_interval = $this->jobdata_get('resume_interval');
229
+ if (!is_numeric($resume_interval) || $resume_interval<200) { $resume_interval = 200; }
230
+ $schedule_for = time()+$resume_interval;
231
+ $this->newresumption_scheduled = $schedule_for;
232
  $this->log("This is resumption ".$this->current_resumption.", but meaningful uploading is still taking place; so a new one will be scheduled");
233
+ wp_schedule_single_event($schedule_for, 'updraft_backup_resume', array($this->current_resumption + 1, $this->nonce, $this->backup_time));
234
  }
235
  }
236
 
250
  $this->current_resumption = $resumption_no;
251
 
252
  // Schedule again, to run in 5 minutes again, in case we again fail
253
+ // The actual interval can be increased (for future resumptions) by other code, if it detects apparent overlapping
254
+ $resume_interval = $this->jobdata_get('resume_interval');
255
+ if (!is_numeric($resume_interval) || $resume_interval<200) $resume_interval = 200;
256
+
257
  // A different argument than before is needed otherwise the event is ignored
258
  $next_resumption = $resumption_no+1;
259
  if ($next_resumption < 10) {
260
  $this->log("Scheduling a resumption ($next_resumption) in case this run gets aborted");
261
+ $schedule_for = time()+$resume_interval;
262
+ wp_schedule_single_event($schedule_for, 'updraft_backup_resume', array($next_resumption, $bnonce, $btime));
263
+ $this->newresumption_scheduled = $schedule_for;
264
  } else {
265
  $this->log("The current run is our tenth attempt - will not schedule a further attempt until we see something useful happening");
266
  }
435
  if ($backup_files) $this->jobdata_set("backup_files", "begun");
436
  $this->jobdata_set('service', UpdraftPlus_Options::get_updraft_option('updraft_service'));
437
 
438
+ // This can be adapted if we see a need
439
+ $this->jobdata_set('resume_interval', 300);
440
+
441
  // Everthing is now set up; now go
442
  $this->backup_resume(0, $this->nonce, $this->backup_time);
443
 
716
  return true;
717
  }
718
 
719
+ function reschedule($how_far_ahead) {
720
+ // Reschedule - remove presently scheduled event
721
+ wp_clear_scheduled_hook('updraft_backup_resume', array($this->current_resumption + 1, $this->nonce, $this->backup_time));
722
+ // Add new event
723
+ if ($how_far_ahead < 200) $how_far_ahead=200;
724
+ $schedule_for = time() + $how_far_ahead;
725
+ wp_schedule_single_event($schedule_for, 'updraft_backup_resume', array($this->current_resumption + 1, $this->nonce, $this->backup_time));
726
+ $this->newresumption_scheduled = $schedule_for;
727
+ }
728
+
729
+ function increase_resume_and_reschedule($howmuch = 120) {
730
+ $resume_interval = $this->jobdata_get('resume_interval');
731
+ if (!is_numeric($resume_interval) || $resume_interval<200) { $resume_interval = 200; }
732
+ if ($this->newresumption_scheduled != false) $this->reschedule($resume_interval+$howmuch);
733
+ $this->jobdata_set('resume_interval', $resume_interval+$howmuch);
734
+ $this->log("To decrease the likelihood of overlaps, increasing resumption interval to: $resume_interval");
735
+ }
736
+
737
  function create_zip($create_from_dir, $whichone, $create_in_dir, $backup_file_basename) {
738
  // Note: $create_from_dir can be an array or a string
739
  @set_time_limit(900);
753
  $zip_name = $full_path.'.tmp';
754
  $time_now = time();
755
  $time_mod = (int)@filemtime($zip_name);
756
+ if (file_exists($zip_name) && $time_mod>100 && ($time_now-$time_mod)<30) {
757
  $file_size = filesize($zip_name);
758
+ $this->log("Terminate: the temporary file $zip_name already exists, and was modified within the last 30 seconds (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod).", size=$file_size). This likely means that another UpdraftPlus run is still at work; so we will exit.");
759
+ $this->increase_resume_and_reschedule(120);
760
  die;
761
+ } elseif (file_exists($zip_name)) {
762
+ $this->log("File exists ($zip_name), but was apparently not modified within the last 30 seconds, so we assume that any previous run has now terminated (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod).")");
763
  }
764
 
765
  $microtime_start = microtime(true);
1005
  $stitch_files[] = $table_file_prefix;
1006
  }
1007
 
1008
+ // Race detection - with zip files now being resumable, these can more easily occur, with two running side-by-side
1009
+ $backup_final_file_name = $backup_file_base.'-db.gz';
1010
+ $time_now = time();
1011
+ $time_mod = (int)@filemtime($backup_final_file_name);
1012
+ if (file_exists($backup_final_file_name) && $time_mod>100 && ($time_now-$time_mod)<20) {
1013
+ $file_size = filesize($backup_final_file_name);
1014
+ $this->log("Terminate: the final database file ($backup_final_file_name) exists, and was modified within the last 20 seconds (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod).", size=$file_size). This likely means that another UpdraftPlus run is at work; so we will exit.");
1015
+ $this->increase_resume_and_reschedule(120);
1016
+ die;
1017
+ } elseif (file_exists($backup_final_file_name)) {
1018
+ $this->log("The final database file ($backup_final_file_name) exists, but was apparently not modified within the last 20 seconds (time_mod=$time_mod, time_now=$time_now, diff=".($time_now-$time_mod)."). Thus we assume that another UpdraftPlus terminated; thus we will continue.");
1019
+ }
1020
+
1021
  // Finally, stitch the files together
1022
+ $this->backup_db_open($backup_final_file_name, true);
1023
  $this->backup_db_header();
1024
 
1025
+ // We delay the unlinking because if two runs go concurrently and fail to detect each other (should not happen, but there's no harm in assuming the detection failed) then that leads to files missing from the db dump
1026
+ $unlink_files = array();
1027
+
1028
  foreach ($stitch_files as $table_file) {
1029
  $this->log("{$table_file}.gz: adding to final database dump");
1030
  if (!$handle = gzopen($updraft_dir.'/'.$table_file.'.gz', "r")) {
1033
  } else {
1034
  while ($line = gzgets($handle, 2048)) { $this->stow($line); }
1035
  gzclose($handle);
1036
+ $unlink_files[] = $updraft_dir.'/'.$table_file.'.gz';
1037
  }
1038
  }
1039
 
1046
  $this->log($file_base.'-db.gz: finished writing out complete database file');
1047
  $this->close($this->dbhandle);
1048
 
1049
+ foreach ($unlink_files as $unlink_files) {
1050
+ @unlink($unlink_file);
1051
+ }
1052
+
1053
  if (count($this->errors)) {
1054
  return false;
1055
  } else {