if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 108 of 116

    Smart Office

    ‘PowerON’: Optus Revs Up Cloud

    Optus is to go hell- for-leather on cloud services


    Click to enlarge

    Renamed as ‘PowerON Compute,’ the Singapore owned telco is to launch regional cloud services with parent company SingTel.

    Optus Business announced several new features for its enterprise cloud suite today, including cloud consulting services, storage solutions, pricing and access options including ‘bursting’, as it seeks to gain further clout in enterprise and government cloud computing market.

    However, these new additions won’t be launched until in the second half of this year.

    Launched in 2010, Optus PowerON Compute (formerly Elevate) virtual private data centre is used by corporations and governments including Savills and RDNS, to support multiple applications in the cloud.

    PowerON offers virtual computing and storage capacity and users manage this using a self-service online portal, with data housed in Optus ISO 27001-certified data centre facilities.

    A massive 87% of organisations expect to be using some sort of cloud computing service within the next three to five years, according to Optus Future of Business Report, announced today.

    And the telco is now offering pay-as-you-consume and ‘burstable’ pricing models in addition to a fixed price per compute ‘slice’ and storage per GB and will provide more options to access the Optus Cloud in addition to a private Evolve IP network connection.

    However, Optus were scant on detail for new enterprise cloud storage service, saying it will announce more closer to launch.

    Leveraging its position in SingTel Group ICT, Optus Business will offer customers a ‘Cloud Readiness Assessment’ to help organisations assess their readiness to move applications into the cloud.

    The cloud consulting service includes buiding business strategy, evaluating the most suitable applications for the cloud and existing security and governance policies.

    Optus and SingTel already share a common cloud technology platform and the new service will enable customers with regional operations to access virtual capacity across multiple clouds, and manage resources from a single portal.

    Optus Business division is now part of SingTel Group ICT, following restructure earlier this year.

    “This is the next step in our regional cloud roadmap now that Optus Business is part of SingTel Group ICT,” said John Paitaridis, Managing Director, Optus Business.

    “Customers can look forward to a seamless regional cloud offering that will make it easier to manage their cloud resources- that’s one contract, one bill and one management portal, with access to multiple clouds across the region.” 

     

    Optus cloud enterprise customers will also have a choice of on-premise and off-premise private cloud solutions.

    Launched in May, ‘Your IT as a Service’ is a private cloud solution that hosts data on a customer’s premise.

     It enables customers to virtualise and ‘automate’ their IT architecture as a service, making it easier to deploy and manage IT  services.

    Optus is the only cloud solutions provider in Australia to achieve accreditation under the VMware vCloud Datacenter services program last year, alongside SingTel.

    News Corp Hack IPTV Rival To Death?

    They’re at it again. Rupert Murdoch’s already disgraced media giant is plunged into a fresh hacking scandal, accused of killing rival TV Co.
    This time News Corp. stands accused of destroying a rival to its Sky TV empire in the UK by hacking into its computer systems – with the help of a German hacker and pirate website NDS, a secretly News co-owned company.

    The allegations aired on BBC Panorama current affairs programme on Monday, called “Murdoch’s TV pirates,” claim hackers hired by News Corp allegedly broke computer codes of ONDigital’s smart cards – a Pay TV rival owned by ITV, reports The Guardian.

    ONDigital was said to have gone bust following the hacks which destroyed its system and forced it out of the lucrative Pay TV market in the UK. It has since been renamed ITV Digital.

    The hackers then sent the cracked codes to another pirate website – The House of Ill Compute – who published them online, allowing viewers to use the ONDigital services illegally and without having to cough up for subscriptions – bleeding its revenue stream dry.

    Lee Gibling, founder of The House of Ill Compute also known as Thoic, alleges he was paid by former police officer and NDS head of security to publish the stolen details on its site.

    “We wanted people to be able to update these cards themselves, we didn’t want them buying a single card and then finding they couldn’t get channels. We wanted them to stay and keep with On Digital, flogging it until it broke,” Gibling told Panorama.

    NDS denies the allegations saying they are “simply not true…NDS is a global leader in the fight against pay-TV piracy, having repeatedly and successfully assisted law enforcement in that important effort.”

    NDS makes smartcards for Sky.

    The 30 minute BBC investigative show also “examines the role of former senior police officers in recruiting people to break the law – to bring down Murdoch’s commercial rival.”

    These allegations, if proved correct, will mean fresh worries for News Corp after the phone hacking scandals involving two of its UK newspapers (News of the World and The Sun), resulted in the closing of the former last July with a string of other casualties including top News execs including James Murdoch, who resigned as executive chairman of News International and Rebecca Brookes as News Intl CEO.

    Brookes as former editor of both tabloid titles was recently rearrested for her role in the phone hacking scandal which involved various celebs including Jude Law, Sienna Miller and Charlotte Church.

     

    The allegations could mean even more headaches for News Corp UK operation with watchdog Ofcom already questioning whether British Sky Broadcasting (BSkyB) is “fit and proper” to own 39.1% of Sky TV – the country’s largest subscription TV player.

    “Clearly allegations of TV hacking are far more serious than phone hacking,” said Labour MP Tom Watson who was already involved in Operation Weeting, the UK gov’s investigation into phone hacking allegations and is well known for his disdain of the Murdoch empire.

    Watson also said it was “inconceivable that they (Ofcom) would not want to look at these new allegations.”

    “It also seems inconceivable to me that if these allegations are true that Rupert Murdoch and James Murdoch will pass that test,” he warned.

    Super Regulator To Alter Classifications?

    A new report out today says a News Press Council and Super regulator should be set up in Australia.


    Click to enlarge

    The report, the Independent Inquiry into Media and Regulation by Hon R Finkelstein QC and and Matthew Ricketson from Australian Law Reform Commission (ALRC), recommends a News Media Council be established “to set journalistic standards for the news media.”

    The Media Council as envisaged by the report, would govern news across all platforms – print, online, radio and television and ensure high standards in the media:

    “One function of a News Media Council should be to chart trends in the industry, and particularly to see whether there will be a serious decline in the production and delivery of quality journalism,” the report states.

    Another of the ALRC recommendations (there’s over 50 in all) is a new super regulator to oversee the classification of all content in Australia -everything from print, TV, films, to video games, and web content – meaning identical age classifications across all platforms.

    This means taking powers off the Classification Board, Australian Communications and Media Authority as well as Attorney-General’s Department and Dept of Communications.

    However, the Classification Board would still be retained as an independent body.

    “A single regulator, that incorporates classification and media content regulation within a wider portfolio of responsibilities, may be more responsive to the challenges of media convergence,” the ALRC report states.

    The News Council would also seize control for online news currently held by Australian Communication Media Council and handle complaints made by the public when standards are breached and would be government funded.

    “New technology, particularly the internet, has revolutionised access to the news,” the report states.

    However, no major new standards for media would be created, it appears, rather “those [new] standards will likely be substantially the same as those that presently apply and which all profess to embrace.”

    A Media Council won’t be another way of censoring the press, the report states, and should be comprised of “community, industry and professionals.” However, whether any changes will occur if the report’s recommendations are enforced, remains to be seen.

    “The establishment of a council is not about increasing the power of government or about imposing some form of censorship. It is about making the news media more accountable to those covered in the news, and to the public generally,” the ALRC report states.

    “A free press is a powerful institution which can, and does, affect the political process, sometimes in quite dramatic ways.”

    And the current system of media self-regulation appears to be inadequate the report notes, with “only one or two” newspaper appointing an ombudsman or reader rep, the report continues, and criticised the present system where newspapers largely fund and control regulation.

    “The mechanism currently in place these mechanisms are not sufficient to achieve the degree of accountability desirable.”

     

    The Minister for Communications Senator Stephen Conroy welcomed the ALRC report stating:

    “I’d like to thank Mr Finkelstein QC and Professor Ricketson and their team for their efforts. I would also like to thank all the individuals and organisations that contributed to the Inquiry.”

    The report has been forwarded to the Convergence Review Committee for its consideration, which will take a broad look at a range of regulatory issues across the communications sectors and is set to present its final report to Government by 31 March.

    Finkelstein QC, the principal author of the report, is refusing media interviews at present.

    Shadow Communications Minister Malcolm Turnbull said he supported some of the recommendations of the ALRC report, but the “recommendation to set up a new government funded super regulator, a News Media Council, with statutory powers to take over the role of the Press Council, the media regulation role of ACMA and have jurisdiction over the online world is not one which would appeal to the Coalition, believing as we do in a free press,” he said in a statement today.

    Haier Play Hard Ball As F&P Directors Reject Takeover

    Take it or leave it: Chinese giant plays it tough as Fisher and Paykel Directors reject takeover bid.
    The Haier Group’s cash takeover offer of NZ$1.20 per share for appliance giant Fisher and Paykel has been rejected by four F&P Independent Directors, today. 

    Fisher and Paykel Applainces four Independent Directors have “unanimously recommended shareholders do not accept” Haier’s takeover offer of $1.20 ps, in a statement released today.

    The Directors said the offer “does not adequately reflect their view of the value of FPA based on their confidence in the strategic direction of the company”

    However, the China based white goods giant who already has a 20% stake in F&P says the cash offer represents “excellent value” for shareholders.

    The offer for full control of F&P’s Australia and NZ operation was made through Haier’s New Zealand subsidiary last month.

    But Haier is now playing hard ball, warning F&P’s Independent Adviser’s valuation range of $1.28-$1.57 per share price range, announced today, is “overly optimistic” and fails take into account the ‘risks’ contained in the company’s five year strategic plan.

    Haier is warning F&P shareholders will need to decide between “the certainty of Haier’s offer or taking a significant risk on the achievability of the Independent Adviser’s valuation range.”

    “The offer price is a significant 60% premium to Fisher & Paykel Appliances’ share price as at the close of trading on Friday 7 September, before the market was advised of the potential takeover ,” the company warned in a statement today.

    Haier already has several of F&P’s large shareholders on board including Allan Gray Australia, which holds 17.46% stake, has entered into an irrevocable agreement to accept the offer.

    Liang Haishan, Haier president and NZ Chairman said this agreement represents a strong endorsement of his company’s takeover bid of NZ$1.20 per share, which is a 91% (share) premium to the weighted average trading price over the three month period up to the takeover bid was announced.

    Liang also warned that if Haier’s offer does not succeed, a large decline in the share price from current levels is likely, noting Fisher & Paykel shares has traded as low as NZ$0.33 in the past 12 months. F&P’s shares rose signifcantly in New Zeland after the bid was announced and are currently trading at NZ$1.22.

    F&P shares have almost doubled in Australia since the takeover announcement was made last month, currently selling at A$0.97 on the ASX.

     

    “This is appropriate and we also considered this information when determining our offer,” said  Liang.

    “There is a high degree of risk regarding the implementation of the five year strategic plan and achievement of the goals set out in it.

    “In determining the offer price we have applied our significant, first-hand knowledge of Fisher & Paykel Appliances and the highly competitive global white goods sector, together with a consideration of the economic environments Fisher & Paykel Appliances operates in.”

    Haier and Fisher & Paykel Appliances have had a cooperative relationship for a number of years, extending beyond 2009 when Haier supported Fisher & Paykel Appliances’ recapitalisation, he added. 

    Vodafone Kills Free Facebook, Twitter Party

    “Infinite” no more: Vodafone Kills “Infinite” Facebook,Tweeting as it prepares for a shake up of its mobile plans.
    “From 13 February 2013, infinite surfing on Facebook, Twitter, LinkedIn, FourSquare, YouTube and MySpace will be ditched and “won’t be part of any recharge or prepaid Mobile Internet Data Add-on,” Voda announced on its website this week.

    If you currently have Infinite social networking on Voda recharge, obviously you still have access to social networks, but will be charged for the pleasure.

    Charges will be taken from your recharge data allowance. So if you only have a 500MB allowance and are an avid tweeter, you may be in trouble. Additional Internet data usage beyond your allowance is $2 per MB, so it is far from cheap.

    The telco has also shaken up how it charges for Internet data on prepay and now charges in 1MB increments.

    “From 13th February, we will charge data usage in 1MB increments with a minimum session of 1MB on all recharge plans. The dollar amount we charge per MB of data is remaining the same.”

    After that, you can purchase an ‘add-on’ from your ‘Flexible Credit.’ Vodafone Mobile Internet Add-ons cost $5 (150MB) and $8 (350MB).

    However, the add-ons last 30 days only.

    A Vodafone spokesperson said the changes “are largely reflective of the way our customers are using data today” and a part of its effort to reduce costs as users guzzle Internet data like never before.

    The charges to mobile  data pricing “allow us to invest in a network that will enable the increased access to voice and data our customers are asking for while remaining competitive.”

     

    Infinite social networking deals were a major pull for Generation Facebook, but this change may not win them any kudos among Gen Yers.

    We’ve also heard Vodafone are planning an overhaul their postpaid plans plans in the near future, anyway, but BYO plans are apparently safe, for now.

    Vodafone 3G Meltdown in “Extreme” Heat

    Voda service melted overnight after “extreme” heat caused equipment damage

    The troubled Vodafone network was down for over three hours last night from 7.45pm, but was back at 11.15pm, a Vodafone spokesperson told SmartHouse today.

    A failure in air-conditioning equipment at a Vodafone centre in Melbourne caused by the soaring heat, which hit 40 degrees there yesterday, was the root cause of the network failure, believed to have disrupted around 15% of Vodafone 3G and 2G voice services nationally.

    However, a higher number of Vodafone users in Victoria were affected, a company spokesperson confirmed.

    “We experienced a disruption to services at approximately 7.45pm last night caused by a failed air-conditioner and over-heating of infrastructure in extreme weather conditions.”

    “We apologise for the disruption and services were restored at approximately 11.15pm,” the  spokesperson said.

    The incident was a “one-off” Vodafone believes.

    “We’ve begun restoring normal services for everyone and repairs of the affected equipment are underway. Please switch your phone off and back on again to restore your connection to the network,” Vodafone wrote on its Facebook page nine hours ago.

    Vodafone spokesperson said it “will review our processes to reduce the impact in the unlikely event this should happen again.”

    The red telco has been dogged by severe network outages in the past, leading to customers branding the network “Vodafail” and fleeing the telco en masse. However, it has since invested over $1bn in network upgrades.

     

    This Sydney-based Vodafone user also experienced some delays in receiving text messages with three arriving this morning that were sent overnight.

    Others disgruntled customers also made their feelings known on Voda’s Facebook page: “Vodafone this is a JOKE! My Internet wasn’t working from Wednesday night until Thursday 11pm! I couldn’t make calls for two hours and I wasn’t even able to send a text! Get your act together!”

     

    Welcome Back, Walkman: Sony Cult Runs Gingerbread 2.3

    Live with Walkman is the smartphone gone music mad. The Sony Ericisson has a 3.2″ screen, powerful 1Ghz processor, dual cameras (front facing camera is Skype enabled, while a 5MP AF camera can capture 720p HD video).


    Click to enlarge

    Bluetooth, DNLA, Micro USB support and even geo tagging all come as standard as well as the usual slew of Android tools. 

    It also packs a punch on the audio front with Sony’s xLOUD output, and delivers a unique ‘social music’ experience with a dedicated ‘Walkman’ button giving that instant music hit, and even more tracks and videos via Sony Qriocity. 

    So, the beloved Walkman can be yours once more, but with a phone and media centre to boot. 

    But it also is friendly with social networks, and allows users to share and discover content via “deep” Facebook integration as well as Twitter, through a native application called ‘Timescape’.  

    Running on the latest Gingerbread 2.3 platform, it also provides access to over 250,000 applications from Google Android Market.

    The Android Walkman also contains a bundle of audio extras including ‘media discovery’ application, TrackID, FM radio, Mp3 music tones, stereo earphones and speakers (natch). 

    The slick smartphone with mineral glass front is just 56.5 x 106 x 14.2 mm and is lightweight at just 115g. 


    “Consumers want smartphones to deliver a rich and social entertainment experience. Rather than a one dimensional music experience, they want instant and seamless access to new content, combined with the ability to share and connect with their friends,” Nikolaus Scheurer, Sony Ericsson Head of Product Marketing, said. 

     

    Click to enlarge

    Live with Walkman will be available globally in “selected markets” from Q4 2011, so here’s hoping this mean, mobile music machine is heading Down Under. 

    Sony Ericsson New 1Ghz Lightning-Fast Xperias To Rival HTC Desire

    Sony Ericsson have just put it up to HTC and Samsung with two new high speed smartphones, Xperia ‘ray’ and ‘active.’

    The latest high end additions to the Xperia family, both running Gingerbread 2.3 OS have powerful 1Ghz processors, pitting them against the recently launched HTC Desire and Samsung’s Galaxy II, although the latter boasts 1.2Ghz version. 

    Specs wise, the 3.3″ Xperia ‘ray’ in particular is looking to give high end rivals a run for its money with pretty impressive features including “reality display” with Sony Ericsson’s mobile Bravia engine, 8.1 MP camera with Exmor R for mobile and HD video functionality. and slick aluminium frame (9.4mm thin). 

    And Sony aren’t beating around the bush about who “premium” ray is aimed at: “We expect Xperia ray to appeal to consumers who are looking for a combination of beautiful design and a rich feature set,” says Daniel Sandblom, Global Product Marketing Manager.  

    The Xperia active is positioning itself as a lifestyle phone with 5Mp camera, water and dust proofing, anda flurry of sports apps preinstalled including iMapMyFitness app – users can easily monitor their daily performance.

    But wait for it there’s more: a built-in GPS, barometer and compass in combination with the on-screen heart rate and pulse monitor (enabled by ANT+ wireless networking technology). (Phew, I’m tired already).

    The maker also announced new ‘Smart Extras’ for Xperia’s at CommunicAsia 2011 in Singapore today. 

    “As the Android platform gains market share in Asia Pacific, the Xperia portfolio is well positioned to deliver consumers a unique and differentiated experience,”said Steve Walker, Chief Marketing Officer, Sony Ericsson. 

    “Today’s announcement demonstrates that we continue to deliver a portfolio to make the Sony Ericsson experience available to a broad range of consumers. ” 

     

    No word if Xperia’s will make it to  Aussie shores just yet but Sony have said it will be available on select “global markets” from Q3. Pricing was not detailed either. 

    Where’s Steve? Apple In Secret Talks To Replace CEO

    Reports are emerging that Apple board members are pondering the replacement of their legendary CEO Steve Jobs.However, Jobs is said to be unaware of the replacement talks, reported to be talking placed among a select number of Apple board members, although not all. 

     The story, which first appeared in the Wall Street Journal, is said to have been rubbished by Jobs, who reportedly said the story was “hogwash.” 
    Jobs, who is on his third medical leave of absence, has in the past fought pancreatic cancer and also undergone a liver transplant.