if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 83 of 116

    Smart Office

    Its ON: Telstra “Treble Play” Foxtel + IPTV+ Internet

    The bundles are here. Yes folks Telstra have pressed the ‘on’ button on Internet, IPTV and phone bundles, commonplace in the UK.

    Tipped for some time, Telstra finally launch “entertainer bundles” with 11 Foxtel channels (on T-Box), BigPond  broadband, unlimited calls on home phone.

    And there also a bundle if you want wireless broadband (500MB), but you can add more Internet data packs.

    11 Foxtel channels shown on Telstra’s T-Box include SkyNews, FOX8, Cartoon Network, Discovery but you also get all the free to air channels like Seven and Nine, and bundlers can rent 6000+ BigPond Movies and TV shows via Internet Protocol TV (IPTV), as Telstra go hard on pushing Foxtel content, which it owns 50% of.

    The home phone comes with discounted international call rates on all plans and family calls benefit for regular numbers dialled.

    Just last week, the blue telco revealed it would be screening its own sports show called the Clubhouse via IPTV.

    Telstra Entertainer Bundle customers can also add Foxtel Sports and Movies and Premium Drama package for the latest TV series “Express from the US” including cult series Game of Thrones.

    Priced for the bundles kick off at $115 per month for 2 year contract, which comes with 100GB data, phone and TV channels, the $135 pack comes with a 200GB data and 500MB mobile broadband, and the top end pack costs $155 with a massive 500GB Internet allowance.

    But it will cost more if can add on extras like the data packs, more TV channels.

    You can buy Telstra’s T-Hub 2 smart home phone that looks like a tablet for a reduced price.

    There’s an essential packages at $80 where you can bundle and the phone but no T Box.

    “Our customers have flocked to our great value bundles, and told us entertainment and mobility were the features they most wanted to see added. These take centre stage in our new offerings,” said Telstra Director Broadband Bundles and Devices John Chambers .

    “The inclusion of the Foxtel on T-Box “Get Started? pack along with access to more than 6000 movies and TV episodes to rent through BigPond Movies, places the Telstra Entertainer Bundles ahead of the pack. 

    “Packaging premium IPTV content with all the great broadband and home phone benefits traditionally offered by a Telstra bundle provides a ready-made entertainment experience straight out of the box.”

    Kodak + Officeworks “Exclusive” Pic Deal

    Kodak and Officeworks have hooked up for “exclusive” pic deal.


    Click to enlarge

    Retailer Officeworks has penned an exclusive agreement with Kodak for in-store photo Services.

    The “multi-year” agreement means Officeworks Photo Centre will be using Kodak imaging gear in all 137 stores.

    Kodak’s kiosks will now be creating Photo Books, Personal Greeting Cards, canvas prints and other picture services.

    Officeworks will be installing Kodak’s Adaptive Picture Exchange (APEX) dry lab equipment and technology into its labs.

    This comes as Fuji announced a similar deal with Harvey Norman in over 100 stores in March to use its digital signage services.

    Troubled camera maker Kodak has recently added several Kiosk software enhancements and Offceworks are hoping to drive “substantial sales growth via new product offerings and better service in the Photo category,” says Mark Ward, Managing Director, Officeworks.

    “There is continued customer demand for our solutions and continued opportunity for consistent growth and increased profit in this category.”

    Kodak and Officeworks have been partnering on photo services since 2005.

     

    “Kodak has helped Officeworks build its photo retailing category to where it is today with consistently strong growth as well as innovative solutions for customers,” the imaging company said in a statement.

    Officeworks was the first Australian retailer to offer a dry lab print solution in all of its store. 

    Online Retail Slumps December:Nab

    Online retail sales weakened in December after November peak
    However, online sales were up 23% y-o-y, but the slowing of December trade marked “weaker” internet sales from its peak in November, according to NAB’s latest online Retail Sales Index.

    NAB Online Sales Index fell to 227 points (from 241 pts in November), attributed to seasonal factors with November being a peak month for online retail sales ahead of the Christmas rush, according to the Index.

    It is also interesting to note Click Frenzy online sales took place in November with millions of Aussies taking to the Internet to seek out bargains in the one off 24 hour extravaganza, causing some retailers’ e-commerce sites to crash, but proving the huge appetite for web shopping.

    Read: Click Frenzy: What The Retailers Say

    Australia’s online retail spending totalled $12.8 bn in 2012 – around 5.8% of the size of Australia’s traditional bricks & mortar retail sales for the 12 months to November. 

    Domestic sales accounted for around 73% of total online sales, good news for retailers locally.

    However, the annual growth of 23% for web sales was “reasonably robust “when compared with the past year, but below the strong growth rates enjoyed in October and November (+26% ,+27% respectively), says Nab.

    In addition, growth for online sales continues to outperform the traditional bricks and mortar sector.

    In November, traditional sales increased 3.3% y-o-y (non-seasonally adjusted) but taking into account seasonal factors, growth was just +2.5%.

    Spending in ‘Household goods & Electronics’ was above the trend level, averaging $181 per transaction, as was Media spend.

    However, spending in Auctions, Department Stores & Fashion contributed to the bulk of the web sales.

     

    “There is a stark difference between the share of spending (in dollar terms) and the number of transactions – with a large volume of transactions on comparatively low value items (Recreation, Toys, Games & Hobbies, Music, Movies, Books)”, the Index noted.

    Online currently accounts for just over 1.5% of total sales for big players like JB Hi-Fi and Harvey Norman, although it looks set to grow further this year and beyond.

    There was also an interesting upturn after Christmas – the Index shows, with the traditional post-Christmas spreading to the web in a more “significant fashion”, attributed to to increased advertising. 

    iTunes OZ Hacked?

    Fraudsters are on the loose and hacking Aussie iTunes accounts.


    Click to enlarge

    Aussie iTunes users have had their accounts hacked by unauthorised users.

    Thats according to Victoria’s consumer watchdog who said its become “aware” of concerns about Apple iTunes and App Store account security after consumer complaints about unauthorised purchases made via credit card.

    No word yet if this is confined to Victoria only.

    Many App Store customers have one ID account and password – usually linked to a credit or debit card account.

    But online hacker forums are now selling iTunes account info for as little as $33 – with forums saying fraudsters can net  thousands in account credit, warned Consumer Affairs Victoria.

    That is if consumers don’t stop them in their tracks (literally).

    According to the consumer body, Apple said it is working to enhance the security of its online store, following complaints and has advised customers whose payment information had been stolen to change their passwords and contact their financial institutions.

    Update: However, an Apple Australia spokesperson told SmartHouse it is unaware of the iTunes issue and has had no contact with the Vic office and is “unsure” of where they got this information from.

    The consumer body did not give a direct reply to questions from SmartHouse but it seems they got their info from a New York Times article on similar iTunes fraud in the US.

    However, Victoria’s consumer body has warned users to change their password regularly, use ones at least eight characters long (a combination of letters, symbols and numbers).

    Apps with a long track record of user reviews are a safer bet, it said in a statement.

    Consumers who access counterfeit or ‘cloned’ apps for sale in the App Store risk compromising their systems to “predatory software.” Counterfeit apps look like real apps but don’t have the same kind of security as those developed by established programmers.

     

    These ‘cloned’ apps can expose personal data to malware or predatory, virus-like software which can be used to steal personal information.

    This isn’t the first website hacking of late. LinkedIn, eHarmony and Twitter have all suffered data breaches of late, with passwords stolen.

    “Consumer Affairs Victoria reminds consumers to always remain vigilant in the online environment and provides advice when concerns are raised in the public domain about potential consumer detriment,” it told SmartHouse today.

    E-Tailing: ‘Supernovas’, Secrets (And Why Amazon ISN’T The Bees Knees)

    Aussie retailers shouldn’t necessarily jump on the same e-tailing bandwagon as US counterparts , an Online Retailer conference in Sydney was told yesterday.


    Click to enlarge

    So says respected Forester ecommerce analyst. Sucharita Mulpuru, who gave some valuable insights into the US market and F-Commerce, showrooming and why the darlings of the Internet aren’t always right, yesterday.

    The analyst cited some of the top errors US retailers have made – including ‘jumping on the Amazon bandwagon”, over embracing apps, blaming excess taxes and ‘showrooming’ for your woes (hmm, sounds familiar) .

    (And if anyone is unfamiliar with the showrooming term, it’s where consumers check out goods in a brick and mortar retail store, then buy it online to find a lower price).

    These latter woes in particular rings strongly here in Oz as we recall the furore Gerry Harvey and his High Street colleagues caused when they demand the government drop the GST tax threshold to under $1000 for online goods purchased from international sites.

    The US is always flagged a few years ahead of us Aussies in the online retailing space, but the Yanks may not be so avant-garde after all, says Mulpuru.

    But they must be doing something right as online retail has increased share by 6% in the US. 

    Australia still lags behind with some of the major brands like Harvey Norman only conceding to the platform of late and still doubt its potential.

    So stop whinging, as these aforementioned woes have been thrashed out by US retailers long before. (Its worth noting Harvey and Co have been quiet since the backlash erupted among consumers fed up with paying far too much for consumer items anyway).

    Also another vital error – “believing the supernovas are best to crack the local market”. So who are the supernovas?

    Apple, Google, Amazon Facebook – the Internet giants that dominate the scene whether we like it or not.

    While these stalwarts are “great repositories of information” and major drivers of traffic these obvious ecommerce platforms aren’t all they’re hyped up to be, says Mulpuru.

    The quick rundown of the 4 internet commerce supernovas is this:

    Google: accounts for half of all global visitors but “lacks strategic direction, Facebook’s M-commerce and ‘F-Commerce’ “failed to fly” and social commerce is “negligible” as it drives only 1% of sales, research shows.

    Amazon “a force that belies its size” and “Amazon are akin to the new Wal-Mart” – even though it is smaller in size, says the Forrester analyst.

    And the establishment of a Sydney based Amazon warehouse may be a “double-edged sword” as the e-tail kingpin often starts selling goods itself if its sees an online ‘partner’ making big sales on product lines as it did with PC accessories.

     

    Amazon positions itself as a “partner” but ends up being a “competitor.”

    Apple also has an annoying habit of being slow to respond to problems in its eco system and their terms can also often be a pain and everything on the app store is on their terms, which can be risky if you’re depending on just one mobile app to drive online traffic.

    “Apple is not so much a gift from God but rather is The Godfather” says Forrester’s ecomm guru.

    On the issues of apps, they’re not always as useful as they seem, even though 55% of online retailers in Australia are now selling via mobile-accessible sites, it was revealed yesterday.

    But the Starbucks app, for instance, accounts for just 5% of its total transactions so its hardly the golden path to a successful mobile strategy.

    The ecomm guru also gave some general tips to Aussie retailers:

    – Have a holistic marketplace strategy:

    Don’t just partner with one (i.e Amazon) – a lot of retailers are now starting to hook up with several marketplaces. Some retailers like Asos have even launched their very own marketplace, in order to avoid paying commission to Amazon and abiding by stringent rules, in the case of Apple apps rules.

    She also cited new formats emerging like ‘Fancy ‘that lets retailers sell products that are similar to ones a consumer is already browsing online.

    – Optimise for mobile web.

    Its easy to say but hard, if not impossible, to do and mobile shopping may not live up to the hype.

     Android is the “winning platform but it is fragmented” she warns. And there are also other OS to optimise, like Apple iOS, Windows, BlackBerry and so on which can be a major nuisance – not to mention a massive cost and time drain.

    Instead of dealing with Apple some like shoe retailer Steve Madden retailer decided to optimise their own site for various OS.

    “Optimising for all the different OS is starting to become unsustainable …and retailers are starting to realise this fast,” she warns.

    – Be a fast follower rather than a beta partner. Don’t just be the first to market for the sake of it.

    Mulpuru cites a case study in the US between two retailers – one who spent $5m on and another who spent very little.

    But guess what?

    Practically the same level of mobile traffic on mobile m-comm sites – just 1.5%

    “So is the best mobile strategy just to not have one at all?” she asked the retail audience.

    – Innovate and reinvent your consumer shopping experience:

    There is still a lot of “hanging fruit” like email and payments which retailers have still not got sorted despite the advent of ecommerce as a major force – many are not even PayPal members, which is pretty basic stuff.

    The key to online is also solving the “last mile problem” where a consumer buys the products but delivery is an issue.

    Wal-Mart allows free pick up instore and in FedEx offices for those customers who live further away, which is cited as a good strategy by the analyst as it can help with that “last mile” dliemma.

     

    (Most of us can probably recall abandoning a shopping cart as the delivery price was too high or too much trouble).

    Targeted email campaigns to consumers are still “strong” despite the advent of other forms of messaging like instant messaging and Facebook, with many now received via mobile; it is important to optimise for that platform.

    And “forget the sales tax arguments..change the store” and add new categories, says Mulpuru, citing Warby Parker in the US who has stores within stores and acquisitions and partnership with others is vital to increase footfall.

    The analyst cites Walmart who acquired Aussie start up Grabble, which provides retailer with a point of sales app for purchases.

    FINALLY! Telstra Seals $11B NBN Deal

    Telstra has finalised agreements with NBN Co and the government regarding the NBN rollout, it announced today.


    Click to enlarge

    The ‘Definitive Agreements’ with the company charged with the National Broadband Network rollout, NBN Co, and the government are expected to provide Telstra approximately $11 billion (post-tax) net present value over the long term, Telstra CEO David Thodey confirmed.

    Telstra shareholders approved the proposed split up of the company and NBN deal, which will see it surrender its copper network, infrastructure, pits and ducts, at its AGM last year and some major new investments by the telco is expected following the huge cash windfall.

    “The agreements are expected to also contribute to free cashflow generated in the medium term, provide us with greater financial flexibility and a stronger balance sheet, and help to offset the decline in free cashflow expected as customers migrate onto the NBN,” Thodey said.

    This confirmation comes as the competition watchdog, ACCC, gave final approval to Telstra’s revised draft of its Structural Separation Undertaking, which outlines the break up of its retail and wholesale divisions during the rollout of NBN, last week.

    The break up of Australia’s largest telco is to be completed by July 1 2018.

    Mr Thodey confirmed the Structural Separation Undertaking (SSU), had also now come into force.

    “Compared with other realistically available options this outcome should deliver a better overall financial outcome, a more stable regulatory environment and greater strategic flexibility, enabling Telstra to maintain a strong focus on our key areas of growth,” Mr Thodey said.

    The government will fork out $190 million to Telstra under the Information Campaign and Migration Deed, expected later this year and will be amortised over three years as ‘Other Income’ on its balance sheet as costs are accrued.

    The payment is outside Telstra’s guidance for fiscal year 2012.

    Telstra will retain ownership of its Hybrid fiber-coaxial (HFC) network and its 50% share in FOXTEL, it also confirmed.

     

    This followed three years of complex and prolonged negotiations, with ACCC, Government, NBN Co, while rivals including Optus voiced their concern over Telstra $11 deal with NBN and its proposed break up.

    Thodey said he was “pleased” with the positive outcome. 

    Last week, Telstra revealed its NBN high speed broadband pricing, which starts at $49 for 50GB and is available in NBN test areas in NSW, QLD, Victoria and Tas and SA.

    Optus Secret Cloud Tests

    Telco to expand network capacity at big events with cloud technology.
    No. 2 telco Optus is hooking up with Connectum to test cloud technology to deliver more network “core capacity”, Optus’ MD of Networks, Guenther Ottendorfer, revealed to media yesterday. 

    The telco is to engage in testing on the cloud technology which would help Optus increase its network capacity at major events like New Years, the Melbourne Cup or the AFL Grand Final, Ottendorfer said.

    California-based Connectum helps carriers connect mobile devices via an elastic cloud infrastructure.

    The Telco will be making a formal announcement over the next few days.

    Optus revealed its grand plans for 3G+ and 4G network‘s yesterday and confirmed it will be expanding its fledgling 4G LTE service to Canberra and South Australia next year.

    Stacked Up: Telstra Unleash IPV6 To Web

    Telco has kicked off IPV 6 for business users meaning they wont run out of Internet addresses (on IP v 4) – predicted to happen in the next few years.


    Click to enlarge

    Telstra is now providing business and wholesale customers with Internet Protocol version 6 (IPv6) connectivity – the next generation of newer, longer web addresses. 

    The service will be available to all other Telstra broadband users “over time,” it confirmed. 
    This will help overcome the impending shortage of current IPv4 addresses globally, the Telco said today. 
    Telstra’s internet backbone is now fully “dual-stacked” meaning users can connect with IPv4, the existing system, or IPv6, for the first time. 
    Customers that choose to opt-in to IPv6, will have access to the global IPv6 internet, including connectivity to multiple providers, but shouldn’t notice any difference in the change over, Telstra insist. 
     Being dual stacked gives customers the current reliability of IPv4 as they commence the transition to IPv6, says Director Transport and Routing, David Robertson. 
    “Currently IPv4 offers 32 bits for an internet address, with IPv6 an internet address has 128 bits. This means that with IPv4 there were about 4 billion internet addresses, with IPv6 there are more internet addresses than grains of sand on the planet.” 
    “The deployment of IPv6 into the network is an on-going program of work, and we’ll make this available for other networks such as DSL and our wireless networks over time. 
    “By dual stacking IPv4 and IPv6 in our network, customers can opt into IPv6 in their own time, and according to the lifecycle upgrade of their existing equipment. 
    Most customers won’t notice the change to IPv6, he added. 
     

    Telstra will be working with customers who have expressed an interest in moving to IPv6 to help them through the transition but will need to opt-out if they wish to continue using IPv4.

     “In coming years we expect that IPv6 will become the norm,” declared Robertson. 

    Its Live! Vodafone ‘Fastest 4G Network In Oz’

    From Vodafail to Vodafast: Telco promises ‘thrilling’ 4G speeds as it finally switches on LTE service

    Voda has switched on its 4G LTE network in all main cities – Sydney, Melbourne, Brisbane, Perth, Adelaide,  as well as Newcastle, Wollongong in NSW, today. 

    The third largest telco has finally moved to deliver a faster 4G service  – almost 2 years after Telstra’s 4G service went live – but is looking to make up for lost ground, claiming its superfast wide-band network will deliver “mobile data speeds never before experienced” in OZ.
    Vodafone’s Head of Network Product Management, Rob Glennon, says its 4G network demonstrated trial speeds of up to 100Mbps – not seen on any other network here, in a company blog.
    Optus too is upping the 4G ante this year, and is promising 70% coverage by year end and new LTE technologies, as Aussies use mobile internet even more. 
    Vodafone 4G coverage will hit “selected” metro areas of all the main capital cities only, but the troubled telco says its wide-band 4G network coverage will treble by year’s end. 

    Vodafone’s boss put it up to rivals Telstra and Optus today claiming the fastest 4G speeds locally and “many parts of the world.”

    “Vodafone customers in 4G areas with compatible devices will have access to speeds that are among the fastest not only in the country but many parts of the world,” said Vodafone CEO Bill Morrow. 

    “We’re offering a leading-edge 4G network to customers on the best value plans in market.”

    New customers will be able to join its new 4G service from next month.

    “We have invested heavily in our 3G and 3G+ networks and we know our wide-band 4G rollout is going to thrill our data-hungry customers,” Murrow added. “Australians have told us loud and clear they want a fast and reliable network.”
    The telco is claiming the fastest network speeds on the back of its 2 X 20MHz contiguous spectrum holdings. 

     

     “We’ve been single-minded in our determination to improve our network, enhance our customer service and offer the best value mobile services in the country.”


    The 4G announcement comes in the wake of the telco’s expansion into regional Australia and major expansion of its call centre operation in Tasmania.  

    In 2011-12, Vodafone suffered a PR nightmare after its 3G network crash banged, leading to the telco being dubbed ‘Vodafail’. 

    On Net: TPG Profit Soar 40% As Users Flock To Cheap Broadband

    Net profit soar 40% as customers head to TPG. The ISP on a broadband high after adding almost 100K new users to its ‘on net’ broadband service and posting a 40% jump in profit after tax to $78.2 million.
    Group financial results announced yesterday for the year ended 31 July 2011 (“FY11”), painted a bright picture for the small ISP, which offers competitively priced ADSL, ADSL2+ and SHDSL broadband and domain hosting services.

    Earnings before tax, depreciation (EBITDA) also increased 37% to $234.0m – above company guidance range of $225m-$230m.

    “Strong organic subscriber growth” in consumer broadband led to a net increase of 59,000 subscribers comprising of  77,000 ‘On-Net’ users, offset by a decline in Off-Net customers.

    The ‘On-Net’ broadband and home phone bundle which costs $29 was also singled out as a major growth driver, adding a impressive 98,000 subscribers during the year.

    And the ASX listed ISP is also looking to the cloud announcing the completed purchase of IntraPower, whose “Trusted Cloud” will allow it offer cloud services to its growing subscriber base.

    PIPE Networks, its optic fibre business has also continued to grow “strongly” and contributed $57.2m to the 2011 profit results thanks to “strong revenue growth.”

    Its network rollout for the Vodafone Hutchison Australia contract, which will increase PIPE’s domestic fibre footprint by approximately 60%, is “on schedule,” the company said.

    Other financial highlights includes:  earnings per share increase of  33% to 10.1c per share, and strong cashflow enabled  bank debt to be slashed by $100m.

     

    TPG’s Board of Directors has declared a final FY11 dividend of 2.25 cents per share (fully franked), payable on 22 November bringing total FY11 dividends to 4.5 cents per share.

    However, the directors have invited shareholders to reinvest in the company through its “dividend reinvestment plan.”