if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 90 of 91

    Smart Office

    Is The Franchise Model Broken? It Is Very Close To Being

    When Gerry Harvey went into battle recently for a 10 percent tax on overseas online purchases he set off an explosion which resulted in most Australians suddenly becoming aware that a lot of retailers are price gouging when one compares the overseas cost of a same brand product to that being sold in an Australian retail store.

    It also exposed the online weaknesses of several major retail groups such as Harvey Norman, The Good Guys, Betta Electrical and Retravision who primarily operate as franchisees.

    Online today is no longer about having a web site and a transaction engine. Online is about delivering choice, competitive pricing and above all having the engines in place that allow operators to deliver a first class customer experience.

    Email marketing and product tracking engines that allow consumers to track their purchase on a mobile or tablet are essential and so is bar code linked technology that allows consumers to access the latest information about a product or Company.

    A web site and a transaction engine was web 01, smart retailers in the US and Europe are now moving to web 03, with content openly available in several forms on multiple devices.

    Groups like JB Hi Fi who already has a web site and company-owned store operation can respond quickly to change. Franchise operators are not in the same position, as Harvey Norman has recently found out.

    To have to carve out an online margin for a franchisee and an operator like Harvey Norman or A Good Guys, instantly creates a layer of margin that non franchise operators don’t have.

    Then there is the issue as to who the franchisees are competing against. Is It Harvey Norman Vs The Good Guys or is it Harvey Norman Vs Amazon or E Bay store operators who are able to ship from overseas warehouses direct to an Australian consumer.

    These operators already have the technology that allows them to engage with consumers across several levels and several continents.

     

    They have the comparison pricing engines and the engines that prompt the sale of an accessory or extended warranty purchase, which in a lot of cases is supported by vendors who are moving to global warranty programs for their products.

    They also have the bar code technology that confirms instantly when a product has been delivered to a customer.

    Going forward, an Australian retailer is going to have to think global to compete locally.

    Working in their favour is the fact that Australians will pay a higher margin to buy locally. A recent Australian Institute research study revealed that Australians expect to pay a difference between 20 and 35 percent for an Australian delivered online product. Anything above that and the consumer will shop online.

    This presents big problems for a franchisee that has neither the technology nor the knowledge nor the funding to compete in an online environment where the benchmark is being set by a big global operator such as Amazon.

    By late 2012 we will see several Australia retail operators roll out overseas linked and operated web sites. Companies like Myer are already giving it a go. This will add a further level of competition as Myer battle with big brand players that are linked to organisations like Google and Amazon. 

    Acer On A Roll

    Acer is tipped to snare the #3 notebook slot in the world foillowing record global sales. In Australia Acer is powering ahead with stock availability the only issue that will slow it.

    Acer has reported record monthly consolidated revenues of $2.2 billion for September, with industry sources now predicting that the company stands a good chance of becoming the world’s number four notebook vendor this year and number three next year. In Australia they are #1 in notebooks with GFK figures showing that Acer may have snared the #1 slot for LCD TVs in the last quarter.

    Acer said its September consolidated revenues increased almost 50% from the $1.3 billion it reported for the same period one year earlier due to strong sales in Europe, the US, the Asia-Pacific region, and Greater China. The latest company record represents a 20% rise from August’s $1.07billion.

    Accumulated consolidated revenues for the first three quarters reached $8.43 billion, which represents a 38.3% rise from one year-earlier, and the company has achieved 75% of its revenue goal for this year.

    Compared to its September revenues last year, Acer said its revenues in Europe grew 55%, while sales grew 157% in the US, 65% in the Asia-Pacific region and 56% in Greater China.

    Sources with Taiwan’s notebook makers estimated that Acer will ship 6.2-6.5 million notebooks this year, taking the world’s number four position from Lenovo, which is expected to ship about six million units.

    Although Toshiba will remain third this year with shipments of 7-7.5 million units, the sources said the Japan vendor is expected to lose its position to Acer next year. Judging from orders that the two vendors have placed for next year, Acer and Toshiba are expected to ship 9-9.5 million and 8-8.5 million units in 2006, respectively, the sources added.

     

    HP OZ Reports Record $58M Loss After Being Hit With $3M For Misleading Consumers

    EXCLUSIVE: Hewlett Packard Australia, which was ordered by a Federal Court judge on Friday to pay a $3M fine and $200,000 in legal costs for misleading consumers, has reported a massive $58,238,000 loss for 2012.

    What is not known at this stage is whether HP’s consumer PC division, which the US company was looking to sell 12 months ago, was a significant contributor to the losses. IDC reported that their PC market fell over 20 percent last year in Australia.


    Also unknown is which Australian executives made the decision to engage in misleading consumers and whether there will be any personnel fallout from the Federal Court decision.


    According to documents filed with the Australian Securities & Investment Commission, HP Australia went from a profit in 2011 of $134M to a loss of $58M in 2012. A contributor to the loss was a $30M tax liability.


    Sale of goods revenue in 2012 slumped from $2.58 billion in 2011 to $2.18 billion.


    Also slumping was finance revenue, which fell from $6.3M to $5.16M.


    Despite losses and a fall in revenue, wages and salaries at the company rose by 80 percent, from $440M to $754M.


    Marketing costs also fell from $38M to $32M. It is not known how much of this expenditure was co-op dollars attributed to retailers selling HP products.


    The Federal Court  slapped Hewlett-Packard with the $3 million fine after a lengthy investigation by the Australian Competition & Consumer Commission, which concluded that HP management engaged in a “widespread and systemic” process that resulted in hundreds of consumers being misled by the Australian subsidiary.


    ACCC chairman Rod Sims said it was an important case. “The misconduct was widespread and systemic from a very large multinational firm.” 


    Rather than face court, where evidence would have been presented in an open court, HP Australia chose to negotiate a settlement. The court found that HP gave clients misleading advice on their consumer guarantee rights.


    According to ACCC sources, HP management involved in marketing HP PC products in Australia instructed HP call centre personnel to tell customers that products purchased online could only be returned to the company at its sole discretion, which was also how it determined product remedies.


    HP retailers have also been implicated in the companies’ misleading conduct.


    Consumers were told that they were required to have their product repaired multiple times, before being entitled to a replacement.


    Another false claim was that the warranty period for HP products was limited to a specified express warranty period.


    The ACCC initially instituted proceedings against HP on October 16, 2012.


    The year 2013 is not looking any better for HP, after research group IDC reported that the Australian PC market had declined 21 percent in the first quarter of 2013, compared to the same time last year.


    HP took the top slot in the Australian PC market in the first quarter of 2013, with a 19 percent share. 


    IDC is forecasting a further decline of 15 percent across the Australia and New Zealand markets.


    The announcement that HP had deliberately engaged in a process of misleading consumers and had been fined $3M after a series of discussions with the Australian Competition & Consumer Commission was made late on Friday afternoon.


    It is not known whether HP proposed a Friday afternoon announcement to the ACCC in an effort to minimise the PR fallout from the Federal Court decision. 


    Late on Friday, neither HP, nor their PR company, refused to supply a spokesperson for the company.


    Shortly after calling HP and PR company Burson Marsteller, SmartHouse got a spin statement from Biana Harkovskaya, Acting Media Relations Manager HP Enterprise Business, South Pacific.


    Receptionists at both HP and Burson Marsteller said they had been instructed to take the names of media personnel.

    Samsung Fuel Cell Breakthrough

    Samsung claims it has achieved a fuel cell breakthrough that delivers them a significant performance advantage over Japanese rivals Toshiba and NEC.

    Samsung claims that it has developed a notebook fuel cell which lasts twice as long as rival systems from Toshiba and NEC. They also It’s lighter and slimmer too.

    Samsung claims that their fuel the cell squeezes 200 Watt hours of energy out of each litre of fuel it consumes. This they say beats the 100-130Wh per litre energy densities claimed by the likes of Toshiba and NEC – both Japanese companies – for their own notebook fuel-cell prototypes. The problem for Samsung according to the latest IDC figures due to be released later this week is that their notebooks are failing to sell with the manufacturer slipping from where they were last quarter.

    The Samsung cell measures 23 x 8.2 x 5.3cm and weighs under 1kg, the company said. But while it’s more compact that rival fuel cells, according to Samsung, it still represents quite a chunky addition to the back of anyone’s notebook. The cell contains around 200 cubic centimetres of methanol fuel. Samsung said it yields up to 50W, with an average output of 20W. It can run for 15 hours. The company hopes to put the fuel cell into commercial production in 2007.

    Like other methanol fuel cells, the Samsung system uses a catalyst to react methanol and water at the positive electrode. This produces Hydrogen and electrons, which combine at the negative electrode with oxygen to create water. The cell has to transfer some of the water back to the anode to continue the reaction and suck the Hydrogen across to the cathode to enter into the water-producing reaction. The cell also produces carbon-dioxide.

    Hot Ferrari Screen Coming

    If you’re a rev head Ferrari fan or simply addicted to formula 1 Acer could well have a lovely new product for you in the new year a big shiny red 20″ Ferrari monitor with built in TV tuner.

    Acer, the official supplier to the Scuderia Ferrari racing team,  has unveiled in Europe its Ferrari 20-inch LCD monitor in with a price tag of around $1,000 and according to Acer management in Australia the screen could well make it to Australia.


    Click to enlarge

    The latest model (F20) which features a native resolution of 1680_1050, a contrast ratio of 800:1 and viewing angle of 176 degrees is hot with auto fans queuing up to buy it. Raymond Vardanega Acer Australia Marketing Director said “It’s a lovely product and right now we are deciding what to do about the screen. One of our options is to run discussion groups with both resellers and consumers to see how popular this screen could be in Australia. It is very exclusive and definitely a must have for auto in particular formula 1 fans”.


    Click to enlarge
    Which ever way you look at it, It’s HOT.

    EXCLUSIVE: David Jones Tipped To Be Running Ruler Over Harris Farm Markets

    The Woolworths SA owned retailer David Jones is believed to be in discussions to buy the Harris Farm Markets chain of food stores.

    According to sources both retailers have held discussions with a deal tipped to be in the making. 

    David Jones management believe that Australian consumers are becoming increasingly willing to pay a premium for “quality” food similar to what Harris Farm are currently selling.


    ChannelNews understands that the model that David Jones are looking to replicate in Australia is the UK Waitrose model, with the Company also looking to sell small food appliances used for cooking at their new premium food locations which will sell also sell combination of precooked foods, meats gourmet foods, and organic fresh food. 

    “People these days want real food and there’s a big movement towards real food, from farm to table,” said Jones the Grocer chief executive Mark Watson. “As people get more educated about food there’s definitely room [for more].”


    Mr Watson said Jones the Grocer, which opened its first store in Sydney’s Woollahra in 1996, was not surprised to hear that South African food and clothing retailer Woolworths Holdings had appointed one of its executives to revamp David Jones’ ageing food halls and investigate opening a chain of upmarket food stores.

    Former David Jones chief executive Peter Wilkinson agreed, saying Australian consumers had a strong appetite for high-quality premium food where the provenance was indisputable and customers were able to track the entire supply chain.

    “There’s no doubt there’s been a material emergence of people that care deeply about what they’re eating – I think Australia is ready for it,” Mr Wilkinson said.

    Mr Wilkinson, who was chief executive of David Jones for seven years, ultimately lost his job after a failed foray into gourmet food that cost the department store chain $120 million between 2000 and 2003.

    KEF Price Goughing Update

    Earlier this week we published a story re KEF price gouging, one of the products out of over 30 we checked was not as overpriced as we stated.

    In Australia, the KEF LS 50 Wireless speaker is retailing for $3,799. In Canada where the VAT Tax is only 5% the same product is retailing for $2,999 or A$3,022.

    Add 5% VAT tax to the already existing 5% VAT tax and the difference is still over $600 for a sub $4,000 pair of wireless speakers.

    The KEF LS 50 wireless speakers still must be connected via an Ethernet cable to deliver a Stereo pair whereas the Bluesound High Res speakers the Heos or Sonos speakers can be easily paired via a simple software app downloaded to a tablet or Smartphone.


    Click to enlarge

    KEF distributor Advance Audio pointed out the difference. They made no reference to several other KEF products that we exposed as being in cases, thousands of dollars more expensive in australia than overseas.

    COMMENT: Why No One in Their Right Mind Should Consider Foxtel

    Serious questions have got to be asked of Foxtel and their owners, News Corporation and Telstra, two Companies that put themselves up as being among the “Best out there” when it comes to service and the delivery of content to a set top box.

    After returning from a three week, trip overseas, I recently discovered that my Foxtel iQ3 which is my second box in 12 months was refusing to let me get access to any Foxtel programs including free to air TV stations.

    I also discovered that nearly all my Foxtel recordings were blocked and nothing had been recorded for three weeks. The message “Entitlement Loss” was splashed across all my recorded programs.
    Despite my bill being paid I was presented with a message saying that an “upgrade was required” before I could access any Foxtel Channel. After checking the software update in settings, I could see clearly that I had the latest software.

    After rebooting the system which included removing the HDMI cable I was still unable to access my system.

    In the past I, have often I have had to remove the HDMI cable simple to get a picture to the screen due to previous problems with the iQ3 box.

    Ironically another iQ2 box which was in another room was operating normally without any access problems, this box is on the same account as my iQ3.

    When called Foxtel support, my drama really started to unfold.

    I was told that I did not own a Foxtel box, that my home phone number was not on the Foxtel system and that there was no Foxtel connection at my home. When I pointed out that I was a Foxtel customer via my Telstra account I was again told that I did not exist on the Foxtel network.

    After giving my full name, address and home details five times to the Foxtel support representative, who kept referring to me as “Mr. David” I had the bright idea of going back to the box to access the SIM and serial number on my iQ3 box.

    After handing over the codes I suddenly discovered that Bingo! I did exist.

    Remember these are two big Australian organizations who are using uneducated call center staff in the Philippines who have still not worked out that consumers in Australia have Christian names and Surnames.

    Despite this the support staff insisted on again rebooting the box, and when this failed I had to reboot it again without the HDMI cable in.

    When I pointed out that the box was constantly slow, struggled to find recorded content and was a pitiful excuse for a set top box especially as Foxtel is one of the most expensive pay TV services in the world let alone Australia the operator finally concluded that I needed yet another new iQ3 box.

    Currently there is over 250,000 of these boxes lying dead in a Homebush NSW warehouse this is despite News Corporation and their technology partner Telstra claiming that they have “The Best” streaming and content delivery system in Australia.
    Not only have I lost all my recorded content I was also not offered any compensation for the inconvenience or loss of programs.

    The frightening part is that Telstra, who are supposed to be the content and technical whiz kids in the Foxtel partnership were at CES 2017, spruiking their ability to deliver medical and health services and automated smarthouse technology.

    They are also the same organization who have struggled to deliver a stable mobile network or constant broadband support.

    Three days later I am still waiting for my third iQ3 box to arrive.

    I wonder how long I will have to wait for this box to fall over?

    PS: I also have a Fetch TV and it is sensational. Fast, easy to operate and in two years has never once failed or had a problem.

    Samsung Flash Drive Soon

    Samsung Semiconductor will begin production of its NAND flash-equipped hybrid hard drive for notebook computers during the fourth quarter.

    Samsung’s hybrid hard drive was developed to take advantage of the company’s memory and hard drive businesses by combining the two; this will create both a new hard drive category and generate a need for more flash memory from other hard-drive makers that the company hopes will adopt the technology.

      By placing 128MB or 256MB of NAND flash on the hybrid drive to act as a cache, Samsung said it is able to increase battery life, allow for faster booting and reduce wear and tear on the notebook’s hard drive. Don Barnetson, Samsung’s flash memory director, said the cache is used to hold data and applications in use, allowing the drive itself to shut down. Because NAND is non-volatile, the information is not lost when the computer is shut down, enabling it to be ready for use when the computer is rebooted.

    “The hybrid drive cuts boot/resume time by 50 percent and cuts hard-drive run time to 1 percent of normal,” Barnetson said.Samsung has not set a price for the drive, but the additional cost of the onboard memory is expected to be minimal, the company said. An aftermarket upgrade kit for the drive is also on the roadmap.

    By shutting down the drive a notebook’s battery life should be extended by 10 percent. “Normally a computer writes 64MB of data to the hard drive every 10 minutes. Here it is stored in the memory, and then the drive turns on once every 10 minutes to flush the cache, and then it turns off,” he said.

    Barnetson thinks enabling a long hard-drive life will result in much lower customer service costs for notebook vendors. Since hard-drive failure is usually the primary reason for a notebook to need servicing, a company will end up saving on new hardware, repair and shipping costs. Another feature of the hybrid hard drive, developed by Microsoft, is called SuperFetch. This application studies how the computer is used and then intelligently picks out which programs are used and when, and then places them into the NAND flash for faster access. Barnetson said it will alter what is cached depending upon the day; for example, if the person uses different programs during the work week than on the weekend, it will adjust what it caches accordingly. This will allow programs to load three to four times faster, saving the average person 15 minutes per day in time normally used waiting for a program to open.

    The hybrid hard-drive’s technology will be open to all vendors, and Samsung expects third-party versions to follow about three months after Samsung begins shipping its version. Barnetson said the company did not want to charge a licensing fee, but would rather make its profit by selling the other hard-drive vendors memory.

    The hybrid hard drive has also opened doors for Samsung to start supplying drives to PC manufacturers, a market the company has not been able to break into.iBarnetson said Samsung is in talks with computer vendors, but would not say when or if its drives would be used.

    Why Harvey Norman Should Be Concerned About Good Guys JB Hi Fi Deal

    The move by JB Hi Fi to run a ruler over Good Guys makes sense and the timing is right.

    JB Hi Fi needs to deliver growth and their move into selling appliance has been frustrated by a combination of stumbling blocks which the acquisition of The Good Guys would fix.

    Appliances and white goods is where the profit margins are and the two big players in the Australian market are Harvey Norman and The Good Guys.

    Out of these two The Good Guys is the biggest appliance seller of the two.

    JB Hi Fi on the other hand has been looking to expand into the appliance market which is worth around $1.6 Billion dollars a year and highly profitable for retailers, but they have two major problems that the acquisition of The Good Guys would fix, store size and relationships with suppliers.

    Several big brands have only given JB Hi Fi selected appliance products to sell but the price has been higher that what Harvey Norman and The Good Guys are able to buy white goods for, despite this they have they have had a lot of success selling appliances because they have what is acknowledged in the industry as “the best retail management team in the business”.

    However, JB Hi Fi needs faster acceleration of their appliance strategy which while on track is limited by supply and store sizes.

    If a deal did go ahead big appliance brands that have been giving Harvey Norman and the Good Guys better terms than JB Hi Fi face working with a group that will have massive buying power across both consumer electronics and appliances.

    As soon as JB Hi Fi announced a move into appliances, two years ago, Harvey Norman moved to expand their appliance offering by expanding house brand appliances while at the same time cutting better deals with suppliers. 
    The JB Hi Fi board, aware of this have now triggered a play that could have a major impact on Harvey Norman.

    What they doing is sticking their finger to the wind to litmus test the markets attitude.

    Last year JB Hi Fi had revenues of $3.3B, Harvey Norman 4.95B. By combing The Good Guys business JB Hi Fi would have revenues of over $5B which makes them a real threat to both Harvey Norman, Winnings, Bing Lee and the buying group NARTA.

    ChannelNews understands that JB Hi Fi has opened the bidding at around $550M while the Muir family who own The Good Guys and a lot of The Good Guys stores are looking for between $850M and $1 Billion, but this is early days and the deal has a lot of potential for both sides.

    Do the Muir family have to sell? No but they should seriously consider a deal with JB Hi Fi, because their options are limited.

    The Muir family are currently netting more than $80M a year out of The Good Guys stores that are turning over between $1.8B and $2B.

    The stores are also highly profitable and the Muir family own valuable properties that will continue to deliver rent. The stores are also well run by savvy South African management.

    The big issue is whether the family want to stay in the retail business and whether they will take a price between the JB Hi Fi opening offer and what they believe the business is worth.

    The answer could lie in an earn out deal as the market has no appetite for a retail float following the disastrous Dick Smith and Myer floats.

    Several venture capital groups, overseas retailers and even JB Hi Fi have ran a ruler over The Good Guys in the past, and all have walked away empty handed, the latest being the South African Company, Steinhoff International Holdings who was looking at an acquisition price of over $800M to buy the group.

    If the deal goes ahead the appliance market will then consist of two key groups Harvey Norman and JB Hi Fi, this will put pressure on Bing Lee who many expected JB Hi Fi to go after and Winnings who are believed to be struggling to grow sales due to a combination of factors, poor store traffic and white label sales by Aldi and the likes of Masters who will shortly exit the market.

    The deal could also present problems for the NARTA Buying group as JB Hi Fi are their biggest customers, the only saviour will be if JB Hi Fi continue buying via the group a move that will cost them margin.

    JB Hi Fi use NARTA to buy appliances and if they exit NARTA it could cause major problems for the group.

    ChannelNews understands the David Jones board recently considered a major move to expand their appliances and consumer electronic goods offering. The move is still being considered.

    The other big appeal for JB Hi Fi is that appliance are primarily purchased from stores and not online while consumer electronics is seeing a migration by consumers to online.

    Appliances also have better margins and this combined with JB Hi Fi’s operational and management capability makes this the deal to be done.