if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 45 of 91

    Smart Office

    BREAKING NEWS:HP OZ Hit With $3M Fine For Misleading Consumers

    UPDATED:Hewlett Packard Australia has been hit with a massive $3M fine for misleading consumers about their consumer rights, it is not known how many faulty HP products were involved in the claim.

    The Federal Court has also ordered HP to advertise how consumers affected can redress their claims against HP. HP has also been ordered to pay ACCC legal costs of $200,000.

    The Federal Court has today ordered Hewlett-Packard Australia (HP) to pay a $3 million penalty fine after a lengthy investigation by the Australian Competition and Consumer Commission into claims that HP made to consumers about their guarantee rights.

    Rather than face an ongoing Court battle and disclosure of information HP Australia negotiated a settlement with the ACCC. 

    At this stage HP Australia who conduct selective PR programs has chosen not to comment. 

    The Australian Competition and Consumer Commission instituted proceedings against HP on 16 October 2012. Subsequently, the ACCC and HP came to an agreed settlement on the matter a statement issued by the ACCC said.

    The Court found, based on the parties’ agreed facts, that HP made a number of false or misleading representations to consumers about their consumer guarantee rights, including that:

    the remedies available to consumers were limited to the remedies available at HP’s discretion;
    consumers were required to have their product repaired multiple times before they were entitled to a replacement;
    the warranty period for HP products was limited to a specified express warranty period;
    consumers were required to pay for remedies outside the express warranty period; and
    products purchased online could only be returned to HP at HP’s sole discretion.
    In addition, the Court found that HP represented to retailers that it was not liable to indemnify the retailer if the retailer failed to obtain authorisation from HP before giving a consumer a refund or replacement.

    The above representations were made by HP staff  to Australian consumers working at call centres located around the world, as set out in HP’s internal guidelines and scripts.

    In his judgment, Justice Buchannan stated that the penalty was appropriate and “reflects an acknowledgment of the seriousness of the respondent’s conduct”. 

    Justice Buchannan noted the Court’s disapproval of HP’s conduct and the need for general and specific deterrence for such behaviour.

    “This was an important case to the ACCC. The misconduct was widespread and systemic from a very large multi-national firm,” ACCC Chairman Rod Sims said.

    “The ACCC believes that this penalty sends a strong message to all companies, particularly large multi-national companies, that the Australian Consumer Law is not negotiable. This result also shows that the Court is not afraid to impose significant penalties for serious contraventions of the ACL.”

    “All businesses operating in Australia require robust mechanisms to comply with the consumer guarantees provisions under the Australian Consumer Law,” Mr Sims said.

    In addition to the $3 million penalty, the Court also made orders including:

    declarations;
    injunctions;
    a contribution towards the ACCC’s costs of $200,000;
    consumer redress orders;
    public disclosure orders;
    corrective advertising orders; and
    orders to implement a compliance program.

    The Court’s orders include a requirement that HP set up a consumer redress process for affected consumers.

    Late Friday evening Hewlett Packard Australia issued the following statement

    :“Individual, corporate and government customer satisfaction is the cornerstone of HP’s business.  We deeply regret that in the instances identified by the ACCC, HP fell short of our core commitment to high standards of service for Australian consumers who purchased our HP-branded desktop computers, notebooks/laptops and printers and of our duties under Australian consumer laws.

    Through discussions with the ACCC with a view to resolving the legal proceedings brought against HP, HP has voluntarily consented to Federal Court orders.  Under the orders, we have committed to, among other things, review our warranty and support practices against the Australian Consumer Law and implement a robust program to monitor and achieve ongoing compliance.

    HP is dedicated to honouring our obligations to Australian consumers under the Australian Consumer Law.  We will provide customer support to assist consumers in resolving concerns with HP products in accordance with the Australian Consumer Law and have established a specific consumer redress program (involving a customer contact centre) to help with past concerns relating to HP-branded desktop computers, notebooks/laptops and printers.

    We have also taken steps to adjust our consumer policies and practices and re-train our Printing and Personal Systems team members.  HP will continue to design products distinguished for their outstanding quality, reliability and ease of use and looks forward to delivering high-quality service to our Australian customers.
      Cut 
      Copy 
      Pate 
      Paste as HTML 
      Modify cell properties 
      Modify table properties 
      Modify image 
     If you feel that you have been misled about your consumer rights, involving a HP product send us an email at dwr@4squaremedia.com

    How Would You Like To Listen To Sound Through The Best Headphones In The World? We Have and It’s One Awesome Experience.

    I often get asked the question, who has the best headphones? My answer is always, what do you want to listen to and on what device.

    I personally believe that headphones are a personal thing, the shape, the comfort and the quality are all critical components which is why Sennheiser is seriously out there when it comes to constantly producing the best headphones in the world.

    This is no accident, this is a Company who stick to their knitting, by no venturing too far away from their core capability which is making headphones and recording microphones, this is also a Company that big artists swear by when performing before a live audience with their Sennheiser radio microphones. 


    Click to enlarge
    These are without doubt a very special pair of headphones.


    Sennheiser is 70 this year, and the company has chosen to celebrate by revealing a brand new Orpheus pair of headphones that cost a mere $70,000. 

    The first Orpheus launched in 1991, as an attempt to push the boundaries of sense and possibility. 

    What Sennheiser has now released in small quantities is a reference electrostatic headphone, valve amp which is a combination of wood, glass and metal.

    Sennheiser has spent nearly 10 years developing its successor, and the final product is quite something.

    I was recently presented with the opportunity to experience the Sennheiser Orpheus HE1060 headphones.
     
    I first saw these headphones at CES 2016 but it was not till I visited a Darlinghurst terrace where I got to listen to the Orpheus HE1060 headphones in the comfort of a lounge chair.

    And even if you can afford them at a mere $70K, I doubt they’ll ever be that easy to get hold of. Sennheiser can only make 250 a year thanks to the painstaking way they’re put together.

    First off, the Sennheiser Orpheus HE1060 aren’t really a pair of headphones.


    Click to enlarge

     

    The pair of headphones I listened to were attached to a slab of marble that houses both a pre-amp and power amp.

    And before you have the idea of plugging them into a smartphone, tablet or the odd digital source you can’t, these need to be plugged into a serious source. 

    As well as the amp section being finished with real marble, the valves and control knobs silently open up in sequence as you turn the set on.
    The level of engineering here is a bit mad. But then a Sennheiser Orpheus set was never going to be remotely ordinary.

    Look at the headset in isolation and it seems far more conventional, though. The look is an awful lot like the original Orpheus HE90, with a far less outlandish look than the budget Sennheiser HD800.

    As with any pair of electrostatic headphones, the Sennheiser Orpheus HE1060 are fairly heavy.
     
    However, the padding is superb. Thick, and squishy with a head-hugging feel, it makes the weight a non-issue.

    Loads of thought has gone into this too. The ear pads use a mix of leather and a micro fibre blend, making sure that the leathery bits don’t touch your skin.


    Click to enlarge

    The HE1060 shells are made of machined aluminium, but with a texture that adds a level of softness.
     
    Just like the HE90 and almost all of Sennheiser’s top-end sets, the Sennheiser Orpheus HE1060 are totally open-backed. There’s zero isolation. But if you can afford this pair you probably live in a pretty nice house with double brick walls.
     
    There are no major flaws in the Sennheiser Orpheus HE1060’s sound. It is simply awesome.

    The texture and realistic ‘weight’ of the mid-range gives vocals an authenticity you just don’t hear often, the bass is effortless and powerful. The treble is natural and precise. It has the detail of headphones with a trebly emphasis, without having any obvious focus in that area.

    Instrument separation is terrific and compared to much of the HD range, the presentation is much more up-front. It’s not dark or flat-sounding like some of the mid-level Sennheiser open-back sets.

    I listened to some jazz and classical music through the Sennheiser Orpheus HE1060. 
    The most obvious win is how good they are at maintaining a sense of airiness and coherence at the lowest registers. It’s remarkable stuff.

    If you’re reading this there’s a good chance you’re a bit of a headphone nerd. And for that crowd the big question here is how this pair compares to the old Orpheus from way back in 1991, and Sennheiser’s more conventional HD800.

    The answer is that the HE1060 are fairly similar to the HE90. 

    Ultimately, the Sennheiser Orpheus HE1060 are absolutely Orpheus reborn.

    This is an electrostatic set, which uses a very large 2.4 micron thick driver where the HD800 has dynamic drivers. Those are ‘normal’ drivers.

    Contrary to what you might assume, the HD800 actually have a significantly larger sound stage, and will seem more obviously ‘epic’ at first listen. I’ve never heard an electrostatic headphone that’s managed to rival the ginormous scale of the HD800. They also have much more a sense of delivering micro-detail than the HE1060.

    However, the Sennheiser Orpheus are much more ‘real’ sounding, and much more honey-glazed, without any of the sugary softness that often comes with. The HE1060 are much better, but they are also very different, offering a different kind of thrill.

    Best headphones in the world? Certainly.

     Sennheiser has gone all-out with these headphones. Ironically the benchmark is a prior model of the same headphones from the same German Company. 

    To own the title of producing the ‘best headphones in the world’ is befitting a Company like Sennheiser whose pedigree goes back a long way to the days when pure audio engineering was in it’s infancy.  

    Ultra-fine control and ultra-low distortion mean these are headphones you should definitely experience if you get the chance. The best way to experience these superb headphones is to check your local Sennheiser site to find which specialist dealers are stocking the43 HE1060, they will be few and far between.

    The one little ray of light is that the Sennheiser Orpheus HE1060 aren’t going to sell as a deliberately limited edition like the HE90. 

    However, with Sennheiser’s production capability limited to 250 units a year, it’s a rare beast by its very nature.

    Boxes That Record TV Shows To Help Seagate

    Seagate Technology the world’s largest maker of computer hard-disk drives and the Company that recently aquired Maxtor, said its biggest revenue driver in the next 18 months would be digital set-top boxes that record television shows, chief executive Bill Watkins has said.

    “We are selling a phenomenal number of hard drives into this digital video recorder space,” Watkins told journalists the day after Seagate reported quarterly earnings that beat analysts’ estimates and sent the stock up 4 percent on Thursday. “It’s all going to be about the TV” in the next 12 to 18 months. Seagate is boosting sales of disk drives for digital cameras, portable music players and televisions to expand beyond desktop and notebook computers, its main markets. Consumer electronics accounted for 13 percent of revenue in its fiscal year ended in June, 2005, up from less than 5 percent in 2004.

    “We have to morph ourselves into that consumer space,” Watkins said, referring to computer hardware companies including Seagate, Intel the largest chip maker, and Dell the No. 1 personal computer maker, among others.

    Shares of Seagate close up 99 cents, at $25.59 on the New York Stock Exchange and in the last 12 months have surged 39 percent. The company is incorporated in the Cayman Islands, but it has its headquarters in the USA.

    Intel is promoting computer systems based on its Viiv chip technology to gain a hold on the digital home. At the annual Consumer Electronics show in Las Vegas earlier this month, Intel chief executive Paul Otellini unveiled partnerships with media and Internet companies including Google,Time Warner, AOL unit and DirecTV Group.

    Watkins said that in the long run, Seagate plans to sell disk drives for an even greater variety of consumer devices as homes become increasingly connected, with computers in different rooms communicating with each other and with television set-top boxes and music players.

    Also, ultra-clear high-definition televisions, or HDTVs, will “drive a whole lot more storage” demand, Watkins said, likely pushing up sales of Seagate hard drives.

    Beyond the home, the next frontier for Seagate is the automobile, Watkins said. “The car people want to put an entertainment system in your back seat,” Watkins said. Like the home, the question in the automobile business is who controls the access to music and videos, he added. Car makers want to ensure they have control over delivering entertainment content and may lower prices on equipment in return for subscriptions, he said.

    In the home, subscriber-based entertainment systems that are “subsidized” by cable or phone companies in exchange for monthly payments are likely to win out over off-the-shelf devices that cost more up-front, Watkins said.

    Seagate faces competition from makers of so-called flash drives, relatively inexpensive memory devices used in mobile phones, digital cameras and portable music players. But flash technology may boost demand for hard disk drives as people seek ways to back up songs and pictures they’ve downloaded onto flash devices, he said. “The more mobility we have with content, the more it will drive backup storage,” Watkins said. “Backup storage will become a big deal for us.”

    CE & IT Values Are Up But TV Values Are In Decline Say GFK

    As the going gets tough vendors and retailers are looking to the bottom line as opposed to getting a sale for the sake of a sale says David Ackery the General Manager orf Electrical at Harvey Norman and if the latest research from GfK is anythig to go by some vendors are making big returns with the product tracking Company reporting record growth in several key categories.

    As the going gets tough vendors and retailers are looking to the bottom line as opposed to getting a sale for the sake of a sale says David Ackery the General Manager orf Electrical at Harvey Norman and if the latest research from GfK is anythig to go by some vendors are making big returns with the product tracking Company reporting record growth in several key categories.
     
    GFK have reported  that the overall “value” growth in the consumer electronics market is 3.3 per cent for February 2009 versus the same period last year and YTD growth of 5.9 per cent. In the appliance market maket fans and air conditioning retailers benefitted from the recent bout of hot weather with GFK reporting that the sector had  combined growth of 83 per cent according to GfK Retail and Technology Strategic Planning Manager, Gwenno Hopkin.

    GFK have also said that the  the home office segment grew by 14.6 per cent, driven by notebook growth of 26 per cent, storage growth of 31%  and networking growth 53 per cent. IT Peripherals had growth of 22 per cent with Ihe Ink Cartridge category up by 10 per cent.

    In the digital still camera market  year on year value growth has been14 per cent however flat panel TV have declined year on year by 4.7% with several vendors claiming that this has been caused by a shortage of display panels. While set-top boxes grew by 21.5 per cent DVD players declined by 10%.

    Spending Rises Consumers Confident Say ABS

    Spending in Australia has risen 1% which is twice as much as economists estimated according to the Australian Bureau of Statistics.

    A survey by Bloomberg of 20 economists predicted a rise of 0.5%.

    Australia was one of the few major economies, to expand in the first quarter following hand outs by the Federal Government. Also helping is the lowest interest rates in 49 years. Retailers like David Jones Ltd. and JB Hi-Fi Ltd, have both raised their profit forecasts in recent weeks.

    According to Bloomberg “Interest-rate cuts have worked their magic, together with the stimulus applied by the government,” Craig James, chief equities economist at Commonwealth Bank of Australia in Sydney, said ahead of the report. “The lift in consumer confidence is translating into greater activity at cash registers, and tax cuts will give consumers more reason to visit shopping malls.”

    A recent Westpac consumer sentiment report said that consumer confidence jumped by the most in 22 years in June and business sentiment in May had the biggest gain since 2001.

    Sales at department stores advanced 5.5 percent from the previous month and spending on clothing gained 2.9 percent, today’s report showed. Sales at restaurants climbed 1.4 percent.

    Regional Australia & WA To Boom Claim Citi Group

    NSW is a retail basket case when it comes to retail spending according to Cit Group Analysts however Victoria is even worse. According to a new report. Victoria was the slowest state at 5.6% growth, NSW recorded growth of 6.4%, while Queensland was the fastest growing state at 9.2%.

    Overall Australian retail spending grew 6.9% however the reduction in commodity prices is likely to lead to a slowdown in interstate migration and income growth for Queensland and Western Australia in 2009 say Citi Group.

    The retail report claims that many retailers have expanded their presence in the resource rich state of WA. However, only a handful of retailers have a large exposure to faster growing regional areas and a small exposure to the weakest state – NSW.

    Retailers with the best geographic exposure are Metcash, Harvey Norman and Woolworths.

    They claim that Australia is a two-speed economy, whether it be those regions exposed to the resources boom and those that are not, the metropolitan versus regional divide or higher-end households spending significantly more than middle and low ncome households.

    While retail spending was strong in 2007, not all parts of Australia enjoyed the good growth they say. NSW and Victoria have lagged the other states. In addition, rural and regional areas have suffered despite demographic trends favouring population growth in those areas.

    Major Retailers want to be in Western Australia due to the fact that WA delivered 9.9% growth in calendar 2006 and a further 10.5% increase in 2007. What is hurting several retailer including the likes of Harvey Norman is their exposure to NSW.

    CitGroup say that retailers with an over-exposure to NSW have suffered during the past few years. In 2006, growth was 4.4% and 6.4% in 2007, both below the national average however premium retailers still performed strongly in NSWbecause the State has more wealth andhigher income growth.

    In 2009 CitiGroup expect retail spending growth to accelerate in regional Australia. Its exposure to both the mining and agricultural industries will lead to stronger income and population growth than for major metropolitan areas. They note that over the past three months, significant rain has fallen over many agricultural areas in the east coast of Australia. Additionally, agricultural commodity prices have soared – wheat prices are up 91% in the past year (in Australian dollars). Therefore, farmrelated income is expected to rise significantly in 2008 and 2009.

    More Data Being Downloaded

    With more than 1.8 million Australians connected to broadband and more than 2 million more accessing the web from work ISP’s are reporting bigger data downloads.

    Australians who are connected to the Net are downloading more and more data, the latest report from the Australian Bureau of Statistics shows.

     Stats’ Internet Activity Report for the quarter to March 31 estimates total data downloaded by Australia’s 5.98 million subscribers during the quarter was a record 14.12 billion megabytes. This was up 28 percent on the September 2004 quarter total of 11 billion megabytes – though well below the 72pc growth rate experienced during the September 2004 quarter. It’s also more than four times the 3 billion megs downloaded in the March 2003 quarter (see chart).

     It isn’t just a matter of more subscribers. The figures show individual users are downloading more – an average of 2435MB per subscriber in the latest quarter, compared with 2057MB in the September quarter. That’s an 18pc jump in the average amount each subscriber downloads.Household subscribers, representing 86pc of all subscribers, downloaded a total of 10.56 billion megabytes, or 75pc of all data downloaded. Business and government subscribers downloaded a total of 3.56 billion megabytes – a 39 percent increase on September.

    The growth of broadband subscriber numbers is mainly responsible for the surge in downloading, it seems. “Non dial-up” users make up only 30 pc of the total subscriber numbers, but they were responsible for 87 percent of the total data downloads.

     

    Has The Dick Smith Customer Database Been Nicked?

    Claims that the Dick Smith customer database which was sold this week to online trader Kogan, was obtained by a disgruntled employee, days after the Company was placed in administration have still not been confirmed.

    According to sources Dick Smith ran an IBM AS 400 server on which the database was stored.

    The Company chose not to use an external database management Company such as Mail Chimp.

    Security on the server was departmentalised, so that staff from each department at Dick Smith could only access information relative to their department.

    What ChannelNews has been told is the database was copied and is available for sale.

    Sources have told ChannelNews that several people working in marketing, sales, research, IT administration, along with senior management had access to the customer database. 
    What is known is that Dick Smith did have security in place around the database and that false names were entered into the database that would flag whether anyone was using the database without the permission of Dick Smith management.

    The only problem is that the bulk of the people who knew the security codes and which were the planted customers have since been terminated by receiver Ferrier Hodgson.
       
    Earlier this week Kogan the operators of the Kogan.com.au web site acquired the database, the Dick Smith online site, the Dick Smith and Move brand names.
    He is set to relaunch the Magento back end site in July. 

    What is not known is whether Kogan will operate two separate sites using two different transaction engines, or whether he will simply reskin the Kogan site with Dick Smith templates and branding and only expose certain products to the Dick Smith site that are not available on the Kogan site.

    Neither Ferrier Hodgson or Dick Smith have commented for this story. 

    Dodgy Accounts, Three Years Supply Of Batteries + Overseas Dick Smith Companies Probed By Investigators

    Investigators probing the accounts of Dick Smith are believed to be questioning the cost that Dick Smith was actually paying for house brand goods from Companies located in Asia.

    They are also probing as to how Dick Smith came to be stocking, three years supply of Dick Smith branded batteries and other house brand products supplied by related Companies located in Hong Kong when they were placed into receivership.

    Anchorage Capital Partners – the private equity firm that transformed Dick Smith from a $94 million, middle-market electronics chain into a $520 million stockmarket star – will have to front a Senate inquiry to answer questions over its role in the chain’s subsequent demise.

    SA Senator Nick Xenophon claims it would be up to the Senate committee to decide who will appear before its inquiry into the collapse of listed retailers in Australia, but he expected Anchorage would be questioned.

    “The collapse of Dick Smith has raised some fundamental questions about private equity and I’m sure private equity will defend its position vigorously,” Xenophon said.

    Questions are also being raised about the actions of Dick Smith Chairman Rob Murray who in a filing with the Federal Court has failed to supply a detailed report relating to the final days at Dick Smith or how Dick Smith auditors Deloitte who were also the accountants for Woolworths the prior owners of Dick Smith failed to reveal shortfalls in the Companies accounts. 


    Click to enlarge
    Dick Smith Chairman Rob Murray


    According to the last lot of known financials for the Company, operating cash flow for the last financial year ending June 28 was negative.

    Records show that Dick Smith had $29.5 million cash, $53 million of receivables due, and payables totalling $228 million.

    To date most of those suppliers owed money have not been paid.

    Recently Ferrier Hodgson approached suppliers offering a bank guarantee to resupply Dick Smith, some vendors have taken up the offer however several brands including the likes of Samsung are refusing to give rebates for goods sold. 

    Recently The Federal Court judgment, granting Dick Smith’s administrators a six-month extension on holding its second creditors’ meeting.

    According to court documents, the Dick Smith directors, including chairman Rob Murray and former chief executive Nick Abboud have failed to report shortcomings in the accounts, as a result of requests by the directors, the administrators have granted an extension to the directors until 29 January, 2016, however the directors have come back requesting a further extension until 19 February, 2016. The request is currently under consideration,” said the court document.

    Currently various organisations including Ferrier Hodgson are feasting off the Dick Smith carcass.

    As the company crashed, Dick Smith’s directors assigned no less than four partners of McGrathNichol as administrators. Its banks appointed three partners of Ferrier Hodgson as receivers, they charge out at a minimum of $600 an hour. 

    While they will get their fees suppliers have little chance of being paid. 


    Last week a smoking gun email found on the Computer of former Dick Smith Finance Director Michael Potts and addressed to former CEO Nick Aboud revealed a $2 million shortfall in annual leave entitlements.

    McGrathNichol have already spent Dick Smith’s money to obtain a six-month extension from the Federal Court.

    According to Jeff Knapp, accounting academic at UNSW who was called in by Fairfax Media to analyse the Dick Smith accounts. 

    Rather than charging $600 an hour per partner and taking six months, Knapp took two hours and 14 minutes.

    Based on the fact that on November 30, 2015, Dick Smith Holdings announced a $60 million write-down of inventory which had become necessary. The share price tanked and the banks soon pulled the pin.

    Dick Smith Holdings has failed because of an inventory problem; and this inventory problem can be traced to a time before the company was floated on the ASX Knapp concluded. 

    On November 26, 2012, Anchorage Capital, acquired the business from Woolworths using the entity Dick Smith Sub-Holdings Pty Ltd – but the financial reports of Woolworths and Sub-Holdings for June 2013 paint a very different picture about the acquisition.

    The fine print in the 2013 report of Dick Smith Sub-Holdings show the inventories as having a book value of $371 million to Woolworths at the date of sale.

    In contrast, the Woollies’ annual report records the inventories of subsidiaries as having a book value of $246 million, and these inventories include more than just the Dick Smith business.

    According to Anchorage, it valued the inventories down by $58 million to record a cost of acquisition of $312 million. If the Woolworths number is correct, however, then Anchorage actually valued inventories upwards by at least $66 million.

    It is a similar tale for plant equipment. According to Anchorage, it valued plant and equipment down by $55 million to record a cost of acquisition of $65 million.

    Based on the Woolworths number though, there was actually a revaluation upwards of $14 million.


    Fairfax Media point out that Deloitte Sydney acted as auditor for Woolworths and Anchorage for 2013 – not to mention, ahem, “Investigating Accountant” for the public float. If this Big Four audit firm, like its peers, was not beyond the law things would get very messy.

    The Woolworths financial report also shows Dick Smith’s asset write-downs and restructuring costs of $420 million were booked for June 30, 2012.

    It appears that when Anchorage came along it bumped the inventory and plant values back up and misleadingly disclosed it had done the opposite.

    The most obvious thesis is that Anchorage “window-dressed” the inventory balance of Dick Smith when it acquired the business and the inventory remained window-dressed until the company failed.

    This suggests the inventory balance shown in the prospectus for the float is false. If this is the case, those who bought shares in the float, mostly super funds, have been played for mugs Fairfax claimed. 

    The inventory saga came unstuck in 2015, long after the crew from Anchorage had pocketed hundreds of millions of dollars by tipping their shares into the superannuation system.

    Nick Aboud the former CEO of Dick Smith is back working with the guys from Anchorage Capital however insiders in Mosman where he lives in a palatial waterside property, say that he has become deeply concerned with what ASIC and various investigators are discovering. 

    Several people have concluded that the statutory disclosures at the time of the acquisition simply do not reconcile – and there is therefore scope for regulators to act without waiting for six months.

    Michael West writing for Fairfax Media claims that questions need to be answered about the actions of Dick Smith and Anchorage management.

    Why does Anchorage extol the value of the Dick Smith brand in the prospectus when it has not recognised an asset for that brand at acquisition date?
     
    Why would Woolworths sell the Dick Smith business for $115 million if Anchorage and its auditors, Deloitte, reckoned the fair value was $261 million? 

    Why did Woolworths earn another $118 million for “administration” costs in the Dick Smith changeover?  Is there a conflict of interest when Deloitte, the auditor of Woolworths, is also appointed as Anchorage’s auditor and investigating accountant for the prospectus?

    New Toshiba Notebooks

    Toshiba ISD has introduced three new models to its Portege, Libretto and Tecra ranges. All include Toshiba EasyGuard, a new suite of hardware and software tools that provide data security, system protection and connectivity to notebook PCs.

    Toshiba’s new notebook
    Port_g_ R200

    The Port_g_ R200 measures 9.9 mm in height, weighs 1.29 kilograms and has a battery life of four hours and 40 minutes, made possible through the inclusion of an Intel Pentium M Ultra Low Voltage processor and specially-developed low-power enhancements. It also features a ruggedised magnesium alloy casing for style and protection of the notebook, and a full range of expansion capabilities and connectivity options making it one of the most portable productivity devices available.

     Libretto U10

    The Libretto U10 marks the reintroduction of Toshiba’s ‘miniaturised’ fully-functional notebook PC range, the Libretto.  At 210×165 mm (width x length) and less than a kilogram in weight (980 grams), the Libretto is roughly the size of an A5 notepad.  Despite its size, Toshiba has included some of the Qosmio AV-note multimedia functions into the Libretto U10. These include:  

    A TruBrite widescreen display with LED backlighting;
    A quick boot function to enable people to immediately watch DVDs or play CDs without having to boot the computer via its operating system;
    A specially-designed DVD dock that features a DVD Super Multi drive (bundled as standard).

    Tecra M4

    The Tecra M4 tablet PC includes Intel’s newest mobile technology (a 2.13GHz Pentium M and Mobile Intel 915PM Express chipset), healthy 80GB HDD, and a nVidia 128 Mb graphics card. The 14.1″ screen offers 1400 x 1050 resolution, with a 145 dpi ratio to allow more light to be transmitted for a clearer viewing experience. Toshiba has also included a thin protective polycarbonate panel to protect the screen whilst it is in use and provide users with a ‘natural’ writing experience.

    The Tecra M4 uses a DVD SuperMulti double layer read/write drive in a Slim SelectBay which supports a number of different devices including a second battery, second HDD or weight-saver. It also includes a full range of connectivity options including wireless 801.11 a/b/g, two USB2 ports, to make it easy to share and save information between devices.

     Toshiba ‘EasyGuard’

    EasyGuard enhances data security with feature such as a biometric fingerprint reader, HDD shock and vibration protection, encryption and digital signature implementation through the inclusion of both hardware and software and a software device lock that enables locking of devices to prevent unauthorised access and theft of data, as well as an external Kensington lock for physically protecting the notebook from attempted theft.

     RRP Pricing

    Libretto U10 with a bundled DVD SuperMulti dock: $3,450. (this notebook is available exclusively to Harvey Norman until late July)
    Port_g_ R200: $3,520
    Tecra M4: $4,950.