if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 33 of 91

    Smart Office

    Correction

    The head of the NBN Mike Quigley and CFO Jean-Pascal Beaufret have said via their in-house lawyers that no allegations of corruption or bribery were ever made against them.

    We recently published two articles following allegations made by Opposition Spokesperson Malcolm Turnbull about NBN Co executives Mike Quigley and Jean-Pascal Beaufret and a corruption investigation into Alcatel-Lucent whilst they were senior executives of the French Company. Those articles implied that Mr Quigley and Mr Beaufret were responsible for the corrupt practices of Alcatel-Lucent and should have disclosed the investigation of the company to the government when considered for their roles at NBN Co.

    Smarthouse acknowledges that those statements were unfair to Mr Quigley and Mr Beaufret.

    Smarthouse accepts that no allegations of corruption or bribery were ever made against Mr Quigley and Mr Beaufret and they were not involved in any way in the incidents.  We also acknowledge that Mr Quigley and Mr Beaufret were never questioned by investigating authorities. As such, there was no proper basis for suggesting the investigation into Alcatel-Lucent should have been disclosed by Mr Quigley or Mr Beaufret.

    Smarthouse unreservedly apologises to Mr Quigley and Mr Beaufret for the harm caused to them by the articles.

    Get Online Or Perish Long Term

    Are you trading online? If not you are missing out on a massive opportunity. In the UK more than a quarter of the population now buy online with online sales in 2006 nudging close to high street sales. To get online is not hard.

    For a business to prosper online, there are a few intrinsic stages you need to be aware of. To give you the best advice SmartHouse News has teamed up with NetRegistry a leading ISP who has operated in the Australian market for a long time. During the next few weeks in association with Net Registry we will bring you a series of tips and hints that will help you understand what is involved in getting your business online. Then you can make a decision as to where you want to go.

    This article focuses on domain names, website creation, and the web hosting phases of establishing an online enterprise. With numerous service providers to choose from, it is important to choose one that can provide all necessary services to facilitate the online growth of your business.
    Many businesses might have asked themselves at one time or another, “Why does our business need a website?” The simple answer is that businesses have a direct channel to prospective customers through the internet, facilitating an area to promote past work. This portfolio could be in the form of images as well as testimonials from clients, boosting your credibility and business success.
    In the future, online businesses with eCommerce facilities will be able to offer online purchasing of products or information, considerably increasing their market presence whilst making it easier for customers.
    In January 2006, over 68.4% of Australians were recorded as using the internet on a regular basis, (Nielson//NetRatings). This is the second highest per capita usage in the world, with the United States topping the list at 68.7%. Internet usage has doubled since 2000, when only 33.8% of the population were online. These days, Australians of all ages are frequently using the internet as a resource, and according to the Australian Bureau of Statistic’s most recent polls, over 72% of 40-54-year-olds and 32% of 55-year-olds and above are actively online. These statistics exemplify the potential success of Australian businesses online.

    Registering a Domain Name

    A domain name is the unique name by which a website is identified on the internet; it’s a memorable address, and should be both recognisable and relative to your company name or brand. To secure a specific domain name, a business must register it with a domain registrar, where it is as simple as entering your desired name and domain extension (for example .com) on a registrar’s homepage, and ordering the available domain or a domain that is as close to the domain you want, if it’s not available.
    When purchasing a domain name, it’s important to ensure the company is an accredited registrar. Prices range considerably, so it pays to shop around. For example, some IT companies can charge up to $140 for a 2-year domain name versus Sydney-based NetRegistry who charges only $44.95 for exactly the same service.

    The Value of a Domain Name

    If the internet is part of your business strategy, a small fee ensures the protection of your business’s domain name. It’s recommended that businesses register a domain name to secure it for the future, even if you’re not ready to build a website. Reputable service providers will automatically provide business owners with a placeholder site, advising visitors it is “under construction.” In doing this, businesses can effectively prevent competitors from obtaining/using their name, benefiting from the business foundations already established. Therefore, domain name registration provides a simple and affordable method of protecting a brand or product name.

    Making a Website for Your Business

    To position your business online and establish a strong web presence, creating a useful tool for current and future clients, you need a valuable and working website. The 2 main alternatives for web design include:
    · The first option for business owners is to outsource the entire creation to an experienced web designer. This is a cost effective alternative, which will fulfil a company’s individual requirements. For businesses requiring a more polished aesthetic, web designers are indeed recommended. Businesses can either find a web designer on their own accord or use experienced designers offered by their service provider. NetRegistry offers a Custom Website service for only $499, with a ten-day turn around and a dedicated account manager to handle any concerns.
    · In contrast, some hosting companies, like NetRegistry, offer customers Instant Website software – a free website builder. This software is the latest technology in website creation, and with a step-by-step process, customers with no web design skills can create a functional and aesthetically pleasing website for their venue. With Instant Website software, businesses can choose from over 500 templates which offer a seamless integration of business graphics and text to create a professional and customised website. NetRegistry customers have had considerable success with this free website builder – Bert Newton’s Family Feud website as just one example at www.bertsfamilyfeud.com.au .

    Web Page Hosting

    After producing a web page, the next step is hosting. This is essential to make the site available for public browsing. It’s important to confirm that your web-hosting provider will maintain the server on which your site is hosted, as well as manage the technology linking it to the internet. Service providers should also offer a range of packages that tailor web-hosting specifications (data storage and data transfer) to customers’ requirements. These quantities should be easily upgradeable to facilitate business growth. All packages should include full technical support, which is vital for any technically-challenged businesses.

    Choosing the Right Service Provider

    When choosing your service provider, ensure they offer the services provided below. It’s also important to choose an accredited domain name registrar who has a solid reputation for consistently delivering value, reliability, service, and technical support to their customers.
    · Speed: Remember you are selecting a business partner as well as a service provider. Ensure they’re dedicated to maximising the effectiveness and online performance of your site or multiple sites. It’s important your website maintains a consistent speed/performance, particularly during peak activity periods. This is when many potential customers are trying to access your virtual business.
    · Location: Does it matter where your web hosting company is located? Absolutely. Bandwidth in USA is much cheaper than in Australia, which enables US hosting companies to offer hosting services at highly competitive prices. However, with servers on the other side of the world there can be speed issues (known as latency) that can result in your website being delivered slower. Technical support can often be problematic when a US office is closed during Australian business hours. Even though a hosting company presents itself as being Australian, if their data centre is in Florida you’ll experience the same issues. To be sure, look for web hosting providers with high quality Australian data centres.
    · Customer Service & Technical Support: It’s important your service provider has technical support staff dedicated to addressing any question or concern that you may have, while also assisting you in configuring your services so that you get the maximum benefit out of all the features that come with the package you choose. Your business relies on the successful, consistent hosting
    of your website therefore choose a service provider that will give immediate help, so you don’t have to wait for call-backs or explanations. Also take into consideration the after-sales support offered by each hosting company. Ensure your web hosting company can be contacted by telephone during business hours, because many smaller web hosts only offer email only support. It’s also preferable you choose a company with a dedicated technical support telephone number with a set time of availability so should you require any advice, or have any technical issues, you’re able to contact someone for a quick resolution.
    · Range of Services: Choose a provider whose web hosting packages are scalable, facilitating the potential growth of your business needs. Businesses can start with a basic web hosting or email package and as your internet presence grows, you can upgrade easily to more comprehensive packages such as eCommerce.
    · Specialist: Choose a company who specialises in web hosting. In return, your website will be stored on infrastructure that is fast, stable, and secure – with a dedicated customer support team.
    Whilst your business may only want a simple website in the first instance, you are likely to want to grow in the future. Does the hosting company offer a range of services that will facilitate your growth online, in areas such as eCommerce or online marketing? Do they offer any extras such as news articles or tutorials that will help you increase your understanding of the opportunities available online, as well as technological developments? Web hosting is a business critical service – do not be afraid to ask questions of your web hosting company or your web designer to ensure you are receiving a quality web hosting service.
    When you are ready to take your business to the online market, call a NetRegistry consultant on 1800 78 80 82 to trial their web hosting for a month and experience the benefits it will bring to your business, jump online and visit www.netregistry.com.au.

     

    Dick Smith Close 8 Stores

    Dick Smith has been forced to close eight stores in Christchurch New Zealand after a 6.5 earthquake just before 1pm NZ time today. This was followed by constant aftershocks, some as powerful as magnitude 5.

    Also affected by the earthquake is Harvey Norman who has one store in Queensland. Telstra who operate their struggling TelstraClear said that their network which uses a Telstra owned cable network was still up but like the recent Queensland Cyclone their network has been affected by a lack of power to key locations where operational gear is located.

    Telecom New Zealand said that their Christchurch network was operable but there was significant network congestion and a lot of its network is running on battery power which has limited life.

    Both Telecom NZ and TelstraClear advised fixed-line customers to use a plug-in analogue phone if they had one.

    “These are currently running on back-up power. This means our voice services are still operating, except where there is damage to phone lines, individual premises or power is lost to premises,” spokesman David Courtney said.

    About half of its 50,000 cable modem customers were without access to the internet.

    At 3.00pm Australian Eastern Time, Vodafone said that their network was still running but 10 of its 150 towers in the region had been knocked out and another 43 were operating on battery back-up which will run out by midnight tonight.

    A spokesperson for Dick Smith said that all of their Christchurch stores were now closed and that there had been no loss of life among employees.

    All of the networks operating in Christchurch say that data networks which in a lot of cases are still operational will run out of power as battery back up fails.

    Flat Panel TVs Blamed For Grim Harvey Norman Warning

    Harvey Norman is struggling, company chairman Gerry Harvey has claimed with a 30% fall in TV values identified as a major contributor to the groups problems.

    As a result the company that is also facing problems in Ireland has warned that profits are set to fall.

    Speaking at yesterday’s Annual General Meeting, Harvey said that things were “not looking good”  and that price deflation on flat-screen televisions and growing competition from online rivals had made it impossible to make profits on this key product line.

    Earlier in the day the Thorn Group pulled the plug on their Big Brown Box online operation which were set up to sell appliances and consumer electronics goods. Despite $19 million in profits from operations like Radio Rentals and Rentlo the group was unable to turn a profit with the Big Brown Box web site.

    Gerry Harvey said: “We are having a dreadful period at the moment. Australia has actually demolished the pricing of this product (TVs) more than any other country in the world. The prices we are selling them at are absolutely impossible to make any money and we have demolished the price.

    “The manufacturers are losing a fortune, we are losing a fortune and the category that everyone is in — flat-screen TVs in Australia — is severely damaged.

    “You are going to see a pretty sizeable fall in profit for the six months that we are in now and I don’t know how that is going to go in the next year,” he said.

    He added: “We’re now looking at November sales figures and they are nowhere where I had hoped they would be. There is a big consumer lack of confidence.”

    BluRay HD DVD War Simmering

    This upcoming summer could be a hot one particulary as the HD-DVD-versus-Blu-Ray optical media marketing war is just starting to heat up.

     Five days after Sony announced it has started shipping 50GB dual-layer Blu-ray Disc recordable (write-once) media disks, Imation countered by announcing the availability of its own recordable HD-DVD media platters. Memorex, a subsidiary brand of  Imation, announced last week that it is shipping its own 15GB HD-DVD recordable media. Both Memorex and Imation brands retail for US$19.99 apiece, but they won’t be competing for attention on the same retail shelves.

    “The difference [between the two brands] is that Memorex’s HD DVD is primarily for consumers and available at retail, whereas Imation’s HD DVD is more geared toward business professionals and prosumers and is available primarily through commercial channels,” Imation spokeswoman Nancy Bjorson said recently.

    Imation and Memorex HD-DVDs can store up to 15GB of digital files–three times the capacity of standard DVDs–and can be used for any kind of graphics storage: including specialized medical and government imaging, photography, videography, as well as high definition video recording. Imation’s corresponding Blu-ray recordable (write-once) media, announced July 13, can hold up to 25GB on a single-layer disk at a retail price of US$19.99 but can only be used on a Blu-ray machine. The rewritable single-layer (25GB) Blu-ray version retails for US$29.99, the spokesperson said.

    Sony’s 50GB dual-layer Blu-ray Disc recordable disks with AccuCORE technology retail for US$48 with this tipped to be $69 in Australia. AccuCORE, a proprietary technology Company owned by Sony, utilises a new recording dye for both the DVD-R and DVD+R formats so as to deliver better performance in writing and playback across a wide variety of drives and recorders, a Sony spokesperson said.

    Improved writing stability and faster addressing is achieved through a new stamper design, new molding and new bonding technologies, the spokesperson said.

    Data storage needs expected to triple by 2010

    Data storage requirements are expected to triple by the end of the decade with e-mail proliferation, more stringent compliance requirements, and as businesses continue to produce more multi-media content that needs to be stored in a digital environment, IDC reported.

    “I’d say we’re in the early adopter phase of the HDD/Blue-Ray DVD market,” Charles King of Pund-IT said recently.
    “The players and recorders aren’t common, but they’re becoming increasingly available. For businesses that use optical storage for back-up and compliance purposes, they qualify as a next-gen solution; faster and more capacious that previous technologies so valuable for companies that are feeling squeezed by the headroom of existing DVD disks.”

    The popularity of optical storage has dwindled over time, but its price and performance are still compelling for some small to midsize businesses and organizations with a history of investment in optical-storage processes, King said.

    “I also expect to see HDD and Blu-Ray drives in many of the upcoming PC desktop and laptop models that will it the stores in time for the holidays,” King said. “Overall, I’d say that it’s smart for Imation to get these products into the stores ASAP.”

    Google Boss Joins Apple

    Google head honcho Eric Schmidt has joined the board of Apple in a move that will deliver for Apple the experience of a seasoned IT industry executive. The move comes as Google moves to delivering enterprise software and Apple a range of enterprise computers that will run both Windows and Apple software.

    Schmidt, 51, joined Google from Novell in 2001. The announcement signals closer professional ties between Apple CEO Steve Jobs, whose iPod is the most popular digital music player, and Schmidt, who oversaw Google’s rise to become the most-used Internet search engine.

    Before Novell, Schmidt was chief technology officer at Sun Microsystems .”There’s a real scarcity of really top-flight people with operational experience who can be directors,” said Paul Saffo, an independent technology analyst in Silicon Valley.

    “Eric has been around Silicon Valley forever.” Schmidt will join Jobs, former Vice President Al Gore, Genentech CEO Arthur Levinson and J. Crew Group CEO Mickey Drexler on Apple’s board.

    While Apple started as a computer company and Google started as a search engine, the companies are beginning to compete in markets such as online video. Google also has a feature that enables users to search for songs and links them to online music stores, including Apple’s iTunes site.

     “It doesn’t hurt the Google-Apple relationship that Schmidt is on Apple’s board,” Saffo said. “It also creates interesting opportunities in the form of expanding the conversation between Google and Apple.” Apple’s other directors are Elevation Partners Managing Director Fred Anderson, General Motors board member Jerome York and Intuit Inc. Chairman Bill Campbell.

    Verbatim Moves Into The Drive Storage Market

    At a time when storage vendors market are being “margin squeezed,” Verbatim has announced that it is moving into the external hard drive market via the aquisition of SmartDisk.

    Verbatim and its parent company, Mitsubishi Kagaku Media (MKM), announced the purchase of SmartDisk’s external hard drive and digital imaging business for an undisclosed price.

    The acquisition expands Verbatim’s global footprint in the data storage market beyond its current lines of optical disc media, magnetic tape and flash products.

    “The acquisition of SmartDisk’s external and portable hard drive business assets is complementary to Verbatim’s existing business,” said Shigenori Otsuka, president of MKM. “Plans are already underway to leverage these assets and Verbatim’s heritage to quickly extend our global leadership in the removable storage and related accessories markets.”

    In addition to acquiring SmartDisk technology and patents, Verbatim said in a release that it will retain “key U.S. engineering, operations, marketing and sales personnel to smooth the transition and assist with future business growth.” Verbatim did not disclose if any jobs will be eliminated.

    The deal mirrors rival Imation’s recent acquisitions of TDK and Memorex, which added another broad-line memory supplier to the global accessories market.

    The removable storage category has seen “explosive growth” in recent years, according to IDC. The research firm predicts more than 6 million external hard drives will be shipped in 2008, up 58 percent from the 3.8 million units shipped in 2006.

    SmartDisk had a 16 percent market share in the U.S. retail portable drive market in 2006, according to data from the NPD Group.

    Acer Trounces Lenovo Big Job Cuts Tipped

    Lenovo is struggling in the PC notebook market as Acer snatches third spot in global sales. Now big cuts are tipped with Australia under the spotlight.

    Lenovo plans to cut 1400 jobs worldwide – including in its Asia-Pacific operations – under a new restructuring plan. The plan, announced just as Acer pinched third place in global computer sales, shows how urgently Lenovo is seeking to boost profit outside its core market of China by bringing costs under control.

    Lenovo aims to save US$100 million this fiscal year by eliminating or moving the 1400 jobs – roughly five percent of its global work force.

    The cuts mark Lenovo’s second restructuring since the company acquired the PC division of IBM in 2005 for US$1.25 billion.

    About 650 jobs will be cut outright and roughly 750 will be moved from the US and Europe to emerging markets. This will help Lenovo shake off some high-priced positions that were a legacy jobs from the IBM hierarchy.

    There was no news at the weekend on how many Australian jobs, would be affected.

     

    One of the big problems for Lenovo is that Acer replaced it as the world’s third-largest vendor of personal computers in the first quarter to March, two quarters ahead of schedule claims Acer chairman JT Wang and citing statistics compiled by Gartner Dataquest.

    Acer’s sales volume in the first three months of the year rose by a better-than-expected 46.1 pct year-on-year, Wang said.

    While the company normally sees a 3-5 pct sequential increase in its second-quarter sales, the pace of growth is likely to become even stronger this year, he added.

    Wang said the vibrancy of the overall information technology market in the current quarter to June hinges on whether Microsoft’s Windows Vista operating system can quickly grow in popularity.

    Dell Launches Capacitive Touch Notebook

    Dell Australia has unveiled a lightweight tablet PC featuring what it calls “capacitive touch technology”. It’s Dell’s first tablet, and the first by any maker to be sold here with the capacitive technology, the company says.

    Capacitive touch uses natural electricity produced by the human body to sense input. According to Dell, it’s superior to the better-known “resistive” touch technology because no pressure is required; scrolling, dragging “flicks” and “gestures” are smoother; it’s more reliable; and there’s no interference with handwriting recognition.

    Dell recommends using Microsoft Vista with the tablet, as it enables ouch-based flicks and gestures.

    The company claims the tablet also supports future “multi-touch” capability.
    It explains that multi-touch allows users to use multiple simultaneous touch points to provide rich interaction through intuitive gestures (“Yes,” it adds in a rare salute by Dell to Apple, “a la the iPod phone”).

    The Latitude XT has a 12.1-inch screen and weighs 1.6kg. Dell claims it is the thinnest and one of the lightest 12.1-in. models on the market. In Australia it will be offered with a choice of integrated mobile broadband from Vodafone or BigPond. Pricing starts at A$3500; the company says it will begin taking orders on December 19.