if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Staff Writers, Author at Smart Office - Page 27 of 43

    Smart Office

    Ingram Micro Enters The Unwired World

    Resellers of Ingram Micro will now be able to offer their customers an internet enabled consumer home and office solution after Ingram Micro has agreed to distribute wireless broadband products from the range available through Unwired Australia.


    Click to enlarge

    Unwired’s wireless broadband products enable the use of an Internet connection, almost anywhere in the home or office, providing connectivity to any device with wireless capability – free of cables, telephone lines and costly installations.  The Unwired service enables customers to turn off the tired copper phone line and its ‘plug and play’ offering can be set up in minutes without professional installation.

    It can even be taken outside the home or office, keeping the user connected and online, without the need for a telephone line, while they are mobile within Unwired’s coverage area.  Unwired’s network currently covers the majority of Sydney and 35 per cent of Melbourne, delivering speeds similar to ADSL, with the added benefit of portability. The company is continuing to upgrade existing coverage as its customer base grows.

    Previously available from its own website and major retail outlets, the Unwired General Manger for Sales Richard Singh said “we have been looking to expand our distribution through a successful broad based distributor for Unwired’s products and the arrangement with Ingram Micro achieves this”.

    Resellers can purchase the Unwired Ethernet Modem (K-ETH-UNW-10) which retails for $129 RRP and the Unwired Pre-Paid Ethernet Starter Kit (K-ETH-UPP-30) which retails for $169 RRP, for end users to sign up online for prepaid or contract plans.

    Quantum Boss Says Technology Renders Storage Prices Irrelevant

    Predicting where the price of storage will go is a bit like predicting what the effects of global warming will be- fraught with speculation.


    Click to enlarge

    Consider a prediction from Creative Computing magazine in December 1981: “The cost for 128 kilobytes of memory will fall below U$100 in the near future”.
    And closer to this age, the San Francisco Chronicle on 27 September 2001 claimed ” RAM (random-access memory) is now at record low prices, with many vendors offering 256-megabyte modules for U$50 or less”.

    Despite the amusing nature of these statements, the price of storage is no laughing matter as storage costs typically represent anywhere from 10 to 20 per cent of a company’s total IT hardware expenditure.

    So when the head of one of the top global storage technology companies makes a prediction on the future of storage pricing, it’s either the sign of a very brave individual or one that has extremely thick skin.

    But that is exactly what Keith Busson, the new Asia – Pacific head of Quantum has done.

    In an interview with channelnews.com.au, he noted: “I think that the price of storage will remain fairly constant relative to the technical advancements of the storage infrastructure itself. i.e. current technologies will decrease in price as new technologies emerge however the traditional adage of cost per terabyte/gigabyte will not be a viable measurement point as de-duplication and storage virtualisation mature through to the front end tiers of storage”.

     

    Busson went on further to say that “The price of storage has been typically measured with a $/Gb basis, new technologies like de-duplication throw out this simple measure by giving you more on the same hardware platform”.
    Putting another way what Busson says whilst storage as a whole may drop in price overall in time, measuring where the price of storage will go has now become a much more complex deal as new technologies skew the value unit of storage real estate and that predictions on price will become harder to gauge and in empirical terms, become virtually meaningless.

    Although on face value it sounds like an each-way bet, in many ways it is one prediction that at least makes some sense: technology advances may require a new formula for the measurement of storage pricing.
    And as Busson says, for SMBs, today it’s the protection of that data that is more the issue than the price of storing it. “…the underlying challenge for all is management and protection of that data.” 

    Samsung Moves Into Development Hyperdrive

    It seems someone has been giving Samsung executives a little too much red cordial – as they seem to moving in all directions at once and pushing out research projects and pumping money into new capital expenditure.

    For example the company has developed a new solid-state drive (SSD), which is expected to replace hard disk drives in laptop computers.

    The new SSD is about 8 cm long and 1 cm thick; and has been described as the world’s smallest. Samsung says it is 2.4 times faster than traditional hard drives and can read up to 200 megabytes of data per second and the company claims says it intends to begin production this year.

    Samsung also noted that it is confident that 35 per cent of all of its notebook computers will use the SSD by 2012.
     
    Also on the research and development front, Samsung, the world’s No. 2 mobile handset maker says it has developed a mobile phone model equipped with Infineon’s chipsets.

     

    Furthermore the company said on Monday it plans to invest nearly AUD $ 1billion in upgrading memory lines this year.

    This spending spree is all part of the South Korea-based company’s 2008 investment plan with about 64 per cent out of it earmarked for memory products such as dynamic random access memory (DRAM) and flash chips.

    On a more somber note, Samsung said it expected to see its chip division turn a corner by the second half after a year-long downward spiral during which prices of some chips lost 90 per cent of their value.

     

    Better Battery Life For Mobiles

    Low power consumption CPUs are the holy grail for mobile devices and although progress has been made over the past few years, Intel now suggests it is ready for a quantum leap.

    The company says its latest 65 nanometer (nm) manufacturing process will enable production of very low-power chips for mobile platforms and small-form factor devices.

    Key to the low power consumption of the process is a reduction in power leakage, which can happen even when the device is off.

    The new process not only offers lower-power transistors on the CPU, but a shorter gate length of 35nm compared to the 50nm gate length used in the current 90nmPentium 4 processors.

    Though the second-generation 65nm process won’t reach production until 2007 the significant increases are worth noting now as a future trend. Intel says the process uses a version of Intel’s strained silicon, eight high-speed copper interconnect layers and a low-k dielectric material. The transistor modifications result in significant reductions in the three major sources of transistor leakage: sub-threshold leakage, junction leakage and gate oxide leakage. The benefits of reduced transistor leakage are lower power and increased battery life, says the company (whatever that means, Ed).

    It all means double the number of transistors Intel can put on a single 90nm chip today at only a two percent increase in manufacturing cost.

    “People typically embrace mobile platforms that maximize battery life,” said Mooly Eden, vice president and general manager of the Intel Mobile Platforms Group. “Such products will be greatly enhanced by our new ultra-low power manufacturing process. We will design future mobility platforms to take full advantage of both leading-edge, 65nm manufacturing processes.”

    “With the number of transistors on some chips exceeding one billion, it is clear that improvements made for individual transistors can multiply into huge benefits for the entire device,” said Mark Bohr, senior fellow and director of Intel Process Architecture and Integration. “Test chips made on Intel’s ultra-low power 65nm process technology have shown transistor leakage reduction roughly 1000 times from our standard process. This translates into significant power savings for people who will use devices based on this technology.”

    iiNet Sells OzEmail Search Site

    Leading ISP iiNet has exited the search engine business with the sale of its Anzwers search portal to Ansearch Ltd.

    Both search site rate in the top ten search engines on the Australian Internet. iiNet received $250,000 for the site.

    The Anzwers site was started by OzEmail which iiNet acquired last year. Not core to the company’s business strategy, the business has held its own in the search engine wars which have taken place through 2005. It claims the 18,000 website that provide an Anzwers search box, text link or referral back to the search site as a key factor in that.

    For Ansearch, the acquisition is designed to give it a larger piece of the online advertising pie.

    “In 2005 revenue in online advertising in Australia was in excess of $650 million according to ABYS Online Advertising Expenditure Report, 2005. This was an increase of 60% on 2004. By focusing on growth through distribution, organic development and synergistic corporate and business acquisitions, like the Anzwers acquisition we aim to secure a major slice of that revenue pie,” said Dean Jones CEO of Ansearch.

    “Importantly for shareholders, the Ansearch Group’s increased search capacity is expected to drive an instant increase in overall revenue” said Jones. “With Anzwers loyal audience and substantial monthly search volumes, the revenue contribution will be immediate with profitability expected within 3 months of the acquisition,” said Jones.

     

     

    All the New Notebooks

    Intel’s Australian Centrino Duo (Napa) launch yesterday showcased new systems from more than a dozen system builders.

    First and second tier vendors showed off systems, most of which won’t be available for another six weeks or more. Pricing, said most representatives is not yet finalized, but is likely to be in line with existing systems offering users significant price/performance improvements.

    Lenovo introduced two new ThinkPad models designed around the Centrino Duo platform including the X60 Series which includes the lightest ever ThinkPad weighing in at only 1.22kg. The 12.1-inch XGA feature a T2400 or L2400 processor, 512MB RAM and 60GB SATA drives, while the T60 model on show shared the T2400 (1.83GHz) processor in the standard ThinkPad formfactor.

    Pricing for all three models announced were in the $3,500 – $3,900 range. From April, the new ThinkPads will come equipped with a 3G wireless antenna and Vodafone SIM for easy connectivity to wireless broadband.

    Toshiba’s Centrino Duo offering included two new Tecra’s and a Satellite.Teh 15.4 inch widescreen Tecra A7 for business fleets uses either the T2300 or T2400 processor 60 or 100GB disk and DVD dual combo drive. Depending on config the pricing ranges significantly from $2,420 to $3,080. The Tecra A6 at $2,200 is a 14.1 inch widescreen using the &230 CPU and available only with a 60GB drive.

    Finally, form Toshiba, the Satellite Pro P100 is the new high end offering priced at $3,100 and featuring a T2400, 100GB SATA drive and a 17-inch widescreen. Like the Lenovo systems the Satellite features a fingerprint scanner.

    From Acer the Aspire 5670 features an in-built, rotating 1.3 Megapixel digital camera with 225-degree rotation. To round out the collaborative features, the 5670 also features a Bluetooth IP phone fro accessing VoIP services.

    Used in conjunction with the in-built camera, the Bluetooth IP phone provides an ideal communications package for video calls and live picture feeds from out on the road. The notebook’s 15.4 inch widescreen display, 100GB SATA drive, dual layer DVD Multi, card reader and a full GB of RAM will retail at $2,799.

    Acer also announced a TravelMate, the 8200, leveraging the Centrino Duo platform. The device’s carbon fibre casing features the phone and webcam capabilities of its sibling, and also has an ATI Mobility Radeon X1600 graphics card with up to 512MB to drive images to its 15.4″ WSXGA+ widescreen display.

    Based on the T2500 CPU, the 8200comes with a Gig of RAM, a 120GB drive with anti-shock protection and weighs in at 3.0kg. Acer quotes a 3.5 hour battery life with 9-cell battery somewhat less than some second tier vendors were quoting for their systems.

    Case in point was the Pioneer Computers Dreambook (which the company received a rap on the knuckles for announcing early) which boasts a 6 hour battery life with a 6 cell powerplant.

    Optima Technology Solutions, talked up their power-saving features in its Centoris range. The company announced three ranges, the Centoris DW, TW, SW Series. The 15.4-inch widescreen TW Series, for graphic intensive applications, features an NVIDIA G72M mobile graphics processor specifically designed for use in thin, lightweight notebooks.

    Weighing under 2.1kg, the Centoris DW Series ultra portable includes an embedded graphics chip and 12-inch widescreen display, while the Centoris SW Series for corporate users has a 14-inch widescreen display.

    BenQ is planning on two models for th new Centrino. Both will use a T2300 processor to begin with. The S61 series has a 12.1 inch widescreen and weigh 1.45kg, while the S73 serie has a larer, 14.1 inch widescreen, is slimmer and weighs in at 1.97kg. While some of the notebooks showcased either have no tuner or digital tuners as standard, BenQ is offering DTV as an option on both models.

    LG is planning a range of four notebooks, (M1, T1, P1 and S1) with the dual core processor. Priced from the $2,199, 15 inch M1 to the premium 15.4 inch widescreen S1 for $3,999 the range comes in a variety of options. The ultra-slim (21.5mm) T1 features an attractive Piano black/white finish and will be available in March for $2,999.

    Sumsung is offering the PC X60 and R65. The X60 is a 15.4 widescreen while the R65 features a regular ratio 15 inch display. Hard drives are available up to 120GB in the latter but only 100GB in the X60. Both come standard with half a Gig of RAM.

    NECs S5200 is a 14.1 inch device weighing only 1.9kg. Based on the T2300 processor the notebook comes with 1GB RAM and 80GB SATA drive and was independently tested to deliver a battery life of 3.37 hours by Ziff-Davis.

    The showcase included two DUO models from Bluechip Infotech – the OpenBook 3100 series and Z62F, both of which support processors down to the Celeron-M. The 3100 series 15.4 widescreen has similar spec’s to the 14.1 inch (wide) Z62F, though the latter built-in camera and microphone.

    Another distributor offering build to order Centrino Duo is Synnex which will carry the BTO series built around the 915GM chipset. Just a shade over 2kg the M31EI will hold a disk from 40-120GB and a 13-inch widescreen.

    Protac will have two notebooks in its initial range. The Excel M555N has a 15 inch 4:3 aspect ratio display while the ECS G410 offers a widescreen 14 inch. Both will support 100GB standard IDE drives.

    Finally, Fujitsu and Sony both promise to deliver Centrino DUO machines, Fujitsu had such early manufacturing samples, one still had Japanese writing and for Sony, it’s VAIO sample was stuck on the docks locked up in a Customs cage on the day.

     

    Canon Shoots Out All-In-One SMB Printer

    The new Canon all-in-one printer is designed to address the many varying needs of SMBs by efficiently making presentations and proposals that any business can be proud of.


    Click to enlarge

    The Pixma MX7600’s Auto-Duplex ADF enables effortless, doubled-sided copying and scanning, while valuable resources are conserved with built-in double-sided printing. 

    In addition, its built-in Ethernet network allows connected users to access print and scan functions for added convenience and cost savings.

    The Pixma MX7600 has six individual ink tanks, which only need replacing when the one colour runs out, providing additional savings for the home office user.  Once power is turned on, the Pixma MX7600’s Quick Start feature allows users to print colourful documents and photos without delay.

    Send and receive monochrome and colour documents in a flash with a high-speed fax modem.  Speed dialing allows up to 100 numbers to be registered at once, so calling people has never been easier.

    The Pixma MX7600 also meets Energy Star  Guidelines for energy efficiency, for the environmentally conscious business.
    The Pixma MX7600 printer (RRP $549) is available through selected Canon dealers nationally now.

    Who Sells The Most Projectors?

    UPDATED: It’s unclear who is winning the battle for top spot in the local projector market, with Acer, BenQ and Panasonic all vying for the top spot honours.

    Acer, according to the results of projector sales market share According to IT Market Insights – Quarterly Data Projectors Market Insight Report, Acer was the leading Australian projector brand in Q3 2005. Epson [2560 units], NEC [2586] and BenQ [2441] are all on 11 per cent. Acer scored 12 per cent with 2884 units.

    However, for the whole year, BenQ is the number one projector vendor in Australia, according to the DTC Worldwide quarterly insight report results.

    If you believe DTC Worldwide figures (and there’s no reason why you wouldn’t) BenQ has a market share of 11.5 per cent. That would mean the company has jumped four places in the DTC figures over the last year.

    Philip Newton, Vice President BenQ Asia Pacific, says “Our aim in the projector market has always been to attain and maintain a leading position and we are extremely proud of our performance over the past year.”

    Increasing sales in the Home Entertainment projector segment and strong growth in the Education market have significantly attributed to BenQ’s consistent growth in the projector market throughout 2005.

    BenQ anticipate continued success in the Australian projector market and are forecasting further growth in 2006, with the intent to achieve an increased market share of 5% by the years end.

    The IT Market Insights report shows a slightly different story in the home theatre segment, where Panasonic takes a commanding leaf with 40 per cent and Epson gets a 20 per cent share.

    BenQ and Acer are both relative newcomers to the data projector world where the two are growing at 54 per cent (Acer) and 22 per cent (BenQ) Epson’s growth rate is around 19 per cent.

    Globally, Epson’s PR agents point out, the DTC Worldwide figures show that while BenQ currently has the lead in Australia, it is Epson which as a commanding lead globally, with a market share across the broad that’s almost 1.5 times its nearest rival.

     

    Builders & Installers Get Help To Switch To Digital TV

    The Digital Television Antenna Systems Handbook was jointly issued today by the Australian Building Codes Board (ABCB), in co-operation the Digital Switchover Taskforce and television-associated industries.


    Click to enlarge

    The Minister for Broadband, Communications and the Digital Economy, Senator Stephen Conroy, and the Minister for Innovation, Industry, Science and Research, Senator Kim Carr, said the handbook will assist the building and associated industries plan for digital television switchover.

    “The switchover from analog to digital free-to-air television is the most exciting and far reaching change in the history of the broadcasting industry. There are however challenges ahead and that is why we are encouraging the participation of key industries in the planning process,” Senator Conroy said.

    “This handbook will help lift standards in the unregulated TV and antenna installation sector. Only through all levels of Government working constructively with industry will we deliver the digital television revolution for Australia,” Senator Carr said.

    Senator Conroy said the handbook would assist developers, owners, managers and tenants of buildings with shared antenna systems prepare for digital television switchover.

    “The Government has set a firm date for analog switch-off on 31 December 2013 and the Digital Switchover Taskforce will produce a timetable later this year to allow a smooth transition. It is vital that all Australians are informed so that they can make the right choices for digital switchover,” Senator Conroy said.

    Senator Carr said the handbook will assist owners, managers and tenants of buildings, such as apartments, units, hotels and care facilities, with shared TV antenna systems.

    “It is also a helpful tool for people involved in supplying or maintaining shared TV antenna systems,” Senator Carr said.

    Get Ready For The $400 Notebook

    Notebook manufacturers in Taiwan are now estimating that they can deliver a notebook priced at just US$299 (AUD$411).

    This is not in some far distant future either. According to the Chinese-language Commercial Times the makers believe that they may be able to deliver such pricing in the second half of this year.

    Now let’s see, five per cent margin on $411 discounted to an even $400 is, hum, ah, not worth opening the doors for. Who do we have to thank? The report references “leading CPU suppliers” as those assessing the idea.

    It all makes Nicolas Negroponte’s One Laptop For Every Child foundation look like rip off artists charging developing countries a whole US$100 for a crank driven laptop.

    In other, possibly related, notebook news, DigiTimes reports that leading notebook players in Taiwan have slashed their processor orders in the face of weakening demand.

    Companies such as Dell, HP, Acer and Asustek have cut their Intel orders by as much as 30-50 per cent due to an unexpected rise in notebook inventory in the first quarter.

    The strong desktop replacement sales they expected to start off the beginning of this year failed to materialise killing off single-core Sonoma sales. This is probably because buyers are holding off for the dual-core Napa-based notebooks just now hitting the streets.

    However, DigiTimes’ sources indicate that consumers are not that keen on the new pricing of the Napa notebooks and won’t buy them either.

    Add this to the end-of-year plans from Microsoft which will release a new operating system and Office 97 in the November timeframe and you have even more trepidation from potential customers concerned that their new notebook won’t be able to run the 64-bit enabled Vista.

    The report says inventories should be under control through March, but Taiwan makers are urging Intel to bring forward a Yonah CPU price reduction currently scheduled for May 28.

    Additional Reporting David Tzeng, Taipei; Steve Shen, DigiTimes.com