if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 29 of 91

    Smart Office

    JB Hi Fi Charman Talks About Their Youth Brand, Parallel Importing & Selling Appliances Online

    EXCLUSIVE: As Harvey Norman moves online, JB Hi-Fi has moved to restructure their operations for what their Chairman describes will be a “record year”. He also said that the future is “low cost” retailing and that parallel importing is a “temporary issue”.

    In an exclusive interview with ChannelNews JB Hi-Fi Chairman Patrick Elliott said that JB Hi Fi was Australia’s most recognised “youth brand” and that the group is still on track to open between 13 and 15 new stores, some of which will be converted Clive Anthony stores.

    He said that the youth of today, who are shopping at JB HI Fi will be their customers in the future, if they “do the right thing by them. The opportunity we have to leverage that youth brand into online is significant. This is an audience that is used to online trading and recognise brand values”.

    Speaking about retailers who have an older target audience Elliott said: “These retailers will reach a point where they are going to have to renew their customer set”.

    He claimed that Internet trading is set to be an issue for many retailers in the future and that parallel importing is a “temporary issue” that will go away as the market re-adjusts to lower prices.

    “Local manufacturers realise that they are losing control of global supply. As a result they are already starting to lower their costs in Australia in an effort to stay competitive” Elliott said.

    “It will get to the stage where bringing in small shipments of goods from overseas will not be cost effective and due to the lowering of prices in Australia driven by online the market will rebalance itself” he said.

    Last week, JB Hi Fi alerted the market to the restructure of their struggling Clive Anthony brand, with Elliott admitting, during a recent interview on the ABC program Inside Business, that some stores will be closed. He described Clive Anthony as an experiment “that hasn’t succeeded in its current format”.

     

    One of the options now being considered by JB Hi Fi is to launch an online appliance store, a move which Elliott admits will appeal to a lot of consumers due to the high replacement factor that appliances attract.

    “Going online makes a lot of sense. We want to be a low cost retailer. We are competing against retailers who have expensive property structures such as department stores. If department stores want to carry on selling appliance they are going to have to deliver exceptional in store service to keep attracting customers. The future is low cost which is where JB Hi Fi is operating”.

    Elliott said that net profit at JB Hi Fi for the 12 months to June was now likely to be $108.5 million to $113.5 million, compared with the previous forecast of between $134 million and $139 million.

    “I think there are plenty of good days still to come, we have an excellent management team at JB Hi Fi, and they love their jobs and are well rewarded for it. I think our model is right,” he said.

    Elliott also said that closing down its underperforming discount electrical retailing brand, Clive Anthony’s was “one of the options” that the group was considering.

    “The expectation is that we’ll be able to convert quite a few of those stores to JB Hi-Fi stores,” Mr Elliott said during his ABC interview.

    “We’re looking at perhaps a change to the format for those stores, but remaining in appliance retailing, although it may be on a somewhat smaller scale, and some of those locations which aren’t profitable we will look to close.”

    “The internet model is particularly well suited to do that, whereas the bricks and mortar model is less so, so this discussion is very much more around where we are with currency, perhaps, than any significant change in technology or the uptake in technology.”

     

     

    Elliott said JB Hi-Fi’s online business was growing, but stressed that its “bricks and mortar” model was “very low-cost”.

    “Our cost of doing business, which is everything below the gross margin line, is about 14, 14.5 per cent of sales,” Elliott said. “There wouldn’t be too many online retailers who have that low cost base, and so we’re comfortable that with our scale, our buying power and our low cost to business that we can compete quite effectively with an online retailer, and to that extent clearly pushing our own online model.”

    Suppliers of software and possibly cameras would also adjust their domestic prices to match international prices thanks to the “lag effect” of the rising dollar, Elliott said.

    Senator Xenophon Accused Of Telling Big Fat Lie

    Independent Senator Nick Xenophon has been accused of “tellling a big fat lie” on a new web site that he has rolled out in an effort to combat a $20M campaign by Clubs Australia.

    Earlier today Xenophon complained  to the Australian Competition and Consumer Commission (ACCC), about a gambling advertising campaign by Clubs Australia and the Australian Hotels Association.

    Click to enlarge

    The Clubs campaign states that Federal Government’s proposed changes affecting gambling would mean people would need to apply for a licence to make a $5 bet.

    Senator Xenophon says the statement and a number of others in the campaign are incorrect and misleading.

    However Clubs Australia claims that it is Xenephon who is misleading consumers by claiming on his new web site that it is “Un Australian for Hotels & Clubs to get 40% of their pokies profits from problem gamblers”.

    A Clubs Australia spoksperson said “If Xenephon is claiming that 40% of Club profits come from problem gamblers he is wrong. Club profits come from all gamblers as well as entertainment services food and catering”.

    Senator Xenophon was not available to comment.

    ATO Court Win May Force Rethink For OZ CE &N IT Companies

    As the ATO starts to probe transfer pricing in Australia, in particular the price those local subsidiaries of large consumer electronics and IT companies, pay for goods from their parent Company, a Federal Court judge has ruled as to how a local subsidiary should be taxed.

    As the ATO starts to probe transfer pricing in Australia, in particular the price those local subsidiaries of large consumer electronics and IT companies, pay for goods from their parent Company, a Federal Court judge has ruled as to how a local subsidiary should be taxed.

    In a major decision that may have ramifications for a number of Australian technologies Companies IBM Australia has lost a $55 million Federal Court case about how revenue paid to it by its Australian subsidiary should be taxed. The company is expected to appeal.

    The US-based IBM Corporation, IBM World Trade Corporation and local subsidiary IBM Australia in July 2009 filed a lawsuit against the Australian Commissioner of Taxation over claims that it owed amounts of $26 million and $29 million on earnings from the Australian subsidiary in 2004 and 2005.

    Under a 1987 software licensing agreement, IBM Australia paid 40 percent of its revenues to the US parent in exchange for the right to “use, distribute and market” software created in the US.

    The company claimed a portion of these payments should not classified as revenue or royalties on which withholding tax would have to be paid.

    However, Justice Annabelle Claire Bennett yesterday dismissed IBM’s claims, ruling that Big Blue was liable to pay withholding tax on the full amount. She also ordered that IBM pay the ATO’s legal costs.

    A spokesman for IBM Australia said that company was reviewing its position. But the spokesman said: “Whilst IBM Corporation is disappointed with the court’s decision; it has fully complied with relevant tax laws, met all of its tax obligations, and will continue to do so”.

    $153bn Apple Brand Pushes Out Google To Take Top Brand Slot

    Apple, whose brand is now worth over $150bn, has pushed out Google to become the world’s most valuable brand according to a new brand study.

    For the last four years, Google has dominated the BrandZ Top100 ranking of the most valuable global brands, compiled by Millward Brown in Association with the Financial Times in the UK.

    According to the new report, which will be published today, Apple has increased its brand value by $137bn, or 859 per cent, since 2006 when the BrandZ rankings were launched. 

    The brand valuation compares with Apple’s stock market capitalisation of $319.4bn, which is almost five times higher than in 2006. Google’s market capitalisation is $172.4bn.

    The BrandZ index calculates brand value on a number of factors, including an estimate of the brand’s contribution to earnings, valuation of intangible assets, measures of customer perception and an estimate of growth potential.

    Peter Walshe, global BrandZ director at Millward Brown, said Apple’s success reflected the fact that its products were not only highly desired by consumers but also seen by companies as useful.

    “The anecdotal evidence is that if employees are given the choice of two similar jobs they opt for the one with better technology for its employees – for example an iPad,” Walshe said. Apple, he added, had also succeeded in emulating luxury goods brands, in that making its products more expensive had increased their desirability.

    Which Brand Should Survive Mio Or Navman?

    Taiwanese GPS navigation company Mio is attempting to expand its presence in Australia at the expense of Navman, despite the fact that both brands are now owned by Mitac, a major Asian technology manufacturer.

    In recent weeks both brands have rolled out new product offerings with questions now being raised as to whether Mitac actually needs two separate sales and marketing operations in Australia.

    Currently, Mio operates out of serviced offices in Melbourne and calls to their offices yesterday went unanswered. On the other hand Navman has 40 staff in Australia and the tip is that the Mio brand will be rolled into the Navman sales, support and marketing operations in Sydney. In addition, Navman Australia has stronger and more capable marketing operations than Mio in Australia. 

    They are also expanding in both the vehicle and boating markets and were recent sponsors of the Sydney motor show.
    Wendy Hammond the National Marketing Manager at Navman said, “It is early days. I suppose it makes sense to combine both brands. We have 40 staff and are expanding. We are also investing extensively in online and events like the Sydney Motor Show.”

    A spoksperson for a major national retailer said, “We need less brands not more. More brands mean more training and in the navigation market 3 makes sense not 4. The Mio brand is the weakest link and is the one that should be killed off. For retailers, the problem is training and high staff turnover. So the less brands we have to train staff in the better”.

     

    Both Peter Ferrigno National Sales Manager for Mio and Daniel Antonello National Marketing Manager for Mio failed to return SmartHouse calls.
     
    In today’s technology market, the stuff inside the boxes is getting increasingly similar and this is no more evident than with navigation where every vendor is using one of 24 global GPS to deliver information to a device.

    For Taiwanese manufacturers brand is critical – Acer and BenQ are two technology brands that have managed to cut through in the IT space and now Mitac is relying on the Navman and Mio brands to give it share of the fast growing navigation market.

    Taiwanese companies initially had little consideration for their own branding and other marketing finesse. There are several early contenders still with us: Acer and Mitac have learned a thing or two about marketing over the years, as well as Viewsonic and AOC in monitors or Leadtek in graphics cards.

    A few others were just as strong at the very start – remember Tatung, with monitors, servers and even SPARCstation RISC workstations? Or FIC’s LEO brand, well known in early ’90s, and all but forgotten today?

     

    Globally Mio is a number 4 player in the GPS navigation market. Mio parent company Mitac acquired rival handheld GPS  maker Navman in February of this year.

    The sale saw  Navman parent Brunswick rid itself of the Navman-branded handheld and in-car devices operation.

    In Europe, Mio has ten per cent of the GPS market, Navman eight per cent, giving Mitac a combined share of 18 per cent, putting it effectively in second place behind TomTom and ahead of Garmin. Mio and Navman both have strong brands, and ditching either risks losing overall marketshare.

    Toshiba Recalls 340,000 Batteries But Not Sony

    Toshiba has become the third major manufacturer after Dell and Apple to issue a worldwide recall of 340,000 notebook batteries made by Sony citing recharging problems. The recall is the latest in a series of embarrassing manufacturing slips for Sony. But surprisingly Sony has not announced a recall for the batteries in its own notebooks.

    Despite repeated requests to Sony, to explain why their own notebooks have escaped a recall, when over 1.7 million other vendor’s notebooks using Sony batteries have been recalled has gone unanswered.

    The latest recall will have a major impact on Sony’s bottom line with some analysts tipping that the recall could cost them as much as $500 million which would wipe out profits for this financial year.
     
    With the latest recall Toshiba say that batteries sometimes stop recharging or run out of power, but no injuries or accidents have been reported, a spokesman for Toshiba said.
     
    The problems are in batteries in Dynabook and Satellite laptops made by Toshiba from March through to May of this year, the spokesman said. These will be replaced for free.
     
    The recall affects 100,000 laptop batteries in the United States and 45,000 in Japan, he added, without specifying the location of other affected computers.
     
    Toshiba said that the defect was not directly related to the recalls by Dell and Apple of Sony batteries last month, amid concern that those batteries could overheat and catch fire. Dell asked customers to return 4.1 million faulty laptop batteries, while Apple recalled 1.8 million.
     
    This month, Matsushita Electric recalled 6,000 batteries used in its Panasonic notebook computers in Japan, citing concern about overheating.
     
    The production problems at Sony come as the Japanese maker of Walkmans and PlayStations has been trying to bolster its brand image.
     
    Sony said this month that it would postpone the European introduction of its PlayStation 3 by four months, until March, because of problems in producing a component in the machine’s Blu-ray disc feature.
     
    Last week, Sony said that it would postpone until this Saturday the Japanese introduction of its new digital Walkman because of a malfunction of an unspecified part.
     
    Meanwhile, Canon, the camera and photocopier maker, said Tuesday that it was recalling 800,000 desktop copiers in the United States that could catch fire. The recall is part of a global offer made by Canon last week to inspect and repair 1.87 million personal copiers that could be at risk.
     
    Canon received six reports of NP1020 models either smoking or catching fire because of an improperly fitted electrical connection, the U.S. Consumer Product Safety Commission said. The company, based in Tokyo, said that fewer than 70,000 copiers, which were made between 1987 and 1998, remain in use.

     

    BenQ Launch New 24″ LED Monitors Including 3D Model

    BenQ is set to launch three new 24″ LED backlight monitors including a 3D model. The Taiwanese Company are targeting the models at the home, business and gaming markets in Australia.

    The 3D model XL2410T uses a TN panel and during a first look session at BenQ offices we were impressed with both the quality and the $495 price tag.
    The 120 Hz monitor outputs two sets of images at 6Hz which are then synchronised using 3D glasses. Native resolution is 1,920 x 1,080 pixels resulting in most output files being supported.


    Click to enlarge
    Contract comparison. New BenQ monitor on right
    The monitor that comes with a solid stand that slides up and down on a pole allows the display screen to be easily adjusted to a suitable height. It also delivers 16.7 million colours, viewing angles of 170/160 degrees and multiple video inputs, including DVI, D-SUB (VGA) and HDMI. 


    BenQ Monitor right screen
    The monitor we saw has a brightness of 300 cd/m2, plus a dynamic contrast ratio of 10,000,000:1 as well as G2G (gray to gray) response time of 2 ms.
    BenQ has also launched two other VA-panel LED monitors that produce true black with zero bright dot (ZBD), they also minimise light leakage, leading to better contrast. The viewing angles are also maximized to 178 degrees, both vertical and horizontal. “We are using the ‘Senseye Human Vision Technology’ to increase image clarity and detail in the new product”said a BenQ executive.
    Set to be a major player in the monitor market up against Samsung BenQ has in the past been the #1 monitor suppler and they are confident that there new range will be succesful.

    PCs Playing Key Role In HD-DVD Vs Blu-ray War

    PCs are emerging as a battleground in the budding war between two technologies vying to be the standard for high capacity DVDs — and take the lucrative mantle of next benchmark in optical storage.

    The stakes are huge for one group led by Sony, developer of the standard known as Blu-ray, and another led by Toshiba, developer of the rival HD-DVD standard, with each party standing to reap a fortune in royalties if its candidate becomes the next industry standard.

    Many liken the war to the 1980s battle over video cassette recording standards, which ultimately saw the VHS standard emerge the victor over Sony’s Betamax.

    Most attention to date has focused on a new generation of video players that can take advantage of up to 30 gigabytes of storage capacity — six times the amount in current DVDs — to show movies in high definition.

    But PC makers will also be vital in deciding which standard ultimately wins, said Howard Locker, director of new technology at China’s Lenovo Group Ltd., the world’s third biggest computer maker.

    “The three major suppliers of the players of these new next generation discs will be the PC industry, consumer electronics and gaming machines,” Locker said. “If you look at the volumes, PCs are now more than 50 percent of that total space, so we’ll have a big say on who wins.”

    So far, however, most PC makers are refusing to take sides.

    Lenovo itself is taking a wait-and-see approach, keeping its feet in both camps but committing to neither just yet.

    Among other industry giants, Dell the world’s biggest PC maker, has said it is committed to Blu-ray, while Hewlett-Packard, the number two player, has said it will support both standards.


    LEG UP

    Taiwan’s Acer. was showing four notebook models with HD-DVD drives this week in Taiwan at Computex, the world’s second biggest computer show. But a spokeswoman said the company also plans to support Blu-ray when drives become available.

    PC and optical disc makers at Computex said Sony could once again be behind the game, with its technology about a half year behind HD-DVD in terms of market readiness.

    Like Acer, laptop specialist Asustek Computer Inc. said it was planning to incorporate both standards into its models, but was only showing an HD-DVD-equipped laptop at the show.

    “Right now we have no supply (of Blu-ray drives),” said Vicki Hsiao of the company’s sales and marketing department. “We’re expecting some this month.”

    She and others said PCs equipped with HD-DVD or Blu-ray will cost several hundred dollars more than comparably equipped models with DVD drives — a factor that should keep sales relatively low this year as consumers wait for applications and video titles that can take advantage of the higher capacity.

    CMC Magnetics, the world’s largest maker of compact discs, was showing both Blu-ray and HD-DVD formats at Computex, but Chairman Bob Wong said Blu-ray was about six months behind HD-DVD.

    But sales of both types of discs — which now cost around $20 each but are expected to come down quickly as volume ramps up — are expected to zoom next year as the disc drives find their way into more PCs and standalone video players.

    “We’ve just started taking orders for these discs,” Wong said.

    Flash Notebooks A Serious Threat

    Despite their lack in capacity and lofty price tags, Flash hard drives may become a serious threat for traditional hard drives in notebook computers. In-stat analyst Frank Dickson believes that solid state disk (SSD) storage may “dethrone the hard drive as the top laptop storage choice within 10 years.”

    Despite their lack in capacity and lofty price tags, Flash hard drives may become a serious threat for traditional hard drives in notebook computers. In-stat analyst Frank Dickson believes that solid state disk (SSD) storage may “dethrone the hard drive as the top laptop storage choice within 10 years.”

    Flash will make its way into notebook computers with the launch of Windows Vista, as Microsoft has announced that so-called hybrid hard drives – storage devices that combine Flash memory with a hard drive – a requirement for notebooks that come with Windows Vista Premium pre-installed. Flash hard drives have been announced by Samsung and PQI. Samsung will soon be offering a 32 GB drive and PQI said it will be selling a 64 GB version later this year – for about $2000.

    “The hard drive industry has done a phenomenal job of driving areal densities; however, it is clear that there are user segments for which drive capacities far exceed the user’s need,” Dickson said. “When one examines the declining cost trends for Flash, the user’s need for storage and the premium that consumers place on the benefits provided by SSDs, it is easy to see that there will be a clear demand for SSDs.” Dickson believes that the SSD market share in mobile computers could reach 50% by 2013.

    Digital Camera Sales Still Booming

    Digital camera sales are still booming according to research group IDC. In the second quarter ending July shipments were up by 22 percent.

    Canon led the field with 26 percent of the market, trailed by Sony at 18 percent and Kodak at 12 percent (dropping 15 percent from the first quarter). Samsung bumped off Nikon to claim the fourth spot with a share of 11 percent.

    Repeat buyers spurred the roughly 7.8 million digital cameras shipped during the quarter, IDC said.

    Growth during the quarter would lead the research firm to upwardly revise its 2007 camera shipment forecast, IDC said.
    Digital camera shipments rose 118 per cent in the first half of 2007, according to a new report from the Japan-based Camera & Imaging Products Association (CIPA), which represents firms responsible for approximately 80 per cent of global sales.

    CIPA reported a total of slightly more than 164 million cameras shipped during the six months from January to June. Growth in shipments of more expensive digital Single Lens Reflex (SLR) cameras was particularly strong, reaching 145.5 per cent.

     

    “Digital camera shipment volume rose 38.9 year-on-year to 7,570,000 units in June 2007, while shipment value rose 33 per cent year-on-year to 164.1bn yen [$1.387bn],” commented Tokyo-based Nomura Securities analyst, Tetsuya Wadaki. “Shipment volume for digital SLR cameras rose 84.9 per cent year-on-year, which marks the tenth consecutive month that growth has exceeded 50 per cent.”

    “Growth picked up again in Japan and Europe and remained strong in North America,” said Wadaki. According to CIPA, European growth of Digital SLR shipments approached 170 per cent in June, and averaged more than 140 per cent over the first half of the year.

    CIPA, an industry-sponsored association, collects data on camera production and shipments from members, who include Japan’s leading camera vendors, as well as some foreign firms. The figures include both domestic and foreign production. While CIPA’s membership controls approximately 80 per cent of the global digital camera market, a rising percentage of Japanese-branded cameras are in fact made by suppliers in China and Taiwan.