if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 87 of 91

    Smart Office

    Cisco To Snare Massive Oz Contract

    Network Company Cisco are set to be a big winner as Telstra allocates more than $2 billion dollars to a new IP network

    CISCO are set to snare a multi million dollar contract to supply Telstra with a new IP based network. Telstra boss Sol Trujillo said that he had today spoken to CISCO CEO John Chambers in the USA About the deal. David Thodey the Managing Director of Telstra business told SHN that the CISCO IP based technology was crital in delivering a new IP based network for business in Australia.

    Telstra CFO John Stanhope said that the Company had allocated in excess of $2 billion dollars in CAPEX expenditure for the new network that will allow Telstra to deliver an array of new services to business that spends between $10,000 and $100 million dollars a year with Telstra.

    Telco equipment vendors Ericsson and Alcatel were also named as a major technology partner going forward.

    Software vendors Siebel and Accenture have been recruited to handle the company’s new software platforms.

    Telstra says it has awarded a number of contracts to build its new internet protocol network (IP) and third generation (3G) mobile phone platform.

    The company said Alcatel and Cisco would build Telstra’s new IP network. Ericsson will build Telstra’s nationwide 3G platform. Chief operations officer Greg Winn said Telstra had selected a number of new vendors and strategic partners for the company’s growth.

    “First and foremost Alcatel has been awarded the contracts for our next generation soft switches and most of our access platform,” he said.

    Alcatel will be primarily responsible for Telstra’s network design, integration, product supply and ongoing support, Telstra said.

    “Cisco is going to build our IP core,” Winn said.

    “In our 3G area, we have selected Ericsson. They are going to build our nationwide 3G platform, along with a clear path to (future network innovations) super three and four.”

    OZ To Get “Seriously Good” Super Fast LG G2 Smartphone In October

    Australian smartphone fans are set to get what several reviewers have called a “seriously good” smartphone with the release in October of the all new LG G2.

    Carriers have confirmed to SmartHouse that the device which has been described as “significantly superior” to the Samsung Galaxy S4 and the HTC One will go on sale in the third week of October. 

    The device comes with Android OS, v4.2.2 (Jelly Bean); 5.2″ Full HD IPS LCD display with zero gap touchscreen, (1080 x 1920) pixels; light weight: 143g; dimensions (138.5 x 70.9 x 8.9) mm; 13 MP camera, autofocus with optical image stabilization (OIS).
    Equipped with an expansive 1080p display and 13-megapixel camera. CNet said LG’s placement of the G2’s power and volume buttons..doesn’t ruin the experience. Indeed, with its beastly specs and ultrafast processor, LG is definitely putting its gloves on for this smartphone battle.

    Digital Trends said” with the G2, LG is stepping up to the plate. This is a powerful phone with a myriad of simple, but useful new features. Audiophiles and spec junkies should flock to it”.

    “The first time you set eyes on the LG G2, you’re impressed. It looks like “the” smartphone we’ve been working toward for a few years now. The screen is nearly edge-to-edge, the device is dead thin at 8.9mm, and there isn’t a button in sight. From the front, there are no buttons on the G2. The navigation buttons – Back, Home, Menu – are onscreen and the power and volume controls are . not there. But you don’t really need them most of the time. A quick double tap on the screen will wake the G2 up from its slumber.

    An LG Australia executive said “A number of publications have praised the new G2 we will have it on sale in Australia in mid-October”.

    Samsung Revenues Up

    Samsung Electronics has generated better than expected sales in the fourth quarter of 2005, with a 1.5% rise and a 7.5% rise from the previous forecasts, according to Samsung Securities who made the announcement at the CES Show in Las Vegas.

    The electronics giant made 1.589 trillion won (approximately US$1.580 billion) in revenues and 2.49 trillion won ($2.477 billion) in operating profits in the fourth quarter, up 9.3% and 17.3%, respectively, over the prior quarter,

    By business sector, sales from NAND flash and TFT-LCD segments were better than expected, while earnings from the DRAM business remained close to expectations. Samsung Securities also estimates the performance in the information and telecommunication segment remained below expectations due to the increasing costs.

    Samsung Securities adjusted upward its operating profit forecasts from TFT-LCD and semiconductor units by 14.7% and 0.6%, respectively, whereas the information and telecommunication operating profit estimates were revised downward by 1.3%. In addition, the securities company predicts that Samsung Electronics will earn 2.58 trillion won ($2.566 billion) in operating profits in the first quarter this year and 2.12 trillion won ($2.11 billion) in the second quarter.

    Samsung has also announced what it claims is the world’s fastest multimedia-downloa mobile handset. The phone, equipped with an MSM6 chip from the Qualcomm of the Uni States, enables users to download music or other multimedia content speed of 3.6 megabits per second, approximately 10 MP3 music files aminute.

    It also appears that Competition between European and U.S. standards is heating up as rival phone makers support both standards.Simultaneous announcements of cell phones that support competing standards from South Korean rivals Samsung Electronics and LG Electronics could kick the emerging market for mobile broadcast-style TV into high gear.

     Both Samsung and LG said they have developed cell phones that support the two leading standards for broadcast-style TV on cell phones: DVB-H (digital video broadcasting-handheld) and Media FLO (forward link only). And both companies will unveil the phones at the International Consumer Electronics Show (CES) taking place this week in Las Vegas.

     DVB-H was developed by Nokia and is designed to optimise broadcast video on personal handheld devices such as cell phones. Italian operator 3 is currently using DVB-H technology to launch a broadcast TV service (see TechSpin: Triple Play for 3). So far the DVB-H standard is most popular in Europe.

     Media FLO was developed by Qualcomm and is being championed by US Carrier Verizon Wireless, which announced a month ago that it plans to offer TV services in late 2006 or early 2007 on Qualcomm’s Media FLO network. Qualcomm’s Media FLO network is not scheduled for full commercial launch until the end of 2006. Telstra is expected to launch TV services either late in 2006 or early 2007 when the new broadband network comes online.


     DVB-H and Media FLO are not the only standards in development or use. In South Korea, there is a competing standard, DMB (Digital Multimedia Broadcasting), for sending broadcast TV via cell phones. And in Japan, there is the ISDB-T (Terrestrial Integrated Services Digital Broadcasting), which seems to have been developed primarily for domestic consumption. These standards make it possible for cell phone users to tune into broadcast TV, which has a one-to-many architecture, much like regular over-the-air, traditional broadcast TV. It is video offered on a parallel over-the-air network employing something that works more like traditional signaling.

    In the case of Media FLO, it will operate on a parallel network instead of on the cluttered voice network. In the U.S., the pricing structure for broadcast-style TV over cell phones has not been established. But with TV networks charging in the neighborhood of $1 or $2 for the rebroadcast of TV shows online, the pricing model may be forming.

    New Microsoft Service Pack Nobbles Word

    An automatic Microsoft upgrade that has crashed popular word proccesing package Word which is part of Microsoft’s Office 2007 offering has been pumped to millions of computers.

    Overnight Microsoft has pumped 11 upgrades for Vista and Office to millions of computers and on two seperate computers both at home and in my office the upgrade has nobbled both Word and my default email package. MIcrosoft say that they are investigating the issues.

    Click to enlarge

    Among the patches distributed by Microsoft overnight were Microsoft Office Service Pack 1, Security updates for Microsoft Windows,Microsoft Outlook updates and security updates for CAPICOM.

    Among the problems we experienced one was unable to scroll pages up or down, right click a mouse or paste copy to a page. When one goes to closean affected page an error message pops up saying that Word has “Stopped Working”.

    The latest Microsoft security bulletin says that of the 11 upgrades six of are deemed “critical”  while five are deemed “important.” One bulletin, suggested that a majority of the “critical” patches affect Microsoft Office, two critical patches include users of Office for Mac 2004, one affects Visual Basic 6.

     

    Microsoft say that the “important” patches are mostly Internet services-related. One patch is specific to the Windows Vista update, however, all the Windows Vista-related updates will be included with Windows Vista SP1, expected to roll out to consumers in mid-to-late March.

    Tim Rains, security response communications lead for Microsoft, humorously noted that “Windows Vista SP1 and Windows Server 2008 are not affected by any of today’s bulletins.” They’re not affected because they are not yet available to the public. All Microsoft security patches for both Windows and Office software are available via Microsoft Update or via the individual bulletins detailed below.

    MS08-003: Important

    Titled “Vulnerability in Active Directory Could Allow Denial of Service (946538),” this bulletin affects users of Microsoft Windows 2000, XP SP2, Server 2003, but does not affect Windows Vista. A vulnerability detailed in CVE-2008-0088 exists in implementations of Active Directory on Microsoft Windows 2000 Server and Windows Server 2003 and Active Directory Application Mode (ADAM). Microsoft says “attacker must have valid log-on credentials to exploit this vulnerability. An attacker who successfully exploited this vulnerability could cause the system to stop responding or automatically restart.”

    MS08-004: Important

    Titled “Vulnerability in Windows TCP/IP Could Allow Denial of Service (946456),” this bulletin only affects users of Windows Vista. The update addresses the vulnerability detailed in CVE-2008-0084 that exists in Transmission Control Protocol/Internet Protocol (TCP/IP) processing. Microsoft says “an attacker who successfully exploited this vulnerability could cause the affected system to stop responding and automatically restart.”

     

    MS08-005: Important

    Titled “Vulnerability in Internet Information Services Could Allow Elevation of Privilege (942831),” this bulletin affects users of Microsoft Windows 2000, XP SP2, Server 2003, and Vista. The update addresses the vulnerability detailed in CVE-2008-0074 that exists in Internet Information Services (IIS). Microsoft says “a local attacker who successfully exploited this vulnerability could take complete control of an affected system. An attacker could then install programs; view, change, or delete data; or create new accounts. Users whose accounts are configured to have fewer user rights on the system could be less impacted than users who operate with administrative user rights.”

    MS08-006: Important

    Titled “Vulnerability in Internet Information Services Could Allow Remote Code Execution (942830),” this bulletin affects users of Microsoft Windows XP SP2 and Server 2003, but not Windows 2000 or Vista. The update addresses the vulnerability detailed in CVE-2008-0075 that exists in the way that IIS handles input to ASP Web pages. Microsoft says “An attacker who successfully exploited this vulnerability could then perform actions on the IIS server with the same rights as the Worker Process Identity (WPI). The WPI is configured with Network Service account privileges by default. IIS servers with ASP pages whose application pools are configured with a WPI that uses an account with administrative privileges could be more seriously impacted than IIS servers whose application pool is configured with the default WPI settings.”

    MS08-007: Critical

    Titled “Vulnerability in WebDAV Mini-Redirector Could Allow Remote Code Execution (946026),” this bulletin affects users of Microsoft Windows XP SP2, Server 2003, and Vista, but not Windows 2000. This update addresses the vulnerability detailed in CVE-2008-0080 in the WebDAV Mini-Redirector. Microsoft says “an attacker who successfully exploited this vulnerability could take complete control of an affected system. An attacker could then install programs; view, change, or delete data; or create new accounts with full user rights.”

    MS08-008: Critical

    Titled “Vulnerability in OLE Automation Could Allow Remote Code Execution (947890),” this bulletin affects users of all supported editions of Microsoft Windows 2000, Windows XP, Windows Vista, Microsoft Office 2004 for Mac, and Visual Basic 6. The update addresses the vulnerability detailed in CVE-2007-0065. If exploited, the vulnerability could allow remote code execution through attacks on Object Linking and Embedding (OLE) Automation if a user viewed a specially crafted Web page.

    MS08-009: Critical

    Titled “Vulnerability in Microsoft Word Could Allow Remote Code Execution (947077),” this bulletin affects users of Microsoft Word 2000 Service Pack 3, Microsoft Office XP Service Pack 3, Microsoft Word 2002 Service Pack 3, Microsoft Office 2003 Service Pack 2, Microsoft Office Word Viewer 2003, but does not affect Microsoft Office 2003 Service Pack 3, Microsoft Word Viewer 2003 Service Pack 3, 2007 Microsoft Office System, 2007 Microsoft Office System Service Pack 1, Microsoft Office 2004 for Mac, Microsoft Office 2008 for Mac. The update addresses the vulnerability detailed in CVE-2008-0109 and could allow remote code execution if a user opens a specially crafted Word file. Microsoft says “An attacker who successfully exploited this vulnerability could take complete control of an affected system. An attacker could then install programs; view, change, or delete data; or create new accounts with full user rights. Users whose accounts are configured to have fewer user rights on the system could be less impacted than users who operate with administrative user rights.”

    MS08-010: Critical

    Titled “Cumulative Security Update for Internet Explorer (944533),” this bulletin affects users of Microsoft Windows 2000, XP SP2, Server 2003, but not Windows Vista. The update addresses the vulnerabilities detailed in CVE-2008-0076, CVE-2008-0077, CVE-2008-0078, and CVE-2007-4790. Microsoft says “the most serious of the vulnerabilities could allow remote code execution if a user viewed a specially crafted Web page using Internet Explorer. Users whose accounts are configured to have fewer user rights on the system could be less impacted than users who operate with administrative user rights.”

     

    EXCLUSIVE: Mass Sackings At Acer OZ After Company Stops Manufacturing

    Acer Australia who are reeling from a 38% slump in sales and over $5M in losses have started mass sacking staff after a decision was made to stop local production of PC’s. The Company is also tipped to be withdrawing from the commercial and enterprise markets.

    According to sources several staff were told yesterday that they were terminated and had to leave the Acer Australia premises “immediately”, the mass sackings are all part of a plan to concentrate on retail sales of PC’s.

    Senior executives have said that senior management are engaging in a “blame game”.

    One senior manager said “Discontentment and in-fighting is rife between the directors and at all levels of senior management, with each division pointing its finger at the other for the current state of affairs”. 

    Acer Australia will cease production of PC’s for schools, corporations and governments next week with future products fully imported.

    The future of Charles Chung the long time CEO of Acer Australia is now being questioned with several executives blaming management aligned with Chung for the problems Acer Australia is facing.

    In the past several senior managers including former general manager Nigel Gore and former business development manager Robin Tang have left the Company after falling out of favour with the current management team.

    A current serving executive said that the new round of cuts “will significantly impact Acers about to continue in the commercial desktop and notebook markets. They claim that the decision to retrench staff and stop local production is all part of Charles Chung’s directive to shift Acer Computer Australia to a retail only operation. 

    They claim that Acer is beginning a gradual withdrawal of PCS from the commercial and enterprise markets.

    “The intent is to wind down all field service operations and move to a return to base warranty model across the board” the executive said.

    Global shipments of personal computers slumped 10.9 per cent in the second quarter, the longest decline in the industry’s history, as the market continues to be devastated by the popularity of tablets, research firm Gartner said recently.

    Acer Australia who has suffered more than most saw their sales slide 38% last year with insiders claiming that the sales slide has continued into 2013 resulting in the decision to restructure the market.

    Last year the Company set up two kiosks right opposite their retail partners, a visit to these kiosks by SmartHouse staff revealed that the Company was offering incentives to buy direct from the Companies own online store.

    A visit to the Harvey Norman web site reveals that Acer is one of the few PC vendors whose products are not listed among the brands that Harvey Norman sell. Acer management claim that they have not been “dumped” as a supplier to the mass retailer.

    Acer management have not returned our calls or offered any statement to the media on the Companies future direction.

    Vendors Hurting Ingram Micro As Retailers Buy Direct

    Major vendors are bypassing billion dollar distributor Ingram Micro to go direct to mass retailers this has forced the Company to slash costs, retrench people and offer extended credit to key retailers in an effort to retain a relationship say several industry insiders. It has also forced them to consider a new service revenue model.


    In recent weeks the Company who are the largest distributor of technology products in Australia has undertaken extensive restructuring surgery in a move that one analyst has described as “worrying”.


    And while local management has refused to comment on their problems their global Chief executive Greg Spierkel has confirmed that mass “box shifters” like Harvey Norman, JB Hi Fi and the likes of Officeworks are buying their mainstream branded products direct from vendors. This he says has resulted in the Company refocusing their business on mid tier resellers where he says the Company is making more money.


    “In recent years mass retailers or the box shifters have invested in distribution systems that allow them to deal directly with vendors and distribute products across their networks. This has impacted us in some Countries like Germany the UK and Australia”. 


    He has also confirmed that several of the mass retailers that are still dealing with them are now demanding extended credit terms. “Our normal terms are thirty days however we are renegotiating some 45 days. The retailers would prefer 60 days but we are not going to go there”.


    He added “There are changes taking place in the distributor channel and yes we have lost some of the major brands because they are supplying direct to the mass bricks and mortar retailers however this is not necessarily a bad thing as the margins were always low. We are now focusing on a new breed of mid tier resellers who understand that one of the key components that the mass resellers don’t want to deliver is service and support.”

     

    “n the US some of these resellers are seeing their business models change from being predominantly hardware driven to being service and support driven” he said.


    He has also said that Ingram Micro Australia may launch a brand new service where the distributor sets up a 24/7 remote access service division that resellers can on-sell to their customers.


    “We are currently rolling out in the US a model whereby we value add the service proposition that a reseller can deliver without them having to invest in expensive tools. This will allow them offer ongoing support and a 24 hour hotline”. Said Spierkel.


    In Australia things are not tracking well for Ingram Micro with one senior executive at one of the biggest notebook Companies claiming  “Ingram Micro has become irrelevant as vendors are dealing and supplying direct to the mass retailers including the likes of Harvey Norman, Officeworks, JB Hi Fi and Woolworths owned stores such as Dick Smith and Powerhouse”.


    They added “They have lost their way and are no longer delivering  a value proposition for resellers or the vendors whose products they are selling. Many of the most popular brands are dealing direct with the mass retailers and this is a win, win, situation for both the retailer and the vendor as both make more money”.


    Attempts by ChannelNews to discuss this issue and the problems facing the mass distributor locally have been met by a wall of silence. Calls to Ingram A/NZ vice-president, Jay Miley have not been returned and shortly after making a call earlier today we got a call from a New Zealand PR Company who admitted that 24hours earlier they had been appointed to manage the PR for Ingram Micro.

     

     


    When asked why Ingram Micro Australia was resorting to the use of a New Zealand PR Company they said that they had been put into place to facilitate media inquiries. When asked why Miley was incapable of picking up a phone and returning calls they said “That’s what we are here for. We want to facilitate media interviews”.


    Earlier this week Ingram Micro closed their ACT and Adelaide offices this follows the axing of the Companies Australian 
    Ingram Micro also announced several redundancies last month after they moved their credit control department to Asia. They have also been forced to cut up to 7% of their workforce across account management, sales and technical roles. The General Manager of Solutions Group, Stuart Ellis, was also retrenched.


    A senior executive in the Company said “Things are not looking good. I would lose my job if they could identify who it was that spoken to the media. Revenues are down significantly for example in February when retailers were reporting significant sales of consumer electronic sales at Ingram Micro were down over 45% because we have massive exposure to the enterprise and SMB market that has stopped spending.

    Another problem is that the other half of the SMB market is now buying direct from the likes of Officeworks, Harvey Norman or other mass retailers who are selling products like notebooks and mobile phones and attach devices which are supplied direct by vendors who have an operation in Australia. Where are picking up business is from vendors who are cutting back their presence in Australia and want Ingram Micro to still distribute their products. We are also getting the business “.  

    3 Display Monitors Are Better Than One Say Experts

    Are you looking to be more productive? How about three PC screens instead of one. New research shows that when three screens are linked together productivity increases by up to 35% over someone using a 19″ LCD flat screen monitor.

    Are you looking to be more productive? How about three PC screens instead of one. New research shows that when three screens are linked together productivity increases by up to 35% over someone using a 19″ LCD flat screen monitor.

    According to Fujitsu and the Fraunhofer IAO laboratory one group of testers who had a 22-inch widescreen monitor, increased their productivity over the single 19-inch group by 8.4-percent. 

    According to the researchers says Slashgear.com employees can perform a typical knowledge-sector job much more efficiently at a three-display workplace than at a conventional one.

     This is particularly relevant for jobs where digital information has to be frequently processed as is the case with scientists, editors, engineers or insurance company employees. Overall, the study showed that larger screen areas increase productivity – and with the 3-display workplace all interconnected to form one desktop, Fraunhofer IAO scientists recorded increased productivity of 35.5 percent.

    Google Jumping Telegraph + Herald Sun Paywall To Deliver Content For Free

    Readers who from today have to pay to access to content on News Ltd.’s Daily Telegraph and Herald Sun web site are now using Google search to circumvent the subscription block twitter feeds reveal.

    Several Twitter feeds reveal that readers are being urged to ‘click on the blocked Daily Telegraph or Sun Herald story, highlight the headline and then place it into a Google search’.

    This allows readers to access the content without having to pay readers claim.

    Currently News Ltd is offering a tiered subscription model ranging from $4 a week to $10 a week.

    Last week News Ltd moved to capture data by requesting visitors to their log into access Daily Telegraph content for free. This week after capturing thousands of reader’s personal details, the Daily Telegraph is asking visitors who gave up their email address and name to pay for access to content.

    The new digital subscriber strategy which is called News will be extended to AdelaideNow and The Courier Mail in June.

    News Limited CEO Kim Williams said “Our new digital subscription service for the Herald Sun and The Daily Telegraph marks an important landmark for News Limited as we continue paving the way for commercially sustainable models for quality journalism and digital innovation in Australia,”.

    The Company said that they will offer integrated content from Fox Sports as well as enhanced online local coverage.

    “For the first time, we will see an integration of high-quality Fox Sports broadcast reporting, analysis and commentary across News’ online network giving our masthead subscribers access to fine content as part of the news+ product,” he said.

    “We will continually enhance the value of our digital subscription product and over time we will also integrate our online lifestyle and business content offerings within the mastheads” he added.

    A test by SmartHouse reveals that most of the locked stories can be accessed by doing a Google search of the headline inside the story.

    In an effort to try and circumvent the Google search engine News Ltd is placing a headline on the master story and a second headline inside the clicked story.

    Readers responded online by writing the following comments:

    Integration with Fox Sports hey.. so why would I pay to view their articles via the daily telegraph site when I can just jump over to fox sports and get it for free?

    I have been searching for the “quality journalism” in the Herald Sun and Daily Telegraph for many years now. When I find it then I will subscribe to online editions. In the meantime a free hard copy courtesy of the local cafe which can be read in the time it takes to have a coffee will do me.

    Why do they think people will pay for stale news? Both News Ltd and Fairfax re-cycle stuff from around the web, often 2/3 days later and seem to think we won’t notice.

    If they were engaging in legitimate investigations they may have support but expecting subscribers to stump up for cold potatoes is arrogance.

    Another Big Artist Gives Apple Music Two Finger Salute

    Another leading artist has refused to deal with Apple and their new Apple Music streaming service

    Taylor Swift has said that she will not stream her latest album on Apple’s new Music app.

    The Shake it OFF star previously hit the headlines for refusing to give her album to Spotify, claiming services did not pay enough.

    Now, her record company has confirmed it will not appear on Apple’s service when it launches on June 30th for a three month free trial.

    Swift management however did confirm that her back catalogue will.


    Click to enlarge


    Last week Apple was accused of promoting artists for Apple Music that they have don’t have the rights to.

    Artists such as Alabama Shakes and FKA Twigs are threatening legal action while Apple appears to be giving artists the two finger salute. The artists claim that they don’t want to give away three months of free content.

    An AppleInsider report points out high-profile independent artists like Alabama Shakes and FKA Twigs are all over the promotional literature, despite their labels publicly and categorically refusing to agree to Apple’s current terms.

    Both artists are currently signed to labels under the Beggars Music umbrella; the former is signed to Rough Trade records in the UK, while the latter is an XL Recordings artist.

    Beggars’ various labels – which are home to the likes of Adele, Jack White, Radiohead, Queens of the Stone Age and Vampire Weekend – issued a statement overnight stating ‘obstacles’ would need to be removed before any accord could be struck.

    The main point of contention appears to be the three-month free trial Apple is offering to users, during which it will not pay a single penny on royalties to the rights holders involved.

    ‘Taylor Swift’s 1989, which has not been released to any streaming services, will not be available at launch on Apple Music, either, representatives for both Swift’s label Big Machine Records and Apple confirmed over the weekend.

    Swift removed her entire back catalogue from music streaming service Spotify as she was promoting new album 1989, which sold 1.287 copies in its first week, just shy of Britney Spears’ record for the biggest-ever album weekly sales by a female artist (1.319million).

    ‘If I had streamed the new album, it’s impossible to try to speculate what would have happened,’ Taylor told Yahoo of leaving Spotify.

    ‘Music is changing so quickly, and the landscape of the music industry itself is changing so quickly, that everything new, like Spotify, all feels to me a bit like a grand experiment.

    ‘And I’m not willing to contribute my life’s work to an experiment that I don’t feel fairly compensates the writers, producers, artists, and creators of this music.’

    Earlier this month Apple unveiled its long awaited music streaming service will launch on June 30th with a three month free trial period.

    Called Apple Music, it will cost $9.99 per month, or $14.99 for a family membership with up to six users (although it has not confirmed Australia pricing, it is expected to be more expensive in Australia than in the UK and USA.

    Fibre Uptake Key To Telstra NBN Payments

    Telstra will only be paid for access to their current copper network, when consumers choose to switch to the new NBN Co fibre network Telstra CEO David Thodey has revealed.

    In a conference call today Thodey said that the Australian Competition and Consumer Commission are set to play a key role in deciding the final deal between Telstra and the NBN Co and he has not ruled out the ACCC taking a look at their pitch to become a major media Company while retaining their 50% share of Foxtel.
    Thodey said that the future for Telstra is new products and revenue streams. However a “significant amount of work must still be done on many complex issues.” He said.
    “While this is an important step, a very significant amount of work must still be done on many complex issues, including migration processes, taxation, the future of legacy regulations applying to Telstra and consequences of any major changes to the NBN rollout schedule,” Mr Thodey said.
    Outside of the NBN deal Telstra and their shareholders could benefit from tax benefits arising from the deal and the sale of the physical copper in their network as it becomes redundant. 
    The $11 billion agreement was announced in Canberra on Sunday by Prime Minister Kevin Rudd and Mr Thodey, which will see NBN Co reuse existing suitable Telstra infrastructure, including pits, ducts and backhaul fibre, in the roll-out of its program, avoiding the creation of a duplicate network.
    A big benefit to Telstra, say analysts is that all operators will have access to the same broadband speeds with product and pricing set to be the big differentiator. 
    Telstra said that the agreement will reduce the overall cost of building the NBN and will see a greater proportion of the network placed underground, as opposed to overhead cabling.
    Another big advantage for Telstra is that they will be able to expand their existing Next G wireless network which is currently the fastest in Australia while being able to bid for new spectrum.

     
    The federal government had originally threatened to restrict the Telco’s access to crucial radio spectrum if it failed to structurally separate its wholesale and retail arms.
    Mr Thodey said that Telstra expects to put the transaction to a shareholder vote in the first half of calendar 2011, once all necessary regulatory approvals are received and a final agreement is reached with the government.
    See: Telstra Set To Be a Major Media Company