if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Mendelson Tiu, Author at Smart Office - Page 18 of 72

    Smart Office

    Telstra To Double Wireless Broadband Speeds

    Telstra will be doubling the data speeds over its Next G network later this year. The company also said that regional Australians will be among the first to experience and enjoy these new speeds.Telstra’s CEO, David Thodey said that 100 regional towns and cities including Albury, Alice Springs, Orange, Broome, Ballarat, Roxby Downs, Port Lincoln and Newcastle will benefit from the new 3G modem when it becomes available to customers.

    According to the company, customers using the new device called the Telstra Ultimate USB will be able to download files or surf the Internet up to twice as fast as those using the next-fastest 3G modem currently available on an Australian national network.

    “Telstra will make this new technology available in 100 selected regional towns and cities at the same time as many capital cities, underlining our commitment to provide the best technology to Australians no matter where they live,” Mr Thodey said.

    “Telstra has been investing in regional and rural Australia for more than 100 years, including through Telstra Country Wide for the past ten years, to improve the quality of life for people living and working in regional and rural Australia.”

    “The first decade of Telstra Country Wide is just the beginning. Telstra is committed to continuing its investment in regional and rural Australia so we can provide local consumers, businesses, governments, schools and health services with the same leading edge services that are available elsewhere in Australia,” he said.

    In addition to this announcement, Thodey also said that that the company had recently commissioned its 7,000th Next G base station, expanding Australia’s fastest national mobile broadband network.

    Acer 18.4-inch Notebook A Multimedia Monster

    Forget about bringing the Acer 8943G anywhere, this 18.4-incher is designed to replace your desktop and provide you with a great personal home theatre experience anytime.


    Click to enlarge

    The 8943G measures 440 x 295 x 31/43.8 mm and weighs 4.6 kg, making it hard to bring on those long hauls. The brushed metal lid and interior helps minimise fingerprint marks and gives it a classy feel.

    In terms of connectivity, the 8943G comes with five USB 2.0 ports, a 5-in-1 card reader (SD, MMC, MS, MS Pro, and xD), an e-SATA port, an IEEE 1394 port, HDMI, IR port, D-Sub port, headphone/speaker/line-out jack, microphone-in jack, line-in jack, Ethernet, and even an RF-in for digital TV cable input.

    This notebook is ready for almost anything, thanks to its noteworthy specifications. It has an Intel Core i7-720 processor running at 1.6GHz, uses ATI’s Mobility Radeon HD 5850 as its graphics card, comes with 8GB of RAM, and can store up to 1.2TB of data. Also onboard are five built-in speakers and one subwoofer, a Blu-ray drive, Wi-Fi 802.11 b/g/n, Bluetooth, and 8-cell battery.

    Opening the lid reveals an 18.4-inch Full HD LED-backlit LCD screen, a webcamera, a chiclet keyboard with a dedicated numeric keypad, and a piano black touchpad that doubles as a media console. The unit also comes with three hotkeys that automatically launch the Acer Backup Manager, Launch Manager (Bluetooth/Wireless toggle), and Arcade Deluxe (for Photo, Video, and Music).

     

    Click to enlarge

    The 8943G comes pre-installed with a 64-bit version of Windows 7 Home Premium, various Acer software (eRecovery, Updater, Backup Manager, Eye Webcam, GameZone, and Arcade Deluxe), Microsoft Office (60-day trial), Microsoft Works, and NTI Backup Now 5, and NTI Media Maker 8.

    We ran a couple of Blu-ray movies to check the image and sound quality. Various outdoor shots in 10,000 BC looked crisp and vibrant, while dark scenes in Close Encounters of the Third Kind were well-reproduced. The only problem was that the glossy display is prone to reflection, forcing us to close the lights when watching indoors. The 5.1 speakers did not disappoint and provided rich audio throughout our test. Highs, mids, and lows were well-reproduced and did not break at high volumes.

    Pressing the middle button on the touchpad instantly displays a control console for movies and music. The console has seven touch-sensitive icons (Movie, Music, Play/Pause, Stop, Previous, Next, and Mute) for easy audio/video navigation. It even comes with a rotary dial that controls the volume.

    Besides being able to play videos and listen to music, the 8943G can also run some of the latest games, multi-task without slowing down, and even record TV shows.

    The battery lasted close to two hours on our Blu-ray playback test (maximum brightness and volume). However, it lasted close to four hours when the brightness is set to minimum and only used for surfing the web and/or running office programs.

    Overall, the Acer Aspire 8943G is a good desktop replacement. This notebook has powerful specifications, big screen, and can even play the latest games. The only thing that we didn’t like is the very reflective screen and the touchpad that is prone to fingerprints. It is available for $2544.

    Asus Notebook Made Out Of Bamboo

    Asus has unveiled a new notebook that is made out of bamboo. The natural bamboo exteriors are juxtaposed with sleek, brushed aluminum interior surface and a seamless chiclet keyboard.


    Click to enlarge

    The Asus U Series Bamboo Collection notebook has a Super Hybrid Engine (SHE) which monitors processing loads and delivers just the right level of power. It also comes with NVIDIA Optimus technology that is able to conserve energy and extend battery life.

    U Series Bamboo Collection notebooks also feature Syncables technology that enables effortless syncing with various devices including desktop PCs, other notebooks, smartphones and digital cameras.

    USB 3.0 is also onboard, providing fast data transfer while being backwards compatible. The LCD panels come with a Zero Bright-Dot guarantee, providing peace of mind against dead pixels. The notebooks also feature ASUS Splendid Video Intelligence Technology which automatically tweaks colours, sharpness and other visual parameters to ensure the best experience regardless of the content being viewed.

    Australian pricing will be announced at a later date.

    Seagate First With 3TB External Hard Drive

    Seagate unveiled the first 3 terabyte external desktop drive in the world.


    Click to enlarge

    The 3TB FreeAgent GoFlex Desk external hard drive can store up to 120 HD movies, 1,500 video games, thousands of photos, or countless hours of digital music. The 3TB GoFlex Desk external drive’s USB interface can easily adapt into a USB 3.0 or FireWire 800 connection to meet speed needs.

    Seagate’s Executive Vice President of Sales, Marketing and Product Line Management, Dave Mosley said, “Consumer capacity demands are quickly out-pacing the needs of business as people continue to collect high-definition videos, photos and music.”

    “Seagate has a tradition of designing products that break into new storage frontiers to meet customer requirements and the 3TB GoFlex Desk external drive is no exception-delivering the highest-capacity storage solution available today,” added Mosley.

     

    The GoFlex Desk external drive delivers unconstrained, high-capacity storage and automatic, continuous backup with software file encryption to help keep all data safe and secure.

    The standard USB 2.0 interface can be upgraded to USB 3.0 or FireWire 800 by coupling the drive with the appropriate GoFlex desktop adapter to increase file transfer performance by up to 10x for easier copying or sharing of files.  

    It is compatible with both the Windows operating system and Mac computers. Each drive includes an NTFS driver for Mac, which allows the drive to store and access files from both Windows and Mac OS X computers without reformatting. The NTFS driver is simply installed once on a Mac computer, allowing it to read and write files on a Windows formatted drive. Its sleek black, 3.5-inch design sits either vertically or horizontally to accommodate any desktop environment.

    The 3TB GoFlex Desk external drive with USB 2.0 adapter can be purchased through select retailers and resellers for $499.

    Kingston USB Drives Protect Sensitive Data

    Kingston has a range of USB flash drives that are able to encrypt sensitive data.


    Click to enlarge

    The new USB drives range from a $125 entry-level drive to a $982 high-end drive capable of storing and encrypting 32GB of data.

    The Kingston DataTraveler Locker comes with two partitions – public or encrypted zone access. Its zone size is customisable and even has a drive lockdown function that reformats the whole drive after 10 failed attempts. It is available in 16GB for $125.

    The DataTraveler Locker+ offers full encryption, with 100 percent of stored data is protected by hardware-based, 256-bit Advanced Encryption Standard (AES). It offers a drive lock down with reformat function, enforced password protection, and comes with 5-year warranty. The DataTraveler Locker+ stores 32GB of data and is sold for $220.

    In addition to the Locker and the Locker+, Kingston has also unveiled the DataTraveler 5000 – a waterproof and titanium-coated stainless steel USB drive that offers 256-bit Advanced Encryption Standard (AES) hardware-based encryption. It is available for $434.

    Finally, Kingston’s DataTraveler Vault – Privacy Edition has a hardware-based, 256-bit Advanced Encryption Standard (AES), offers data transfer rates of up to 24 MB/sec. read and up to 10 MB/sec, locks down and reformats the USB after 10 intrusion attempts, and is waterproof. The 32GB model is available for $982. 

    Lenovo Launches All-in-One Business PC

    Lenovo has unveiled a 23-inch all-in-one desktop PC aimed at businesses. The company also revealed new desktops and monitors.


    Click to enlarge
    Lenovo ThinkCentre M90z

    The ThinkCentre M90z all-in-one PC has a 23-inch display and comes equipped with choices of the latest Intel Core i5 and i3 processors, DDR3 memory, selection of hard drives or a solid state drive, a DVD drive, webcam (with microphone) and optional Wi-Fi/Bluetooth.

    The ThinkCentre M90z also comes with an optional ergonomic stand: frame, height adjustable, extended arm or wall mount arm. In addition, Lenovo is also offering a number of optional features that include: easy to clean touchscreen, webcamera on/off switch, auto-brightness control, array microphone, or touch capacitive monitor buttons.

    As the M90z is part of Lenovo’s line of business PCs, it comes with ThinkVantage Technologies productivity tools such as Lenovo’s Hardware Password Manager for encrypted hard drives. The ThinkCentre M90z also incorporates a tool-less design allowing users the ability to easily upgrade components such as the Hard Drive, Optical (DVD) Drive and memory.

    Lenovo Australia and New Zealand’s Director, SMB and Consumer Business,
    Chris Kelly said, “While more people are using mobile devices, there are
    many environments where desktop products simply make sense, and these
    latest Lenovo products showcase how our new innovations are leading in
    this space. We believe all-in-one is the future of desktops, so we
    created the ThinkCentre M90z to deliver everything large enterprise
    customers need; uncompromised performance, customised ergonomic features
    and a full web conferencing experience.”  

     

    Besides this all-in-one PC, Lenovo is also introducing two new ‘first of their kind’ monitors. The first port replicator monitor, the Lenovo L2230x wide provides full port replication via USB, providing notebook computers with greater convenience and productivity in the workspace. The ThinkVision L2321x wide (Duo), the first ‘dual’ display monitor, allows a single screen to be split into two for running legacy applications using a widescreen display.  

    Finally, the company will also roll out new ThinkStations – the C20 and C20x. The C20’s provide an ultra-compact size, being 46 percent smaller than competitive offerings. Its small size allows for more ThinkStation workstations to be installed in a rack or trader desk. An analyst can also attach and run up to 16 monitors. New Lenovo-developed technologies onboard include a patented slim, rotatable optical drive and a tri-channel cooling system.

    The workstations offer choices of Intel Xeon 5600 Series processors, up to 96 GB of DDR3 memory on the C20x with 12 available DIMM slots, up to two NVIDIA FX4800 class graphics, native SAS support on theC20x, and certified for compatibility with SBFI Trading and Control Room Furniture

    The ThinkCentre M90z all-in-one desktop will be available from June 28, 2010 through business partners and on www.lenovo.com.au. Pricing starts at $1,899. The ThinkStation C20 and C20x workstations will be available in mid-June starting at $2,899. The Lenovo L2230x Wide and ThinkVision L2321x Wide monitor will be available in mid-July through the web and business partners and are priced at $389 and $419, respectively.

    New D-Link Surveillance Camera With 10x Optical Zoom

    D-Link has announced a new Wireless N Pan/Tilt/Zoom Network Surveillance Camera with 10x optical zoom which can be accessed from any mobile phone or computer.


    Click to enlarge

    The DCS-5635 comes with a high-speed motorised pan, tilt and optical/digital zoom for extensive monitoring and object tracking. Its 10x optical zoom lens claims to deliver the level of detail necessary to identify faces, license plate numbers and other important details.

    The DCS-5635 also provides support for 3GPP Mobile Surveillance — allowing the user to view a live video feed from a compatible Internet-ready mobile phone or PDA with a 3GPP player installed — for on-the-go monitoring. To maximise bandwidth efficiency and improve image quality, it supports real-time video compression in MJPEG, MPEG-4, and H.264 formats. 
    “Users can sign up to a free Dynamic DNS service on the web to assign a name and domain to the camera to remotely access it without its IP address. 2-way audio with a built-in microphone and audio-out port is also included to communicate with anyone in close proximity,” said the company. 
    Offering both Ethernet and 802.11n wireless interfaces the DCS-5635 supports WEP and WPA-PSK/TKIP for establishing secure connections to a wireless network. 
    This new network camera also offers an auto-start motion detector that can be configured, for example, to start recording and send an email alert when triggered. It can be configured to record video to a connected PC or network attached storage device when motion is detected or it can be scheduled to record at certain times throughout the day. 
    The DCS-5635 Wireless N Pan/Tilt/Zoom Network Surveillance Camera is available now for $1398.

    Conroy Announces 19 New Fibre Sites

    Minister for Broadband, Communications and the Digital Economy, Senator Stephen Conroy has today named 19 new sites that will get fibre broadband access.”The rollout of fibre to the premise will deliver speeds of up to 100 Mbps, 50 times faster than most people experience today,” Senator Conroy said.

    Among the new sites are: Bacchus Marsh and South Morang (Melbourne) in Victoria; Brisbane (inner north), Springfield Lakes, and Toowoomba in Queensland; Riverstone (Western Sydney) and Coffs Harbour in NSW; Modbury and Prospect in South Australia, Victoria Park (Perth), Geraldton, and Mandurah in Western Australia; Casuarina in the Northern Territory; and Gungahlin in the ACT.

    In addition, five existing locations to receive an additional rollout site are Minnamurra/Kiama Downs and Armidale in NSW, Townville in Qld, Brunswick in Melbourne, Victoria and Willunga in South Australia. As the entire township of Willunga is already part of the rollout, the nearby communities of Seaford / McLaren Vale will be the adjacent site.

    “As with the five first release sites, NBN Co has chosen the second release sites based on a range of engineering, network design and logistical criteria,” continued Conroy.

    Construction in the second release locations is scheduled for the second quarter of 2011.

    Review: Samsung N220 With Optus Mobile Broadband

    The Samsung N220 is a netbook that is exclusively available to Optus users. Its built-in 3G quad-band module allows you connect to the Internet wirelessly, while its 6-cell battery provides enough juice to last half a day.


    Click to enlarge

    The N220 is available in two colours – red or green, with the glossy lid being a major problem as it is prone to fingerprints. In terms of connectivity, the N220 has three USB ports, a D-Sub port, Ethernet, microphone-in and headphone-in jacks, and a 3-in-1 card reader.

    Like any other netbook in the market, the N220 has an Atom processor (N450 at 1.66 Ghz), 1GB of RAM, 250GB of storage space, and wireless connectivity (802.11 b/g/n). It also comes with Bluetooth (2.1 + EDR) and 3G quad-band module for wireless internet access (SIM card located underneath).

    Take note that while the netbook is exclusively available to Optus, it the wireless module is not network-locked. This means that users can use their own SIM card (such as 3 or Telstra) to gain access to the Internet.

    Opening the lid reveals a 10.1-inch LED screen with a maximum resolution of 1024 x 600, a web camera, a chiclet keyboard, and a touchpad. The unit runs on Windows 7 Starter and comes with various Samsung programs such as – Battery Life Extender, Easy Display Manager, Easy Network Manager, Samsung Recovery Solution 4, Support Centre, and Samsung Update Plus. A trial version of McAfee and Microsoft Office 2007 are also pre-installed.

     

    Click to enlarge

    We found the N220 a joy to use. The keys on the netbook were well-spaced and made it easy for us to type word documents while on the go. Getting online was simple – all we had to do was to click on the connect icon. The unit was also able to handle HD videos without slowing down, although we had to make sure that no other program was running in the background.

    Audio quality was good for a netbook, thanks to its 2.1 channel speaker. The subwoofer was able to add depth to explosions in action movies and was also able to improve the sound in various audio tracks.

    Battery life was quite remarkable too. During our battery drain test, the N220 lasted 329 minutes, which equates to about 10 hours of normal usage.

    Overall, the Samsung N220 is a solid netbook for users who want to stay connected throughout the day. It sometimes lags as it lacks the horsepower to keep up with the default operating system, but this can easily be remedied by purchasing a 2GB RAM stick.

    The N220 is available at Optus dealers nationally for $49.99 (2GB) or $69.99 (5GB).

    New Apple Mac Mini Revealed

    Apple has unveiled a new Mac mini that features up to twice the graphics performance, has a new HDMI port and SD card slot, in a compact aluminium enclosure.


    Click to enlarge

    The new Mac mini features a compact aluminium enclosure just 19.7 cm square and 3.6 cm thin. A new integrated power supply eliminates the need for an external power adapter, while a removable panel underneath provides quick access for memory expansion.

    It includes an NVIDIA GeForce 320M graphics processor that claims to deliver up to twice the graphics performance of its predecessor. This means that the Mac mini is ideal for graphics intensive applications or visually rich games. Mac mini comes with Mac OS X Snow Leopard, iLife, and comes standard with a 2.4 GHz Intel Core 2 Duo processor, 320GB hard and 2GB of RAM for $999.

    Apple’s Senior Vice President of Worldwide Product Marketing, Philip Schiller said, “The sleek, aluminium Mac mini packs great features, versatility and value into an elegant, amazingly compact design. With twice the graphics performance, HDMI support and industry-leading energy efficiency, customers are going to love the new Mac mini.”

    Apple is also offering a Mac mini with Mac OS X Snow Leopard Server that fulfils all the server needs of a workgroup or small business including email, calendar, file serving, Time Machine backup, Wiki Server, Podcast Producer and more.

    Mac mini with Mac OS X Snow Leopard Server uses less than 11W of power at idle, a fraction of the power consumed in comparison to typical entry-level tower servers. Mac mini with Mac OS X Snow Leopard Server is faster than before with a 2.66 GHz Intel Core 2 Duo processor, two 500GB hard drives now running at 7200 rpm, 4GB of RAM and supports unlimited clients for $1399.

    The new Mac mini is available today through the Apple Store, Apple’s retail stores and Apple Authorised Resellers.