if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 110 of 116

    Smart Office

    IBM Top Cloud Co: Study

    A major study into cloud has given IBM the thumbs up.


    Click to enlarge

    The report, released this week by Longhaus, selects IBM as the top infrastructure-as-a-service Cloud for enterprise in Australia, citing its flexible billing options and low commitment formula.

    “IBM’s smart Cloud enterprise is established in six Cloud data centres globally, including two onshore Australian centres, enabling clients to run workloads across any centre of choice,” said Longhaus MD, Peter Carr.

    IBM’s cloud came ahead of rivals Wipro, Datacom, Emantra and Fujitsu who were also among the top 5 providers here, among 175 companies globally currently offering service to the Oz market.

    No. 2 Whipro “will be a surprise to many,” Longhaus conceded but “while Wipro’s in-country marketing is no indicator of cloud capability, they are highly resourced and will be a new leader to watch in the ‘API Economy’.”

    Area9, a Darwin based cloud enterprise was also given the nod for ‘Rising Star’ and although it is predominant in Northern Australia, it could soon extend its capabilities into South East Asia, Carr noted.

    The cloud market is now hitting maturity the report also stated, with over one third of providers experiencing outages in the last 12 months.

    Key emerging cloud trends include: ‘Business-as-a-Service’, where, for example, an airline pays their cloud provider on a per-suitcase processed through the entire baggage handling systems.

     “This is game-changing and takes the challenge of consumption-based pricing to a whole new level,” says Longhaus senior research director, Scott Stewart.

    Another trend emerging is “a whole new market where pure-play IaaS providers such as Amazon are enabling a massive pool of non-cloud professional services companies including KPMG, Accenture and Deloitte to now deliver cloud services.”

    This will add a completely new dimension to the category and place challenges on the current cloud leaders, Stewart warns.
     

     

    What it means is “watch out IBM, Wipro, Fujitsu, CSC and others, your competitive landscape is rapidly shifting ground.”

    Cloud API (Application Programming Interfaces) economy in its own right was another trend emerging on the cloud.

    Google Glasses, ‘Wired’ Dresses: Tech-U-Wear To EXPLODE

    If the tech fits.. Google glasses, wired bras – tech u wear is set to take off. In a big way.


    Click to enlarge

    As if technology isn’t fashionable enough at the moment (think how proud owners flash their latest smartie, iPad), demand for augmented-reality eyeglasses, as debuted by Google (basically a smartphone on specs), cocktail dresses that light up when your phone rings and sports bras that monitor heart rates, is set to explode, IMS analysts reckon.

    Wearable tech are devices that are worn on the body for an extended period, enhancing user experience via features like  wireless connectivity and independent-processing capability.

    But now with tech advancements, there is “major potential for growth in all kinds of wearable technology products,” IMS predicts. 

    By a whopping 500% in 4 years, meaning a lot more of us will be sporting gaming vests, smartwatches that sync with your phone and health monitoring wristbands.

    Hell, Nokia have even patented a ‘vibrating tattoo’ which alerts you when your phone goes off, using ferromagnetic ink.

    The wearable technology market is likely to rise fivefold by 2016 to almost 93 million units, tech researchers IMS said today. In 2011, just 14 m wearable devices were shipped.


    Click to enlarge

    Wearable tech already has huge appeal for fitness fanatics who check heart rates and running times on mobile devices like the iPhone, but future smart devices will transmit real-time info like news, maps or cinema times, with the potential to be used for more serious medical purposes such as monitoring vital health signs and augmented senses, says IMS.

    Google Glasses, unveiled earlier this year, are a pair of augmented reality glasses that feed information across the (left lens) screen including emails, video chat, Google searches and GPS directions. AR may play a big part in the wearable tech field in the future. Nokia already uses AR navigation on its newly unveiled Windows 8 phones as does BlackBerry.

    Analysts are also predicting “significant progress” in wearable technology, including new products that are produced in mass numbers, with Adidas, Nike among the major names and already flogging devices.

     

    In other words, the sky is the limit as far as what tech will bring so who knows what wired devices we’ ll be wearing a few years time.

    “Wearable technologies provide a range of benefits to users, from informing and entertaining, to monitoring health, to improving fitness, to enhancing military and industry applications,” said Theo Ahadome, senior analyst for medical research at IMS Research.

    The United States is the leading region for wearable devices at present along with Europe and Japan. 

    Telco’s To Chop Roaming Charges?

    Ever get a nasty bill after a holiday? Well, Stephen Conroy has declared war on romaing and is hatching a plan to chop extortionate charges across the ditch.

    Minister for Broadband, Communications and his New Zealand counterpart Amy Adams MP, today released a draft report that says telcos are making “excessive” profits from trans-Tasman mobile roaming charges.

    Both governments are now considering their options and whether or not to put “downward pressure” on mobile prices, or even force telcos like Optus, Telstra et al to scrap the extortionate roaming charges altogether.

    The “options” open to the governments include improving pricing transparency of roaming charges or allow roamers to become local end-users, so they are charged local instead of overseas mobile prices.

    The radical change could be in place within twelve months and means Ausies and Kiwis who use their mobile while travelling across the ditch know exactly what it will cost.

    Unbundling roaming services so people can use one network for domestic communications and a different network for trans-Tasman roaming and introducing price caps are the other proposals put forward by the report.

    Minister Conroy is now directing the Australian Communications and Media Authority to put in place an industry standard for mobile roaming, so consumers are aware of the precise charges.

    “The draft report makes it clear that telecommunications companies are stinging consumers on trans-Tasman mobile roaming charges and that their profit margins are excessive,” Minister Conroy said.

    “While this report focuses on travellers between Australia and New Zealand, we know that high mobile roaming charges affect Australians in every country they visit.”

    “One of the most common complaints that I hear is from people who return from overseas and are confronted by a mobile phone bill that runs into the hundreds or even thousands of dollars.

     

    Consumers are angry about the excessive charges and about not knowing how much they are being charged,” he added.

    The Australian and New Zealand governments are now seeking submissions on the draft report from consumers, the telecommunications industry, and other stakeholders, which will inform the response adopted.

    Ouch Android: HTC Bruised, Apple Win AGAIN

    New blow for Android as HTC loses its patent case against Apple – Judge rules no infringement took place, which means another victory in the bag in its global legal battles against Google’s Android


    Click to enlarge
    Is Android heading for fall?

    Sensation maker HTC accused Apple of violating four patents, a case which it filed in May 2010 to the US based International Trade Commission, and sought to ban sales of Apple’s iPhone, iPod and iPad in the US.

    But fear not Apple lovers, the ITC Judge ruled no infringement took place yesterday, meaing the iPhone 4S and friends aren’t going anywhere.

    This comes as Apple have just announced stellar sales of iPhone 4S released on Friday – selling a whopping 4 million in just three days and have also just released quarter profits, reporting 85% hike in profit to $25.9bn, up 85% from the previous year. 

    But it has not been a good week for Android makers. Just last week a Sydney Judge upheld an interim ban on the sale of Samsung Galaxy Tab 10.1 here, pending a full trial in November.

    However, HTC are still going gung-ho against its iOS rival, and are likely to appeal the case, saying:

    “This is only one step of many in these legal proceedings. We are confident we have a strong case for the ITC appeals process and are fully prepared to protect our intellectual property,” said Grace Lei, HTC’s general counsel.

    “We look forward to resolving this case, so we can continue creating the most innovative mobile experiences for consumers.”

    However, legal experts aren’t so sure, saying the Sensation creator has failed the first ‘big hurdle’ in its battle against Cupertino, a decision which will now hold some sway in higher Federal court.

    “This was an important victory for Apple,” Alexander Poltorak, chairman and CEO of the General Patent Corporation, told the E-Commerce Times “because although the decision of the ITC is not necessarily binding on the federal court, where this dispute is also being heard, it still does carry some weight.”

    Patents expert Florian Mueller isn’t convinced either, calling HTC’s case as “weak.”

     

    “To be honest, I don’t expect anything meaningful to come out of this ITC investigation. I didn’t take it seriously from the day it was filed. The patents look weak,” he wrote on his Foss Patents blog.

    “HTC still has the weakest patent portfolio of the three leading Android device makers,” he added.

    On Monday it emerged Samsung had made an application before an Australian Federal Court to ban the sale of the iPhone 4S here, claiming Apple violated wireless phone technologies, although no decision has yet been passed down.

    Apple has filed a separate case against HTC.

    Is That You, iPhone 5? Vodafone Flog ‘Mystery’ Device

    Red telco is urging Infinite users sign up to the “biggest release of the year.” Troubled Vodafone appears to be betting on iPhone 5 release to reinvigorate customer morale after a series of network issues and consumer backlash against shoddy coverage.


    Click to enlarge

    “Register now for info on the year’s the biggest smartphone release,” Voda’s website urged customers today.

    The telco is pushing its Infinite sign ups by promising all signees exclusive upgrade to the ‘mystery’ device, which is highly lightly to be the impending iPhone 5, tipped for an early October release.

    “Simply pop a Vodafone SIM into your existing phone and enjoy the freedom of a month to month plan within Australia you get Infinite standard national calling, texting and social networking to popular sites”. Plus you’ll also get a generous amount of data too.

    However, no indication of when the ‘upgrade’ is to take place.

    And as Voda look to develop a new network it is also looking to repair its disastrous reputation among consumers of late, pledging $1 billion on the network project. 

    “We’re investing $1 billion to bring you better indoor coverage, faster downloads and a stronger signal than ever before from Vodafone,” it reassured customers.

    Hello, Cloud: 230,000 Ditch Copper For VoIP

    Cloud telephony taking off as Aussies disconnect from old copper network

    Cloud telephony is on the rise, MyNetFone says it has ported almost quarter of a million users  (over 230K) switching to Voice over IP services. 
    The telco say it due to Number Porting, which allows users to retain existing phone numbers when they switch over to a ‘virtual’ Internet based VoIP service, hosted in the cloud. 
    Cloud telephony, a virtual phone system based on VoIP technology, gives business functionality similar to traditional PBXls, like call hold, transfers, conference calls and Interactive Voice Response.

    MyNetFone CEO, Rene Sugo puts it down to more consumers understanding the cloud and Voice over IP. 

    “As the OCloud becomes more understood and accepted, we’re seeing far greater interest in cloud-based telephony, using Voice over IP,” said Sugo. 
    “Number Porting makes it quick and simple to switch from copper to cloud, and this is reflected in the ever increasing volume of phone numbers that we move into the cloud and host on our network.”
    And its cheaper.
    Plan start at $9.95 for residential users, and $60 for business users.  
    A business located in Melbourne can get local numbers in Sydney, Brisbane, Perth making it easy for local customers to call them.
    “Whilst many people focus on data when it comes to the NBN, it also spells the end of traditional voice services. Bundling of VoIP and data services will be the norm in two to three years, ” says MyNetFone boss.   

    Its On: NBN Hits NSW – 1Gbps Speeds ‘Expected’

    The tyranny is over. A second NBN site was unveiled today, promising “affordable, high-speed” broadband to all.


    Click to enlarge

    Resident of rural Kiama, NSW can now trial a world class high-speed broadband network, with speeds of “up to” 100Mbps and “potential speeds of up to 1 gigabit per second expected” late next year.


    And the take up is massive, if Conroy’s figure are accurate. 

    “Almost 80 per cent” of local residents have signed up for a free connection, he confirmed. 

    In the past,  take up has been less than convincing – Tasmania, the first has been switched on to the fibre optic services last year had a reported take up of just 15 per cent, when first rolled out.  

    When the NBN service was rolled out in Armidale, NSW, the first mainland site,  take up was said to be 87 per cent, although this was later revealed to be just over 600 customers. 

    The Minister for Broadband, Senator Stephen Conroy supported by Deputy Prime Minister Wayne Swan who were at the Kiama launch today, said it represented another historic step in the broadband roll out. 

    “Kiama is the second site on mainland Australia to launch the NBN and will enable trial users to receive a connection to our world class high-speed broadband network,” Conroy said. 

    However, he also said the controversial $36bn NBN project is “at times a very thankless job” referring to the efforts of NBN Co Chief Mike Quigley, the company charged with rolling out the network. 

    However, its not just households that will benefit – the fibre optic network will enable improvements in the delivery of healthcare services and access to more business opportunities, Conroy insists.

    Read more about Internode’s NBN pricing: Get Internode + NBN High Speed Internet 30GB $59 Here

     

    The use of Neuroscience Research Australia’s Fall Prevention Program via the NBN was demonstrated at the launch. Users can also reap the benefits of other “in-home exercise and rehabilitation programs,” Conroy said.  


    And tyranny of distance is no more, said Wayne Swan, who believes the broadband network would be a boom for Aussie businesses, irrespective of location. 

    “The NBN will allow businesses, whether they are small, medium and large in rural and regional Australia, to overcome the tyranny of distance by gaining access to national and international markets,” Mr Swan said.

    “Small businesses are already relocating to Kiama to take advantage of the NBN,” he added. 

    Kiama Downs/Minnamurra is the second of five mainland NBN test release sites to be unleashed. Armidale, NSW was connected to the NBN in May this year with customer trials now well underway. 

    The other sites of Brunswick (VIC), Willunga (SA) and Townsville (QLD) will begin offering services “progressively over the coming months.”

    Life Imitates Facebook As Justin Timberlake Grabs Ailing MySpace

    He played the shrewd Facebook investor in The Social Network but now it seems J Timberlake is doing it for real.

    Music artist Justin Timberlake is part of a group set to buy ailing social network MySpace off Rupert Murdoch’s News Corp, who is looking to shed the loss maker from its media portfolio. 

    And Timberlake won’t be taking a backseat in the business either and will be playing “a major role in developing the creative direction and strategy for the company moving forward”, says Specific Media, the company partnering with the pop singer on the purchase.

    The US singer played Facebook investor Sean Parker in the hit film The Social Network released last year, which chronicled the birth of the social networking sensation from its early days in a Harvard student dorm. 

    Parker owns seven per cent of Mark Zuckerberg’s Facebook. 

    The MySpace deal, said to be worth $35m, News Ltd reports suggest, with News Corp retaining less than 5 per cent interest in the venture, which is said to be finalised today.

    The pricetag pales into comparison to the hefty $580 million News Corp coughed up for the social network just six years ago.

    Established in 2003, MySpace was one of the original social networkers and was popular with bands and artists showcasing musical talents and was credited with launching the career of British songstress Lily Allen.

    However, things took a turn for the worse following Facebook’s arrival of the scene in 2004. 

    MySpace may still retain its niche as a music focused site but with fresh input from Timberlake. 

     

    This news comes as Google launched its own answer to Facebook’s ‘like’ function with +1 recommendation tool on its Search engine. 

    Facebook ‘Posts’ Tweak To Quell Privacy Uproar?

    Facebook say they have changed their ‘Timeline’ display so older message do not show up in bulk.


    Click to enlarge

    “We’ve made several improvements to the way older content is displayed on timeline to make the audience and other information clearer,” Facebook confirmed.

    1 billion global Facebookers will see the ‘improvements’ on their timeline from this week, in a bid to quell the din by users angry at their wall posts being made public.

    The improvements to posts involves some minor language and presentation changes on timeline, which started to roll out last Tuesday, to make it clearer who wrote on your wall.

    Most users should have it by now, a Facebook spokesperson told SmartHouse.

    Read: ‘Hide All Posts’? Facebook ‘Privacy’ Status IMPLODES

    These “improvements” come after huge numbers of Facebook users globally, including several SmartHouse spoke to, believed pre-2010 ‘private’ messages were made public on their timelines, visible on their profile pages.

    Although the older wall posts will still appear on profile pages, users can manually hide the posts from their timeline.

    Facebook deny the charge and say there was no privacy breach; rather, these message were wall posts dating pre 2009, before ‘likes’ or ‘comments’ were introduced, fooling users into thinking these messages were ‘private’ when this was not the case. 

    “Our engineers investigated these reports and found that the messages were older wall posts that had always been visible on the users’ profile pages. Facebook is satisfied that there has been no breach of user privacy.” the network said after the scandal first broke two weeks ago.

     

    The ‘leaked’ messages occurred after Facebook globally rolled out Timeline, causing the format changes.

    In Australia, there are 11.5 million monthly active users on the social network, it said last week.

    Telstra Trial 4G ‘LTE-Advanced’ As Data ‘Explodes’

    LTE Advanced, HetNets: as mobile users chew up Internet data, Telstra is upping the 4G ante

    The telco has just announced addition of second wireless frequency to its 4G network –  its old 2G 900MHz which it will add to the 4G 1800 MHz spectrum band.

    The use of older spectrum willhelp unlock more capacity on its 4G LTE network and cater for mobile demand in remote areas like Kalgoorlie, WA, and regional Queensland.

    Telstra will trial LTE (Long Term Evolution) Advanced – a new technology which uses the 900MHz and 1800MHz spectrum bands together and the telco says it is one of the first globally to trial the two, announced at a media event in Sydney today.

    Telco execs today referred to the importance of refarming spectrum to increase coverage in areas “when it makes sense.”

    The technology will help deal with heavy data demand over greater distances.

    4G users are growing at 20% monthly, says Mike Wright, Executive Director, Networks and Access Technologies, and is outpacing 3G growth which is slowing due to the arrival of the faster network, adding these new technologies announced are vital to deal with the “tsunami “of demand for Internet data.

    The average Aussie now spend 12 hours every weekly surfing the net on a smartphone or tablet – not a PC.

    4G data usage per device is also on the rise, Telstra confirmed today.

    “The core of our competitive advantage is out network” says Telstra Executive Director, Mobiles, Warwick Bray, and the new LTE technology will mean no black spots, faster speeds and quicker updates.

    The country’s biggest telco is also testing LTE-B (broadcast) a technology which allows multicast broadcasts so if a lot of people want to watch the same thing at the same time whether its AFL final or another event, the 4G network wont keel over.

    And in a bid to end that annoying network crash at big events like AFL finals and concerts, Telstra is also trialing ‘HetNets’ in major cities,  which are small cell networks that essentially expand network capacity.

    HetNets could possibly be used on 3G and 4G networks, although the telco would not confirm this.

    And as it invests $1.2 bn in its mobile network this year ($4bn in total on 4G) , Telstra is making sure the investment is paying off and is busy pushing users over to 4G services, and weaning them off 3G.

    80% of its postpaid smartphones launched this year will be 4G- ready it confirmed- the telco already sells eleven 4G smarties including iPhone 5 and Motorola RAZR M 4G, along with 4G data packs for tablets and dongles.

    Currently, Telstra has 1.5 million customers on its 4G network, and says it is on track with its network expansion to 66% coverage, countrywide by June. 

    However, when questioned the telco refused to predict how many customers it expects to add to its 4G network in 2013, although Optus now has its, albeit limited, 4G LTE network running, and Vodafone just announced 4G trials.

     In 2013, we will consume more data than we have done in the last 2 years, says Mike Wright, Executive Director, Networks and Access Technologies as our “love affair” with mobile and the “explosion” of 4G services continues.

    4G LTE devices are known for consuming more data due to the faster download/upload speeds and consumers often complain of greater “bill shock” on 4G, but Telstra say it plans to up the ante on its usage alerts for its customers

     

    Telstra confirmed it is “in discussions” with phone manufacturers about LTE Advanced technology and compatibility with devices.

    Nokia’s new Lumia 920 is 9000Mhz ready and so will a BlackBerry and Sony device, and “there will also be upgrade for mobile broadband devices later this year,” Bray confirmed.

    But Wright says the telco is confident about its LTE technology and infrastructure but admits it has to innovate to keep up with massive demands placed on its network.

    It will also erect 1000 new 4G base stations throughout Australia this year.

    2G to 3G was a “big jump” but 3G to 4G was another “wow moment”, says Wright.

    Telstra will be hoping the ‘wows’ are on its network and not rivals.