if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 75 of 116

    Smart Office

    ‘Operation Avenge Assange’: Mastercard & PayPal Anonymous Hacker Charged

    A 22 year old British student has been charged with offences relating to the Anonymous hacking of credit card operations in December.


    Click to enlarge

    This followed Mastercard’s decision to stop processing payments to the whistle-blowing site Wikileaks. PayPal, Visa and Amazon were also among the victims of cyber attacks by the online rogue hackers. 


    British police yesterday released details about Peter David Gibson, 22, a student, of Castleton Road, Hartlepool, Cleveland, who has been charged with conspiracy to do an unauthorised act in relation to a computer, intent to impair the operation of any computer or prevent or hinder access to any programme or data held in a computer or to impair the operation of any such programme or the reliability of such data – contrary to Sec 1(1) of the Criminal Law Act 1977.

    He is due to appear on bail at City of Westminster Magistrates’ Court on 7 September.

    Gibson was arrested by officers from the Met’s Police Central e-Crime Unit in connection with an investigation into Anonymous, following allegations of DDOS attacks by the group against several companies. 

    UK Police have now arrested six people in total as part of the Anonymous hacking campaign known as “Operation Avenge Assange” referring to Wikileaks leader Julian Assange. 

    The five other males -all British- are aged between 15 and 26, arrested under the Computer Misuse Act in January, while in the US 16 others have been arrested.

     

    The FBI said to be working through a list of around 1,000 names from an IP list identified by PayPal, according to the Guardian.

    Amazon Ahoy! EMI Lose MP3 Battle, Major Victory For Cloud Music

    Music mogul EMI has just lost a major legal battle with cloud music.


    Click to enlarge
    Image: Popcrush

    EMI Capital Records along with 14 other companies slapped MP3Tunes with a lawsuit, making a string of allegations (33,000) against the fledgling cloud music service, claiming it infringed music rights by sideloading songs from the net directly to personal lockers (like Google Magnifier and Amazon store is doing). 

    The giant also alleged its storage method of deduplication – running a music search engine and linking directly to songs online – was also infringement and alleged the music service didn’t do enough to stop repeat infringers. 
    EMI also said playing back songs from a locker was a public performance which requires a license. 
    However, these allegations were thrown out by the US federal courts in a verdict delivered on Monday last. 
     The Court ruled that “MP3Tunes qualified for the Digital Millennium Copyright Act (DMCA) safe harbors, and copyright holders, not the service providers, must police for and give specific notice of copyright infringement.”
    The record companies had claimed the music service has violated 33,000 of its works, but the court ruled a mere 350 of these were valid. The verdict “is definitely a victory for cloud music and MP3tunes. 
    “The Judge OKed our core business of storing and playing music in our cloud-based service,” Michael Robertson, MP3 founder, declared. 
    Music service like Amazon, Google, Grooveshark and Dropbox can have “renewed confidence in offering similar unlicensed services,” he added. 
    Users can also have confidence that they can to store, play cloud-based music services, without fears of breaking copyright laws, he added. 
    “Few companies have been able to stand up to the record labels attacks and get rulings from the court on key issues relevant to the future of the internet music.” 
    The ruling will set new precedent if it remains standing,” Robertson believes and doesn’t anticipate any changes to how his cloud based service operates. 
     

    He also insisted his service was operated in a responsible manner which is why the court rule in his favour for 99% of the claims. 

     MP3tunes allows syncing of music down to PC and an open API, one of the only such cloud services to do so.

    Grays Gobbles OO.com

    Online auction house nabs OO.com.au.

    Grays acquisition of the South African owned oo.com.au is a move to consolidate the online business, a Grays spokesperson told SmartHouse
    The purchase makes Grays Australia who already own e-retailer brands Grays Online, Grays Outlet and Grays Escape, the largest e-commerce player in OZ with over $350m in combined turnover. 
    It will now have access to a database of over 3 million customers.  
    Recent figures from NAB show Australia’s Internet sales grew 19% in February, although still accounts for just 6% of all
    retail sales but is set to soar in the coming years. 
    The two companies will keep their stand alone websites but introduce new categories and additional investment in the popular OO.com.au site.  

    oo.com.au is “incredibly strong” on electronics, homewares and toys while Grays is a big seller of wine, but flogs everything from pink diamonds to AV gear, often stock from companies gone into liquidation. 

    “We’re hoping some OO customers might try some wine, and there are products on OO that we might bring over.” the Grays rep said. 
    Business efficiencies and website enhancements will also provide customers with a better overall shopping experience.
    The company will merge into one at the back end including warehousing, call centres, infrastructure, but the websites will not be rebranded.
    Vendors are being contacted about the acquisition, announced today. 
    Rolf Krecklenberg, the current CEO who will continue to manage the business, said “I am looking forward to expanding our range, entering new categories and leveraging the Grays infrastructure so that we can offer our customers even better value. We have an exciting future ahead.”

    The OO warehousing and staff will relocate from their 9000 sqm Rosebery site to Grays 30,000 sqm headquarters at Sydney’s Homebush over the next three to six months. 
    Cameron Poolman, CEO for Grays says, “the extra scale provided by this acquisition will ensure our ongoing profitability using a sustainable business model.” 
    “Both businesses have profitably operated on proven business models.” 

    Is This Ultrabook? Acer Aspires To MacBook Air With Skinny 13.3″ Sandy Bridge Laptop (In Oz Sept)

    Acer ‘Ultrabooks’ hits Sept 07 but can it breath life in struggling laptop category.


    Click to enlarge
    Image: Acer Aspire 3951 leaked images.

    What’s in an Ultrabook? All will be revealed next month, when Acer unveils its first ever Ultrabook to the media in Oz. 

     The model, picture of which were leaked last week, (and tipped to launch here) is the ultra thin 13.3″ Aspire 3951, which bears an uncanny resemblance to Apple MacBook Air – inside and out. 
     And Ultrabook promises, well ultra speed, sporting Intel high speed generation of processors, namely Sandy Bridge and Ivy Bridge. 
    Acer’s Ultrabooks are said to start up at the click of a button – this model is said to start up in just 1.7 seconds to be precise. And networked in possibly just 2.5 secs, say previous reports. 

    Or at least they “should”, the laptop makers have said. 
    Announced earlier this year in Taipei, Ultrabooks are to sport “thin, light and beautiful designs that are less than 20mm thick, and mainstream price points under US$1,000,” said Intel Vice President, Sean Maloney. 
     However, Acer is said to be disobeying Intel’s ‘cheap and cheerful’ doctrine, and is said to be charging far more than the $1000, as envisioned by the chip maker, for some ultra models. 
     In fact, Acer models using Intel’s iCore models using Core i5 and i7 are said to cost up double the price – hitting the $2000 mark – say sources. 
     However, the model hitting Oz next month the 13.3″ Aspire 3951, which looks eerily like Apple’s MacBook Air complete with Sandy Bridge and will, as Intel wishes, go for around AU$1000. 

    However, this is subject to confirmation from Acer officialdom. 
     

     Oh, and also on the menu in September is Acer’s 7″ Iconia tablet, later in the month.

    London Calling: Foxtel Unveil Olympics Prog

    Eddie and Rove, over 3,200 hours of coverage including 1,100 hours live.


    Image: Herald Sun

    There’s 100 days to go until the London 2012 Olympic Games commence, and Foxtel has revealed its eight dedicated Olympic channels.

    The eight channels that will be free to residential sports subscribers and will air in both High Definition and Standard Definition.

    With around 1,100 hours of live events, and over 3,200 hours of total coverage, FOXTEL will showcase all the drama, electricity, joy and despair of the 2012 Games.

    The channels are:

    LONDON 1 Swimming, Diving, Synchronised Swimming, Water Polo

    LONDON 2 Cycling – Track, Cycling – BMX, Cycling – Mountain Bike, Cycling – Road

    LONDON 3 Gymnastics – Artistic, Gymnastics – Rhythmic, Gymnastics – Trampoline

    LONDON 4 Athletics – TrackLONDON 5 Athletics – Field

    LONDON 6 Equestrian – Jumping, Equestrian – Eventing, Equestrian – Dressage, Fencing, Table Tennis

    LONDON 7 Rowing, Canoeing – Sprint, Canoeing – Slalom

    LONDON 8 Beach Volleyball, Sailing, Football, Hockey, Weightlifting

    The remaining sports will be shown across a range of channels depending on the competition schedule. Foxtel will release a full guide including all broadcast times by July 1.

    Subscribers will be able to see every single Gold Medal event live and in full and over the 17 days of competition Foxtel will broadcast all sessions and matches of sport in full, with all major sports shown live. And with the Foxtel iQ or iQHD, viewers can record, pause and rewind live TV so they see all the action at a time that suits them.

    Extensive coverage that will be broadcast live from two Foxtel studios based inside London’s Olympic Park, one of which will run 24 hours a day.

    Eddie McGuire will headline daytime coverage, alongside former Olympic sprinter Matt Shirvington, broadcaster Tracey Holmes will team with FOX Sports cricket host Brendon Julian; while former Olympic swimmer Lisa Forrest will host with SKY News’ James Bracey.

    Fronting Foxtel’s second studio will be former track and field athlete and SKY News presenter Tiffany Cherry and FOX Sports presenters Adam Peacock, Sarah Jones and Jason Dunstall.

    It will also have a team of roving reporters on the ground in London, including comedian Rove McManus, Rove will present daily stories covering every aspect of the Games – from the colourful streets of the Olympic city to the various sporting venues and the athletes’ village, as well as catching up with some favourite athletes.

     

    The Olympic commentary team includes: James Tomkins (Rowing), Kerri Pottharst (Beach Volleyball), Scott McGrory (Cycling), Liz Chetkovich (Gymnastics), Debbie Watson (Water Polo), Andrew Gaze (Basketball), Jane Flemming (Athletics), Peter Donegan (Athletics), Daley Thompson (Athletics), Damian Brown (Weightlifting), Mike Murphy (Diving), Lucinda Green (Equestrian), Rechelle Hawkes (Hockey) and David Wansbrough (Hockey).

    Throughout the 17 days of competition, Foxtel’s subscribers will also be able to watch the eight dedicated channels on the  London 2012 Olympic Games tablet app, streamed via both WiFi and 3G.

    Foxtel’s ground-breaking coverage of the London Games will go live on July 25 to include live coverage of the 14 football matches that precede the Opening Ceremony. Competition will commence on air at 6.00pm on Saturday, July 28 (AEST).

    Austar Drops Foxtel Ready To Pounce

    Bad news for Austar as it loses almost 9,000 customers as it awaits Foxtel invasion.


    Click to enlarge
    Less people watching Austar PayTV, according to new figures.

    Austar United results for the year end 31 December 2011, released today, paint a worrying picture for the future of pay TV with the regional player losing 8,845 viewers in the past year.

    Total subscribers fell by almost 9,000 (8,845) to 755,374 – compared to 764K a year ago, which Austar blamed on “growing competition in particular from the free-to-air”, which continues to impact churn rate.

    However, aside from dwindling users numbers, Austar showed a decent set of financials with profit after tax increasing 21% to $120m, although revenues were “flat” which the company blamed on the loss $7m in revenues from its mobile business sold to M2 Telecommunications.

    However, despite the net loss in subscribers MyStar service continues to gain “momentum” with net additions of 14,500 customers in Q4 2011, Austar said today.

    MyStar, the digital video recorder (DVR) service now has a penetration rate of 43% of all pay subscribers.

    Average revenue per user (ARPU) increased $3.56 to $88.64 compared to Q4 2010, thanks to “the MyStar phenomenon, along with other product and service enhancements,” the company said in a statement.

    “MyStar continues to be a strong differentiator for us, with existing customers opting-in for its control and convenience” said AUSTAR CEO, John Porter.

    The dip in the regional PayTV player’s fortunes comes as it faces increasing competition from cheap IPTV rivals like FetchTV sold by Optus, iiNet and Internode from as little as $10 a month and the advent of Smart TVs which offer streamed Internet TV content, movies and new release TV shows as well as free-to-air digital channels.

    Just this week, SmartHouse revealed that Smart TV giant, Samsung, are to begin streaming BlockBuster movies content on its sets, its Galaxy smartphone and Tab devices and already offers similar content services via Quickflix.

    Read: Samsung OZ Scores Major Blockbuster Movie Deal Here

    Local analysts Telsyte also predict Internet TV will eat into PayTV market share thanks to the “credible services” via broadband like Telstra’s TBox and FetchTV, supplementing “mature” cable markets here.

    In August, Foxtel said its subscribers grew 2.5%, to a total of 1.65m. With Austar under its belt its total user base would exceed the 2 million mark and it also hopes cost saving and synergy will come with the proposed merger with Austar, due to be approved soon.

     

    “In the last half of 2011 we focused on controlling our expenditure. We did this very successfully, meaning we were able to deliver a pleasing financial result,” said Austar boss Porter.

    “However, we did continue to invest in our customers and we are already seeing returns, with our AFL offering in particular resonating in our markets.”

    Of the proposed $2.2bn merger with Foxtel, the regional cable provider said the court approval to postpone its shareholder meetings to approve the the proposed acquisition to 30 March will allow time for the competition watchdog, the ACCC, to finalise its review of the deal.

    Read:  IPTV To Eat Foxtel Alive?

    Forget Android: Google To Tackle Asteroids?

    Google are taking to space along with Titanic director James Cameron.


    Click to enlarge

    Larry Page, Google CEO, along Sergy Brin and Exec Chairman Eric Schmidt have backed a space venture, which is tipped to be lurking for asteroids and other natural resources.

    Microsoft’s Charles Simonyi and Ross Perot, Jr are also in on the venture which will examine the earth’s resource base.

    The start up ‘Planetary Resources’, announced yesterday, will examine “two critical sectors – space exploration and natural resources – to add trillions of dollars to the global GDP.

    “This innovative start-up will create a new industry and a new definition of ‘natural resources’,” according to a statement.

    According to the Wall Street Journal, Planetary Resources will explore the feasibility of mining natural resources from asteroids.

    The multi million dollar exploration venture also includes space entrepreneur Eric Anderson; former NASA mission manager Chris Lewicki and astronaut Tom Jones, Peter H. Diamandis will lead the commercial space venture “with a mission to help ensure humanity’s prosperity.”

    Planetary Resources will create a new industry and a new definition of ‘natural resources’, it says, although was scant on precise detail, reckons it will save humanity.

     

    All will be revealed at a news conference scheduled for tomorrow Tuesday, April 24 at the Museum of Flight in Seattle, however:

    “Join us to learn about our mission and how we plan to revolutionize current space exploration and help ensure humanity’s prosperity for generations to come.”

    Google V Groupon: G-Offers Bid For Deals As Facebook Jumps Ship

    But do we REALLY need another deals service? Just as Facebook ditches its Deals service, Google wades in to get a slice of the action.


    Currently only available in the US, Google Offers is now displayed on a prime retail site – its web search homepage.

    Offers, is one of the only instances Larry Page’s tech powerhouse has ever housed a service on its bare homepage, interrupting the clean design it usually abides by. 

    This shows Page’s Google is serious about its deals business, recently announced, and follows a failed $6bn takeover bid for daily deal market leader, Chicago based Groupon. 

    ‘Offer’s is currently in beta version confined to Portland, New York City, and San Francisco but is to roll out to other US cities in the coming months – and probably internationally, all going well. 
    Recent deals included 80% of a day trip to the New York Museum. 
    Google’s new business is joining the likes of Jump On It, Living Social Deals and the steady stream of others who have joined the market in the past year – globally the industry is estimated to be worth almost $4 billion by 2015 in the US alone, a 400 percent jump to its worth in 2010 – just $873m. 
    And it is an industry already well established here in Australia, with Groupon (formerly operating as Stardeals), joining the likes of Living Social, Cudo and Harvey Norman’s Best Buys. 
    On any given day, I receive around eight ‘stunning’ and ‘amazing’ 80% off daily deals and is fast reaching saturation point (and flooding my inbox). There’s only so many 60% off massages any one person can take. 
    Google’s websites combined attracted over 180 million visitors in the month of July in the US  and has plenty plans up Internet sleeve – meaning Groupon et al now has a serious competitor on its hands. 
    “We occasionally include a link on the Google homepage that points users to important information, whether it be about a relevant cause, a new product or an offer,” a Google spokeswoman said. 
    “Users can benefit from learning about great deals from local organisations.” 
    Stephanie Tilenius, vice president of Google Commerce has also recently admitted there will be more “density” to Offers, meaning possible integration with Google +, mobile commerce apps like Wallet or Shopper or maybe even Android itself. 
     

    No other deals website gets traffic or has resources of such titanic proportions at its fingertips. 

    Out with the old, in with the new 
     But as it expands this service, Google has decided to ditch some of its ahem, less successful ventures including Aardvark, Desktop, Fast Flip, Google Maps API for Flash, Google Pack, Google Web Security, Image Labeler, Notebook, Sidewiki and Subscribed Links. 
    But Google ‘The Innovator’ won’t be stopping any time soon – “We’ll continue to take risks on interesting new technologies with a lot of potential,” writes Alan Eustace, Senior Vice President, on Google official blog. 
    “We’ve never been afraid to try big, bold things, and that won’t change.”

    Tab Threat: LG Slash LCDs As iPad 2 & Androids Call The Shots

    TV is in serious trouble as LG is ‘likely’ to report losses in current quarter and slash investment.


    Click to enlarge

    Why? Two words: tablets and smartphones. 
    iPad 2, Android tabs and smartphones are increasingly eating into LCD and smart TV market share, it appears, with Korean giant LG the latest maker to report major woes in the category, cutting investment by a quarter, according to a Reuters report. 

    The dip in PC demand is also hitting LCD display demand and the global recession isn’t helping matters either, particularly in debt ravaged US and European markets. 
    LG, one of the top TV brands in Australia, releasing its 3D Cinema TV range with much aplomb in April and passive 55″ 3D technology just last month, now appears to be painting a much duller picture of one of its core businesses, TV panels. 
    “We plan around 3 trillion won ($2.8 billion) of capital spending next year and have no plans to build a new factory,” a LG Display spokesman confirmed. 
    In July, it reported 87.3% plunge in net profits for second-quarter of the year with LG’s display division posting a shocking 96 per cent dip in net profits from 554.8 billion won in 2010 to 21.3 billion won, the same period this year. 
    This comes as a sales of Android handsets grew over 350% during Q2 alone and although LG is a player in this market also with smarthones like Optimus, it is far from a front runner, unlike Korean rival Samsung, and the takeoff of the tablet category offering mobile internet TV has also gobbled display sales. 
    “LCD makers will keep 2012 investment plan conservative and LG is also likely to cut spending again as visibility is very low due to weak demand especially from Europe and the United States,” John Soh, Shinhan Investment & Securities analyst, said. 
    “LG is likely to report losses widening in the current quarter and the outlook for the next nine months or so is dreadful due to weak PC and TV demand.” 
     However, analysts predict its not the end of the road for LCD’s just yet, forecasting a demand resurgence in future quarters. 
     

    “Once it recovers and the supply has not increased because panel makers did not expend production, the industry can return to a healthy state,” one analyst said. 

     LG is already said to have cut TV sales forecasts by 20% earlier this year, joining the likes of Sony and Samsung as it battles mobile display dominance. 
    The growth of iPads is however, a double edged sword for LG as it is one of Apple’s main LCD suppliers.