if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 58 of 116

    Smart Office

    REVEALED: How Apple Dodge $2.4 BILLION Of Tax

    Apple is in hot water once again. But this time its not over its China sweat shops – it the tax havens allowing it to dodge billion dollar tax bill from the US government and others.


    Click to enlarge

    Just last week, the iPhone king announced yet another record breaking profit, which rose to an astonishing 94% to US$11.6 billion. Its massive cash reserve has also risen to $104 bn, Peter Oppenheimer, Apple’s CFO declared last week.

    So the question is how?

    The wild state of Nevada just a few hundred miles east of California, which has a zero tax rate, is how.

    Oh and other well known tax havens including Ireland, British Virgin Islands and Holland where Cupertino, along with many other tech giants including Google, Facebook and LinkedIn, pay as little as 2.5% tax rate, compared to California’s rate of 8.84%.

    These questionable tax practices, known as ‘Double Irish’ and ‘Dutch Sandwich,’ are methods Apple pioneered, according to New York Times report. The companies channel their billion dollar profits through foreign subsidiaries allowing it to pay minimum tax. 

    Read: Cheers Ireland: Why Google Oz Send Revenue To Greener Pastures

    70% of all Apple tax is paid outside the US despite the fact a large proportion of its business is still conducted within its home country.

    It seems these creative accounting methods have allowed the iPhone maker to dodge more than $2.4bn in tax over the past number of years, a US Treasury economist believes.

    Setting up an Apple base in Reno, Nevada and elsewhere has allowed Apple to dodge millions in tax it would otherwise have to pay to the state of California, where its Cupertino HQ is located, and 20 other states according to The Times.

    And in many cases, the tech giant plonks “little more than a letterbox or an anonymous office” in some far flung state, whose authorities are delighted to have a stalwart like Apple locate a base there.

    This is not the first time the tech giants suspiciously low tax bills have come under the spotlight.

    And here’s another startling fact – all 71 technology giants on of Standard and Poor (S&P) 500 index reported paying 30% less tax than all the other companies.

    But the question is shouldn’t the Cupertino based giant really being paying tax in its own state, even on a moral argument, since it has its base and the vast majority of employees located there, including CEO Tim Cook and Steve Jobs’ family home.

    This issue is especially pertinent since the US government is struggling to avoid another recession and could use the tax revenue.

     

    Apple insist its practices are all above board, saying it “has conducted all of its business with the highest of ethical standards, complying with applicable laws and accounting rules.”

    It also insists it is one of the top payers of tax in the US and its operations generated $5bn in tax for the Government between income tax and other taxes.

    The reality is Apple is the now the world’s most valuable company, but at what price?

    Winning Has A New Boss

    David Crane is the new CEO of Winning Appliances, announced today.
    John Winning, CEO of Winning Group, announced the appointment of Crane, a former Vice President of International Sales and Business Development at La-Z-Boy furniture in the US.

    “David’s extensive experience in sales, management and manufacturing within the furniture and retail industry across America and Oceania will be invaluable to the Winning Appliances business,” said Winning, who is relocating from Texas to commence his new role as CEO on 3rd December.

    His appointment follows a big for Winning Appliances, which included the acquisition of a new showroom in Indooroopilly, Queensland.

    Winnings group, owners of Appliances Online, Big Brown Box and Handy Crew, also purchased PowerBuys in August.

     “I am proud to be representing a company that has been delivering exceptional customer service for over 100 years. Winning Appliances offers old world values and service in a modern environment.

    “I look forward to moving to Australia and taking on the role as CEO of Winning Appliances to continue the company’s consistent growth,” Winning Appliances new CEO.

    Previously, David has worked with Natuzzi Americas and Oceania, where he was located in Australia for two years in sales and management. He also worked at LeatherTrend across production and sales.

    Amazon, Microsoft Cloud Invading OZ: Warn Analysts

    Amazon and Microsoft cloud are invading Oz, warns analysts.


    Click to enlarge

    International cloud services are making significant headway in the Australian enterprise market, with Amazon Web Services, Apple and Google widely used here.

    Two thirds of Oz enterprises that use IaaS (infrastructure as a serivce) are using a cloud computing service from an offshore provider with servers located ouside the country, according to technology analyst Telsyte.

    This means “the opportunity is ripe for local providers to fill the void for Australian cloud services,” say analysts.

    The Infrastructure and Cloud Computing Market Study surveyed 260 Chief Information Officers and showed the massive penetration of the big cloud computing vendors within local enterprises.

    The research also looked at Aussie cloud IT policies and showed 36% of organisations have “no restrictions” on data being sent to offshore providers, however, 30% say their company data cannot leave Australia, which is “significant,” says Telsyte senior analyst Rodney Gedda.

    The main cloud players currently circulating the market here include: Amazon Web Services, Apple, Attachmate, Citrix, Dell/EqualLogic, D-Link, EMC, Google, Hitachi Data Systems, HP, IBM, Microsoft, NetApp.

    Many organisations continue to operate IT service infrastructure (servers, storage systems, etc) in disparate locations, including server rooms and private data centres and this will not change drastically with the availability of cloud computing, the study also found.

    The advent of on-demand cloud computing services has provided a compelling option for service delivery without the need to manage physical infrastructure for many Australian CIOs, says Telsyte senior analyst Rodney Gedda. 

    “About 35 per cent of Australian enterprises are subscribing to some type of IaaS or PaaS cloud service, with the majority of subscriptions, and data, heading to overseas providers,” Gedda noted.

    Public cloud services are being used for production and testing of server and storage environments but investment in on-premise infrastructure and private clouds are still strong with 19% of organisations building a private cloud and a further 35% ‘considering’ such a move.

     

    In addition, many CIOs are unable to rely on public cloud for a variety of reasons, including reliability and data location restrictions.

    More than half of Australian enterprises are now using server virtualization technology, according to Telsyte.

    Treble Speeds Telstra Upgrade Defy NBN

    Despite the NBN, Telstra is upgrading its cable network meaning treble the download speeds in all the main cities. The upgrade of its hybrid fibre coaxial network will push speeds from 30 megabits per second to 100Mbps.
    The move will see download times of movies and media reduced up to just one third of what they are at present and allows Web surfers do more things simultaneously, like listen to Internet radio while watching YouTube.


    Click to enlarge

    The upgrade will apply to for 2.7m Telstra customers in capital cities of Sydney, Adelaide, Perth and Gold Coast, only however, and won’t go nationwide.

    Melbourne subscribers were already privy to the upgrade to the DOCSIS 3.0 HFC cable broadband standard, as part of its $300m upgrade in 2009.

    BigPond’s Ultimate Cable plans go from 5GB for $39 to $99 for 500GB of data (bundled), and includes a 200GB option for $79 per month over 2 years for Telstra HomeLine users.

    Existing eligible BigPond Turbo and Elite customers can buy an Ultimate Cable self install Wi-Fi modem for $149 to upgrade.

    The upgrade announced today, comes despite the soon to be finalised deal with the NBN, which will see the telco hand over its cable network as part of the $11billion deal.     

    However, the work will not interfere with the NBN deal, Telstra insists, but may increase the number of customer it hands over to NBN Co eventually, as part of its $36bn optic fibre roll out to 99% of Australia. 

     

    The cable work was announced by Telstra CEO, David Thodey, this morning.

    The upgrade is limited to capital cities due to poor demand in Melbourne for the high speed broadband plans, according to reports, although Telstra insist greater customer demand was driving the roll out.

    The upgrade will be available in Perth from 30 November and Adelaide, Brisbane, Gold Coast and Sydney customers two weeks later, December 12.

    “This upgrade provides additional capacity to Telstra’s network and will provide one of the fastest broadband services available in Australia today,” said John Chambers, Telstra Director of Fixed Voice and Broadband.

    “BigPond customers also enjoy no peak or off peak restrictions and peace of mind that they won’t incur any additional usage charges – as speeds are slowed once the monthly usage allowance is reached,” Mr Chambers said.

    The volume of data consumed over BigPond fixed internet had more than doubled in the last financial year and the upgrade would help meet the growth in connected devices and online media use, he said.

    Malware Alert! ‘Thousands’ Face Web Blackout

    Thousands of Aussies may be hit by potentially lethal computer malware, it they dont act now.

    That’s according to Communications watchdog, ACMA, who issued a warning about DNSChanger – malicious software, or malware as it is known, that may have been installed on your computer without knowledge.

    Approximately 10,000 Australian internet users are currently infected with this malware, Australian Communications and Media Authority warned today.

    If your computer is infected you need to remove it immediately or else risk Internet blackout by 9 July 2012, at which date you will be unable to connect to the web.

    This is because the temporary solution to this malware developed by Internet Systems Consortium (ISC) is due to expire on July 9.

    DNSChanger is a class of malicious software (malware) that changes a user’s Domain Name System (DNS) settings, enabling criminals to direct unsuspecting web users to fraudulent websites and interfere with their web browsing.

    It has been associated with ‘click fraud’, the installation of additional malware and other malicious activities.

    In November 2011, the FBI closed down a ring of cyber-criminals believed to be responsible for the worldwide spread of DNSChanger.

    An estimated four million users were affected worldwide.

    Head to ACMA’s diagnostic website that shows you whether or not you are infected with DNSChanger.

     

    The site was developed by ACMA, CERTAustralia – the government’s computer emergency response team – and the Department of Broadband.

    If you are infected, it provides links to tools and detailed documentation that may help you remove the malware.

    $60 LG ‘Superphone’ Hits OZ

    The mystery is over: Telstra opens up the floodgates on the ‘superphone’ LG claims is the “best Android handset ever”

    Here comes the superphone

    The telco is kicking off pre-orders today for the LG ‘Optimus G’ on Android, which can be bought online/instore from 12 March.

    Telstra is selling LG Optimus G ‘Superphone’ on $60, for $0 upfront (1GB data + $600 calls/MMS) there’s also $80 (1.5GB + $800), 100 ($900 2GB) and $120 (3GB + unlimited calls) plans. The smartphone will be sold at Telstra only.

    So what’s so super about Optimus G I hear you cry?

    4.7-inch HD screen, 4G, 1.5GHz quad-core Qualcomm Snapdragon processor, 13MP rear camera and runs Android 4.1 Jelly bean, and New user experiences QuickMemo, QSlide, Dual Screen/Dual Play, and a high-density 2,100mAh battery.

    But its not the only phone that is claiming to be these best-est ever – HTC unveiled a stunning One smartphone last week, there’s a new Samsung Galaxy IV on the way (and possibly even a Huawei P2 which claims to be world’s fastest phone), it should make for interesting times in the mobile market.

    Read: Look Out, Samsung! LG Plot Smartphone Attack

    “Optimus G has generated a tremendous amount of positive interest,” claims  Jonathan Banks, LG’s new Head of Mobile Communications.

    “We believe this is simply the greatest Android handset to arrive in Australia to date.”

    But in case demand isn’t forth coming Telstra and LG are are giving it an extra push, offering all those who pre-order online  a chance to win the “ultimate” $25,000 Australian F1 Grand Prix experience with flights and accommodation to Melbourne for 2 people, Friday Paddock Club pass, pit lane walk, a hot-lap in a 2 seater F1 car, grandstand passes for Saturday and Sunday, and $1,000 to spend.

    You can pre-order the G here.

     

    Check out the key specs for Optimus G:

    · Chipset: Qualcomm Snapdragon S4 Pro Processor with 1.5GHz Quad-Core CPU,

    · Adreno 320 GPU (Graphics Processing Unit)

    · Operating System: Android 4.1 Jellybean

    · Network: 4G LTE / 3G / 2G

    · Display: 4.7-inch WXGA True HD IPS+ (1280×768 pixels) / 15:9 ratio

    · Memory: 32GB· RAM: 2GB DDR

    · Cameras: / 1.3MP front· Battery: 2,100 mAh

    · Size: 131.9 x 68.9 x 8.45mm· Weight: 145g

    · Other: Bluetooth 4.0, Wi-Fi, USB2.0 HS, A-GPS, MHL, DLNA, NFC

    Woolies Up But Dick Drops 0.3%

    Dick Smith continues to slide as ‘for sale’ sign hangs hard.


    Click to enlarge

    Woolworths announced third quarter sales results of $13.7 billion across all divisions, a rise of 3.9% on the 2011 figure.

    However, this sales growth (for food, petrol and home improvements) figure for the 13 weeks ending ended 1 April ’12 does not include its troubled Dick Smith chain – now classed as ‘Discontinued Operations’ after Woolies announced it was putting the retailer up for sale, earlier this year.

    Dick Smith’s total sales across Australia (and N Z) fell 0.3% to $356 million in Q3.

    However, despite the “for sale” sign now hanging on Dicks, its total comparable sales showed a slight rise to 0.6%

    Breaking this down, Dick Smith Australia sales decreased 1.7% to $296 m, across the ditch, NZ sales increased 2.7%2 to NZ$77 m.

    Woolies’ other electronics retailer, Big W, told a different story: sales were $931 million, an increase of 1.4% on last year, for Q3.

    Big W showed continued improvement in sales, customer numbers and items sold although poor summer weather impacted growth.

    This figure also marks good news for Big W, one of the biggest seller of Apple in Australia, as sales dipped 1.3% for the first half of the financial year.

    The discounter also added two new stores to its network as did Dick Smith, although it shut a further 12 DS during the 13 week period.

    Customer numbers and items sold increased during the quarter for Big W, although price deflation continued, averaging 5%, Woollies said today. The primary cause of deflation is the stronger Australian dollar with cost price reductions passed onto customers.

    Strong sales were recorded in Toys, Books and Cosmetics; although there was no mention of how CE sales fared. Cooler weather had an effect on sales in seasonal areas such as outdoor, aquatic and cooling.

    “The continued growth in customer numbers and items sold were pleasing. This is evidence of BIG W’s strong value proposition – offering the lowest prices on thewidest range of quality and branded merchandise every day,” Julie Coates, Director BIG W said.

    “Sales have been assisted by the six new stores that we have opened this year. We opened two new stores during the quarter bringing total stores to 171. We plan to open an additional store in thefinal quarter of the 2012 financial year.”

    On multi-channel sales, which every retailer seems to be getting it knickers in a twist over, of late, here’s what Woolies had to say:

    “The online component of our multi-option growth strategy continues to progress at pace and remains a key focus.

    “Our latest additions and enhancements include, the Supermarkets mobileshopping app, a virtual shopping wall, the BIG W mobile app, click then collect trials and a newgeneration supermarket online platform. We also launched the Door Buster daily deals site duringthe quarter. The fourth quarter will see the launch of the Masters Home Improvement transactional website.”

     

    And these multi-channel efforts are paying off, it appears and contributed to mammoth online sales growth of 108% (although this figure fell to 45% excluding Cellarmasters) for Q3.

    CEO Grant O’Brien said the latest results were “pleasing ” despite tight consumers spending and deflation:

    “Woolworths has posted a pleasing sales growth figure of 3.9% for continuing operations. This has been achieved in a continuing tight consumer market. I stated at the half year that there were two key factors impacting the third quarter sales outcome: cycling the natural disasters of 2011, and accelerating deflation.

    “In addition to these factors, sales were affected by an unseasonably cold and wet summer period during which some States, such as NSW, experienced their wettest months in more than 50 years.”

    “However we were pleased with continued growth in customer numbers, market share and units sold and the growing momentum of key initiatives.

    “Whilst the quarter saw an improving sales trend, we continue to remain cautious about the sales outlook for the fourth quarter, particularly given consumer and business uncertainty about the impact of the carbon tax and interest rates.”

    Where Are The Wallabies? On BlackBerry

    2011 Rugby World Cup, here we come. BlackBerry keep rugby fans on side with MyRugby.

    MyRugby apps on BlackBerry delivering exclusive insights and coverage on national and international rugby have just been announced. 

    The mobile app is a rugger bugger’s dream with instant access to live scores, tickets, match results and info on upcoming matches, check out player profiles and vital stats, as well as updated team information. 
     
    The Wallabies have teamed up with with Research In Motion, the makers of BlackBerry to develop the app, and with the 2011 international Rugby season just days away fans won’t miss a minute of the action. 
     
    MyRugby will hit both BlackBerry smartphones and the new Playbook tab and is the official app of the Qantas Wallabies and the Australian Rugby Union, to boot. 
     
    And for real fanatics, BlackBerry gives users the ability to customise the app interface to that of their favourite super rugby team. 
     
    In addition, BlackBerry PlayBook is going one more with MyRugby TV, which gives exclusive behind-the-scenes footage on the Wallabies as they progress through the 2011 season. 
     
    MyRugby TV will broadcast exclusive player interviews, in-camp footage, media conferences and insights from the coaching team.
     
    “These new applications provide fans with instant access on their BlackBerry smartphones and BlackBerry PlayBook tablets to up to date information and behind the scenes content, which allows them to stay informed about Rugby,” ARU Managing Director and CEO, John O’Neill AO. 
     
    “We know people are changing the way they get their Rugby news and connect with the Qantas Wallabies and their favourite Super Rugby teams, which is why we have been working with Research In Motion to develop these applications before the International Season kicks off.”

    ARU has also been working with RIM to develop a new technology that will help the Wallabies train smarter in one of the most important years in Aussie rugby history. 

    BlackBerry have helped out by issuing each of the Wallabies playing and coaching staff with a new PlayBook tablet and Torch smartphone. 

    The PlayBook will allow coaches to film and immediately playback training footage to players for instant analysis instead of having to wait for the footage to be downloaded after training.  
     
     
    BlackBerry App World boasts 25,000 apps including Twitter, Facebook, FourSquare, and eBay. Downloads from App World grew 100% in under a year and now average over 3 million per day, says RIM. 

    Mobile, Wireless: Surfs UP, OZ

    WE love surfing – online. Mobile, fibre, wireless, the name of the game as the number of Internet subscribers hits 12.2m.The number of internet subscribers soared 5% to 12.2m Australians in 2012,  according to data compiled by ABS from Aussie telcos.

    There were 6 million mobile wireless Internet connections at the end of last year – a rise of almost half a million and is the dominant broadband technology -or half of all connections.

    (DSL) Digital Subscriber Line via telephone were also up by 2% to 4.7 million, ABS data shows.

    But interestingly, high speed fibre broadband (which NBN runs off) was the fastest growing type of internet connection, jumping a massive 75% since June 2012 to 91,000 connections by the end of the year.

    Read: It’s Here (Almost): Voda 4G “OZ Fastest” Hits June

    It’s also worth noting the number of slower dialup Internet connections –  the dominant Internet source in 2006 – has slumped by almost 200K users in the last six months to just 282K by December.

    Mobile, mobile, mobile

    The new ABS data also revealed a roaring surge in Aussies surfing the web via mobile phones.

    Data downloads rocketed a massive 33% in the December quarter in comparison to the six months prior – topping 550,000 TB for the time ever as the number of tablets, iPads and smartphones owned by Aussies skyrockets.

    “Data downloaded by mobile handset subscribers increased by 38 per cent to 13,700 TB, however this still represents a relatively small proportion of total data downloads,” said Diane Braskic, ABS Director of the Innovation and Technology Statistics.

    But according to data sources like MYOB’s Business Monitor from August 2012, more than 60% of Aussie businesses are still without a web presence.

    Google Australia jumped on the stats, with its Head of Retail, Ross McDonald warning business “without a website your business can’t be found on the world’s busiest shopping strip: the Internet.”

    Not having a site is like not having a phone number, he said.

    “You wouldn’t run a business without a phone number or mailing address and expect to receive orders or enquiries. But that’s exactly what many businesses are still doing by ignoring the web.

    – Page Break –

    “The numbers prove the opportunity out there. A physical store reaches passersby but a website can potentially reach thousands of people searching the internet.”

    Business owners don’t need to spend a lot of money on elaborate websites to start.

    Google now estimates only 1 in 3 large companies have mobile-optimised sites, but Google’s Retail boss says “mobile is particularly useful for out and about people who are searching local” with mobile ads (m-comm) and Facebook commerce (F-comm) now soaring.

    “The growth in data downloads is consistent with factors such as increased subscriber numbers, the continuing move away from Dial-up, and the emergence of technologies that enable faster internet,” says Braskic.

    Conroy: ‘Tax The Techies…FULL Whack’

    Look out Google, Apple: Minister for Digital Economy is going after you, likening their tax dodging ways to mining companies.

    Click to enlarge
    Apple could be facing much higher tax bill in Oz if government has its way.
    Speaking at CeBIT in Sydney this week, Senator Stephen Conroy warned technology giants (which includes Apple, Google and Adobe) days of paying minimal tax here are coming to a close, if the Gillard government has its way.

    “This is a problem that goes across all sectors, so we’re bringing across legislation because we think we’ve got to ensure Australians get a fair share,” Senator Conroy told an audience at CeBIT yesterday, reports AFR.

    And Conroy also likened the tax avoidance to mining owners Gina Rhinehart (Australia’a richest woman) and other gizillionaires Clive Palmer, Twiggy Forrest who’s companies record gigantic profits annually, fails to be reflected in its tax bill to the Australian Government.

    “[It’s] just like with the mining tax where we want to make sure Australians get a fair share of the wealth that Australians own,”  Minister Conroy warned.

    “It’s not owned by these companies, Gina Rinehart does not own these minerals and neither does Twiggy Forrest or Clive Palmer.”

    Some of the well known technology names engage in transfer pricing, whereby they pay minimal tax on profits here in Australia, channel them back through foreign subsidiaries, including Holland Ireland and other tax havens, where they pay as little as 1.5% tax.

    These global accounting practices are known as the “Dutch Sandwich” and “Double Irish,” allow Apple & Co to pocket millions in profits without having to pay tax to governments in which they operate, the US included, home to most tech firms.

    Google Australia reported a net loss of A$3.9m on operating revenue of $201 million for year 2011, paying just over $74K in local taxes.

    This revenue figure of $201m comprised $200 m for services provided under agreements with its US parent, Google Ireland and Google Asia Pacific, plus $1 million in interest income.

    Apple’s Australian subsidiary made revenues of $4.8bn in the nine months to September last year but paid just $91m in tax – which means its tax rate here was around 0.001%.

    The iPhone maker attributed $4.8bn as cost of sales expenses, reports The Australian.

    Major techies including Google, Apple and Facebook all have their European base in tax friendly Ireland and have expanded in the last year.

    IBM reported a net profit of $309.5 million on total revenue of $4.5 billion, and paid $119m in local taxes.

    Employment Minister Bill Shorten announced reform of multinationals tax treatment in Australia last November and vowed to change the transfer pricing rules in tax law.

     
    The Treasury are now reviewing the proposals, although they are not likely to go down well with the multinationals, who deny any wrongdoing.

    This would force Apple & Co to price intra-group goods and services to “properly reflect the economic contribution of their Australian operations,” Shorten said.

    And since the mining tax is soon to come into play, will there soon be a techie tax?

    Only time will tell but here’s hoping Google & Co won’t be releasing their god awful ads like their mining counterparts.

    “Apple Australia- This Is Our Story’. (Please no).