if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office - Page 27 of 116

    Smart Office

    iiNet Serious ISP Player As Profits Soar

    iiNet now has 1.3 subscriber services – cementing its No.2 DSL position.

    And the ISP have also put up to top dog Telstra, saying “we achieved our strategic goal of becoming the leading challenger brand.”

    Its revenue for the second half of last year grew by a phenomenal 45 per cent to $330 million, said CEO Michael Malone, up from $228.1 million and net profit after tax  soared 16% to $17.2 million.

    Underlying earnings before interest, tax, depreciation and amortisation (EBITDA) were up 25 per cent to $46.8 million.

    “iiNet’s strategy, focused on retaining profitable customers, expanding its product offering and achieving scale, differentiated us from other Internet Service Providers (ISPs) and underpinned our growth, ” Mr Malone said.

    Underlying EPS up 15% to 11.3 cents per share  compared to 10: 9.8 c for the previous six month period.  

    Broadband customers were also up 20 per cent to 650,000 from 30 June 2010 due to “continued organic growth and the acquisition of AAPT’s Consumer Division in September 2010”.

    They also attributed the rising subscriber services, which now stands at 1.3 million to the acquisition of fetchtv, as well as well received broadband packages offered to consumers including new Terabyte plans and BoB Lite.  

     

    Interim dividend also increased 2 c – to 5 cents per share fully franked.

    Mr Malone said that the results reflected the strong underlying fundamentals of iiNet’s business.

    “Over the past six months, iiNet has experienced continued growth in its subscribers, even during a period of increased competition, with more than 7,000 additional broadband subscribers added in the half.

    “In a more highly competitive environment, it’s very important that we’re able to retain customers and maintain our low churn levels. I am very pleased with the fact that our customer service levels continued to remain above our global best practice benchmark.

    The ISP has also launched a campaign to reinforce its position as  the second largest DSL provider in Australia, launching  a new “No. 2 DSL” campaign.

    With regards the looming NBN roll out, the CEO said:
    “We now have greater clarity around the National Broadband Network, and the rollout is now commencing. 

     

    “The NBN will increase the market opportunities for iiNet providing us with nationwide customer access, the potential for lower churn due to relocations, and a larger serviceable market.

    “iiNet is NBN ready and ideally positioned for the change.

    The ISP also said they were on track to achieve full year EBITDA guidance of $106 million.

    Samsung Bada 2.0 OS To Rival Android

    The Bada 2.0 platform, Samsung’s new version of its own OS will compete against rivals Android and Apple’s iOS.
    The new smartphone operating system includes support for Near Field Communication (NFC), HTML 5 and WAC – as well as multitasking and voice recognition.

    The advanced NFC will allow payments to be made directly from the phone, reading information off of objects like stickers and posters.

    User experience will also be heightened with a personalised lock-screen, layout management and 3D sound, says Samsung.

     “We are delighted with the success of the original bada platform, which was launched at MWC last year.

    “We have also created and strengthened an eco-system around bada, ensuring developers are able to create the apps our customers need and want” said JK Shin, Head of Samsung’s Mobile Business.

    This latest version replaces its predecessor, the v1.2 which first ran on the Wave S8500 in June last, which sold over one million in its first month.

    Samsung also operates on Google’s Android OS and has enjoyed massive success on the back of the platform, announcing growth growth of 438.9% – propelling it to the number four Smartphone maker in the world.

    Bada 2.0 also offer flexibility across different PC operating systems, on its software development kit (SDK), giving it an edge over the likes of Apple’s iOS.

     

    The SDK also includes additional features such as an advanced code analyser as well as a faster simulator, allowing developers more insight into the performance of their apps.

    The announcement was made during the bada Developer Day, attended by over 200 developers from around the world at Mobile World Congress in Barcelona, taking place this week.

    Downloads from its Apps store is shortly expected to surpass 80 million on the back of its bada apps, which Samsung says has been hugely popular.

    The launch is part of its Samsung’s drive ” to create a perfect mobile eco-system through bada, with various technical support services introduced.”

    Samsung revealed some of its most successful app which include ‘Polar Mobile’, ‘Com2us’ and ‘Zkatter’ .

     

    There is no word yet as to which phones will run on the platform.

    Just this week Samsung announced their Galaxy S2 and Galaxy Tab II, both of which will run on Android.

    Internode Top ISP, Say Consumers Beating Off Rivals Telstra

    The carrier has been voted Australia’s top broadband co – beating rivals Telstra and TPG.The ISP was given the overall highest satisfaction rate with 95 percent of their 200,000 customers stating they were “very” or “fairly” satisfied with their internationally-linked IPv6-enabled network.

    This is compared to arch rivals Telstra BigPond who had 69%  satisfaction, followed by TPG at 86%, and iPrimus with 72%, although Internode had the highest rating.   

    Both Optus, 3 and Vodafone recorded drops in consumer satisfaction during six months from June to November last year. 

    Overall, it appears Aussies were happier with their ISPs – 73 percent were happy with their service provision, with several of the major providers recording increases in satisfaction among its consumers. 

    The survey carried out by Roy Morgan research surveyed more than 6000 Australian Internet customers during the six months to November 2010.     

    Earlier this month, Internode said they had enough 32-bit IPv4 address space for users to last three to five years as the Internet globally starts to runs out, saying new users will be assigned two kinds of ‘addresses’ in a parallel ‘dual stack’ arrangement.

    Jump Off It: Are eDeals A Scam?

    On any given day, I get about three to four e-mails on group couponing deals from the likes of Cudo and Jump On It flogging the latest deal on offer.Anything goes – from horse riding to kayaking and restaurant deals, some good offers can be had, right? 

    Not exactly.

    About eight months ago, when the whole sector first reared its head in the .au landscape I was all for it.

    A massage for $49, sure why not? A haircut in a top Paddington salon for $79, yes please.

    However, that was before the actual purchase took place.

    On paper these deals look great. You buy the deal of the day, pay for it and get a voucher number via e-mail which you then redeem. 

    However, once you seek to book the service with the voucher you have paid for, it’s a different story.

    Last year I purchased a massage in a top Bondi hotel for $40 for myself on Jump On It and told my friend to do likewise, which is the central premise of these group discounting- the more people sign on the better the deal. 

    When I rang up to book the service, well before the deal was to expire (it had a three month lifespan) I was told all weekends were booked out due to massive influx of Jump On It vouchers and could only come during the week.

    Since work commitments in the city meant this was impossible – and the spa shut at half five – I couldn’t claim my voucher – making the $40 both my friend (who was in the same boat) and I each forked out for a complete waste of money.

    Another Jump On It voucher holder SmartHouse spoke to told a similar story.

    “Having purchased a restaurant deal for two persons, I sought to make a booking but once I said I was a Jump On It voucher holder, I was swiftly told the “quotas” for vouchers were all booked up for the night”. 

     

    Basically, it seems eDeals voucher holders are deemed second class customers by the businesses that offer the deals, probably because they feel they are not making as much money off the vouchers as they are off regular full price paying customers. 

    When contacted by Smarthouse a Jump On It spokesperson had this to say:

    “Vouchers sell by the thousands and restaurants gets inundated with bookings, so its going to be booked out, so don’t expect to use it within the week. 

    “Its best to book weeks in advance – if you have to go out the next night don’t rely on a Jump On it voucher, it’s the name of the game. 

    That’s interesting. Maybe they should say that in the description.

    Another popular group discounting website Spreets, whose Australian and NZ divisions were recently acquired by Yahoo7 has also irked customers with its flimsy service provision.

    On purchase of a dermabrasion skin treatment session advertised as a 45 minute session, it turned out to be a 10 minute polish despite all the bells and whistles promised on the deal description. 

    “All in all a fraction of the normal service plus full on nonstop ‘on selling’ pressure,” the annoyed Spreets customer wrote on consumer sound board notgoodenough.org.

     

    “I complained to Spreets but they conveniently go quiet or advise you to deal directly with the company,” they said.  

    The consumer also investigated another Spreets claim of saving over $1000 on a 6 hour fashion consultation session being flogged for $69 deal which seemed too good to be true. But when the consumer contacted the business they were told it would likely take  just 1 hour, most likely in an informal chat in a local cafe. 

    Although not always bad deals of half hearted service, I must confess I have received some great opportunities via eDeals, it is certainly a case of look (very carefully) before you leap.

    Slimmed-Down Telstra In-To-Win

    Telstra’s privatisation has allowed it to strip itself of its flakey public sector skin and has restructured the business – which is paying off quite nicely, thank you very much.

    This was the message at yesterday’s financial results announcement in Sydney, where a lighter, leaner Telco was revealed by CEO David Thodey. 

    Its new slimmed down business model included the axing of 300 executives from senior level, price simplification and a focus on customers – courtesy of its $1 billion investment in Project New.

    The revamp which has gone hell for leather since its privatisation last year, spells the start of an intensive focus on improving an inflated, sluggish Telco into a dynamic powerhouse and a customer favourite. 

    “We’re getting better as a company..but we’ve a long way to go,” admits Thodey.  

    “We need to address the fundamentals to lower the cost base,” he added. 

    The merging of Countrywide and Customer divisions was cited by Thodey as being a major agent of change, helping bring about a major simplification of organisation.

     “Our strategy is now bearing fruit with the encouraging sales momentum reported in the first quarter of 2010/11 continuing for the half-year.

     

    “We are also enjoying the strongest customer momentum in over a decade, and revenue growth across our three retail segments.

    “Customer satisfaction scores have improved over the half year and we are on track to achieve savings of $250 million in 2010/11. “

    This billion dollar gamble is the former state-owned enterprise’s bid to stay at the top of the market as competition from Optus and Vodafone as well as new players intensifies. 

    However, the drop in profits of 36 percent is sure to frustrate shareholders who are already on edge over the looming NBN deal, ahead of an EGM shareholder meeting in July, which will allow them decide the fate of the plan.

    eDealer Groupon Lands Down In OZ

    US eDealer operating under the name Stardeals launches in Oz.


    Click to enlarge

    First it was Cudo, Spreets and Jump On It.

     Then Asian owned Jigocity joined the ‘deal of the day’ possie late last year. 
     
    Now Groupon, not to be confused with competitor Zoupon,  is here promising Australian online shoppers jaw dropping group deals on “surfing lessons, dance classes and upscale spa treatments.”

    “Stardeals will change the face of local collective buying, leveraging the best practices Groupon has pioneered in more than 40 countries around the globe,” the company said last night in a statement.      

    First launched to US audiences in November 2008, Groupon was one of the first to spearhead the online deals movement, and rejected a six billion dollar take-over offer from Google late last year, proving how lucrative the sector has now become. 

    The basic premise of these edeal sites is the more the merrier – offering discounted deals if take up among users who sign up for daily deal alerts is high enough. 

    This takeover bid by Google was in spite of the company founded by Andrew Mason having reported revenues of just $500 million last year.

    However, the move has not been without its hiccup s- another firm had already registered the Groupon name in Australia, forcing them to use the domain name Stardeals.

     

    “We’re thrilled to introduce our network of 60 million subscribers to the world-class merchants of Australia,” said Rob Solomon, president and chief operating officer of Groupon Inc.               

    “Stardeals joins the Groupon network which serves nearly 60,000 merchants in more than 500 markets and 42 countries.”

    The service currently covering just Melbourne and Sydney with plans to expand to eight more territories in coming months.

    Previous reports suggested the Chicago based company was already encouraging to sign up to its email database in Australia as early as last December before the impending launch.

    Already over 2,200 people have registered their ‘like’ of its Facebook Australia site.

    The No. 2 player, Living Social, abandoned plans to start from scratch in Australia, opting instead for a joint venture with an existing company, Jumponit.

    Vodafone Profit Up Despite Network Scandal

    VHA out of the red as earnings rise 171.6% as mobile base grows.

    Profit for 2010 also leapt out of loss making territory, turning to positive growth of $73.4 million.

    This a major change from this time last year when Australia’s third biggest Telco posted a loss of $119.6 million.

    The strong results were “based on strong growth in postpaid customers and service revenue” said Hutchinson Telecoms Australia (HTA) on Friday. 

    This compensated for heavy losses experienced in their broadband subscriber growth figures, which slumped to 142,000 new additions in the second half of 2010 compared with 539,000 for H1.

    Earnings as a percentage of service revenue has risen 12.3 percentage points to 21.6%.

    Late last year VHA was besieged by continuous issues with their network which affected mobile services, including call failures, slow data speeds and poor reception. 

    Vodafone recently apologised to customers for the service disruptions and said they added staff into the customer care division and in coming months users will be able to contact Vodafone at any time they wish.

    However, despite the strife, their customer base grew “strongly” in 2010 with 681,000 new customers joining the network – a jump of an increase of 9.9% from this time last year.

    VHA now has a total customer base of 7.58 million customers.  

     

    Its postpaid customer growth grew 11.5% (excluding wholesale customers).

    And more than 3 million customers are using mobile 3G broadband via devices like USBs and pocket WiFi.

     “2010 was a good year with solid profitable growth underpinned by strong postpaid handset performance,” said Vodafone Hutchison Australia Chief Nigel Dews. 

    VHA is expected to remain profitable in 2011 and is on track to achieve merger synergies with a net present value of $2 billion, says HTA. 

    Recently Vodafone announced improved service network with new upgrades in the form of their new 850MHz network, including a new quad band mobile broadband modem launched last week.

    HTA’s share of VHA’s pre tax earnings improved 171.6% to $475.8 million, driven by margin growth and reduction in operating costs since mergering, they said. 

     

    “We are pleased with VHA’s performance, profitability and continued growth in service revenues in the first full year since the merger” according to Chairman of HT Canning Fok.  

    VHA is a 50:50 joint venture between Hutchison Telecoms and Vodafone Group.

    Online A Winner At Dick Smith..But Where Is Harvey?

    In an ever competitive retail environment, big retailers are looking Online to boost sales as e-commerce booms. But where is Harvey Norman?


    Click to enlarge

    Dick Smith saw a sales jump of 6.5 percent in the second half of 2010 on the back of “strong customer trading” with a revamp of both their online and in-stores offering carried out in the last year.

    Its refreshed website, which now offers full product ranges for purchase resulted in “strong customer engagement and significant growth,” said Woolworths, owners of both Dick Smith and Big W chain today, describing the customer response as “very pleasing.” 

    The total sales for consumer electronics in the Australian region at $726m – compared to $710m for the previous half – equating to a rise of 2.3 per cent. 

    Read full Dick Smith story here

    And online is growing fast. By 2015 it is forecast to account for 22 percent of all purchases made.

    Big W currently offers 9,000 products for online purchase and almost all products on sale in-store can similarly be purchased online at Dick Smith who “continue to gain significant momentum in this channel”.   

    The electronics giant’s willingness to change with the times and enormous efforts towards overhauling their service offering, “transitioning to a modern computer electronics business,” has been their great strength in light of fierce competition and heavy price discounting from rivals. 

    However, one can’t help but wonder if it is Harvey’s Norman’s major weakness.  

    Read Harvey Norman profit story here

    Woolies has also revamped its flagship store website as well as that of Big W in May and also plans to unleash its top bottle shop Dan Murphy to the web early next year.  

     

    It was clear from their statement today that their efforts have been paying dividends although gross margin fell by 1.15 per cent – from 27.50 percent to 26.35. Net profit after tax for the group rose 6 per cent.

    Online sales for the group as a whole rose a whopping 75 percent. 

    “Shopping online has become an increasingly important part of the Woolworths business with strategies developing across all trading divisions,” admitted the retailer as it launching its BigW.com in May last year, which they said was performing well. 

    In a similar vein, the other major player in the electronic market, JB Hi-Fi reported reported an 8.3% jump in sales and a 16% jump in profits for the last quarter to December 2010, in spite of difficult trading conditions. Online sales grew 35% in the period and the retail giant has continued to expand its online presence.

    “Our online sales grew 35% over the half year and were up 49% in December. Whilst a small but growing percentage of JB total sales, the online business is an important part of our overall strategy”.

    Despite all the furore late last year and early this year made by Chairman Gerry Harvey over online companies eating into his market share,  there was no mention of online going forward or no reassurances about online growth in the statement released today.

     

    He referred only to his stores, saying: 

    “The primary focus remains in the enhancement of our customer offer while expanding and upgrading our retail complexes.”

    This either means there is elephant standing in the corner of management’s office or else expect a major harveynorman.com offensive soon.

    David Jones Sales Cool As Tougher Trading Hits

    Intense competition, bad weather and flooding makes for lower footfall across the retailer’s stores.

    Retailing giant David Jones has released sales figures for Q2 covering October to January last, revealing a drop in sales and a relatively flat outlook for future growth. 

    Sales revenue of $617.6 million was recorded for Q2 2011 – a fall of 2.7 percent compared to the previous year’s figures for the same period, although this falls to 1.1 percent when the calendar is adjusted.           

    Sales for the Q1 of this financial year were $466.6m – a rise of 3.2 percent on the previous year. 

    No breakdown in the sales figures were given, although its electrical appliances division was also likely to be a casualty of falling figures considering the increasing use of online stores among consumers.

    The company described consumer spending as “patchy”, citing heavy pre-Christmas discounting by retailers leading to “a very competitive retail environment, ” as well as the losses incurred by the Queensland floods among the mitigating factors. 

    However, guidance profit after tax for the first half of this year (H1) is still predicted to rise 5 percent and likewise predictions for H2 set for between 5-10 percent, but are now likely to “be at the lower end of this range,” DJ said in a statement.  

    However, the outlook remains somewhat gloomy at the retail giant, predicting future growth in sales this year to be flat. 

    “We experienced a challenging second quarter with wetter and cooler weather,  decline in customer sentiment and significant discounting in the lead up to Christmas and the impact of the Queensland floods on six of our stores,” according to DJ CEO David Zahra. 

     

    “Consumer shopping behaviour continued to be patchy throughout 2Q11 and we have seen no material signs that this is changing.”  

    “Despite in 2Q11 with heavy promotional activity by retailers,we have managed our cost position well and I am pleased to report that we have also effectively managed our Gross Profit Margin and Inventory,”  he said.

    Expansion of its Perth Claremont Quarter with 85 percent more selling space, its Bourke Street Mall outlet in Melbourne CBD  and the new Westfield Sydney centre were all cited as strong performers.  

    Foxtel Eyes Up Pay-TV Austar For Takeover Bid?

    Rumour has it a takeover could be on the cards. Foxtel shareholders which includes Telstra, Consolidated Media and Rupert Murdoch’s News Ltd are said to be mulling over a possible deal and may be “getting closer to agreeing on a potential bid price,” according to the Australian, owned by News Ltd.
    This is not the first time a takeover deal has been played out for the regional pay TV outfit, however.

    Both News Ltd and Consolidated attempted to get deals up in 2005 and 2007 which was subsequently rejected by 50 per cent shareholder Telstra.

    Austar CEO, John Porter has gone on record as voicing his support for the deal, declaring there was “so much compelling industrial logic” to a merger deal with Foxtel.

    However, he insisted his company “has not received any offer” to date. 

    However, Porter has just jumped on a plane bound for the US accompanied by Foxtel CEO Kim Williams, which indicates talks with Austar’s main shareholder Liberty Global, which owns 55, percent are underway.  

    Austar, which also has a mobile and broadband division, has 760,000 subscribers and is the largest pay television operator in regional Australia.

     

    Established in 1995, its service includes over 180 channels, 13 HD Channels, a near video-on-demand service, as well as box office movies, Sports Active and SKY News Active.