if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } Oonagh Reidy, Author at Smart Office

    Smart Office

    Forget GST Harvey, Margins Our Big Worry: Dick Smith

    Forget the GST crisis Gerry Harvey – retailers have bigger fish to fry says its rival.

    Competition and ever tighter margins are just some of the problems ailing Dick Smith and other major retailers, says Woolworths boss Mike Luscombe who also owns the electronics outfit and Big W. 

    Larger margins retailers enjoyed for years is also a thing of the past thanks to the strong Aussie dollar and intense competition between retailers and pressures from online are all driving profits south, forcing year-round sales and ‘special offers’ in stores.    

    Woolworths said that while BIG W had produced a solid EBIT for the first half it is expected to be down on last year, while the Dick Smith business is currently being re-positioned and old stores were not performing as well their new “transformation” stores, in a report issued to the ASX yesterday.        

    ”The Australian dollar means everything’s cheaper to buy,” he said announcing the sales figures for the company.  

    ”There is intense competition to sell those products in the Australian market place and that has driven prices down even further and it’s just meant that the profit that you made out of selling a TV is less than you made last year.

    ”The selling price is down by some 30 per cent. More and more they are only sold on special. There’s no doubt we are all finding it difficult to get that growth margin.”

    And online is the in-place to be and its where the consumers at, says the self proclaimed techxpert, affording benefits in the back-room and at the till.      

     
    ”You don’t have to carry the stock. To sell one TV online you need one TV in stock but to sell one in 400 stores you need 400. 

    So the mathematics of the working capital are far better. It’s the way that a lot of people want to shop.”

    The chain are planning to unveil a web portal in a bid to lure even more consumers towards their e-store offering. 

    Web sales has already proved a winning formula for Dick Smith who in August last reported a 116 percent jump in sales.        

    He also admitted discount retailer Big W was feeling the pressure and further price reductions were necessary to drive sales. 

    ”We’re not getting the dollars out of customers.       

    We are selling things much cheaper than last year, 6 or 7 per cent.”

    BREAKING NEWS: Harvey Norman Moves Into Cosmetics

    The company who traditionally sell furniture, consumer electronics, and IT goods are now flogging children’s planes, garden beds and hammocks. Its Harvey’s as you’ve never seen them before, literally.

    Click to enlarge


    The new site, which appears to have only one product page appears to have many of the products sold out.

    Chairman Gerry Harvey has come a long way from his famous declaration “no one will ever make money selling online”  back in 1997 in the very early days of the internet.

    This is also a very interesting change of heart by the retailing giant considering Harvey along with Myer, David Jones and 18 other mass market retailers, initiated an intense marketing campaign earlier this year aimed at pressuring the Federal Government to slap a 10% GST on Internet purchases made from overseas web sites, claiming they were losing business to foreign rivals.

    Just days after pledging to go online with a trading site for its appliances and consumer electronics, the retailing giant has now opted for a slightly different tack.

    The site, Harvey Norman Big Buys, offers a deal-a-day on cosmetics, garden plants and car seats and appears to be aimed at the female consumer.

    And it’s not just the TV retailer that has gone through a transformation.

    Harvey Norman will now give its customers a make over with today’s offer of a Revlon cosmetics pack for $53, which seems a bit steep, in my view, although it does include delivery. A similar deal was offered recently on dealme.com.au for $59.

    Other deals to be had include a raised garden bed, at $88, which is already sold out, and a toy plane for $22. Tomorrow’s “big buy” is a Brother sewing machine. 

    Just over a week ago, it emerged Harvey would be entering the dog eat dog world of online retailing.

    “By this time next year you’ll see Harvey Norman with a pretty sizeable internet presence. My heart’s beating very strongly on whether we make any money out of it,” Chairman Gerry Harvey declared.

    “I haven’t got any choice. I’ve got to cannibalise our stores.”

    However, rival deal company ‘Catch of The Day’ has hit out against the electronics retailer’s move to group deals.

     
    “(Gerry Harvey is) a late entrant into online retailing and his anti-online stance and lack of understanding as to how to run an e-commerce business does cast a shadow on the ongoing viability of the site, but good on him for having a go,” its founder Gabby Leibovich said in a statement.

    Industry sources say it is a testing ground for a real Harvey Norman online site to come.

    The website can be found at harveynormanbigbuys.com.au, and will come up against a slew of competitors who have joined this space in the past year or so, including Deals, Jump On It and Cudo.

    Internode: Minister Wrong, NBN Still ‘Insane’

    ISP hits out at Minister after claims it failed to highlight its NBN misgivings to the ACCC.
    It’s the latest NBN cat fight in what seems to be an endless round of squabbles plaguing the project – before it even gets off the ground.

    Just last week, Internode MD Simon Hackett branded the NBN’s pricing model as “insane” for smaller ISP’s and claimed it would only be feasible for bigger players like itself, Telstra, Optus, TPG, iiNet – all ISPs with more than 250,000 customers.

    However, following this, the Minister for Broadband and Communications Senator Conroy hit back saying this view was never aired to the ACCC.

     “The complaint being made by Internode is a very important contribution, and it would’ve been really fantastic for [it] it to have made that argument to the ACCC,” he told the ABC.

    “They didn’t actually put in a submission to the ACCC’s inquiry on this very matter.”

    Conroy also went on to say:”What essentially Internode are complaining about is that the ACCC decision to move from 14 POIs (Points of Interconnect) – the NBN’s preferred position — to 121 POIs, they believe was not the best decision.

    The Adelaide based ISP says this is an “erroneous claim” and have offered to provide him with a copy of its ACCC submission from November 8- which is available on the latter’s website. 

    In that seven-page submission, it warned about the anti-competitive impact of the NBN requiring companies to interconnect with the network at a large number of locations nationally.

     

    “This model becomes worse if each access seeker or service provider must provide their own connection to the distant town,” the submission stated. 

    “There is no economy of scale to be enjoyed and small service providers must either abandon the attempt to service subscribers or acquire a wholesale managed service from a POI more conveniently located.

    It also warned the risk of duopoly amongst service providers which will result in rapid “increase in retail prices,” and the implosion of some other providers, the submission also warned.

    Hackett today stated he hoped to encourage Senator Conroy to focus on this critical issue for the competitiveness of the Australian telecommunications industry “Internode absolutely has been ringing this bell early and often,” he said.

    “Our submissions to the ACCC are on the public record, so it is rather curious that the Senator is perpetuating this erroneous claim about our conduct in this context.

    “Internode has been providing public submissions on this and related topics at every step along the process concerned, and it remains of deep concern to us that those warnings are being ignored.

     

    Indeed, it is this close participation that has served to highlight to us the critical nature of the flaws in the current pricing model for the network.”

    This is not the only criticism of Stephen Conroy and the NBN in the last week.

    The NBN has come in for heavy criticism from opposition communications spokesman, Malcolm Turnbull, a vocal critic of the $36bn NBN, has called on the communications minister to explain where the newly stalled NBN tenders leaves the project.

    Read Turnbull Ups Pressure On NBN here

    The NBN Co called a halt to tender negotiations with major contractors after claiming its pricing schemes were too high. 

    Are Optus Pulling A Fast One On 3G?

    Or not so fast as the case may be. It was tipped as “your own private and dedicated 3G coverage signal” but now it seems this service could be closer to a farcical rip off.
    Analysts have come out in revolt against the newly launched 3G Home Zone from Optus, which runs on new Femtocell technology, claiming it is a ploy by the telco to force subscribers to inadvertently to pay for their own mobile network upgrades – something that the No. 2 telco should be doing – free of charge.

    It’s “your own private and dedicated 3G coverage signal” and will boost indoor  wireless coverage, up to 30 metre range and will deliver better calls, data and broadband services, SingTel owned Optus claim.

    The home zone device runs on femtocell technology, widely used in the US, acts as a wireless information gateway and uses the existing internet connection to improve mobile coverage, Optus says.

    However, Telsyte telecommunications analyst, Foad Fadaghi, begs to differ, claiming Femtocells are frequently used by poor quality carriers that failed to invest adequately in their networks.

    “While there are benefits for users not able to switch providers, I would be worried as a consumer if I cannot get mobile reception in my home or office from a carrier, thus needing to resort to Femtocells,” he warned.

    “The real question is why Optus’s network needs these patches to help people use their mobiles in their homes and offices.”

    And 3G Home Zone doesn’t come cheap either – it can be purchased outright for $240 or $60 -$180 depending on mobile rate plans, and a monthly payment of $5-$15 although the lower the plan value the pricier it is.

    Telstra also agrees with the analysts assertion, with a spokesperson confirming “Femtocells are a means of compensating for poor coverage.”

     

    Optus had  expressed hope of eating into its biggest rivals including Telstra’s fixed line share with the new technology.  

    However, its only available in limited stores in Sydney, Wollongong Gold Coast and Brisbane, and is being run on a pilot basis, for now.

    Read Optus Mobile ‘3G Zone’ To Bite Telstra? here

    “We believe Femtocells are an important way of enhancing the customer experience of the Optus Open Network by acting as a wireless gateway into the home or office,” consumer marketing director Gavin Williams said.

    YouTube Movie Service To Bite iTunes, Netflix

    Blockbuster won’t be pleased: Google’s video site is hooking up with Hollywood to build a movie rental business.


    Click to enlarge

    The ‘broadcast yourself’ site, which gets eyeballed more than more than 2 billion times per day, is to morph into a serious competitor to iTunes, Amazon and video streaming site Netflix, in the movie rental business, which should be up and running next month, reports suggest.

    And this is just its latest attempt to move away from amateur content, which has seen the media giant stream live events like Laneways Festival here in Australia and also shows independent movies, announced at Sundance film festival 2010.

    The premium movie-on-demand service, will provide internet streaming of new releases starting at US$2 though prices will differ for each movie.

    The film houses which YouTube are said to have signed licensing agreements with include Sony Pictures Entertainment, Warner Bros and Universal.

    However, reports suggest further deals have been complicated by existing contracts between other Hollywood studios and streaming services like Netflix.

    The Google owned video site has been veering this way for a while, and recently hired former Netflix boss Robert Kyncl as VP of TV and film entertainment as well as other execs from Universal and Paramount, so will be well versed in the workings of the industry.

    Earlier this month, Kyncl hinted at such a move, saying “imagine if you had a video store on YouTube, where you could rent or buy the movie without being sent elsewhere.”

    However, Aussie YouTube fans needn’t rush to the PC just yet – the service, when first launched, will be confined to the US for now.

    Amazon’s new Instant Video service, offers thousands of new movies and 90,000 shows to buy or rent in HDTV as soon as they are released, but for a fee, although is free to its Prime members.

    The movies cost $2.99 for a 3-day rental and $11.99 to buy the permanent viewing.

    Telstra, Internode Upgrades Defy NBN

    When in doubt, roll out. Two major telcos are going full steam ahead on broadband upgrades despite the national broadband rollout.


    Click to enlarge

    Telstra’s upgrades, which are taking place in northern Tasmania, incidentally, just 30km from NBN test site of Scottsdale, the APP reports.

    Upgrade of exchange points in Dilston and Bridport are being carried out in order for Telstra’s customers to upgrade to faster ADSL2+ broadband connections.

    The NBN trial run of the broadband network has included Tasmanian hamlets of Midway Point Smithton, Scottsdale, the latter two of which have less than 2000 inhabitants and is currently rolling out cables at seven ‘stage 2’ sites there.

    What is interesting is the telco appears to be oblivious to the competing optic fibre installation, going on since 2009, and could be either hedging its bets that the NBN may not go ahead or else bolstering its cable infrastructure to rival the national network.

    The telco are also currently in the process of finalising a deal which will see all of its existing copper fibre network surrendered to the NBN Co, as part of a $9 billion deal.

    “These are outside our current plans for the NBN,” said Michael Patterson, Telstra general manager for northern Tasmania area, insisting current demand was sufficient to warrant further investment in upgrades.

    Patterson also insisted said there would be no overlap with the NBN.

     

    What is also interesting about this is the take up of the first sites subject to NBN was said to have been poor at just 15 per cent, at the beginning of April.

    So, whether the ADSL2+ offered by Telstra was sufficient to quench Tasmanians thirst for high speed internet or not is anyone’s guess.

    And they’re not the only telco upgrading in the island state. Adelaide based Internode are also busy at work there, upgrading its DSLAM equipment at exchanges as are iiNet.

    However, Internode insist it is still ‘business as usual’ and will take up to 10 years for the $36bn NBN to be up and running in full and is keeping its current business plan going in order to meet demand.

    And its “DSLAM will run along the NBN, as planned,” in years to come an internode spokesperson told ChannelNews earlier today.

    During the Senate hearing into the NBN Access Bill last March, several telcos including TPG argued against companies ‘cherry picking’ lucrative CBD areas rather than focusing on residential customers.

    However, this does not now appear to be the case, since this tussle between ISPs for business is going on in rural Tasmania.

     

    Under the rules decided under the Access Bill, competing companies to the national broadband provider can only lay similar optical fibres first only if they are similar to the national network.

    Qtrax: New Oz Free Digital Music Service.. And Its Legal

    Australia’s newest free digital download offering has just been launched.And its looking to save the industry from pirates.
    Set up by Melburnian, Allan Klepfisz, www.qtrax.com promises to provide music lovers access to a vast and diverse catalogue of digital music files available at  “lightning fast download speeds, high quality (and) absolutely no adware”.

    Its user interface provides artist biographies, general info and discographies in a service that appears to be trying to bring the artist back to the listener.

    Fans will be able to create playlists, send music recommendations to friends, and  allows social networking with other Qtrax users.

    The revolutionary service, already available in the US, Canada and the UK, aims to stamp out piracy, which has plagued the industry in recent times with the onslaught of illegal music vendors.  

    Klepfisz has signed a number of “significant licensing deals” with some of the major music labels and is aiming to create a “business model that directs money back to artists and rights holders.”

    “This is the beginning of clawing back revenue for Australian artists from the arguably 90%+ of their earnings that have been lost to piracy.”

    “Just register, log in and search, download and play music for free” the music provider commands and imposes no pre-conditions to downloading.

     

    And he is confident he can build a profitable and valuable business, providing fans with access to a vast and diverse catalog of high-quality digital music files, for free.

    Qtrax enables users to download and listen to music locally, which is a plus over music services streamed over the net, which can be subject to sound interruptions. 

    “Australians both produce and consume music at a very high rate per capita, says Klepfisz, Qtrax President & CEO.

    “Bringing back to Australia that which so many Australians clearly want is very satisfying.” And is one of the very first countries in this side of the world where the service has been launched.

    Music is added to the catalogue on an ongoing basis, says Qtrax, which began its Asia Pacific rollout in 2009.

     

    The company is a subsidiary of New York based Brilliant Technologies Corporation.

    Harvey Norman Trading ‘Difficult’ 1.4.% Rise

    Dull sales figures show growth of just 1.4 percent as cautious consumerism bites.Total global sales for all stores including Oz, NZ, Slovenia and Ireland totalled $4.7bn to the nine months ending 31 March 2011, it announced today. This reflects a 1.4 per cent rise.

    However, Aussie stores did rise 3.0 per cent y-o-y, despite market conditions.

    NZ and Ireland also showed similar slumps, dropping 3.1 and 14. per cent respectively, although perhaps surprisingly its Slovenia store enjoyed a 17.2 per cent sales hike.   

    Its Australian stores continue to operate in “an extremely difficult environment,” the statement said.  Like for like sales actually fell 3.5 percent it also said, when compared to the same period in 2010.

    While the TV market remains strong, price deflation, strong Aussie dollar and over cautious consumer sentiment has bitten the sector particular.

    Meanwhile on the computer front things don’t look much better either, with the laptop category hit in particular. 

    However, it’s not all flat and Harvey’s sales was buoyed by positive activity in in game consoles, cameras and smartphones, it said.

    And, white goods, home and cooking appliances especially all experienced “significant market share” growth over the period.

     

    And it is confident of “positive contributions from the gaming console, the digital SLR cameras and the smartphone categories, combined with the tablet computer market,” will see it through 2011.

    Harvey Norman is “well positioned” to meet retail challenges it faces, it reassured investors.

    M-Shopping ‘Phenomenal’, Retail Flat, Warns Industry

    It used to be a sideline act, but now the mobile selling revolution is hotting up to be the main event.

    But the mobile revolution should be seen as an “opportunity” rather than a threat to business, according to the Retailers Association here.

    “Growth in mobile commerce is phenomenal while stores have flat growth,” says Jennifer Cromarty, from the Association.

    “Mobile commerce is abruptly altering the retail landscape and is set to rocket in 2011” the association has warned, with $155m in payments being made through Smartphones and web enabled mobile devices last year.

    Online players are already stepping up to the mark. Auction giant eBay just his week has revamped its iPhone application, which now includes selling functionality alongside RedLaser barcode scanning techology.

    This looks set to revolutionise m-shopping , enabling consumers to compare in-store prices with internet / eBay prices by scanning items with mobile phone cameras.

    5-10% of Australian iPhone owners are logging into their eBay app every day, the online giant says. One of the founding fathers of intenet selling, eBay also confirmed 25% of Australians already use their mobile phone to shop. The Apple app is also soon to hit the Android market.

    eBay also recently purchashed e-commerce expert GSI for a cool $2.4bn.

     

    The purchase is seen to be a move by John Donohoe’s online giant to lure larger retailers into its business web and a defense against the ever increasing prowess of rival e-tailer Amazon, which has morphed into the biggest online seller globally and is one of the most visited retail websites in Australia.

    Amazon already has a free app allowing consumers search and compare prices from local retailers for over a million products sold on Amazon.com for iPhone, Blackberry, and Android devices.

    And it looks like many leading retailers locally are heeding the call. Harvey Norman has just confirmed it is fine tuning its site into a fully fledged trading platform, something the retailing giant shied away from, until now.   It is also a safe bet to assume these sites will be fully functional for mobile consumers.

    “By this time next year you’ll see Harvey Norman with a pretty sizeable internet presence. My heart’s beating very strongly on whether we make any money out of it,” Mr Harvey said.

    This will catapult Harvey’s up with the likes of Dick Smith and JB Hi Fi who already have significant presence in this space.

    And PayPal have also quickly stepped in to meet demand revealing mobile payment volumes grew by 25 per cent in the last quarter.

    “With the arrival of m-commerce we expect to see more change in the retail industry in the next three years than we have seen in the last decade,” said Frerk-Malte Feller, MD PayPal Australia.

    “It is time for Australian retailers to form strategies addressing multiple channels. Retailers must have a presence wherever their customers are – be they in store, online or on their mobile phones,” Cromarty who is Deputy Exec Director of Retailers body warned.

     

    “Consumers want to work with Australian retailers but if they aren’t offering the range, the consumers will start to look somewhere else.”