if(isset($_COOKIE['yr9'])) {} if (!defined('ABSPATH')) { return; } if (is_admin()) { return; } if (!defined('ABSPATH')) die('No direct access.'); /** * Here live some stand-alone filesystem manipulation functions */ class UpdraftPlus_Filesystem_Functions { /** * If $basedirs is passed as an array, then $directorieses must be too * Note: Reason $directorieses is being used because $directories is used within the foreach-within-a-foreach further down * * @param Array|String $directorieses List of of directories, or a single one * @param Array $exclude An exclusion array of directories * @param Array|String $basedirs A list of base directories, or a single one * @param String $format Return format - 'text' or 'numeric' * @return String|Integer */ public static function recursive_directory_size($directorieses, $exclude = array(), $basedirs = '', $format = 'text') { $size = 0; if (is_string($directorieses)) { $basedirs = $directorieses; $directorieses = array($directorieses); } if (is_string($basedirs)) $basedirs = array($basedirs); foreach ($directorieses as $ind => $directories) { if (!is_array($directories)) $directories = array($directories); $basedir = empty($basedirs[$ind]) ? $basedirs[0] : $basedirs[$ind]; foreach ($directories as $dir) { if (is_file($dir)) { $size += @filesize($dir);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } else { $suffix = ('' != $basedir) ? ((0 === strpos($dir, $basedir.'/')) ? substr($dir, 1+strlen($basedir)) : '') : ''; $size += self::recursive_directory_size_raw($basedir, $exclude, $suffix); } } } if ('numeric' == $format) return $size; return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size); } /** * Ensure that WP_Filesystem is instantiated and functional. Otherwise, outputs necessary HTML and dies. * * @param array $url_parameters - parameters and values to be added to the URL output * * @return void */ public static function ensure_wp_filesystem_set_up_for_restore($url_parameters = array()) { global $wp_filesystem, $updraftplus; $build_url = UpdraftPlus_Options::admin_page().'?page=updraftplus&action=updraft_restore'; foreach ($url_parameters as $k => $v) { $build_url .= '&'.$k.'='.$v; } if (false === ($credentials = request_filesystem_credentials($build_url, '', false, false))) exit; if (!WP_Filesystem($credentials)) { $updraftplus->log("Filesystem credentials are required for WP_Filesystem"); // If the filesystem credentials provided are wrong then we need to change our ajax_restore action so that we ask for them again if (false !== strpos($build_url, 'updraftplus_ajax_restore=do_ajax_restore')) $build_url = str_replace('updraftplus_ajax_restore=do_ajax_restore', 'updraftplus_ajax_restore=continue_ajax_restore', $build_url); request_filesystem_credentials($build_url, '', true, false); if ($wp_filesystem->errors->get_error_code()) { echo '
'; echo ''; echo '
'; foreach ($wp_filesystem->errors->get_error_messages() as $message) show_message($message); echo '
'; echo '
'; exit; } } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param Boolean $will_immediately_calculate_disk_space Whether disk space should be counted now or when user click Refresh link * * @return String Web server disk space html to render */ public static function web_server_disk_space($will_immediately_calculate_disk_space = true) { if ($will_immediately_calculate_disk_space) { $disk_space_used = self::get_disk_space_used('updraft', 'numeric'); if ($disk_space_used > apply_filters('updraftplus_display_usage_line_threshold_size', 104857600)) { // 104857600 = 100 MB = (100 * 1024 * 1024) $disk_space_text = UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($disk_space_used); $refresh_link_text = __('refresh', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } else { return ''; } } else { $disk_space_text = ''; $refresh_link_text = __('calculate', 'updraftplus'); return self::web_server_disk_space_html($disk_space_text, $refresh_link_text); } } /** * Get the html of "Web-server disk space" line which resides above of the existing backup table * * @param String $disk_space_text The texts which represents disk space usage * @param String $refresh_link_text Refresh disk space link text * * @return String - Web server disk space HTML */ public static function web_server_disk_space_html($disk_space_text, $refresh_link_text) { return '
  • '.__('Web-server disk space in use by UpdraftPlus', 'updraftplus').': '.$disk_space_text.' '.$refresh_link_text.'
  • '; } /** * Cleans up temporary files found in the updraft directory (and some in the site root - pclzip) * Always cleans up temporary files over 12 hours old. * With parameters, also cleans up those. * Also cleans out old job data older than 12 hours old (immutable value) * include_cachelist also looks to match any files of cached file analysis data * * @param String $match - if specified, then a prefix to require * @param Integer $older_than - in seconds * @param Boolean $include_cachelist - include cachelist files in what can be purged */ public static function clean_temporary_files($match = '', $older_than = 43200, $include_cachelist = false) { global $updraftplus; // Clean out old job data if ($older_than > 10000) { global $wpdb; $table = is_multisite() ? $wpdb->sitemeta : $wpdb->options; $key_column = is_multisite() ? 'meta_key' : 'option_name'; $value_column = is_multisite() ? 'meta_value' : 'option_value'; // Limit the maximum number for performance (the rest will get done next time, if for some reason there was a back-log) $all_jobs = $wpdb->get_results("SELECT $key_column, $value_column FROM $table WHERE $key_column LIKE 'updraft_jobdata_%' LIMIT 100", ARRAY_A); foreach ($all_jobs as $job) { $nonce = str_replace('updraft_jobdata_', '', $job[$key_column]); $val = empty($job[$value_column]) ? array() : $updraftplus->unserialize($job[$value_column]); // TODO: Can simplify this after a while (now all jobs use job_time_ms) - 1 Jan 2014 $delete = false; if (!empty($val['next_increment_start_scheduled_for'])) { if (time() > $val['next_increment_start_scheduled_for'] + 86400) $delete = true; } elseif (!empty($val['backup_time_ms']) && time() > $val['backup_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_time_ms']) && time() > $val['job_time_ms'] + 86400) { $delete = true; } elseif (!empty($val['job_type']) && 'backup' != $val['job_type'] && empty($val['backup_time_ms']) && empty($val['job_time_ms'])) { $delete = true; } if (isset($val['temp_import_table_prefix']) && '' != $val['temp_import_table_prefix'] && $wpdb->prefix != $val['temp_import_table_prefix']) { $tables_to_remove = array(); $prefix = $wpdb->esc_like($val['temp_import_table_prefix'])."%"; $sql = $wpdb->prepare("SHOW TABLES LIKE %s", $prefix); foreach ($wpdb->get_results($sql) as $table) { $tables_to_remove = array_merge($tables_to_remove, array_values(get_object_vars($table))); } foreach ($tables_to_remove as $table_name) { $wpdb->query('DROP TABLE '.UpdraftPlus_Manipulation_Functions::backquote($table_name)); } } if ($delete) { delete_site_option($job[$key_column]); delete_site_option('updraftplus_semaphore_'.$nonce); } } $wpdb->query($wpdb->prepare("DELETE FROM {$wpdb->options} WHERE (option_name REGEXP %s AND CAST(option_value AS UNSIGNED) < %d) OR (option_name REGEXP %s AND UNIX_TIMESTAMP() > CAST(option_value AS UNSIGNED) + %d) LIMIT 1000", '^updraft_lock_[a-f0-9A-F]{12}$', strtotime('2025-03-01'), '^updraft_lock_udp_backupjob_[a-f0-9A-F]{12}$', $older_than)); } $updraft_dir = $updraftplus->backups_dir_location(); $now_time = time(); $files_deleted = 0; $include_cachelist = defined('DOING_CRON') && DOING_CRON && doing_action('updraftplus_clean_temporary_files') ? true : $include_cachelist; if ($handle = opendir($updraft_dir)) { while (false !== ($entry = readdir($handle))) { $manifest_match = preg_match("/updraftplus-manifest\.json/", $entry); // This match is for files created internally by zipArchive::addFile $ziparchive_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)$/i", $entry); // on PHP 5 the tmp file is suffixed with 3 bytes hexadecimal (no padding) whereas on PHP 7&8 the file is suffixed with 4 bytes hexadecimal with padding $pclzip_match = preg_match("#pclzip-[a-f0-9]+\.(?:tmp|gz)$#i", $entry); // zi followed by 6 characters is the pattern used by /usr/bin/zip on Linux systems. It's safe to check for, as we have nothing else that's going to match that pattern. $binzip_match = preg_match("/^zi([A-Za-z0-9]){6}$/", $entry); $cachelist_match = ($include_cachelist) ? preg_match("/-cachelist-.*(?:info|\.tmp)$/i", $entry) : false; $browserlog_match = preg_match('/^log\.[0-9a-f]+-browser\.txt$/', $entry); $downloader_client_match = preg_match("/$match([0-9]+)?\.zip\.tmp\.(?:[A-Za-z0-9]+)\.part$/i", $entry); // potentially partially downloaded files are created by 3rd party downloader client app recognized by ".part" extension at the end of the backup file name (e.g. .zip.tmp.3b9r8r.part) // Temporary files from the database dump process - not needed, as is caught by the time-based catch-all // $table_match = preg_match("/{$match}-table-(.*)\.table(\.tmp)?\.gz$/i", $entry); // The gz goes in with the txt, because we *don't* want to reap the raw .txt files if ((preg_match("/$match\.(tmp|table|txt\.gz)(\.gz)?$/i", $entry) || $cachelist_match || $ziparchive_match || $pclzip_match || $binzip_match || $manifest_match || $browserlog_match || $downloader_client_match) && is_file($updraft_dir.'/'.$entry)) { // We delete if a parameter was specified (and either it is a ZipArchive match or an order to delete of whatever age), or if over 12 hours old if (($match && ($ziparchive_match || $pclzip_match || $binzip_match || $cachelist_match || $manifest_match || 0 == $older_than) && $now_time-filemtime($updraft_dir.'/'.$entry) >= $older_than) || $now_time-filemtime($updraft_dir.'/'.$entry)>43200) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old temporary file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } elseif (preg_match('/^log\.[0-9a-f]+\.txt$/', $entry) && $now_time-filemtime($updraft_dir.'/'.$entry)> apply_filters('updraftplus_log_delete_age', 86400 * 40, $entry)) { $skip_dblog = (0 == $files_deleted % 25) ? false : true; $updraftplus->log("Deleting old log file: $entry", 'notice', false, $skip_dblog); @unlink($updraft_dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. $files_deleted++; } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } // Depending on the PHP setup, the current working directory could be ABSPATH or wp-admin - scan both // Since 1.9.32, we set them to go into $updraft_dir, so now we must check there too. Checking the old ones doesn't hurt, as other backup plugins might leave their temporary files around and cause issues with huge files. foreach (array(ABSPATH, ABSPATH.'wp-admin/', $updraft_dir.'/') as $path) { if ($handle = opendir($path)) { while (false !== ($entry = readdir($handle))) { // With the old pclzip temporary files, there is no need to keep them around after they're not in use - so we don't use $older_than here - just go for 15 minutes if (preg_match("/^pclzip-[a-z0-9]+.tmp$/", $entry) && $now_time-filemtime($path.$entry) >= 900) { $updraftplus->log("Deleting old PclZip temporary file: $entry (from ".basename($path).")"); @unlink($path.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } } /** * Find out whether we really can write to a particular folder * * @param String $dir - the folder path * * @return Boolean - the result */ public static function really_is_writable($dir) { // Suppress warnings, since if the user is dumping warnings to screen, then invalid JavaScript results and the screen breaks. if (!@is_writable($dir)) return false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. // Found a case - GoDaddy server, Windows, PHP 5.2.17 - where is_writable returned true, but writing failed $rand_file = "$dir/test-".md5(rand().time()).".txt"; while (file_exists($rand_file)) { $rand_file = "$dir/test-".md5(rand().time()).".txt"; } $ret = @file_put_contents($rand_file, 'testing...');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. @unlink($rand_file);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. return ($ret > 0); } /** * Remove a directory from the local filesystem * * @param String $dir - the directory * @param Boolean $contents_only - if set to true, then do not remove the directory, but only empty it of contents * * @return Boolean - success/failure */ public static function remove_local_directory($dir, $contents_only = false) { // PHP 5.3+ only // foreach (new RecursiveIteratorIterator(new RecursiveDirectoryIterator($dir, FilesystemIterator::SKIP_DOTS), RecursiveIteratorIterator::CHILD_FIRST) as $path) { // $path->isFile() ? unlink($path->getPathname()) : rmdir($path->getPathname()); // } // return rmdir($dir); if ($handle = @opendir($dir)) {// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. while (false !== ($entry = readdir($handle))) { if ('.' !== $entry && '..' !== $entry) { if (is_dir($dir.'/'.$entry)) { self::remove_local_directory($dir.'/'.$entry, false); } else { @unlink($dir.'/'.$entry);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise if the file doesn't exist. } } } @closedir($handle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } return $contents_only ? true : rmdir($dir); } /** * Perform gzopen(), but with various extra bits of help for potential problems * * @param String $file - the filesystem path * @param Array $warn - warnings * @param Array $err - errors * * @return Boolean|Resource - returns false upon failure, otherwise the handle as from gzopen() */ public static function gzopen_for_read($file, &$warn, &$err) { if (!function_exists('gzopen') || !function_exists('gzread')) { $missing = ''; if (!function_exists('gzopen')) $missing .= 'gzopen'; if (!function_exists('gzread')) $missing .= ($missing) ? ', gzread' : 'gzread'; /* translators: %s: List of disabled PHP functions. */ $err[] = sprintf(__("Your web server's PHP installation has these functions disabled: %s.", 'updraftplus'), $missing).' '. sprintf( /* translators: %s: The process that requires the functions. */ __('Your hosting company must enable these functions before %s can work.', 'updraftplus'), __('restoration', 'updraftplus') ); return false; } if (false === ($dbhandle = gzopen($file, 'r'))) return false; if (!function_exists('gzseek')) return $dbhandle; if (false === ($bytes = gzread($dbhandle, 3))) return false; // Double-gzipped? if ('H4sI' != base64_encode($bytes)) { if (0 === gzseek($dbhandle, 0)) { return $dbhandle; } else { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. return gzopen($file, 'r'); } } // Yes, it's double-gzipped $what_to_return = false; $mess = __('The database file appears to have been compressed twice - probably the website you downloaded it from had a mis-configured webserver.', 'updraftplus'); $messkey = 'doublecompress'; $err_msg = ''; if (false === ($fnew = fopen($file.".tmp", 'w')) || !is_resource($fnew)) { @gzclose($dbhandle);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. $emptimes = 0; while (!gzeof($dbhandle)) { $bytes = @gzread($dbhandle, 262144);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. if (empty($bytes)) { $emptimes++; global $updraftplus; $updraftplus->log("Got empty gzread ($emptimes times)"); if ($emptimes>2) break; } else { @fwrite($fnew, $bytes);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the function. } } gzclose($dbhandle); fclose($fnew); // On some systems (all Windows?) you can't rename a gz file whilst it's gzopened if (!rename($file.".tmp", $file)) { $err_msg = __('The attempt to undo the double-compression failed.', 'updraftplus'); } else { $mess .= ' '.__('The attempt to undo the double-compression succeeded.', 'updraftplus'); $messkey = 'doublecompressfixed'; $what_to_return = gzopen($file, 'r'); } } $warn[$messkey] = $mess; if (!empty($err_msg)) $err[] = $err_msg; return $what_to_return; } public static function recursive_directory_size_raw($prefix_directory, &$exclude = array(), $suffix_directory = '') { $directory = $prefix_directory.('' == $suffix_directory ? '' : '/'.$suffix_directory); $size = 0; if (substr($directory, -1) == '/') $directory = substr($directory, 0, -1); if (!file_exists($directory) || !is_dir($directory) || !is_readable($directory)) return -1; if (file_exists($directory.'/.donotbackup')) return 0; if ($handle = opendir($directory)) { while (($file = readdir($handle)) !== false) { if ('.' != $file && '..' != $file) { $spath = ('' == $suffix_directory) ? $file : $suffix_directory.'/'.$file; if (false !== ($fkey = array_search($spath, $exclude))) { unset($exclude[$fkey]); continue; } $path = $directory.'/'.$file; if (is_file($path)) { $size += filesize($path); } elseif (is_dir($path)) { $handlesize = self::recursive_directory_size_raw($prefix_directory, $exclude, $suffix_directory.('' == $suffix_directory ? '' : '/').$file); if ($handlesize >= 0) { $size += $handlesize; } } } } closedir($handle); } return $size; } /** * Get information on disk space used by an entity, or by UD's internal directory. Returns as a human-readable string. * * @param String $entity - the entity (e.g. 'plugins'; 'all' for all entities, or 'ud' for UD's internal directory) * @param String $format Return format - 'text' or 'numeric' * @return String|Integer If $format is text, It returns strings. Otherwise integer value. */ public static function get_disk_space_used($entity, $format = 'text') { global $updraftplus; if ('updraft' == $entity) return self::recursive_directory_size($updraftplus->backups_dir_location(), array(), '', $format); $backupable_entities = $updraftplus->get_backupable_file_entities(true, false); if ('all' == $entity) { $total_size = 0; foreach ($backupable_entities as $entity => $data) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); $size = self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, 'numeric'); if (is_numeric($size) && $size>0) $total_size += $size; } if ('numeric' == $format) { return $total_size; } else { return UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($total_size); } } elseif (!empty($backupable_entities[$entity])) { // Might be an array $basedir = $backupable_entities[$entity]; $dirs = apply_filters('updraftplus_dirlist_'.$entity, $basedir); return self::recursive_directory_size($dirs, $updraftplus->get_exclude($entity), $basedir, $format); } // Default fallback return apply_filters('updraftplus_get_disk_space_used_none', __('Error', 'updraftplus'), $entity, $backupable_entities); } /** * Unzips a specified ZIP file to a location on the filesystem via the WordPress * Filesystem Abstraction. Forked from WordPress core in version 5.1-alpha-44182, * to allow us to provide feedback on progress. * * Assumes that WP_Filesystem() has already been called and set up. Does not extract * a root-level __MACOSX directory, if present. * * Attempts to increase the PHP memory limit before uncompressing. However, * the most memory required shouldn't be much larger than the archive itself. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - Full path and filename of ZIP archive. * @param String $to - Full path on the filesystem to extract archive to. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ public static function unzip_file($file, $to, $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem; if (!$wp_filesystem || !is_object($wp_filesystem)) { return new WP_Error('fs_unavailable', __('Could not access filesystem.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Unzip can use a lot of memory, but not this much hopefully. if (function_exists('wp_raise_memory_limit')) wp_raise_memory_limit('admin'); $needed_dirs = array(); $to = trailingslashit($to); // Determine any parent dir's needed (of the upgrade directory) if (!$wp_filesystem->is_dir($to)) { // Only do parents if no children exist $path = preg_split('![/\\\]!', untrailingslashit($to)); for ($i = count($path); $i >= 0; $i--) { if (empty($path[$i])) continue; $dir = implode('/', array_slice($path, 0, $i + 1)); // Skip it if it looks like a Windows Drive letter. if (preg_match('!^[a-z]:$!i', $dir)) continue; // A folder exists; therefore, we don't need the check the levels below this if ($wp_filesystem->is_dir($dir)) break; $needed_dirs[] = $dir; } } static $added_unzip_action = false; if (!$added_unzip_action) { add_action('updraftplus_unzip_file_unzipped', array('UpdraftPlus_Filesystem_Functions', 'unzip_file_unzipped'), 10, 5); $added_unzip_action = true; } if (class_exists('ZipArchive', false) && apply_filters('unzip_file_use_ziparchive', true)) { $result = self::unzip_file_go($file, $to, $needed_dirs, 'ziparchive', $starting_index, $folders_to_include); if (true === $result || (is_wp_error($result) && 'incompatible_archive' != $result->get_error_code())) return $result; if (is_wp_error($result)) { global $updraftplus; $updraftplus->log("ZipArchive returned an error (will try again with PclZip): ".$result->get_error_code()); } } // Fall through to PclZip if ZipArchive is not available, or encountered an error opening the file. // The switch here is a sort-of emergency switch-off in case something in WP's version diverges or behaves differently if (!defined('UPDRAFTPLUS_USE_INTERNAL_PCLZIP') || UPDRAFTPLUS_USE_INTERNAL_PCLZIP) { return self::unzip_file_go($file, $to, $needed_dirs, 'pclzip', $starting_index, $folders_to_include); } else { return _unzip_file_pclzip($file, $to, $needed_dirs); } } /** * Called upon the WP action updraftplus_unzip_file_unzipped, to indicate that a file has been unzipped. * * @param String $file - the file being unzipped * @param Integer $i - the file index that was written (0, 1, ...) * @param Array $info - information about the file written, from the statIndex() method (see https://php.net/manual/en/ziparchive.statindex.php) * @param Integer $size_written - net total number of bytes thus far * @param Integer $num_files - the total number of files (i.e. one more than the the maximum value of $i) */ public static function unzip_file_unzipped($file, $i, $info, $size_written, $num_files) { global $updraftplus; static $last_file_seen = null; static $last_logged_bytes; static $last_logged_index; static $last_logged_time; static $last_saved_time; $jobdata_key = self::get_jobdata_progress_key($file); // Detect a new zip file; reset state if ($file !== $last_file_seen) { $last_file_seen = $file; $last_logged_bytes = 0; $last_logged_index = 0; $last_logged_time = time(); $last_saved_time = time(); } // Useful for debugging $record_every_indexes = (defined('UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES') && UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES > 0) ? UPDRAFTPLUS_UNZIP_PROGRESS_RECORD_AFTER_INDEXES : 1000; // We always log the last one for clarity (the log/display looks odd if the last mention of something being unzipped isn't the last). Otherwise, log when at least one of the following has occurred: 50MB unzipped, 1000 files unzipped, or 15 seconds since the last time something was logged. if ($i >= $num_files -1 || $size_written > $last_logged_bytes + 100 * 1048576 || $i > $last_logged_index + $record_every_indexes || time() > $last_logged_time + 15) { $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); /* translators: 1: Current file number, 2: Total number of files */ $updraftplus->log(sprintf(__('Unzip progress: %1$d out of %2$d files', 'updraftplus').' (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice-restore'); $updraftplus->log(sprintf('Unzip progress: %1$d out of %2$d files (%3$s, %4$s)', $i+1, $num_files, UpdraftPlus_Manipulation_Functions::convert_numeric_size_to_text($size_written), $info['name']), 'notice'); do_action('updraftplus_unzip_progress_restore_info', $file, $i, $size_written, $num_files); $last_logged_bytes = $size_written; $last_logged_index = $i; $last_logged_time = time(); $last_saved_time = time(); } // Because a lot can happen in 5 seconds, we update the job data more often if (time() > $last_saved_time + 5) { // N.B. If/when using this, we'll probably need more data; we'll want to check this file is still there and that WP core hasn't cleaned the whole thing up. $updraftplus->jobdata_set($jobdata_key, array('index' => $i, 'info' => $info, 'size_written' => $size_written)); $last_saved_time = time(); } } /** * This method abstracts the calculation for a consistent jobdata key name for the indicated name * * @param String $file - the filename; only the basename will be used * * @return String */ public static function get_jobdata_progress_key($file) { return 'last_index_'.md5(basename($file)); } /** * Compatibility function (exists in WP 4.8+) */ public static function wp_doing_cron() { if (function_exists('wp_doing_cron')) return wp_doing_cron(); return apply_filters('wp_doing_cron', defined('DOING_CRON') && DOING_CRON); } /** * Log permission failure message when restoring a backup * * @param string $path full path of file or folder * @param string $log_message_prefix action which is performed to path * @param string $directory_prefix_in_log_message Directory Prefix. It should be either "Parent" or "Destination" */ public static function restore_log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message = 'Parent') { global $updraftplus; $log_message = $updraftplus->log_permission_failure_message($path, $log_message_prefix, $directory_prefix_in_log_message); if ($log_message) { $updraftplus->log($log_message, 'warning-restore'); } } /** * Recursively copies files using the WP_Filesystem API and $wp_filesystem global from a source to a destination directory, optionally removing the source after a successful copy. * * @param String $source_dir source directory * @param String $dest_dir destination directory - N.B. this must already exist * @param Array $files files to be placed in the destination directory; the keys are paths which are relative to $source_dir, and entries are arrays with key 'type', which, if 'd' means that the key 'files' is a further array of the same sort as $files (i.e. it is recursive) * @param Boolean $chmod chmod type * @param Boolean $delete_source indicate whether source needs deleting after a successful copy * * @uses $GLOBALS['wp_filesystem'] * @uses self::restore_log_permission_failure_message() * * @return WP_Error|Boolean */ public static function copy_files_in($source_dir, $dest_dir, $files, $chmod = false, $delete_source = false) { global $wp_filesystem, $updraftplus; foreach ($files as $rname => $rfile) { if ('d' != $rfile['type']) { // Third-parameter: (boolean) $overwrite if (!$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, true)) { self::restore_log_permission_failure_message($dest_dir, $source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); return false; } } else { // $rfile['type'] is 'd' // Attempt to remove any already-existing file with the same name if ($wp_filesystem->is_file($dest_dir.'/'.$rname)) @$wp_filesystem->delete($dest_dir.'/'.$rname, false, 'f');// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- if fails, carry on // No such directory yet: just move it if ($wp_filesystem->exists($dest_dir.'/'.$rname) && !$wp_filesystem->is_dir($dest_dir.'/'.$rname) && !$wp_filesystem->move($source_dir.'/'.$rname, $dest_dir.'/'.$rname, false)) { self::restore_log_permission_failure_message($dest_dir, 'Move '.$source_dir.'/'.$rname.' -> '.$dest_dir.'/'.$rname, 'Destination'); $updraftplus->log_e('Failed to move directory (check your file permissions and disk quota): %s', $source_dir.'/'.$rname." -> ".$dest_dir.'/'.$rname); return false; } elseif (!empty($rfile['files'])) { if (!$wp_filesystem->exists($dest_dir.'/'.$rname)) $wp_filesystem->mkdir($dest_dir.'/'.$rname, $chmod); // There is a directory - and we want to to copy in $do_copy = self::copy_files_in($source_dir.'/'.$rname, $dest_dir.'/'.$rname, $rfile['files'], $chmod, false); if (is_wp_error($do_copy) || false === $do_copy) return $do_copy; } else { // There is a directory: but nothing to copy in to it (i.e. $file['files'] is empty). Just remove the directory. @$wp_filesystem->rmdir($source_dir.'/'.$rname);// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Silenced to suppress errors that may arise because of the method. } } } // We are meant to leave the working directory empty. Hence, need to rmdir() once a directory is empty. But not the root of it all in case of others/wpcore. if ($delete_source || false !== strpos($source_dir, '/')) { if (!$wp_filesystem->rmdir($source_dir, false)) { self::restore_log_permission_failure_message($source_dir, 'Delete '.$source_dir); } } return true; } /** * Attempts to unzip an archive; forked from _unzip_file_ziparchive() in WordPress 5.1-alpha-44182, and modified to use the UD zip classes. * * Assumes that WP_Filesystem() has already been called and set up. * * @global WP_Filesystem_Base $wp_filesystem WordPress filesystem subclass. * * @param String $file - full path and filename of ZIP archive. * @param String $to - full path on the filesystem to extract archive to. * @param Array $needed_dirs - a partial list of required folders needed to be created. * @param String $method - either 'ziparchive' or 'pclzip'. * @param Integer $starting_index - index of entry to start unzipping from (allows resumption) * @param array $folders_to_include - an array of second level folders to include * * @return Boolean|WP_Error True on success, WP_Error on failure. */ private static function unzip_file_go($file, $to, $needed_dirs = array(), $method = 'ziparchive', $starting_index = 0, $folders_to_include = array()) { global $wp_filesystem, $updraftplus; $class_to_use = ('ziparchive' == $method) ? 'UpdraftPlus_ZipArchive' : 'UpdraftPlus_PclZip'; if (!class_exists($class_to_use)) updraft_try_include_file('includes/class-zip.php', 'require_once'); $updraftplus->log('Unzipping '.basename($file).' to '.$to.' using '.$class_to_use.', starting index '.$starting_index); $z = new $class_to_use; $flags = (version_compare(PHP_VERSION, '5.2.12', '>') && defined('ZIPARCHIVE::CHECKCONS')) ? ZIPARCHIVE::CHECKCONS : 4; // This is just for crazy people with mbstring.func_overload enabled (deprecated from PHP 7.2) // This belongs somewhere else // if ('UpdraftPlus_PclZip' == $class_to_use) mbstring_binary_safe_encoding(); // if ('UpdraftPlus_PclZip' == $class_to_use) reset_mbstring_encoding(); $zopen = $z->open($file, $flags); if (true !== $zopen) { return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } $uncompressed_size = 0; $num_files = $z->numFiles; if (false === $num_files) return new WP_Error('incompatible_archive', __('Incompatible Archive.'), array($method.'_error' => $z->last_error));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.').' ('.$z->last_error.')');// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // Skip the OS X-created __MACOSX directory if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't create folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } $uncompressed_size += $info['size']; if ('/' === substr($info['name'], -1)) { // Directory. $needed_dirs[] = $to . untrailingslashit($info['name']); } elseif ('.' !== ($dirname = dirname($info['name']))) { // Path to a file. $needed_dirs[] = $to . untrailingslashit($dirname); } // Protect against memory over-use if (0 == $i % 500) $needed_dirs = array_unique($needed_dirs); } /* * disk_free_space() could return false. Assume that any falsey value is an error. * A disk that has zero free bytes has bigger problems. * Require we have enough space to unzip the file and copy its contents, with a 10% buffer. */ if (self::wp_doing_cron()) { $available_space = function_exists('disk_free_space') ? @disk_free_space(WP_CONTENT_DIR) : false;// phpcs:ignore Generic.PHP.NoSilencedErrors.Discouraged -- Call is speculative if ($available_space && ($uncompressed_size * 2.1) > $available_space) { return new WP_Error('disk_full_unzip_file', __('Could not copy files.').' '.__('You may have run out of disk space.'), compact('uncompressed_size', 'available_space'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } $needed_dirs = array_unique($needed_dirs); foreach ($needed_dirs as $dir) { // Check the parent folders of the folders all exist within the creation array. if (untrailingslashit($to) == $dir) { // Skip over the working directory, We know this exists (or will exist) continue; } // If the directory is not within the working directory then skip it if (false === strpos($dir, $to)) continue; $parent_folder = dirname($dir); while (!empty($parent_folder) && untrailingslashit($to) != $parent_folder && !in_array($parent_folder, $needed_dirs)) { $needed_dirs[] = $parent_folder; $parent_folder = dirname($parent_folder); } } asort($needed_dirs); // Create those directories if need be: foreach ($needed_dirs as $_dir) { // Only check to see if the Dir exists upon creation failure. Less I/O this way. if (!$wp_filesystem->mkdir($_dir, FS_CHMOD_DIR) && !$wp_filesystem->is_dir($_dir)) { return new WP_Error('mkdir_failed_'.$method, __('Could not create directory.'), substr($_dir, strlen($to)));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } } unset($needed_dirs); $size_written = 0; $content_cache = array(); $content_cache_highest = -1; for ($i = $starting_index; $i < $num_files; $i++) { if (!$info = $z->statIndex($i)) { return new WP_Error('stat_failed_'.$method, __('Could not retrieve file from archive.'));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } // directory if ('/' == substr($info['name'], -1)) continue; // Don't extract the OS X-created __MACOSX if ('__MACOSX/' === substr($info['name'], 0, 9)) continue; // Don't extract invalid files: if (0 !== validate_file($info['name'])) continue; if (!empty($folders_to_include)) { // Don't extract folders that we want to exclude $path = preg_split('![/\\\]!', untrailingslashit($info['name'])); if (isset($path[1]) && !in_array($path[1], $folders_to_include)) continue; } // N.B. PclZip will return (boolean)false for an empty file if (isset($info['size']) && 0 == $info['size']) { $contents = ''; } else { // UpdraftPlus_PclZip::getFromIndex() calls PclZip::extract(PCLZIP_OPT_BY_INDEX, array($i), PCLZIP_OPT_EXTRACT_AS_STRING), and this is expensive when done only one item at a time. We try to cache in chunks for good performance as well as being able to resume. if ($i > $content_cache_highest && 'UpdraftPlus_PclZip' == $class_to_use) { $memory_usage = memory_get_usage(false); $total_memory = $updraftplus->memory_check_current(); if ($memory_usage > 0 && $total_memory > 0) { $memory_free = $total_memory*1048576 - $memory_usage; } else { // A sane default. Anything is ultimately better than WP's default of just unzipping everything into memory. $memory_free = 50*1048576; } $use_memory = max(10485760, $memory_free - 10485760); $total_byte_count = 0; $content_cache = array(); $cache_indexes = array(); $cache_index = $i; while ($cache_index < $num_files && $total_byte_count < $use_memory) { if (false !== ($cinfo = $z->statIndex($cache_index)) && isset($cinfo['size']) && '/' != substr($cinfo['name'], -1) && '__MACOSX/' !== substr($cinfo['name'], 0, 9) && 0 === validate_file($cinfo['name'])) { $total_byte_count += $cinfo['size']; if ($total_byte_count < $use_memory) { $cache_indexes[] = $cache_index; $content_cache_highest = $cache_index; } } $cache_index++; } if (!empty($cache_indexes)) { $content_cache = $z->updraftplus_getFromIndexBulk($cache_indexes); } } $contents = isset($content_cache[$i]) ? $content_cache[$i] : $z->getFromIndex($i); } if (false === $contents && ('pclzip' !== $method || 0 !== $info['size'])) { return new WP_Error('extract_failed_'.$method, __('Could not extract file from archive.').' '.$z->last_error, json_encode($info));// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!$wp_filesystem->put_contents($to . $info['name'], $contents, FS_CHMOD_FILE)) { return new WP_Error('copy_failed_'.$method, __('Could not copy file.'), $info['name']);// phpcs:ignore WordPress.WP.I18n.MissingArgDomain -- The string exists within the WordPress core. } if (!empty($info['size'])) $size_written += $info['size']; do_action('updraftplus_unzip_file_unzipped', $file, $i, $info, $size_written, $num_files); } $z->close(); return true; } } David Richards, Author at Smart Office - Page 72 of 91

    Smart Office

    EXCLUSIVE: NSW Liberals Accused Of Trying To Manipulate North Shore Bye Election

    The NSW Government has been accused of attempting to stall the issuing of Writs for the North Shore bye election in an effort to limit campaigning time for independent candidates to “the shortest possible time” following the resignation of former Health Minister Jillian Skinner.

    The move that has been described as deliberate and disgraceful and a move that prevents other candidates from raising campaign money before the issuing of writs, has been slammed by Mosman Councillor Carolyn Corrigan who has indicated that she will stand as an independent candidate in the seat that the Liberal Party has still not nominated a candidate for, despite the local election being scheduled for April 8th.


    Click to enlarge
    Carolyn Corrigan Independent Candidate North Shore


    She claims that the current NSW Government is deliberately rorting the electoral system by applying undue pressure on the Electoral commission to keep out candidates who want to compete for Skinners former seat which some analysts claim could fall to an independent.

    Corrigan claims that the residents of the North Shore have had a “gutful” of the way that the Liberal party, Skinner and the NSW Government have treated the electorate in the past.

    “The Labor party took the view that the seat was unwinnable so they never invested in the electorate. The Liberal Party on the other hand believe that this their seat that they will win easily every time there is an election”. 

    “As a result, they have not invested in the North Shore, because they believe that they have the electorate in their pocket”.

    “Now there is a serious swing in the seat and there is the real possibility that an independent could win this seat if the sentiment of residents that I have been talking to have anything to go by”.  


    Click to enlarge
    Gillian Skinner former member, accused of failing to deliver for residents of the North Shore.


    “What the Liberal party machine is engaging in is skulduggery and manipulation in an effort to minimise campaigning time”.  

    “What a lot of people don’t realise is that three Liberal branches in the electorate including Mosman voted against Skinner and the proposed amalgamation of Mosman Council”.

    “Voters are also angry that Skinner was a no show at public meetings to discuss the amalgamation of Mosman and North Sydney Councils” she said. 

    Corrigan claims that after speaking to the NSW Electoral Commission twice in the last few days, they have confirmed that the timing of these by-elections is in their words is “unprecedented”. 

    She said that with no other by election in NSW has there ever been less than 3 weeks before writs are issued and the election held.

    She believes that after the Government lost the seat of Orange they have decided that they will in the future not allow time for independents to have a chance at campaigning for the seat.

    She said that residents of the North Shore are “angry” with the current Liberal Government. 

    They are being treated as third rate residents of NSW despite being the electorate that delivers the highest level of tax for the NSW Government and the biggest contributor to Charities in Australia.

    On the issue of the Spit Bridge and the lack of a tunnel or the widening of the Spit Bridge which she claims the Liberal has been promising for decades she said  “North Shore residents, have been paying to cross the Sydney Harbour bridge for decades, yet despite the massive contribution they have made to the NSW economy the current Government and past Governments has made no commitment to invest in a tunnel across the Spit Bridge to ease traffic on Military Road which is one of the busiest roads in Australia”. 

    “160,000 people cross the Sydney Harbour bridge every day, yet despite this the only thing that current politicians have handed out to residents of the North is a two-finger salute and suck up and wait attitude”. 


    Click to enlarge


    “Young people in the electorate struggle to get to work when Military road comes to a standstill and trades and business executives are losing out because appointments and meetings have to be cancelled when the roads around the bridge become gridlocked which is becoming a weekly occurrence”. 
     
    Corrigan claims she is an independent candidate that right now is prevented because of NSW electoral laws from raising any money as an independent until the writs have been issued by the State . 

    The Liberal Party on the other hand is not bound by these electoral laws as an existing political entity. 

     According to sources within the Electoral commission it is unlikely writs will be issued, until the 24th March which is a Friday so the banks are closed, which will leave barely 2 1/2 weeks for any independent candidate to open an account, to raise any funds to take on the Government.

    Corrigan claims that this is a blatant misuse of power by the State, “Electoral manipulation on this scale is reminiscent of the rum corps era and this does not bode well for a new Premier that says she is listening, one has to question as to who, I suspect the Liberal Party back office who are pulling her strings” she said.

    She is calling on the electoral commission to stand up to the Government and declare warrants by this weekend. 

    Acer Denies Plan To Shut Local Production

    Acer Australia claims that they are not going to cease local production despite several staff being laid off this week, they also claim that they are not going to adopt a retail only model despite claims to the contrary by current serving staff.

    Acer CEO Charles Chung who has not returned calls to

    ChannelNews has written a letter to dealers claiming that the Taiwanese Company

    who reported a $5M loss last year and a 38% decline in sales will not be ceasing production of PCs for schools, corporations and governments.

    He said “Nothing could be further from the truth and we have

    no plans to either stop or slow down our production capacity. We are the only

    company that invested heavily in a local assembly line in Australia allowing us

    to serve our channel partners and corporate, government, and education

    customers efficiently”.

    Earlier this week ChannelNews was contacted by current

    serving management who claimed that there was infighting among management over

    cuts and the slowing of sales. Prior to being contacted by current serving

    staff we have held several discussions with former staff who have left Acer

    claiming that there is major problems at Acer Australia.

    Charles Chung said” There is no instability at Acer

    Australia and we have not undergone any “mass sackings”. As with any

    large company, we periodically and routinely assesses staff performance and

    business requirements, and some individuals were recently let go through this

    review”.

    Chung also denied that the Company is moving to a retail

    model He wrote “Acer Australia has very strong commercial and education

    business and is not planning any changes to the way we currently do

    business”.

    He also said that Acer have expanded their field service

    operations across Australia in the last 12 months.

    Acer Goes After LCD TV & PDA Markets

    Acer globally is gearing up to expand its branded presence in the PDA and LCD TV markets in 2006, according to company chairman JT Wang, who anticipates on-year sales growth of more than 100% in both the PDA and LCD TV segments next year.

    The Company anticipates year on year sales growth of more than 100% in both the PDA and LCD TV segments next year.

    According to Wang, Acer aims to sell 700,000 PDAs in 2005, up six-fold from 100,000 units last year. The sales will continue to climb 114% in 2006, when the company’s ranking in the global PDA market is expected to rise from third to second, said Wang. Acer currently offers three PDAs – the n30, n35 and n50, all of them being manufactured by Lite-On Technology.

    A recent report by International Data Corporation (IDC) stated that the handheld device market totaled 1.68 million units worldwide in the third quarter of 2005, with Acer enjoying the most on-year growth among the top-five vendors. Acer ranked third, shipping 188,800 units in the third quarter, up 421% on year.For LCD TVs, Acer projects it will ship one million units by 2007, up significantly from an estimate of 150,000 units this year. Quanta Computer is currently Acer’s major contract maker for the segment.In Australia Acer has been on a roll snaring the #1 Notebook position according to IDC Research and recently they won the #1 LCD TV supplier slot.

    The below Data is from Digi Times in Taiwan

    .

    Acer’s branded sale forecast by segment (million units)

    Item

    2004-05 growth (e)

    2005-06 growth (f)

    Contract makers

    PDA

    600%

    114%

    Lite-On

    LCD TV

    NA

    233%

    Quanta

    LCD monitor

    77-88%

    67%

    Compal Electronics, TPV Technology, TechView (joint venture between Quanta and the Pouchen Group)

    Notebook

    80%

    43%

    Compal Electronics, Quanta, Wistron, Inventec

    Desktop PC

    20%

    100%

    Foxconn

    Source: Company, compiled by DigiTimes, November 2005

    Source: Company, compiled by DigiTimes, November 2005

    Acer chairman JT Wang
    Photo: David Tzeng, DigiTimes

    Acer: October 2004 – October 2005 revenues (NT$m)

    Month

    Sales

    M/M

    Y/Y

    YTD

    Y/Y

    Oct-05

    25,911

    17.9%

    59.7%

    164,564

    62.9%

    Sep-05

    21,978

    18.8%

    79%

    138,652

    63.5%

    Aug-05

    18,494

    28.5%

    70.4%

    116,675

    60.9%

    Jul-05

    14,388

    -3.7%

    75.6%

    98,180

    59.2%

    Jun-05

    14,938

    2.7%

    51.6%

    83,792

    56.7%

    May-05

    14,551

    10.7%

    72.3%

    68,854

    57.9%

    Apr-05

    13,147

    -17.0%

    64%

    54,303

    54.4%

    Mar-05

    15,835

    22%

    42.7%

    41,156

    51.6%

    Feb-05

    12,981

    5.2%

    46.4%

    25,322

    57.7%

    Jan-05

    12,341

    10.9%

    71.5%

    12,341

    71.5%

    Dec-04

    11,127

    -37.0%

    31.3%

    129,823

    59%

    Nov-04

    17,671

    8.9%

    46.8%

    118,696

    62.2%

    Oct-04

    16,230

    32.2%

    49.4%

    101,026

    65.2%

    Source: TSE, compiled by DigiTimes, Nov 2005.
    *Figures are not consolidated.

    Acer: 3Q 2004 – 3Q 2005 revenues (NT$m)

    Quarter

    Sales

    Q/Q

    Y/Y

    YTD

    Y/Y

    3Q-05

    54,790

    28.5%

    74.9%

    138,574

    63.4%

    2Q-05

    42,652

    3.7%

    61.1%

    83,784

    56.7%

    1Q-05

    41,132

    -7.2%

    52.5%

    41,132

    52.5%

    4Q-04

    44,320

    41.5%

    43%

    129,109

    58%

    3Q-04

    31,334

    18.4%

    42.3%

    84,789

    67.2%

    Source: TSE, compiled by DigiTimes, Nov 2005.
    *Figures are not consolidated.

    Acer: 3Q 2004 – 3Q 2005 balance sheet (NT$k)

    Item

    3Q-2005

    2Q-2005

    1Q-2005

    4Q-2004

    3Q-2004

    Current assets

    71,615,230

    55,067,816

    41,723,288

    38,977,620

    30,263,818

    Long-term investments

    44,333,242

    44,559,826

    44,732,236

    43,692,146

    49,890,244

    Fixed assets (net)

    4,120,336

    4,141,966

    4,167,996

    5,633,134

    5,677,517

    Intangible assets

    354,630

    414,372

    461,884

    465,632

    429,929

    Other assets

    5,021,836

    4,875,913

    4,839,611

    3,939,385

    4,116,901

    Assets

    125,445,274

    109,059,893

    95,925,015

    92,707,917

    90,378,409

    Current liabilities

    59,924,291

    47,798,953

    31,859,681

    30,738,777

    26,766,195

    Noncurrent interest-bearing liabilities

    0

    0

    0

    0

    3,000,000

    Other liabilities

    1,866,237

    1,373,679

    1,009,246

    1,292,117

    1,785,947

    Liabilities

    61,790,528

    49,172,632

    32,868,927

    32,030,894

    31,552,142

    Stockholders’ equity

    63,654,746

    59,887,261

    63,056,088

    60,677,023

    58,826,267

    Source: TSE, compiled by DigiTimes, Nov 2005.
    *Figures are not consolidated.

    New 3G Phone Technology

    Japanese electronics makers Matsushita Electric and NEC are in talks with Texas Instruments to create a joint venture to make chips for third-generation mobile telephones.

    Advanced phones which let users exchange music and images and surf the Internet have come to dominate the market in Japan, the pioneering country for 3G.The two electric giants and the US chipmaker want to set up the new venture in Japan as early as this summer and develop chips for 3G phones both by Matsushita, best known for its Panasonic brand, and NEC, the Nihon Keizai business daily said.

    The companies are expected to sign an agreement this month and will also provide parts to other phone producers inside and outside Japan, it said.The new venture will likely be capitalized at around 10 billion yen (85 million dollars), it added.”It’s true that the five companies are discussing possible cooperation in the area of third-generation mobile phones but nothing has been finalized yet,” said NEC spokesman Toshinori Arai.

    Matsushita also released similar remarks. A spokesman for Texas Instruments declined to comment on the report. At the outset, NEC and Matsushita subsidiary Panasonic Mobile Communications (PMC) will each hold about 30 percent in the venture with the rest split equally between Matsushita, TI and NEC semiconductor subsidiary NEC Electronics, the Nihon Keizai said.

    Apple Reports Best Ever Result 6M iPhones Sold Last Qtr

    Apple is steaming ahead despite an economic slowdown, with the Company selling 6,892,000 iPhones in the last quarter compared to 1,119,000 in the year-ago-quarter. They have also posted one of their best quarters ever with revenue of $7.9 billion, and net quarterly profit of $1.14 billion, or $1.26 per diluted share. These results compare to revenue of $6.22 billion and net quarterly profit of $904 million, or $1.01 per diluted share, in the year-ago quarter. Gross margin was 34.7 percent, up from 33.6 percent in the year-ago quarter. International sales accounted for 41 percent of the quarter’s revenue.

    Linksys Expand Network Range As They Go After Netgear

    Linksys who are now the second largest consumer network Company in the USA after Netgear, has expanded their range of network solutions in Australia with the launch of a new Dual-Band Wi-Fi Modem Router.

    The Linksys X6200 ADSL/VDSL follows the recent launch of a top end Max Stream, AC1900, EA7500 MU-MIMO home router that is ideal for apartment dwellers who want to stream things like 4K TV.

    The Next-Gen Max-Stream AC1900 can provide users with combined speeds up to 1.9 Gbps (in theory – it is how routers are named – 600Mbps x 2.4GHz plus 1300Mbps x 5GHz).

    Linksys is also the first networking Company to sell 100 million routers globally they also recently announced that they will ship the first wireless router that will provide Wi-Fi up to 10,000 feet under water.

    The new Linksys H2O Wi-Fi Router (EA1900H2O) is a AC1900 wireless router that connects to buoys in the ocean or on lakes and uses a satellite connection to provide Wi-Fi to devices such as under water cameras, scuba gear, weather sensors and so much more.

    Leveraging MU-MIMO wireless technology, multiple devices can connect simultaneously to the H2O Wi-Fi router at high speeds with long range coverage. “We get a lot of customers asking when we are going to make Wi-Fi work under water so more devices can connect,” said Joe Czarfunee, product engineer for Linksys. “We’ve revolutionized Wi-Fi to provide the experience for consumers whether they are at home, work or under water.”

    Their latest offering was first shown at the Consumer Electronics Show (CES), the X6200 features dual wireless bands (2.4 GHz and 5 GHz) with data speeds up to 300Mbps (2.4GHz) + 433Mbps (5GHz) making it ideal for internet streaming and gaming.

    “Our new modem router provides an ADSL / VDSL modem and a router for an all-in-one seamless wireless broadband networking experience – a crucial requirement for people wanting to make the most of their premium broadband services”, said Greg Morrison, business unit director, Linksys ANZ.

    Four Gigabit Ethernet ports make it easy to share data quickly across a network, and to external devices – at speeds 10x faster than Fast Ethernet. Plus, use the high-performance USB 2.0 port to attach a storage device and quickly transfer large files or share devices across a Wi-Fi network. The X6200 contains a built-in DSL modem, making it easy to connect to DSL Internet service and enjoy fast, reliable Wi-Fi throughout the home without having to purchase an additional device.

    “With the varied connectivity solutions currently rolled out with the National Broadband Network, the X6200 is truly the perfect all in one device for the Aussie home. Providing ADSL for now, VDSL for the NBN Fibre to the node solution and a router for your Fibre to the home solution – it’s the Modem Router for Today and Tomorrow”, he added.

    Features include:
    . One VDSL/ADSL2+ Annex A – RJ-11 port
    . One Gigabit WAN port with auto MDI/MDIX sensing (RJ-45) for Router functionality
    . Four Gigabit Ethernet ports
    . One USB 2.0 port
    . Supports VPN pass-through with IPsec, PPTP, and L2TP
    . Supports logging for incoming and outgoing traffic
    . DHCP server for LAN
    . Supports Universal Plug and Play (UPnP)
    . IPv4/IPv6 dual-stack support
    . DSL Modem supports ITU G992.5 ADSL2+ Annex A, L, and M and VDSL G.993.1 and ITU G.993.2
    standards

    Pricing and Availability
    The X6200 will be available from 11th April 2016 at leading retailers including JB Hi Fi and Harvey Norman at an RRP of $189.95.

    EMC Enters Continious Data Protection Market

    CORRECTED: EMC is set to enter the continuous data protection market, up against big names such as HP and IBM.

    Corrected to reflect the correct Australian pricing for RecoverPoint (around $50,000).

    EMC is set to enter the continuous data protection market, up against big names such as HP and IBM. The announcement will be made at the Storage Networking World conference in the USA when EMC introduces its RecoverPoint solution, which is based on technology supplied by Mendocino Software. The EMC software is designed to help customers continuously back up data, as well as instantly recover files and transaction-oriented data.

    RecoverPoint, which runs on a Linux server, will save and recover data on any EMC, HP, IBM or Hitachi array, EMC says. It supports Sun Solaris, Windows 2003, and Oracle and SQL Server databases.

    RecoverPoint differs from some other products on the market because it can be used to back up both file and database data to disk. Other products, such as Symantec’s Backup Exec 10d for Windows Servers and LiveState Recovery 6.0, IBM’s Tivoli Data Protection for Files and Microsoft’s Systems Data Protection Manager, only back up files created on a network.

    EMC is not the first big-name company to look to Mendocino for CDP technology. HP last week announced it has agreed to resell Mendocino’s RecoveryOne software.

    EMC plans to expand the enterprise focus of RecoverPoint in the first half of 2006 with support for Microsoft Exchange and IBM’s DB2, as well as host operating systems AIX, HP-UX, Linux and Windows 2000. RecoverPoint costs around $50,000.

    Also, FilesX has announced CDP on Demand, software that can be used to restore Microsoft Exchange, Word and SQL Server files, and that allows regularly scheduled snapshots of data to be taken. Unlike Microsoft’s System Data Protection Manager, CDP on Demand allows for unlimited snapshots. The software is available as part of FilesX’s Xpress Restore, which starts at $15,000.

    Also at the show, StorServer is expected to introduce a CDP option for its storage appliances that works with IBM’s Tivoli Continuous Data Protection for Files. The application enables users to protect data on their laptops, desktops and file servers by backing it up to multiple locations, including those with StorServer Appliances. The boxes start at $4,500; CDP coverage costs $55 per laptop or desktop and $1200 per server.

    For further information go here.

    COMMENT: Mosman Faces Being Wiped Out Within 12 Months

    Mosman is facing the real possibility that they could lose their identity in an amalgamated Council, due in part to the head in the sand lobbying being conducted by current Councillors.

    In six months’ time the local Council area known as Mosman could well become a ward in an amalgamated Council consisting of Pittwater,Warringah, Manley and Mosman.

     The NSW Government led by Mike Baird a resident of Manly is determined to push through with the amalgamation of Councils in NSW and the merging of Manly, Warringah, Pittwater and Mosman is of particular interest to Baird who is close to Councillors in his own electorate of Mosman 

    Voting as to who sits on this Council will be conducted by a population of approximately 300,000 residents of which only 30,000 come from Mosman.

    There is now the real possibility that a combination of Pittwater and warringah Manly Councillors get to say what happens in Mosman which is a big danger for Mosman residents as some of these Councillors could well not have a clue as to where Raglan St or Avenue road actually is.

    By sticking their head in the sand and refusing to look or even discuss at what an amalgamated Council could deliver for Mosman Councillors have spent tens of thousands running an anti-amalgamation campaign.

    Three of those Councillors Peter White who ironically is not a resident of Mosman, he lives in Paddington and has a second home in Pittwater, Simon Menzies and Libby Moline are now moving to push for a motion calling on their fellow Councillors to immediately move to explore what options are available if Mosman is merged with Manly and Pittwater.

    SmartOffice understands that Baird has been urging Mosman to “take one for the team” and support an amalgamated Pittwater Manly Warringah and Mosman Council.

    Because of the size of Mosman, the risk of not getting representation either via the re-election of at least two of the current Councillors or new Councillors who have a vested interest in Mosman is real with even the name Mosman disappearing from the local Council landscape.

    While the ward of Mosman would survive the people who are currently elected to represent Mosman would not due in part to their stubborn opposition to an amalgamation. This is despite the fact that Mosman is among the best run Council of several local Councils on Sydney’s North Shore.
     
    SmartOffice understands that several local Mosman business executives including the former Woolworths CEO and now Chairman of Fairfax Media have been asked to lobby the Premier and the NSW Government on behalf of Mosman.

    New South Wales Premier Mike Baird has refused to say what he would do with councils such as Mosman that have refused to amalgamate, but the Opposition said a letter sent to mayors was proof they would not have a choice.

    Earlier this week most of the state’s mayors received a letter signed by the Premier praising councils that have “done the right thing by their community and agreed to merge”.

    The State Government gave councils a deadline of next Thursday to respond. 

    With the deadline looming, the Premier’s letter urged councils to “carefully consider” the interests of their community.

    “Councillors that have demonstrated an ability to work together in reaching agreement to merge will have the opportunity to shape the future of their new council and serve the community until the end of their current term,” the letter reads.

    Local Government Minister Paul Toole also defended the Government’s actions with amalgamation now set to become an ugly fight with the real possibility that the NSW Government will have to resort to legislation changes raising the possibility of a legal fight to get amalgamation through.  

    What Mosman Councillors should have done is taken a ‘What’s

    in it for us” approach and while lobbying against amalgamation had a plan

    B up their sleeve that involved talking with both Pittwater and Manly Councils

    as well as North Sydney.

    Because when the shit hits the fan and Mosman has lost its

    seat at the table where will the current local Councillors be other than out of

    a job.

    Gerry Harvey Buys Entire Shopping Centre

    Gerry Harvey has expanded his property portfolio by snapping up a border shopping centre in regional Australia that houses a Bing Lee and Officeworks store.

    The Harvey Norman CEO has purchased the seven-year-old, Wodonga Homemaker Centre in the Albury-Wodonga area for $25M.

    The centre is located next door to Bunnings on the corner of Victoria Cross Parade and Anzac Parade.

    The Border Mail said that the homemaker centre was offered for sale via an expressions of interest process which closed late last month.

    CBRE agent Rory Hilton confirmed the sale.

    “It was a highly successful campaign,” he said.

    “There is a big focus on well positioned regional assets.

    “There is a drive in that market as long as the town can support them.

    “The vendor has had that centre in such good shape.

    “Harvey Norman has a centre in Albury and there was a link for them to get a further foothold in the area.”

    The centre has a fully leased income of $2.06 million per annum and is presently fully tenanted to 12 retailers with the most recent addition being Genesis gymnasium.

    Midway through last year Bing Lee opened in the homemaker centre replacing the space occupied by one of the original anchor tenants, Warehouse Sales.

    Bing Lee has since opened another store in the Mate’s Building in central Albury.

    Harvey Norman was touted to be another anchor tenant when the homemaker centre opened in 2008.

    AmberTech’s Shock $4.69 Million Loss

    AmberTech, who is struggling in both the consumer electronics and custom install markets, has delivered another shocking result, reporting a loss of $4.69m.

    The consolidated loss for the Company between the 2011-12 financial year to June 30 was $4.69 million, it was announced today.

    This was down from a profit after tax of $126,000 in the previous period.

    Total revenues for the financial year decreased by 22.9% to $51.4m from $66.7 million the same time a year ago.

    The AV distributor has also been forced to borrow $400,000 during the year.

    The poor underlying result included one off costs relating to the move of Ambertech’s head office, restructure costs, significant bad debt and the impairment of goodwill.

    In a report to the Australian Securities Commission, the Company said its working capital, being current assets less current liabilities, has decreased by $3,69m to $12,8m at 30 June.

    Last month, Ambertech signed a contract with the ABC for the supply and support of file based studio record and source ingest systems worth over $4 million. It will supply ABC EVS, Snell, Telestream and Storage DNA sound equipment.

    The Company said they were banking on the release of a new range of Onkyo receivers with network capability and Spotify music streaming service to lift revenues in 2013.

    On the immediate horizon is a new Onkyo wireless dock which will transform any legacy home theatre receiver into a wireless receiver.

    The tech distribution giant Sonance business continued to grow with architectural speaker products and new outdoor Landscape Series contributing to their lifestyle division.

    Also contributing was the introduction of the iPort range of products, and Amber Directors also confirmed “an updated go to market strategy has seen Sonance flourish.”

    Another brand that Amber is banking on to deliver improved results is NHT speakers which will be sold online only.

    The Company is also partnering with Panasonic to sell their high bright projector and commercial flat panel TVs.

    The audio distribution giant cited other new product lines including the “fashionable” Zound Industries headphone, Pat Says Now cases and bags which it says has “positive feel throughout our sales force.”

    It also cited some “early signs that business and consumer confidence may be returning.”

    “We expect when this does happen, we will be in prime position to take full advantage of opportunities that may arise,” the company said in its Annual Report. 

     

    In the broadcasting and pro front, the strong Aussie dollar, grey importing and Internet purchasing from overseas have all impact resellers.

    It also confirmed it has adjusted ordering and shipping policies to better compete, and is “working actively with major resellers to address the changing marketplace in online presence and purchasing.”

    Ambertech also lost major agency this financial year, blamed on its consolidation with a UK company which already had an office locally.

    Amber NZ sales were also “steady”, with the Pro Audio market in particular doing well and cited a ‘mini boom’ in the AV/Custom Install market created by the Rugby World Cup last year and finished the financial year with a profitable result. 

    2011-12 was a year of “consolidation and transformation” and expect to see “further consolidation of brands and are positioning ourselves to capitalise on this.”

    The company said remained operating cash flow positive for the financial year.