diff --git a/_cmd.py b/_cmd.py index 07ac50fa4c2d78ce5b5808ab15eebbfafd91db72..46e464be86965da69e4b8a4aea47906e1627dd3f 100644 --- a/_cmd.py +++ b/_cmd.py @@ -659,11 +659,25 @@ class ClusterEditConfigCommand(ClusterMirrorCommand): return self._show_help() +class ClusterChangeRepositoryCommand(ClusterMirrorCommand): + + def __init__(self): + super(ClusterChangeRepositoryCommand, self).__init__('change-repo', 'Change repository for a deployed component') + self.parser.add_option('-c', '--component', type='string', help="Component name to change repository.") + self.parser.add_option('--hash', type='string', help="Repository's hash") + self.parser.add_option('-f', '--force', action='store_true', help="force change even start failed.") + + def _do_command(self, obd): + if self.cmds: + return obd.change_repository(self.cmds[0], self.opts) + else: + return self._show_help() + + class CLusterUpgradeCommand(ClusterMirrorCommand): def __init__(self): super(CLusterUpgradeCommand, self).__init__('upgrade', 'Upgrade a cluster.') - self.parser.add_option('-f', '--force', action='store_true', help="Force upgrade.") self.parser.add_option('-c', '--component', type='string', help="Component name to upgrade.") self.parser.add_option('-V', '--version', type='string', help="Target version.") self.parser.add_option('--skip-check', action='store_true', help="Skip all the possible checks.") @@ -748,6 +762,7 @@ class ClusterMajorCommand(MajorCommand): self.register_command(ClusterEditConfigCommand()) self.register_command(ClusterReloadCommand()) self.register_command(CLusterUpgradeCommand()) + self.register_command(ClusterChangeRepositoryCommand()) self.register_command(ClusterTenantCommand()) diff --git a/_deploy.py b/_deploy.py index 243d3afca2cd83638104fca8ec5f5f493f8e18d1..ed512f08776e606035967f020e9e44d5d03302fc 100644 --- a/_deploy.py +++ b/_deploy.py @@ -984,10 +984,10 @@ class Deploy(object): self._uprade_meta = uprade_meta return False - def _update_component_repository(self, component, repository): - if not self.deploy_config.update_component_package_hash(component, repository.hash, repository.version): + def update_component_repository(self, repository): + if not self.deploy_config.update_component_package_hash(repository.name, repository.hash, repository.version): return False - self.use_model(component, repository) + self.use_model(repository.name, repository) return True def stop_upgrade(self, dest_repository=None): @@ -995,7 +995,7 @@ class Deploy(object): self._uprade_meta = None self._dump_upgrade_meta_data() if dest_repository: - self._update_component_repository(dest_repository.name, dest_repository) + self.update_component_repository(dest_repository) return True return False diff --git a/_mirror.py b/_mirror.py index 8295f76602b12a23ef149ffea9b0dac673e01038..63c73bca37be3bcb1fa5364ee2ddc1f5fbbb1053 100644 --- a/_mirror.py +++ b/_mirror.py @@ -112,6 +112,13 @@ class MirrorRepository(object): info = self.get_exact_pkg_info(**pattern) return self.get_rpm_pkg_by_info(info) if info else None + def _pattern_check(self, pkg, **pattern): + for key in ['md5', 'name', 'version', 'release', 'arch']: + if pattern.get(key) is not None and getattr(pkg, key) != pattern[key]: + self.stdio and getattr(self.stdio, 'verbose', print)('pkg %s is %s, but %s is required' % (key, getattr(pkg, key), pattern[key])) + return None + return pkg + def get_rpm_pkg_by_info(self, pkg_info): return None @@ -428,14 +435,14 @@ class RemoteMirrorRepository(MirrorRepository): def get_all_pkg_info(self): return [self.db[key] for key in self.db] - def get_rpm_info_by_md5(self, md5): + def get_rpm_info_by_md5(self, md5, **pattern): if md5 in self.db: - return self.db[md5] + return self._pattern_check(self.db[md5], **pattern) for key in self.db: info = self.db[key] if info.md5 == md5: self.stdio and getattr(self.stdio, 'verbose', print)('%s translate info %s' % (md5, info.md5)) - return info + return self._pattern_check(info, **pattern) return None def get_rpm_pkg_by_info(self, pkg_info): @@ -464,7 +471,7 @@ class RemoteMirrorRepository(MirrorRepository): def get_exact_pkg_info(self, **pattern): if 'md5' in pattern and pattern['md5']: self.stdio and getattr(self.stdio, 'verbose', print)('md5 is %s' % pattern['md5']) - return self.get_rpm_info_by_md5(pattern['md5']) + return self.get_rpm_info_by_md5(**pattern) self.stdio and getattr(self.stdio, 'verbose', print)('md5 is None') if 'name' not in pattern and not pattern['name']: self.stdio and getattr(self.stdio, 'verbose', print)('name is None') @@ -499,7 +506,10 @@ class RemoteMirrorRepository(MirrorRepository): matchs = [] if 'md5' in pattern and pattern['md5']: self.stdio and getattr(self.stdio, 'verbose', print)('md5 is %s' % pattern['md5']) - return [self.db[pattern['md5']], (0xfffffffff, )] if pattern['md5'] in self.db else matchs + info = None + if pattern['md5'] in self.db: + info = self._pattern_check(self.db[pattern['md5']], **pattern) + return [info, (0xfffffffff, )] if info else matchs self.stdio and getattr(self.stdio, 'verbose', print)('md5 is None') if 'name' not in pattern and not pattern['name']: self.stdio and getattr(self.stdio, 'verbose', print)('name is None') @@ -689,7 +699,10 @@ class LocalMirrorRepository(MirrorRepository): def get_exact_pkg_info(self, **pattern): if 'md5' in pattern and pattern['md5']: self.stdio and getattr(self.stdio, 'verbose', print)('md5 is %s' % pattern['md5']) - return self.db[pattern['md5']] if pattern['md5'] in self.db else None + info = None + if pattern['md5'] in self.db: + info = self._pattern_check(self.db[pattern['md5']], **pattern) + return info self.stdio and getattr(self.stdio, 'verbose', print)('md5 is None') if 'name' not in pattern and not pattern['name']: self.stdio and getattr(self.stdio, 'verbose', print)('name is None') @@ -730,7 +743,10 @@ class LocalMirrorRepository(MirrorRepository): matchs = [] if 'md5' in pattern and pattern['md5']: self.stdio and getattr(self.stdio, 'verbose', print)('md5 is %s' % pattern['md5']) - return [self.db[pattern['md5']], (0xfffffffff, )] if pattern['md5'] in self.db else matchs + info = None + if pattern['md5'] in self.db: + info = self._pattern_check(self.db[pattern['md5']], **pattern) + return [info, (0xfffffffff, )] if info else matchs self.stdio and getattr(self.stdio, 'verbose', print)('md5 is None') if 'name' not in pattern and not pattern['name']: return matchs diff --git a/core.py b/core.py index 4194a947b0e3042bb1eea9222b6759d5de47728e..ee37376674781973ba9045a9ed32aa7ca163e57d 100644 --- a/core.py +++ b/core.py @@ -207,7 +207,7 @@ class ObdHome(object): self._call_stdio(msg_lv, 'No such %s plugin for %s-%s' % (script_name, repository.name, repository.version)) return plugins - def search_images(self, component_name, version, release=None, disable=[], usable=[], release_first=False): + def search_images(self, component_name, version, release=None, disable=[], usable=[], release_first=False, print_match=True): matchs = {} usable_matchs = [] for pkg in self.mirror_manager.get_pkgs_info(component_name, version=version, release=release): @@ -223,7 +223,7 @@ class ObdHome(object): else: matchs[repo.md5] = repo if matchs: - self._call_stdio( + print_match and self._call_stdio( 'print_list', matchs, ['name', 'version', 'release', 'arch', 'md5'], @@ -583,39 +583,49 @@ class ObdHome(object): return install_plugins def install_lib_for_repositories(self, repositories): - data = {} - temp_map = {} - for repository in repositories: - lib_name = '%s-libs' % repository.name - data[lib_name] = {'global': { - 'version': repository.version - }} - temp_map[lib_name] = repository + all_data = [] + temp_repositories = repositories + while temp_repositories: + data = {} + temp_map = {} + repositories = temp_repositories + temp_repositories = [] + for repository in repositories: + lib_name = '%s-libs' % repository.name + if lib_name in data: + temp_repositories.append(repository) + continue + data[lib_name] = {'global': { + 'version': repository.version + }} + temp_map[lib_name] = repository + all_data.append((data, temp_map)) try: - with tempfile.NamedTemporaryFile(suffix=".yaml", mode='w') as tf: - yaml_loader = YamlLoader(self.stdio) - yaml_loader.dump(data, tf) - deploy_config = DeployConfig(tf.name, yaml_loader=yaml_loader, config_parser_manager=self.deploy_manager.config_parser_manager) - # Look for the best suitable mirrors for the components - self._call_stdio('verbose', 'Search best suitable repository libs') - pkgs, lib_repositories, errors = self.search_components_from_mirrors(deploy_config, only_info=False) - if errors: - self._call_stdio('error', '\n'.join(errors)) - return False + repositories_lib_map = {} + for data, temp_map in all_data: + with tempfile.NamedTemporaryFile(suffix=".yaml", mode='w') as tf: + yaml_loader = YamlLoader(self.stdio) + yaml_loader.dump(data, tf) + deploy_config = DeployConfig(tf.name, yaml_loader=yaml_loader, config_parser_manager=self.deploy_manager.config_parser_manager) + # Look for the best suitable mirrors for the components + self._call_stdio('verbose', 'Search best suitable repository libs') + pkgs, lib_repositories, errors = self.search_components_from_mirrors(deploy_config, only_info=False) + if errors: + self._call_stdio('error', '\n'.join(errors)) + return False - # Get the installation plugin and install locally - install_plugins = self.get_install_plugin_and_install(lib_repositories, pkgs) - if not install_plugins: - return False - repositories_lib_map = {} - for lib_repository in lib_repositories: - repository = temp_map[lib_repository.name] - install_plugin = install_plugins[lib_repository] - repositories_lib_map[repository] = { - 'repositories': lib_repository, - 'install_plugin': install_plugin - } - return repositories_lib_map + # Get the installation plugin and install locally + install_plugins = self.get_install_plugin_and_install(lib_repositories, pkgs) + if not install_plugins: + return False + for lib_repository in lib_repositories: + repository = temp_map[lib_repository.name] + install_plugin = install_plugins[lib_repository] + repositories_lib_map[repository] = { + 'repositories': lib_repository, + 'install_plugin': install_plugin + } + return repositories_lib_map except: self._call_stdio('exception', 'Failed to create lib-repo config file') pass @@ -798,6 +808,8 @@ class ObdHome(object): self._call_stdio('verbose', 'Create deploy by configuration path') deploy = self.deploy_manager.create_deploy_config(name, config_path) + if not deploy: + return False self._call_stdio('verbose', 'Get deploy configuration') deploy_config = deploy.deploy_config @@ -1857,6 +1869,115 @@ class ObdHome(object): return True return False + def change_repository(self, name, options=Values()): + self._call_stdio('verbose', 'Get Deploy by name') + deploy = self.deploy_manager.get_deploy_config(name) + if not deploy: + self._call_stdio('error', 'No such deploy: %s.' % name) + return False + + deploy_info = deploy.deploy_info + self._call_stdio('verbose', 'Deploy status judge') + if deploy_info.status in [DeployStatus.STATUS_DESTROYED, DeployStatus.STATUS_CONFIGURED, DeployStatus.STATUS_UPRADEING]: + self._call_stdio('error', 'Deploy "%s" is %s' % (name, deploy_info.status.value)) + return False + + component = getattr(options, 'component') + usable = getattr(options, 'hash') + if not component: + self._call_stdio('error', 'Specify the components you want to change.') + return False + if not usable: + self._call_stdio('error', 'Specify the hash you want to upgrade.') + return False + if component not in deploy_info.components: + self._call_stdio('error', 'Not found %s in Deploy "%s" ' % (component, name)) + return False + + deploy_config = deploy.deploy_config + + self._call_stdio('start_loading', 'Get local repositories and plugins') + # Get the repository + repositories = self.load_local_repositories(deploy_info) + for current_repository in repositories: + if current_repository.name == component: + break + + stop_plugins = self.search_py_script_plugin([current_repository], 'stop') + start_plugins = self.search_py_script_plugin([current_repository], 'start') + change_repo_plugin = self.plugin_manager.get_best_py_script_plugin('change_repo', 'general', '0.1') + self._call_stdio('stop_loading', 'succeed') + + self._call_stdio('verbose', 'search target repository') + dest_repository = self.repository_manager.get_repository(current_repository.name, version=current_repository.version, tag=usable) + if not dest_repository: + pkg = self.mirror_manager.get_exact_pkg(name=current_repository.name, version=current_repository.version, md5=usable) + if not pkg: + self._call_stdio('error', 'No such package %s-%s-%s' % (component, current_repository.version, usable)) + return False + repositories = [] + install_plugins = self.get_install_plugin_and_install(repositories, [pkg]) + if not install_plugins: + return False + dest_repository = repositories[0] + else: + install_plugins = self.search_plugins([dest_repository], PluginType.INSTALL) + + if dest_repository is None: + self._call_stdio('error', 'Target version not found') + return False + + if dest_repository == current_repository: + self._call_stdio('print', 'The current version is already %s.\nNoting to do.' % current_repository) + return False + + # Get the client + ssh_clients = self.get_clients(deploy_config, [current_repository]) + cluster_config = deploy_config.components[current_repository.name] + + self._call_stdio('start_loading', 'Load cluster param plugin') + # Check whether the components have the parameter plugins and apply the plugins + self.search_param_plugin_and_apply(repositories, deploy_config) + self._call_stdio('stop_loading', 'succeed') + + cluster_config = deploy_config.components[dest_repository.name] + # cluster files check + self.servers_repository_install(ssh_clients, cluster_config.servers, dest_repository, install_plugins[dest_repository]) + # lib check + if not self.servers_repository_lib_check(ssh_clients, cluster_config.servers, dest_repository, install_plugins[dest_repository], 'warn'): + self._call_stdio('print', 'Try to get lib-repository') + repositories_lib_map = self.install_lib_for_repositories([dest_repository]) + if repositories_lib_map is False: + self._call_stdio('error', 'Failed to install lib package for local') + return False + if self.servers_apply_lib_repository_and_check(ssh_clients, deploy_config, [dest_repository], repositories_lib_map): + self._call_stdio('error', 'Failed to install lib package for cluster servers') + return False + + + # Check the status for the deployed cluster + component_status = {} + cluster_status = self.cluster_status_check(ssh_clients, deploy_config, [current_repository], component_status) + if cluster_status is False or cluster_status == 1: + self._call_stdio('verbose', 'Call %s for %s' % (stop_plugins[current_repository], current_repository)) + if not stop_plugins[current_repository](deploy_config.components.keys(), ssh_clients, cluster_config, [], options, self.stdio): + return False + + self._call_stdio('verbose', 'Call %s for %s' % (change_repo_plugin, dest_repository)) + if not change_repo_plugin(deploy_config.components.keys(), ssh_clients, cluster_config, [], options, self.stdio, self.home_path, dest_repository): + return False + + if deploy_info.status == DeployStatus.STATUS_RUNNING: + self._call_stdio('verbose', 'Call %s for %s' % (start_plugins[current_repository], dest_repository)) + setattr(options, 'without_parameter', True) + if not start_plugins[current_repository](deploy_config.components.keys(), ssh_clients, cluster_config, [], options, self.stdio, self.home_path, dest_repository.repository_dir) and getattr(options, 'force', False) is False: + self._call_stdio('verbose', 'Call %s for %s' % (change_repo_plugin, current_repository)) + change_repo_plugin(deploy_config.components.keys(), ssh_clients, cluster_config, [], options, self.stdio, self.home_path, current_repository) + return False + + deploy.update_component_repository(dest_repository) + return True + def upgrade_cluster(self, name, options=Values()): self._call_stdio('verbose', 'Get Deploy by name') deploy = self.deploy_manager.get_deploy_config(name) @@ -1986,31 +2107,23 @@ class ObdHome(object): use_images = [] pkgs = [] - repositories = [] + upgrade_repositories = [current_repository] for image in use_images: if isinstance(image, Repository): - pkg = self.mirror_manager.get_exact_pkg(name=image.name, md5=image.md5) - if pkg: - pkgs.append(pkg) - else: - repositories.append(image) + upgrade_repositories.append(image) else: - pkgs.append(image) - - if pkgs: - install_plugins = self.get_install_plugin_and_install(repositories, pkgs) - if not install_plugins: - return False - - upgrade_repositories = [current_repository] - for image in use_images: - upgrade_repositories.append(self.repository_manager.get_repository(image.name, version=image.version, tag=image.md5)) + repository = self.repository_manager.get_repository_by_version(name=image.name, version=image.version, tag=image.md5) + if repository: + upgrade_repositories.append(repository) + else: + pkg = self.mirror_manager.get_exact_pkg(name=image.name, version=image.version, md5=image.md5) + if not pkg: + return False + install_plugins = self.get_install_plugin_and_install(upgrade_repositories, [pkg]) + if not install_plugins: + return False upgrade_repositories.append(dest_repository) - install_plugins = self.get_install_plugin_and_install(upgrade_repositories, []) - if not install_plugins: - return False - upgrade_check_plugins = self.search_py_script_plugin(upgrade_repositories, 'upgrade_check', no_found_act='warn') if current_repository in upgrade_check_plugins: connect_plugin = self.search_py_script_plugin(upgrade_repositories, 'connect')[current_repository] @@ -2073,9 +2186,9 @@ class ObdHome(object): ssh_clients = self.get_clients(deploy_config, [current_repository]) cluster_config = deploy_config.components[current_repository.name] - install_plugins = self.get_install_plugin_and_install(upgrade_repositories, []) - if not install_plugins: - return False + install_plugins = self.get_install_plugin_and_install(upgrade_repositories, []) + if not install_plugins: + return False need_lib_repositories = [] for repository in upgrade_repositories[1:]: @@ -2749,7 +2862,7 @@ class ObdHome(object): else: self._call_stdio('stop_loading', 'fail') return False - if repository.name == 'obproxy': + if repository.name in ['obproxy', 'obproxy-ce']: ret = connect_plugin(deploy_config.components.keys(), ssh_clients, cluster_config, [], {}, self.stdio, target_server=opts.test_server) diff --git a/plugins/obproxy/3.1.0/generate_config.py b/plugins/obproxy/3.1.0/generate_config.py index a54b7997c2a8194794b3d512b33853634735bc00..b944f8e9cb93e1d2e9951249304939cf5e681289 100644 --- a/plugins/obproxy/3.1.0/generate_config.py +++ b/plugins/obproxy/3.1.0/generate_config.py @@ -34,22 +34,39 @@ def generate_config(plugin_context, deploy_config, *args, **kwargs): stdio.error("obproxy %s: missing configuration 'home_path' in configuration file" % server) success = False continue + cluster_config.update_server_conf(server, 'enable_cluster_checkout', False) if not success: stdio.stop_loading('fail') return - global_config = cluster_config.get_global_conf() - if global_config.get('enable_cluster_checkout') is None: - cluster_config.update_global_conf('enable_cluster_checkout', False) + global_config = cluster_config.get_original_global_conf() + if 'skip_proxy_sys_private_check' not in global_config: + cluster_config.update_global_conf('skip_proxy_sys_private_check', True, False) + if 'enable_strict_kernel_release' not in global_config: + cluster_config.update_global_conf('enable_strict_kernel_release', False, False) - have_depend = False - depends = ['oceanbase', 'oceanbase-ce'] - - for comp in depends: + ob_cluster_config = None + for comp in ['oceanbase', 'oceanbase-ce']: if comp in deploy_config.components: - deploy_config.add_depend_for_component('obagent', comp, False) - have_depend = True + ob_cluster_config = deploy_config.components[comp] break + + if ob_cluster_config: + root_servers = {} + cluster_name = ob_cluster_config.get_global_conf().get('appname') + for server in ob_cluster_config.servers: + config = ob_cluster_config.get_server_conf_with_default(server) + zone = config['zone'] + cluster_name = cluster_name if cluster_name else config.get('appname') + if zone not in root_servers: + root_servers[zone] = '%s:%s' % (server.ip, config['mysql_port']) + rs_list = ';'.join([root_servers[zone] for zone in root_servers]) + + cluster_name = cluster_name if cluster_name else 'obcluster' + if not global_config.get('rs_list'): + cluster_config.update_global_conf('rs_list', rs_list, False) + if not global_config.get('cluster_name'): + cluster_config.update_global_conf('cluster_name', cluster_name, False) stdio.stop_loading('succeed') return plugin_context.return_true() \ No newline at end of file diff --git a/plugins/oceanbase/3.1.0/upgrade_route.py b/plugins/oceanbase/3.1.0/upgrade_route.py index 4267e7041531c954d0c7ba522869e78bd2b658c3..9147d6903b3a5063332e6b2ac8c0d7b7357a1d6c 100644 --- a/plugins/oceanbase/3.1.0/upgrade_route.py +++ b/plugins/oceanbase/3.1.0/upgrade_route.py @@ -23,7 +23,7 @@ from __future__ import absolute_import, division, print_function import os from _rpm import Version, Release, PackageInfo -from tool import YamlLoader +from tool import YamlLoader, FileUtil class VersionNode(PackageInfo): @@ -186,7 +186,7 @@ def upgrade_route(plugin_context, current_repository, dest_repository, *args, ** yaml = YamlLoader(stdio) try: - with open(upgrade_dep_path, encoding='utf-8') as f: + with FileUtil.open(upgrade_dep_path, encoding='utf-8') as f: data = yaml.load(f) graph = ObVersionGraph(data) route = graph.findShortestUpgradePath(current_repository, dest_repository) diff --git a/plugins/sysbench/3.1.0/run_test.py b/plugins/sysbench/3.1.0/run_test.py index 35ceb29f221b5d0fdb7f86cddfb10a32219fff8c..e60d74f81efb511c2276bfde57408811e2267dfc 100644 --- a/plugins/sysbench/3.1.0/run_test.py +++ b/plugins/sysbench/3.1.0/run_test.py @@ -219,6 +219,7 @@ def run_test(plugin_context, db, cursor, odp_db, odp_cursor=None, *args, **kwarg for config in system_configs: if config[0] == 'sleep': sleep(config[1]) + system_configs_done.append(config) continue sql = 'show parameters like "%s"' % config[0] if config[4]: diff --git a/plugins/tpcc/3.1.0/optimize.py b/plugins/tpcc/3.1.0/optimize.py index 22c6bac6f2c91b1abe0a726bb5fc66a0474a3c17..dc05ae96783ae51556c21df9bede00f8ae942814 100644 --- a/plugins/tpcc/3.1.0/optimize.py +++ b/plugins/tpcc/3.1.0/optimize.py @@ -244,6 +244,7 @@ def optimize(plugin_context, cursor, odp_cursor, *args, **kwargs): for config in system_configs: if config[0] == 'sleep': sleep(config[1]) + system_configs_done.append(config) continue if not config[5] or optimization > 1: sql = 'select value from oceanbase.__all_virtual_sys_parameter_stat where name="%s"' % config[0] diff --git a/plugins/tpch/3.1.0/run_test.py b/plugins/tpch/3.1.0/run_test.py index 33a79cb202443a9afdcbba096cc05d8815aeefe5..e8cb0939951eb0a95c1d742e86158cb6c3dbe4d3 100644 --- a/plugins/tpch/3.1.0/run_test.py +++ b/plugins/tpch/3.1.0/run_test.py @@ -199,6 +199,7 @@ def run_test(plugin_context, db, cursor, *args, **kwargs): for config in system_configs: if config[0] == 'sleep': time.sleep(config[1]) + system_configs_done.append(config) continue sql = 'show parameters like "%s"' % config[0] if config[4]: diff --git a/profile/obd.sh b/profile/obd.sh index ce7de20c0a58bd69fbf5be8088e67233be47b7a0..80b5e067e6e2a7ae017a86fdfb53cb3f029766ec 100644 --- a/profile/obd.sh +++ b/profile/obd.sh @@ -11,7 +11,7 @@ function _obd_complete_func cur="${COMP_WORDS[COMP_CWORD]}" prev="${COMP_WORDS[COMP_CWORD-1]}" obd_cmd="mirror cluster test update repo" - cluster_cmd="autodeploy tenant start deploy redeploy restart reload destroy stop edit-config list display upgrade chst check4ocp" + cluster_cmd="autodeploy tenant start deploy redeploy restart reload destroy stop edit-config list display upgrade chst check4ocp change-repo" tenant_cmd="create drop" mirror_cmd="clone create list update enable disable" repo_cmd="list" diff --git a/rpm/build.sh b/rpm/build.sh index 30ec8f70619d95bb6ce8f5c011f6a8467a9f43ec..cffee228a6775d954960f5f2ebc3b91bd84ffe25 100755 --- a/rpm/build.sh +++ b/rpm/build.sh @@ -2,7 +2,7 @@ python_bin='python' W_DIR=`pwd` -VERSION=${VERSION:-'1.3.2'} +VERSION=${VERSION:-'1.3.3'} function python_version() diff --git a/rpm/ob-deploy.spec b/rpm/ob-deploy.spec index 76b7ac2d98829885f7abeb5bc3a2275e9828ed61..d044d7f00199f053c1ec811a114c248c08112084 100644 --- a/rpm/ob-deploy.spec +++ b/rpm/ob-deploy.spec @@ -114,6 +114,9 @@ echo -e 'Installation of obd finished successfully\nPlease source /etc/profile.d #/sbin/chkconfig obd on %changelog +* Tue Apr 26 2022 obd 1.3.3 + - new features: change repository for a deployed component + - fix bug: check kernel version when autdeploy obproxy * Wed Apr 20 2022 obd 1.3.2 - fix bug: remote install will return None when success * Wed Apr 20 2022 obd 1.3.1 diff --git a/tool.py b/tool.py index 4a83bcfca3cd34256d8d9732263cff84c84a7943..04d454f366b2b2106b6840e87f444dfe902184a1 100644 --- a/tool.py +++ b/tool.py @@ -35,6 +35,7 @@ from ruamel.yaml import YAML, YAMLContextManager, representer if sys.version_info.major == 2: from collections import OrderedDict from backports import lzma + from io import open class TimeoutError(OSError): def __init__(self, *args, **kwargs): @@ -315,11 +316,11 @@ class FileUtil(object): return False @staticmethod - def open(path, _type='r', stdio=None): + def open(path, _type='r', encoding=None, stdio=None): stdio and getattr(stdio, 'verbose', print)('open %s for %s' % (path, _type)) if os.path.exists(path): if os.path.isfile(path): - return open(path, _type) + return open(path, _type, encoding=encoding) info = '%s is not file' % path if stdio: getattr(stdio, 'error', print)(info) @@ -328,7 +329,7 @@ class FileUtil(object): raise IOError(info) dir_path, file_name = os.path.split(path) if not dir_path or DirectoryUtil.mkdir(dir_path, stdio=stdio): - return open(path, _type) + return open(path, _type, encoding=encoding) info = '%s is not file' % path if stdio: getattr(stdio, 'error', print)(info)