#!/usr/bin/python # -*-coding:utf-8-*- # coding: iso-8859-15 '''haclient.py, the GUI manamgement tool for Linux-HA''' import sys, os, string, socket, syslog, webbrowser, pickle, xml, gc, time, binascii, thread, tarfile, tempfile from stat import * from xml.dom.minidom import parseString from xml.dom.minidom import getDOMImplementation import re import locale, gettext app_name = "haclient" sys.path.append("/usr/share/heartbeat-gui") sys.path.append("/usr/lib64/heartbeat-gui") from IPy import IP from pymgmt import * import pygtk pygtk.require('2.0') import gobject import datetime app_name = "ha-api" gettext.bindtextdomain(app_name) gettext.textdomain(app_name) gettext.install(app_name, "/usr/share/locale", unicode=1) gettext.translation(app_name, "/usr/share/locale", languages=["zh_CN"]).install(True) support_pyxml = False try: from xml.parsers.xmlproc.xmldtd import load_dtd_string support_pyxml = True except ImportError: pass support_lxml = False try: from lxml import etree support_lxml = True except ImportError: pass support_gv = False try: import gv support_gv = True except ImportError: pass validate_type = "dtd" mode_level = 0 name_cap = lambda name: name.replace("-", " ").replace("_", " ").title().replace("Crmd", "CRMD"). \ replace("Crm", "CRM").replace("Pe ", "PE ").replace("Dc ", "DC "). \ replace("Cib", "CIB").replace("Ccm", "CCM").replace("Ha", "HA"). \ replace("Id", "ID").replace("Dtd", "DTD").replace("Uuid", "UUID"). \ replace("HAve", "Have").replace("Op ", "Operation ").replace("Rsc", "Resource"). \ replace("Lrm", "LRM").replace("Acl", "ACL") manager = None debug_level = 0 last_rsc_view_page_num = 0 last_compound_view_page_num = 0 last_cluster_view_page_num = 0 selected_row_num = 0 treeview_status = dict() class RAMeta: name = "" version = None desc = "" parameters = [] actions = [] class Manager: ''' Manager will connect to mgmtd and control the main window ''' connected = False server = None username = None password = None cache = {} no_update_cache = {} parent = {} io_tag = None update_timer = -1 active_nodes = [] all_nodes = [] try_nodes = [] failed_reason = "" dtd_elems = {} session_lock = thread.allocate_lock() async_ret_lock = thread.allocate_lock() # async_ret_str = {} async_ret_list = {} xml_nodes = {} active_cib = None validate_name = None profile = {} # neoshineha ming.liu add def diskhb_labelbond(self, disklist, label): for device in disklist: cmd = "read -n16 line <" + " " + device + " " + "&&" + " " + "echo $line" # ret_label = self.do_cmd("system\n%s"%(cmd)) ret_label = os.popen(cmd).read().split('\n') if ret_label is None: continue if ret_label[0] == label: return device return False def check_device(self, disk): m = re.match('^[a-zA-Z0-9._/-]+$', disk) if m is None: return False if not disk.startswith("/dev/") or len(disk) < 6: return False return True def get_quorum_policy(self): have_quorum = False for attr_name in ["have-quorum", "have_quorum"]: try: attr_value = self.xml_nodes["cib"].getAttribute(attr_name) except: return False if attr_value != "": if attr_value in ["true", "yes", "1"]: have_quorum = True break if not have_quorum: cluster_property_sets = self.xml_nodes["cib"].getElementsByTagName("cluster_property_set") if len(cluster_property_sets) == 0: return False # elif len(cluster_property_sets) == 1 : else: for nvpair in cluster_property_sets[0].getElementsByTagName("nvpair"): if nvpair.getAttribute("name") == "no-quorum-policy" and nvpair.getAttribute("value") == "ignore": return True return False return True def update_database(self, cmd): ret = self.do_cmd_twice("update_sqldata\n%s\n%s" % ("", str(cmd))) if self.failed_reason != "": # msgbox(_("Please refresh and retry!\n") + self.failed_reason) log(_("Please refresh and retry!\n") + self.failed_reason) return False for node in self.get_dc(): if node in self.get_local_node(): continue ip = self.get_one_node_ip(node) if ip is None: # msgbox(_("Can not get the ipaddress of dc!") + _("Or can not be connected!") + "\n" + _("Please make sure that the environment is configured correctly!")) log(_("Can not get the ipaddress of dc!") + _("Or can not be connected!") + "\n" + _( "Please make sure that the environment is configured correctly!")) return False remote_cmd = "sqlite3 /usr/lib/ocf/lib/heartbeat/db/%s.db \"%s\"" % (node, cmd) ret = self.do_cmd_twice( "ssh_node\n%s\n%s\n%s\n%s" % (str(ip), str("HASql"), str("qwert12345"), str(remote_cmd))) if self.failed_reason != "": # msgbox(_("Please refresh and retry!\n") + self.failed_reason) log(_("Please refresh and retry!\n") + self.failed_reason) return False return True def get_node_ip(self): # get_ip cmd = "crm_node -l" # ret = self.query("system\n%s"%(str(cmd))) ret = os.popen(cmd).read().split('\n') node_ip = {} for r in ret: m = re.match("^(\w*)\s(.*)\smember\s(.*)", r) if m is not None: node_id = m.groups()[0] node = m.groups()[1] node_ip[node] = [] cmd = "corosync-cfgtool -a " + node_id # ips = self.query("system\n%s"%(str(cmd)))[0].split() ips = os.popen(cmd).read().split('\n')[0].split() for ip in ips: if ip != "127.0.0.1": node_ip[node].append(ip) if len(node_ip[node]) < 1: m = re.match("^\w*\s(.*)\smember\s(.*)", r) if m is not None: node = m.groups()[0] node_ip[node] = [] ips = m.groups()[1].split() i = 0 while i < len(ips) / 2: m1 = re.match("ip\((.*)\)", ips[i * 2 + 1]) if m1 is not None and m1.groups()[0] != "127.0.0.1": node_ip[node].append(m1.groups()[0]) i = i + 1 return node_ip def get_one_node_ip(self, node): try: ip_list = self.get_node_ip()[node] except: return None for ip in ip_list: cmd = "ping -c 1 " + ip # if "100% packet loss" not in str(self.do_cmd("system\n%s"%(str(cmd)))): if "100% packet loss" not in str(os.popen(cmd).read().split('\n')): return ip return None def del_node_constraints(self, del_node_id): if del_node_id is not None: cmd = "crm_node --force -R %s" % (str(del_node_id)) # self.do_cmd("system\n%s"%(str(cmd))) os.popen(cmd) if self.failed_reason != "": # msgbox(self.failed_reason) log(self.failed_reason) location_ids = [] for rsc in self.get_top_rsc(): constraints = self.do_cmd("get_rsc_constraints\n%s" % (rsc)) for cons in constraints: # m = re.match('^.*\'(.*)\':(.*)=(.*)$', cons) m = re.match('^\s*:\sNode\s(.*)\s*\(score=(.*)\,\sid=(.*)\)', cons) if m is None: continue if m.groups()[0].strip() == del_node_id: # location location_ids.append(m.groups()[2].strip()) rsc_location_rule = [] xml_constraints = self.xml_nodes["cib"].getElementsByTagName("constraints")[0] for rsc_location in self.xml_nodes["cib"].getElementsByTagName("rsc_location"): id = str(rsc_location.getAttribute("id")) if id in location_ids: xml_constraints.removeChild(rsc_location) # xml_str = rsc_location.toxml().replace("\n", "") # self.cib_do_cmd("cib_delete\n%s\n%s"%("", str(xml_str))) # if self.failed_reason != "" : # msgbox(self.failed_reason) else: for rule in rsc_location.getElementsByTagName("rule"): id = str(rule.getAttribute("id")) if id in location_ids: rsc_location.removeChild(rule) if len(rsc_location.getElementsByTagName("rule")) == 0: xml_constraints.removeChild(rsc_location) xml_str = xml_constraints.toxml().replace("\n", "") self.cib_do_cmd("cib_replace\n%s\n%s" % (str("constraints"), str(xml_str))) if self.failed_reason != "": # msgbox(self.failed_reason) log(self.failed_reason) constes_nodes = [] for node in self.xml_nodes["cib"].getElementsByTagName("conste_nodes"): if node.getAttribute('rscid') == del_node_id: constes_nodes.append(node) for conste in constes_nodes: xml_str = conste.toxml().replace("\n", "") self.cib_do_cmd("cib_delete\nconstes\n%s" % (str(xml_str))) def get_constraint(self, type, id): if type == "rsc_location": location_attr_names = ["id", "rsc", "score", "boolean_op"] expr_attr_names = ["id", "attribute", "operation", "value"] attrs = self.query("get_co\nrsc_location\n" + id) if attrs is None: return None location = dict(zip(location_attr_names, attrs[:4])) location["exprs"] = [] for i in range((len(attrs) - len(location_attr_names)) / len(expr_attr_names)): expr = dict(zip(expr_attr_names, attrs[4 + i * 4:8 + i * 4])) location["exprs"].append(expr) return location elif type == "rsc_order": order_attr_names = ["id", "first", "type", "then"] attrs = self.query("get_co\nrsc_order\n" + id) if attrs is None: return None order = dict(zip(order_attr_names, attrs)) return order elif type == "rsc_colocation": colocation_attr_names = ["id", "with-rsc", "rsc", "score"] attrs = self.query("get_co\nrsc_colocation\n" + id) if attrs is None: return None colocation = dict(zip(colocation_attr_names, attrs)) return colocation def get_status_node_links(self): try: for status in self.query("status_hblinks"): if "FAULTY" in status: return False except: return True return True # neoshineha ming.liu end # cache functions def __init__(self): self.load_profile() def cache_lookup(self, key): if key in self.cache: return self.cache[key] if key in self.no_update_cache: return self.no_update_cache[key] return None def cache_update(self, key, data, keep_in_cache=False): if not keep_in_cache: self.cache[key] = data else: self.no_update_cache[key] = data def cache_delkey(self, key): if key in self.cache: del self.cache[key] def cache_clear(self): self.cache.clear() # internal functions def split_attr_list(self, attrs, keys): attr_list = [] if attrs is not None: for i in range(0, len(attrs), len(keys)): attr = {} for j in range(0, len(keys)): attr[keys[j]] = attrs[i + j] attr_list.append(attr) return attr_list def run(self): gtk.gdk.threads_init() # gtk.main() if self.connected: mgmt_disconnect() # connection functions def load_profile(self): keys = ["server", "user", "width", "height"] self.profile["server"] = "127.0.0.1" self.profile["user"] = "hacluster" self.profile["width"] = 790 self.profile["height"] = 550 save_path = os.environ["HOME"] + "/.haclient" if not os.path.exists(save_path): return try: values = pickle.load(file(save_path, "r")) user_profile = dict(zip(keys, values)) self.profile.update(user_profile) return except: return def save_profile(self): server = self.profile["server"] user = self.profile["user"] self.profile["width"] = width self.profile["height"] = height save_path = os.environ["HOME"] + "/.haclient" try: pickle.dump((server, user, width, height), file(save_path, "w")) except: os.remove(save_path) return def login(self, server, username, password): # connect to one of the cluster self.failed_reason = "" if string.find(server, ":") != -1: server_host, port = string.split(server, ":") else: server_host = server port = "" try: ip = socket.gethostbyname(server_host) except socket.error: print "error in socket gethostbyname" self.failed_reason = _("Can't resolve address of server ") + server_host return False try: ret = mgmt_connect(ip, username, password, port) except: print "error in mgmg_connect" self.failed_reason = _("Can't connect to server ") + server mgmt_disconnect() return False if ret != 0: if ret == -1: self.failed_reason = _("Can't connect to server ") + server elif ret == -2: self.failed_reason = \ _("Failed in the authentication.\n User Name or Password may be wrong." \ "\n or the user doesn't belong to haclient group") else: self.failed_reason = _("Can't login to server.\n The protocols may be mismatched.") mgmt_disconnect() return False if not self.check_ha_version(): self.failed_reason = _("Can't login to server.\n The license has expired.") mgmt_disconnect() return False self.connected = True if server is not None and server not in self.try_nodes: self.try_nodes.insert(0, server) self.server = server_host self.username = username self.password = password self.update_timer = gobject.timeout_add(500, self.update) self.do_cmd("regevt\nevt:cib_changed") self.do_cmd("regevt\nevt:disconnected") fd = mgmt_inputfd() self.io_tag = gobject.io_add_watch(fd, gobject.IO_IN, self.on_event, None) gobject.timeout_add(5000, self.update_crm_metadata, priority=gobject.PRIORITY_DEFAULT_IDLE) return True def check_ha_version(self): version_msg = self.query("lic_verid") # Initialization/Cannot get license info if self.failed_reason != "" or version_msg is None or version_msg == []: return False elif "trial" in version_msg: return self.check_ha_expired() else: return True def check_ha_expired(self): # Initialization/Cannot get license info self.do_cmd("lic_expire") if self.failed_reason == "": return True else: return False def query(self, query, keep_in_catch=False): result = self.cache_lookup(query) if result is not None: return result result = self.do_cmd(query) self.cache_update(query, result, keep_in_catch) return result # 动态添加脚本能自动识别 def query1(self, query, keep_in_catch=False): result = self.do_cmd(query) return result def do_cmd_twice(self, command): ret = self.do_cmd(command) if self.failed_reason != "" or ret is None: time.sleep(0.3) ret = self.do_cmd(command) return ret def do_cmd(self, command): self.failed_reason = "" self.session_lock.acquire() ret_str = mgmt_sendmsg(command) self.session_lock.release() if ret_str is None: debug(str(string.split(command, "\n")) + ":None") self.failed_reason = "return None" return None while len(ret_str) >= 4 and ret_str[:4] == "evt:": gobject.idle_add(self.on_event, None, None, ret_str) self.session_lock.acquire() ret_str = mgmt_recvmsg() self.session_lock.release() if ret_str is None: debug(str(string.split(command, "\n")) + ":None") self.failed_reason = "return None" return None return self.ret_str2list(ret_str, command) def ret_str2list(self, ret_str, command): self.failed_reason = "" ret_list = string.split(ret_str, "\n") if ret_list[0] != "o": debug(str(string.split(command, "\n")) + ":" + str(ret_list)) if len(ret_list) > 1: self.failed_reason = string.join(ret_list[1:], "\n") return None debug(str(string.split(command, "\n")) + ":" + str(ret_list)) return ret_list[1:] def process_ret(self, command, ret_str): self.failed_reason = "" if ret_str is None: debug(str(string.split(command, "\n")) + ":None") self.failed_reason == "return None" return None return self.ret_str2list(ret_str, command) def async_sendmsg(self, command): self.session_lock.acquire() gobject.source_remove(self.io_tag) async_ret_str = mgmt_thread_sendmsg(command) fd = mgmt_inputfd() self.io_tag = gobject.io_add_watch(fd, gobject.IO_IN, self.on_event, None) self.session_lock.release() self.async_ret_lock.acquire() self.async_ret_list[command] = self.process_ret(command, async_ret_str) self.async_ret_lock.release() def async_do_cmd(self, command): self.async_ret_lock.acquire() # self.async_ret_str = {} # self.async_ret_list = {} if command in self.async_ret_list: self.async_ret_list.pop(command) self.async_ret_lock.release() thread.start_new(self.async_sendmsg, (command,)) def logout(self): mgmt_disconnect() # gobject.source_remove(self.io_tag) self.connected = False # event handler def on_reconnect(self): if self.all_nodes == []: return False try_nodes = self.try_nodes[:] '''for node in self.all_nodes : if node not in try_nodes : try_nodes.append(node)''' for server in try_nodes: if self.login(server, self.username, self.password): return False return True def process_event(self, event): if event == "evt:cib_changed": self.set_update() elif event is None or event == "evt:disconnected": self.logout() try_nodes = self.try_nodes[:] '''for active_node in self.active_nodes : if active_node not in try_nodes : try_nodes.append(active_node)''' for server in try_nodes: if self.login(server, self.username, self.password): break gobject.timeout_add(1000, self.on_reconnect) def set_update(self): if self.update_timer != -1: gobject.source_remove(self.update_timer) self.update_timer = gobject.timeout_add(500, self.update) def on_event(self, source, condition, event_str): if event_str is None: # called by gtk self.session_lock.acquire() event = mgmt_recvmsg() self.session_lock.release() debug("on_event:" + str(event)) self.process_event(event) return True else: # called by do_cmd event = event_str log("on_event: from message queue: " + str(event)) self.process_event(event) return False def cib_do_cmd(self, command): self.failed_reason = "" ret_str = mgmt_sendmsg(command) if ret_str is None: debug(command + ":None") self.failed_reason = "return None" return None ret_list = string.split(ret_str, "\n") if ret_list[0] != "o": debug(str(string.split(command, "\n")) + ":" + str(ret_list)) if len(ret_list) > 1: self.failed_reason = string.join(ret_list[1:], "\n") return None return string.join(ret_list[1:], "\n") def update_cib_xml(self): global validate_type xml_nodes = self.xml_nodes cib_xml = self.cib_do_cmd("cib_query\ncib") if self.failed_reason != "": # msgbox(self.failed_reason) log(self.failed_reason) return False if cib_xml is None or cib_xml == "": # msgbox(_("Cannot get the CIB")) log("Cannot get the CIB") return False try: xml_nodes["cib"] = parseString(cib_xml).documentElement except xml.parsers.expat.ExpatError, msg: # msgbox(_("Failed to parse the CIB") + _(": ") + str(msg)) log("Failed to parse the CIB: " + str(msg)) return False """xml_status = self.cib_do_cmd("cib_query\nstatus") xml_rscs = self.cib_do_cmd("cib_query\nresources") xml_nodes = self.cib_do_cmd("cib_query\nnodes") xml_constraints = self.cib_do_cmd("cib_query\nconstraints") xml_crm_config = self.cib_do_cmd("cib_query\ncrm_config")""" if xml_nodes.get("cib") is None: # msgbox(_("Cannot parse the CIB")) log("Cannot parse the CIB") return False validate_type = self.get_validate_type() saved_validate_name = self.validate_name self.validate_name = self.get_validate_name() if validate_type == "dtd": if not support_pyxml: log("Pacemaker GUI requires pyxml package to process DTD") # msgbox(_("Pacemaker GUI requires pyxml package to process DTD")) sys.exit() dtd_elem = self.get_dtd_elem("cib") for (name, mod) in dtd_elem.get_content_model()[1]: if mod != '': continue xml_nodes[name] = xml_nodes["cib"].getElementsByTagName(name)[0] if xml_nodes.get(name) is None: continue sub_dtd_elem = self.get_dtd_elem(name) for (sub_name, sub_mod) in sub_dtd_elem.get_content_model()[1]: if sub_mod != '': continue xml_nodes[sub_name] = xml_nodes[name].getElementsByTagName(sub_name)[0] else: if self.validate_name != saved_validate_name: self.rng_docs = {} self.rng_str_docs = {} self.update_rng_docs(self.validate_name, self.validate_name + ".rng") self.supported_rsc_types = self.get_supported_rsc_types() return True """config_xml = self.cib_xml.getElementsByTagName("configuration") self.crm_xml = config_xml.getElementsByTagName("crm_config") self.nodes_xml = config_xml.getElementsByTagName("nodes") self.rscs_xml = config_xml.getElementsByTagName("resources") self.cns_xml = config_xml.getElementsByTagName("constraints") self.status_xml = self.cib_xml.getElementsByTagName("status") try: self.cib_xml = parseString(xml_cib).documentElement self.status_xml = parseString(xml_status) self.rscs_xml = parseString(xml_rscs) self.nodes_xml = parseString(xml_nodes) self.cns_xml = parseString(xml_constraints) self.crm_xml = parseString(xml_crm_config) except xml.parsers.expat.ExpatError: self.status_xml = None self.rscs_xml = None self.nodes_xml = None self.crm_xml = None self.cns_xml = None debug("fail to parse xml info") return None""" def get_active_cib(self): active_cib = self.do_cmd("active_cib") if active_cib is None or len(active_cib) == 0: return if len(active_cib[0]) > 0: self.active_cib = active_cib[0] else: self.active_cib = "" def get_supported_rsc_types(self): rsc_types = [] if validate_type == "dtd": dtd_elem = self.get_dtd_elem("resources") for (name, mod) in dtd_elem.get_content_model()[1]: rsc_types.append(name) else: sorted_rng_nodes = self.sorted_sub_rng_nodes_by_name("resources") for rng_node in sorted_rng_nodes.get("element", []): rsc_types.append(rng_node[0][1].getAttribute("name")) return rsc_types def find_xml_node(self, obj_type, obj_name): for xml_node in self.xml_nodes["cib"].getElementsByTagName(obj_type): if xml_node.getAttribute("id") == obj_name: return xml_node return None def find_attribute(self, obj_type, attribute_name): for xml_node in self.xml_nodes["cib"].getElementsByTagName(obj_type): for nv_xml_node in xml_node.getElementsByTagName("nvpair"): if nv_xml_node.getAttribute("name") == attribute_name: return nv_xml_node.getAttribute("value") return None def set_crm_attribute(self, type, name, value): for xml_node in self.xml_nodes["cib"].getElementsByTagName(type): for attr_node in xml_node.getElementsByTagName("nvpair"): if attr_node.getAttribute("name") == name: attr_id = str(attr_node.getAttribute("id")) self.do_cmd("crm_attribute\n%s\nset\n%s\n%s\n\n\n%s" % (type, name, value, attr_id)) return for child_node in xml_node.childNodes: if child_node.nodeType != xml.dom.Node.ELEMENT_NODE: continue set_id = str(child_node.getAttribute("id")) self.do_cmd("crm_attribute\n%s\nset\n%s\n%s\n\n%s\n" % (type, name, value, set_id)) return self.do_cmd("crm_attribute\n%s\nset\n%s\n%s\n\n\n" % (type, name, value)) def node_name(self, node_id): for node_xml_node in self.xml_nodes["cib"].getElementsByTagName("node"): if node_xml_node.getAttribute("id") == node_id: node_name = node_xml_node.getAttribute("uname") return node_name return "" def validate_cib(self, cib_xml_node=None): if cib_xml_node is None: cib_xml_node = self.xml_nodes["cib"] if validate_type == "dtd": is_valid = True elif not support_lxml: is_valid = True else: is_valid = self.validate_with_rng(cib_xml_node) return is_valid def validate_with_rng(self, cib_xml_node): rng_doc = self.tmp_rng_doc() if rng_doc is None: # msgbox(_("Cannot expand the Relax-NG schema")) log(_("Cannot expand the Relax-NG schema")) return False try: cib_doc = etree.fromstring(cib_xml_node.toxml()) except etree.Error, msg: # msgbox(_("Failed to parse the CIB XML") + _(": ") + str(msg)) log(_("Failed to parse the CIB XML") + _(": ") + str(msg)) return False try: relaxng = etree.RelaxNG(file=rng_doc) except etree.Error, msg: # msgbox(_("Failed to parse the Relax-NG schema") + _(": ") + str(msg)) log(_("Failed to parse the Relax-NG schema") + _(": ") + str(msg)) return False # try : # relaxng.assertValid(cib_doc) # except etree.DocumentInvalid, err_msg : # print err_msg # print relaxng.error_log try: etree.clear_error_log() except: try: etree.clearErrorLog() except: pass is_valid = relaxng.validate(cib_doc) if not is_valid: error_msg = "" for error_entry in relaxng.error_log: error_msg += error_entry.level_name + ": " + error_entry.message + "\n" # msgbox(_(error_msg)) log(_(error_msg)) delete_dir(os.path.dirname(rng_doc)) return is_valid def tmp_rng_doc(self): try: tmp_dir = tempfile.mkdtemp() except IOError, msg: # msgbox(_("I/O error") + _(": ") + str(msg)) log(_("I/O error") + _(": ") + str(msg)) return None for rng_doc_name in self.rng_str_docs: rng_doc_filename = os.path.join(tmp_dir, rng_doc_name) try: fd = os.open(rng_doc_filename, os.O_RDWR | os.O_CREAT | os.O_TRUNC, 0644) except OSError, msg: # msgbox(_("System error") + _(": ") + str(msg)) log(_("System error") + _(": ") + str(msg)) return None rng_doc_str = self.rng_str_docs[rng_doc_name] try: os.write(fd, rng_doc_str) except OSError, msg: # msgbox(_("System error") + _(": ") + str(msg)) log(_("System error") + _(": ") + str(msg)) os.close(fd) return None os.close(fd) if self.validate_name + ".rng" in self.rng_str_docs: return os.path.join(tmp_dir, self.validate_name + ".rng") else: return None def update_rng_docs(self, validate_name="", file=""): self.rng_docs[file] = self.get_start_rng_node(validate_name, file) if self.rng_docs[file] is None: return for extern_ref in self.rng_docs[file][0].getElementsByTagName("externalRef"): href_value = extern_ref.getAttribute("href") if self.rng_docs.get(href_value) is None: self.update_rng_docs(validate_name, href_value) def get_start_rng_node(self, validate_name="", file=""): schema_info = validate_name + " " + file crm_schema = self.get_crm_schema(validate_name, file) if crm_schema is None: # msgbox(_("Cannot get the Relax-NG schema") + _(": ") + schema_info) log("Cannot get the Relax-NG schema: " + schema_info) return None self.rng_str_docs[file] = crm_schema try: rng_doc = parseString(crm_schema).documentElement except xml.parsers.expat.ExpatError, msg: # msgbox(_("Failed to parse the Relax-NG schema") + _(": ") + str(msg) + schema_info) log("Failed to parse the Relax-NG schema: " + str(msg) + schema_info) return None start_nodes = rng_doc.getElementsByTagName("start") if len(start_nodes) > 0: start_node = start_nodes[0] return (rng_doc, start_node) else: # msgbox(_("Cannot get started in Relax-NG schema") +_(": ") + schema_info) log("Cannot get started in Relax-NG schema: " + schema_info) return None # sub_start_node = None # for sub_start_node in start_node.childNodes : # if sub_start_node.nodeType == xml.dom.Node.ELEMENT_NODE : # break ##sub_start_nodes = self.get_sub_rng_nodes(start_node)["req_elem_nodes"] ##if sub_start_nodes == [] : # if sub_start_node is None : # #msgbox(_("Cannot get the start element in Relax-NG schema: ") + schema_info) # log("Cannot get the start element in Relax-NG schema: " + schema_info) # return None # else : # return (rng_doc, sub_start_node) def get_rng_elem(self, elem_name): elem_node = None for (rng_doc, start_node) in self.rng_docs.values(): for elem_node in rng_doc.getElementsByTagName("element"): if elem_node.getAttribute("name") == elem_name: return (rng_doc, elem_node) return None def get_sub_rng_nodes(self, rng_doc, rng_node): sub_rng_nodes = [] # sub_rng_nodes = {} # req_elem_nodes = [] # attr_nodes = [] for child_node in rng_node.childNodes: if child_node.nodeType != xml.dom.Node.ELEMENT_NODE: continue if child_node.tagName == "ref": for def_node in rng_doc.getElementsByTagName("define"): if def_node.getAttribute("name") == child_node.getAttribute("name"): break sub_rng_nodes.extend(self.get_sub_rng_nodes(rng_doc, def_node)) elif child_node.tagName == "externalRef": nodes = self.get_sub_rng_nodes(*self.rng_docs[child_node.getAttribute("href")]) sub_rng_nodes.extend(nodes) elif child_node.tagName in ["element", "attribute", "value", "data", "text"]: sub_rng_nodes.append([(rng_doc, child_node)]) elif child_node.tagName in ["interleave", "optional", "zeroOrMore", "choice", "group", "oneOrMore"]: nodes = self.get_sub_rng_nodes(rng_doc, child_node) for node in nodes: node.append(child_node) sub_rng_nodes.extend(nodes) # sub_rng_nodes["req_elem_nodes"] = req_elem_nodes return sub_rng_nodes def sorted_sub_rng_nodes_by_name(self, obj_type): rng_node = self.get_rng_elem(obj_type) if rng_node is None or rng_node[1] is None: # msgbox(_("Cannot get %s in Relax-NG schema")%obj_type) return None return self.sorted_sub_rng_nodes_by_node(*rng_node) def sorted_sub_rng_nodes_by_node(self, rng_doc, rng_node): sub_rng_nodes = self.get_sub_rng_nodes(rng_doc, rng_node) sorted_nodes = {} for sub_rng_node in sub_rng_nodes: name = sub_rng_node[0][1].tagName if sorted_nodes.get(name) is None: sorted_nodes[name] = [] sorted_nodes[name].append(sub_rng_node) return sorted_nodes def find_decl(self, rng_node, name, first=True): decl_node_index = 0 for decl_node in rng_node[1:]: if decl_node.tagName == name: decl_node_index = rng_node.index(decl_node) - len(rng_node) if first: break return decl_node_index def get_decl_rng_nodes(self, rng_node): decl_rng_nodes = {} choice_index = self.find_decl(rng_node, "choice", False) if choice_index != 0: decl_rng_nodes["choice"] = rng_node[choice_index] first_choice_index = self.find_decl(rng_node, "choice") if first_choice_index != choice_index: decl_rng_nodes["first_choice"] = rng_node[first_choice_index] group_index = self.find_decl(rng_node, "group", False) if group_index != 0: decl_rng_nodes["group"] = rng_node[group_index] first_group_index = self.find_decl(rng_node, "group") if first_group_index != group_index: decl_rng_nodes["first_group"] = rng_node[first_group_index] return decl_rng_nodes def get_sorted_decl_nodes(self, decl_nodes_list, decl_type): sorted_nodes = [] for rng_nodes in decl_nodes_list: rng_node = rng_nodes.get(decl_type) if rng_node is not None and rng_node not in sorted_nodes: sorted_nodes.append(rng_node) return sorted_nodes def get_rng_attr_type(self, attr_rng_node): sub_rng_nodes = self.sorted_sub_rng_nodes_by_node(*attr_rng_node[0]) for sub_rng_node in sub_rng_nodes.get("data", []): return sub_rng_nodes["data"][0][0][1].getAttribute("type") return None def real_add_obj_type(self, xml_node, obj_type_name, is_wizard=False): sub_attr_is_any_name = False if validate_type == "dtd": dtd_elem = self.get_dtd_elem(obj_type_name) sub_attr_list = dtd_elem.get_attr_list() else: sorted_rng_nodes = self.sorted_sub_rng_nodes_by_name(obj_type_name) sub_attr_list = [] sub_elem_type_list = [] sub_elem_is_any_name = False if sorted_rng_nodes is not None: sub_attr_rng_nodes = sorted_rng_nodes.get("attribute", []) sub_elem_rng_nodes = sorted_rng_nodes.get("element", []) for rng_node in sub_attr_rng_nodes: name = rng_node[0][1].getAttribute("name") if name == "": sub_attr_is_any_name = True elif sub_attr_list.count(name) == 0: sub_attr_list.append(name) for rng_node in sub_elem_rng_nodes: name = rng_node[0][1].getAttribute("name") if name == "": sub_elem_is_any_name = True elif sub_elem_type_list.count(name) == 0: sub_elem_type_list.append(name) else: sub_attr_is_any_name = True sub_elem_is_any_name = True sub_elem_rng_nodes = [] if not sub_attr_is_any_name and (len(sub_attr_list) == 0 \ or ((mode_level != 2 or is_wizard) and obj_type_name in ["meta_attributes", "instance_attributes", "operations", "cluster_property_set", "utilization"])): new_mid_elem = None impl = getDOMImplementation() elem_node_list = [] for elem_node in xml_node.getElementsByTagName(obj_type_name): if elem_node in xml_node.childNodes: elem_node_list.append(elem_node) if len(elem_node_list) == 0: newdoc = impl.createDocument(None, obj_type_name, None) mid_elem = newdoc.documentElement xml_node.appendChild(mid_elem) new_mid_elem = mid_elem if sub_attr_list.count("id") > 0: mid_id = self.auto_unique_id(mid_elem, obj_type_name) mid_elem.setAttribute("id", mid_id) else: mid_elem = elem_node_list[0] if validate_type == "dtd": (sep, cont, mod) = dtd_elem.get_content_model() if len(cont) > 0: if len(sub_attr_list) == 0: sub_obj_type = cont[0][0] else: sub_obj_type = cont[1][0] sub_elem_node_list = [] for sub_elem_node in mid_elem.getElementsByTagName(sub_obj_type): if sub_elem_node in mid_elem.childNodes: sub_elem_node_list.append(sub_elem_node) if len(sub_elem_node_list) == 0: newdoc = impl.createDocument(None, sub_obj_type, None) sub_mid_elem = newdoc.documentElement mid_elem.appendChild(sub_mid_elem) mid_elem = sub_mid_elem if new_mid_elem is None: new_mid_elem = mid_elem else: mid_elem = sub_elem_node_list[0] sub_dtd_elem = self.get_dtd_elem(sub_obj_type) (sub_obj_type, mod) = sub_dtd_elem.get_content_model()[1][0] else: return (None, None, None) else: sub_obj_type = None for sub_elem_rng_node in sub_elem_rng_nodes: if self.find_decl(sub_elem_rng_node, "optional") != 0: continue sub_obj_type = sub_elem_rng_node[0][1].getAttribute("name") break if sub_obj_type is None: return (None, None, None) return (mid_elem, sub_obj_type, new_mid_elem) else: return (xml_node, obj_type_name, None) def get_obj_ids(self, xml_node): id_list = [] node_id = xml_node.getAttribute("id") if node_id != "" and id_list.count(node_id) == 0: id_list.append(node_id) for child_node in xml_node.childNodes: if child_node.nodeType != xml.dom.Node.ELEMENT_NODE: continue id_list.extend(self.get_obj_ids(child_node)) return id_list def exist_ids(self, xml_node=None): all_ids = self.get_obj_ids(self.xml_nodes["cib"]) top_types = ["crm_config", "rsc_defaults", "op_defaults", "nodes", "resources", "constraints", "acls", "status"] if xml_node is None: return all_ids top_parent = xml_node while top_parent.parentNode.tagName not in top_types: top_parent = top_parent.parentNode other_ids = self.get_obj_ids(top_parent) for id in other_ids: if id not in all_ids: all_ids.append(id) current_id = xml_node.getAttribute("id") if current_id in all_ids: all_ids.remove(current_id) return all_ids def auto_id_prefix(self, xml_node, name): parent_node = xml_node.parentNode parent_type = parent_node.tagName parent_id = parent_node.getAttribute("id") if parent_type == "crm_config": return "cib-bootstrap-options" elif parent_type in ["rsc_defaults", "op_defaults"]: return parent_type + "-options" elif parent_type == "node": if xml_node.tagName != "instance_attributes": return "nodes-" + parent_id + "-" + xml_node.tagName else: return "nodes-" + parent_id if parent_id == "": parent_parent_node = xml_node.parentNode.parentNode parent_parent_id = parent_parent_node.getAttribute("id") if parent_parent_id == "": parent_parent_type = parent_parent_node.tagName id_prefix = parent_parent_type + "-" + parent_type + "-" + name else: id_prefix = parent_parent_id + "-" + parent_type + "-" + name else: id_prefix = parent_id + "-" + name return id_prefix def get_unique_id(self, id_prefix, xml_node=None): all_exist_ids = self.exist_ids(xml_node) valid_id_prefix = self.sanify_id(id_prefix) id_suffix = "" new_id = valid_id_prefix while new_id in all_exist_ids: if id_suffix == "": id_suffix = 0 else: id_suffix += 1 new_id = valid_id_prefix + "-" + str(id_suffix) return new_id def sanify_id(self, id): valid_prefix = "_" valid_chrs = "-_." for i in range(len(id)): if id[i].isalpha() or id[i] in valid_prefix: break valid_id = "" for chr in id[i:]: if chr.isalnum() or chr in valid_chrs: valid_id += chr else: valid_id += '.' return valid_id def auto_unique_id(self, xml_node, name): id_prefix = self.auto_id_prefix(xml_node, name) return self.get_unique_id(id_prefix, xml_node) def update(self): self.cache_clear() if not self.connected: retval = False elif self.update_cib_xml(): self.get_active_cib() self.parent = {} retval = False else: retval = True gc.collect() self.update_timer = -1 return retval def get_validate_type(self): validate_list = { "": "dtd", "pacemaker-0.6": "dtd", "transitional-0.6": "dtd", "pacemaker-0.7": "rng", "pacemaker-1.0": "rng" } validate_name = self.get_validate_name() if validate_name is None: return None else: return validate_list.get(validate_name) def get_validate_name(self): if self.xml_nodes.get("cib") is not None: return self.xml_nodes["cib"].getAttribute("validate-with") else: return None def get_crm_schema(self, validate_name="", file=""): lines = self.query("crm_schema\n%s\n%s" % (str(validate_name), str(file))) if lines is None: return None schema = "\n".join(lines) return schema def get_crm_dtd(self): lines = self.query("crm_dtd", True) if lines is None: return None dtd = "\n".join(lines) return dtd def get_dtd_elem(self, elem_name): if not elem_name in self.dtd_elems: dtd = self.get_crm_schema(self.validate_name) if dtd is None: # msgbox(_("Cannot get the DTD") + _(": ") + self.validate_name) log("Cannot get the DTD:" + self.validate_name) return None complete_dtd = load_dtd_string(dtd) self.dtd_elems[elem_name] = complete_dtd.get_elem(elem_name) return self.dtd_elems[elem_name] # cluster functions def update_crm_metadata(self): for crm_cmd in ["pengine", "crmd"]: self.query("crm_metadata\n%s" % (crm_cmd), True) return False def get_crm_metadata(self, crm_cmd): if crm_cmd is None: return None lines = self.query("crm_metadata\n%s" % (crm_cmd), True) if lines is None: return None meta_data = "\n".join(lines) try: doc_xml = parseString(meta_data).documentElement except xml.parsers.expat.ExpatError, msg: log("Failed to parse the metadata of %s: %s" % (crm_cmd, msg)) return None meta = RAMeta() meta.name = doc_xml.getAttribute("name") meta.version = "" version_xml = doc_xml.getElementsByTagName("version") if version_xml != [] and version_xml[0] in doc_xml.childNodes: meta.version = version_xml[0].childNodes[0].data meta.longdesc = self.get_locale_desc(doc_xml, "longdesc"); meta.shortdesc = self.get_locale_desc(doc_xml, "shortdesc"); params = {} for param_xml in doc_xml.getElementsByTagName("parameter"): param = {} param["name"] = param_xml.getAttribute("name") param["unique"] = param_xml.getAttribute("unique") param["longdesc"] = self.get_locale_desc(param_xml, "longdesc"); param["shortdesc"] = self.get_locale_desc(param_xml, "shortdesc"); content_xml = param_xml.getElementsByTagName("content")[0] content = {} content["type"] = content_xml.getAttribute("type") content["default"] = content_xml.getAttribute("default") if content["type"] == "enum": values_tag = "Allowed values:" index = param["longdesc"].rfind(values_tag) if index != -1: strings = param["longdesc"][index + len(values_tag):].split(",") content["values"] = [] for string in strings: content["values"].append(string.strip()) param["content"] = content params[param["name"]] = param param_names = params.keys() param_names.sort() meta.parameters = [] for param_name in param_names: meta.parameters.append(params[param_name]) return meta def get_cluster_type(self): typelist = self.query("cluster_type") if typelist is not None and len(typelist) > 0: cluster_type = typelist[0] else: cluster_type = "" return cluster_type def get_hb_config(self): values = self.query("hb_config") hb_config = {} if values is not None: i = 0 while i < len(values): hb_config[values[i]] = values[i + 1] i += 2 return hb_config def get_cluster_config(self): config = {} cluster_type = self.get_cluster_type() if cluster_type == "heartbeat": config = self.get_hb_config() return config # node functions def get_dc(self): return self.query("dc") def get_local_node(self): return self.query("get_local_node") def get_all_nodes(self): all_nodes = self.query("all_nodes") if all_nodes is None: all_nodes = self.query("crm_nodes") if all_nodes is not None: self.all_nodes = all_nodes else: self.all_nodes = [] return self.all_nodes def get_nodetype(self, node): node_type = self.query("node_type\n%s" % node) if node_type is not None and len(node_type) > 0: return node_type[0] else: return None def get_normal_nodes(self): nodes = self.query("all_nodes") if nodes is None: return self.get_crm_nodes() normal_nodes = [] for node in nodes: if self.get_nodetype(node) == "normal": normal_nodes.append(node) return normal_nodes def get_active_nodes(self): active_nodes = self.query("active_nodes") if active_nodes is not None: self.active_nodes = active_nodes else: self.active_nodes = [] return self.active_nodes def get_crm_nodes(self): return self.query("crm_nodes") def get_node_config(self, node): node_attr_names = ["uname", "online", "standby", "unclean", "shutdown", "expected_up", "is_dc", "type", "pending", "standby_onfail"] values = self.query("node_config\n%s" % node) if values is None: values = ["" for i in range(8)] config = dict(zip(node_attr_names, values)) return config def get_running_rsc(self, node): return self.query("running_rsc\n%s" % node) # resource functions def get_top_rsc(self): ret = self.query("all_rsc") if ret is None: return [] else: return ret def get_rsc_type(self, rsc_id): rsc_type_ret = self.query("rsc_type\n" + str(rsc_id)) if rsc_type_ret is not None and len(rsc_type_ret) > 0: rsc_type = rsc_type_ret[0] else: rsc_type = None if rsc_type == "native": return "primitive" else: return rsc_type def get_rsc_status(self, rsc_id): status = self.query("rsc_status\n" + rsc_id) for lrm_node in self.xml_nodes["cib"].getElementsByTagName("lrm"): node_id = lrm_node.getAttribute("id") for lrm_resource_node in lrm_node.getElementsByTagName("lrm_resource"): if lrm_resource_node.getAttribute("id") == rsc_id: for lrm_rsc_op in lrm_resource_node.getElementsByTagName("lrm_rsc_op"): operation = lrm_rsc_op.getAttribute("operation") if lrm_rsc_op.getAttribute("op-status") == "-1": if operation == "start": status[0] = "starting" break elif operation == "stop": status[0] = "stopping" break return status def get_rsc_running_on(self, rsc_id): ret = self.query("rsc_running_on\n" + rsc_id) if ret is None: ret = [] return ret def get_sub_rsc(self, rsc_id): sub_rscs = self.query("sub_rsc\n" + rsc_id) if sub_rscs is not None: for sub_rsc in sub_rscs: self.parent[sub_rsc] = rsc_id else: return [] return sub_rscs def get_all_subrsc(self, rsc_id): all_subrscs = [] sub_rscs = self.get_sub_rsc(rsc_id) if sub_rscs is not None: for sub_rsc in sub_rscs: all_subrscs.append(sub_rsc) all_sub_subrscs = self.get_all_subrsc(sub_rsc) if all_sub_subrscs is not None: all_subrscs.extend(all_sub_subrscs) return all_subrscs def get_all_rsc(self): all_rscs = [] top_rscs = self.query("all_rsc") if top_rscs is not None: for top_rsc in top_rscs: all_rscs.append(top_rsc) all_subrscs = self.get_all_subrsc(top_rsc) if all_subrscs is not None: all_rscs.extend(all_subrscs) return all_rscs def get_all_real_rsc(self): all_rscs = self.get_all_rsc() all_real_rsc = [] for rsc in all_rscs: rsc_real_id = self.obj_real_id(rsc) if all_real_rsc.count(rsc_real_id) == 0: all_real_rsc.append(rsc_real_id) return all_real_rsc def get_all_real_subrsc(self, rsc_id): all_subrscs = self.get_all_subrsc(rsc_id) all_real_subrsc = [] for rsc in all_subrscs: rsc_real_id = self.obj_real_id(rsc) if all_real_subrsc.count(rsc_real_id) == 0: all_real_subrsc.append(rsc_real_id) return all_real_subrsc def obj_real_id(self, obj_id): return obj_id.split(":")[0] def op_status2str(self, op_status): str_list = self.query("op_status2str\n%s" % str(op_status), True) if str_list is not None and len(str_list) > 0: return str_list[0] else: return "" def get_rsc_info(self, rsc_id): for resources_node in self.xml_nodes["cib"].getElementsByTagName("resources"): for primitive_node in resources_node.getElementsByTagName("primitive"): if primitive_node.getAttribute("id") == rsc_id: primitive_attrs = self.get_rsc_attrs(primitive_node, ["class", "type", "provider"]) rsc_class = str(primitive_attrs.get("class", "")) rsc_type = str(primitive_attrs.get("type", "")) rsc_provider = str(primitive_attrs.get("provider", "")) return (rsc_class, rsc_type, rsc_provider) return None def get_rsc_attrs(self, primitive_node, attr_names): attrs = {} ref_node = primitive_node while ref_node and ref_node.nodeType == xml.dom.Node.ELEMENT_NODE: for attr_name in attr_names: if attrs.get(attr_name): continue attr_value = ref_node.getAttribute(attr_name) if attr_value: attrs[attr_name] = str(attr_value) template_id = ref_node.getAttribute("template") if template_id: ref_node = self.get_template(template_id) else: break return attrs def get_template(self, template_id): if not template_id: return None for resources_node in self.xml_nodes["cib"].getElementsByTagName("resources"): for template_node in resources_node.getElementsByTagName("template"): if template_node.getAttribute("id") == template_id: return template_node return None def get_all_templates(self): templates = [] for resources_node in self.xml_nodes["cib"].getElementsByTagName("resources"): for template_node in resources_node.getElementsByTagName("template"): template_id = template_node.getAttribute("id") if template_id and template_id not in templates: templates.append(template_id) return templates def get_locale_desc(self, node, tag): desc_en = "" desc_match = "" (lang, encode) = locale.getlocale() if lang is None: lang = "en" else: lang = string.lower(lang) if encode is None: encode = "" else: encode = string.lower(encode) for child in node.childNodes: if child.nodeType != node.ELEMENT_NODE: continue if child.tagName != tag: continue if len(child.childNodes) == 0: break langtag = string.lower(child.getAttribute("lang")) if langtag == "": desc_en = child.childNodes[0].data else: langtag = string.split(langtag, ".") if string.find(langtag[0], "en") != -1: desc_en = child.childNodes[0].data if len(langtag) == 1 and lang == langtag[0]: desc_match = child.childNodes[0].data if len(langtag) == 2: if lang == langtag[0] and encode == langtag[1]: desc_match = child.childNodes[0].data if desc_match != "": return desc_match return desc_en def get_rsc_meta(self, rsc_class, rsc_type, rsc_provider): if rsc_class is None or rsc_class == "" \ or rsc_type is None or rsc_type == "": return None if rsc_provider is None or rsc_provider == "": rsc_provider = "common" # "heartbeat" lines = self.query("rsc_metadata\n%s\n%s\n%s" % \ (rsc_class, rsc_type, rsc_provider), True) if lines is None: return None meta_data = "" for line in lines: if len(line) != 0: meta_data = meta_data + line + "\n" try: doc_xml = parseString(meta_data).documentElement except xml.parsers.expat.ExpatError, msg: errmsg = "Failed to parse the metadata of %s: %s" log(errmsg % (rsc_type, msg)) # msgbox(_(errmsg)%(rsc_type, _(msg))) return None meta = RAMeta() meta.name = doc_xml.getAttribute("name") meta.version = "" version_xml = doc_xml.getElementsByTagName("version") if version_xml != [] and version_xml[0] in doc_xml.childNodes: meta.version = version_xml[0].childNodes[0].data meta.longdesc = self.get_locale_desc(doc_xml, "longdesc"); meta.shortdesc = self.get_locale_desc(doc_xml, "shortdesc"); meta.parameters = [] for param_xml in doc_xml.getElementsByTagName("parameter"): param = {} param["name"] = param_xml.getAttribute("name") param["required"] = param_xml.getAttribute("required") param["unique"] = param_xml.getAttribute("unique") param["longdesc"] = self.get_locale_desc(param_xml, "longdesc"); param["shortdesc"] = self.get_locale_desc(param_xml, "shortdesc"); if param_xml.getElementsByTagName("content") != []: content_xml = param_xml.getElementsByTagName("content")[0] content = {} content["type"] = content_xml.getAttribute("type") content["default"] = content_xml.getAttribute("default") param["value"] = content["default"] param["content"] = content meta.parameters.append(param) meta.actions = [] for action_xml in doc_xml.getElementsByTagName("action"): action = {} for key in action_xml.attributes.keys(): action[key] = action_xml.getAttribute(key) meta.actions.append(action) return meta def get_rsc_classes(self): return self.query("rsc_classes", True); def get_rsc_types(self, rsc_class): return self.query1("rsc_types\n" + rsc_class, True) def get_rsc_providers(self, rsc_class, rsc_type): return self.query("rsc_providers\n%s\n%s" % (rsc_class, rsc_type), True) def lrm_op_rc2str(self, rc): str_list = self.query("lrm_op_rc2str\n%s" % str(rc), True) if str_list is not None and len(str_list) > 0: return str_list[0] else: return "" def rsc_exists(self, rsc_id): return rsc_id in self.get_all_rsc() def log(string): syslog.syslog(string) if debug_level > 0: print string def debug(string): if debug_level == 0: return syslog.syslog(string) print string def delete_dir(dir_path): real_path = os.path.realpath(dir_path) if real_path.count(os.sep) == len(real_path): # msgbox(_("Do not delete the root directory")) log(_("Do not delete the root directory")) return for root, dirs, files in os.walk(dir_path, False): for name in files: try: os.unlink(os.path.join(root, name)) except OSError, msg: # msgbox(_("System error") + _(": ") + str(msg)) log(_("System error") + _(": ") + str(msg)) continue for name in dirs: try: os.rmdir(os.path.join(root, name)) except OSError, msg: # msgbox(_("System error") + _(": ") + str(msg)) log(_("System error") + _(": ") + str(msg)) continue try: os.rmdir(dir_path) except OSError, msg: # msgbox(_("System error") + _(": ") + str(msg)) log(_("System error") + _(": ") + str(msg))