From d5a73b2056114e51fa523e9249b550840752c7ea Mon Sep 17 00:00:00 2001 From: Fei Su Date: Wed, 24 Jan 2024 10:20:47 +0800 Subject: [PATCH 001/222] format with black and isort Signed-off-by: Fei Su --- scripts/mail-alarm | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/scripts/mail-alarm b/scripts/mail-alarm index 99be5c44de8..8db6fffb811 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # mail-alarm: uses ssmtp to send a mail message, to pool:other_config:mail-destination # @@ -11,18 +11,18 @@ # the only thing that needs be set is pool:other-config:ssmtp-mailhub from __future__ import print_function -import XenAPI -import sys + +import json import os +import re +import sys +import syslog import tempfile import traceback -import syslog -import json -import re -from xml.dom import minidom -from xml.sax.saxutils import unescape -from xml.parsers.expat import ExpatError from socket import getfqdn +from xml.dom import minidom + +import XenAPI from xcp import branding # Go read man ssmtp.conf @@ -241,7 +241,9 @@ class CpuUsageAlarmETG(EmailTextGenerator): period="%d" % self.alarm_trigger_period, level="%.1f" % (self.alarm_trigger_level * 100.0), brand_console=branding.BRAND_CONSOLE, - cls_name=(self.cls == "Host" or self.params["is_control_domain"]) and "Server" or "VM", + cls_name=(self.cls == "Host" or self.params["is_control_domain"]) + and "Server" + or "VM", ) @@ -365,7 +367,9 @@ class MemoryUsageAlarmETG(EmailTextGenerator): period="%d" % self.alarm_trigger_period, level="%d" % self.alarm_trigger_level, brand_console=branding.BRAND_CONSOLE, - cls_name=(self.cls == "Host" or self.params["is_control_domain"]) and "Server" or "VM", + cls_name=(self.cls == "Host" or self.params["is_control_domain"]) + and "Server" + or "VM", ) @@ -797,7 +801,6 @@ class XapiMessage: return self.cached_etg if self.name == "ALARM": - ( value, name, @@ -827,8 +830,10 @@ class XapiMessage: self.mail_language, self.session, ) - elif name in ["memory_free_kib", # for Host - "memory_internal_free"]: # for VM + elif name in [ + "memory_free_kib", # for Host + "memory_internal_free", # for VM + ]: etg = MemoryUsageAlarmETG( self.cls, self.obj_uuid, @@ -980,7 +985,7 @@ def main(): 'Expected at least 1 argument but got none: ["%s"].' % (" ".join(sys.argv)) ) raise Exception("Insufficient arguments") - + session = XenAPI.xapi_local() ma_username = "__dom0__mail_alarm" session.xenapi.login_with_password( @@ -988,8 +993,6 @@ def main(): ) try: - - other_config = get_pool_other_config(session) if "mail-min-priority" in other_config: min_priority = int(other_config["mail-min-priority"]) From dd6d4f499158f9096272bac4491f410707bbb98a Mon Sep 17 00:00:00 2001 From: Fei Su Date: Wed, 24 Jan 2024 14:32:41 +0800 Subject: [PATCH 002/222] Address the error raised by pytype Signed-off-by: Fei Su --- scripts/mail-alarm | 59 ++++++++++++++++++++++++++++------------------ 1 file changed, 36 insertions(+), 23 deletions(-) diff --git a/scripts/mail-alarm b/scripts/mail-alarm index 8db6fffb811..9cc9cbc4dc5 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -15,6 +15,7 @@ from __future__ import print_function import json import os import re +import subprocess import sys import syslog import tempfile @@ -107,12 +108,14 @@ def get_mail_language(other_config): def get_config_file(): try: - return open("/etc/mail-alarm.conf").read() + with open("/etc/mail-alarm.conf", "r") as file: + return file.read() except: return default_config def load_mail_language(mail_language): + mail_language_file = "" try: mail_language_file = os.path.join( mail_language_pack_path, mail_language + ".json" @@ -727,7 +730,10 @@ class XapiMessage: xmldoc = minidom.parseString(xml) def get_text(tag): - return xmldoc.getElementsByTagName(tag)[0].firstChild.toxml() + text = xmldoc.getElementsByTagName(tag)[0].firstChild + if text is None: + raise ValueError("Get text failed with tag <{}>".format(tag)) + return text.toxml() self.name = get_text("name") self.priority = get_text("priority") @@ -880,7 +886,7 @@ class XapiMessage: self.mail_language, self.session, ) - elif re.match("sr_io_throughput_total_[0-9a-f]{8}$", name): + elif name and re.match("sr_io_throughput_total_[0-9a-f]{8}$", name): etg = SRIOThroughputTotalAlertETG( self.cls, self.obj_uuid, @@ -1025,29 +1031,36 @@ def main(): config = config.replace(s, r) # Write out a temporary file containing the new config - fd, fname = tempfile.mkstemp(prefix="mail-", dir="/tmp") - try: - os.write(fd, config) - os.close(fd) + with tempfile.NamedTemporaryFile( + prefix="mail-", dir="/tmp", delete=False + ) as temp_file: + temp_file.write(config.encode()) + temp_file_path = temp_file.name + try: # Run ssmtp to send mail - chld_stdin, chld_stdout = os.popen2( - ["/usr/sbin/ssmtp", "-C%s" % fname, destination] - ) - chld_stdin.write("From: %s\n" % sender) - chld_stdin.write('Content-Type: text/plain; charset="%s"\n' % charset) - chld_stdin.write("To: %s\n" % destination.encode(charset)) - chld_stdin.write( - "Subject: %s\n" % msg.generate_email_subject().encode(charset) - ) - chld_stdin.write("\n") - chld_stdin.write(msg.generate_email_body().encode(charset)) - chld_stdin.close() - chld_stdout.close() - os.wait() - + with subprocess.Popen( + ["/usr/sbin/ssmtp", "-C%s" % temp_file_path, destination], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) as proc: + input_data = ( + "From: %s\n" + 'Content-Type: text/plain; charset="%s"\n' + "To: %s\n" + "Subject: %s\n" + "\n" + "%s" + ) % ( + sender, + charset, + destination.encode(charset), + msg.generate_email_subject().encode(charset), + msg.generate_email_body().encode(charset), + ) + proc.communicate(input=input_data.encode(charset)) finally: - os.unlink(fname) + os.remove(temp_file_path) finally: session.xenapi.session.logout() From 82da5a58f19c83837e4b15ce1cb168b621b64894 Mon Sep 17 00:00:00 2001 From: Fei Su Date: Thu, 25 Jan 2024 17:01:24 +0800 Subject: [PATCH 003/222] Fix str.encode issue in python3 Signed-off-by: Fei Su --- scripts/mail-alarm | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/scripts/mail-alarm b/scripts/mail-alarm index 9cc9cbc4dc5..cc8a3419166 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -1024,20 +1024,20 @@ def main(): return 1 if not sender: - sender = "noreply@%s" % getfqdn().encode(charset) + sender = "noreply@%s" % getfqdn() # Replace macros in config file using search_replace list for s, r in search_replace: config = config.replace(s, r) # Write out a temporary file containing the new config - with tempfile.NamedTemporaryFile( - prefix="mail-", dir="/tmp", delete=False - ) as temp_file: - temp_file.write(config.encode()) - temp_file_path = temp_file.name - try: + with tempfile.NamedTemporaryFile( + prefix="mail-", dir="/tmp", delete=False + ) as temp_file: + temp_file.write(config.encode()) + temp_file_path = temp_file.name + # Run ssmtp to send mail with subprocess.Popen( ["/usr/sbin/ssmtp", "-C%s" % temp_file_path, destination], @@ -1054,9 +1054,9 @@ def main(): ) % ( sender, charset, - destination.encode(charset), - msg.generate_email_subject().encode(charset), - msg.generate_email_body().encode(charset), + destination, + msg.generate_email_subject(), + msg.generate_email_body(), ) proc.communicate(input=input_data.encode(charset)) finally: From adac07087423a8aecca20da56e156cd67d81d3f1 Mon Sep 17 00:00:00 2001 From: Fei Su Date: Mon, 29 Jan 2024 14:18:16 +0800 Subject: [PATCH 004/222] fix issue: json.load in python3 doesn't have 'encoding' paramteter Signed-off-by: Fei Su --- scripts/mail-alarm | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/scripts/mail-alarm b/scripts/mail-alarm index cc8a3419166..45e834d1c5e 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -120,8 +120,8 @@ def load_mail_language(mail_language): mail_language_file = os.path.join( mail_language_pack_path, mail_language + ".json" ) - with open(mail_language_file, "r") as fileh: - return json.load(fileh, encoding="utf-8") + with open(mail_language_file, encoding="utf-8") as fileh: + return json.load(fileh) except IOError: log_err('Read mail language pack error:["%s"]' % (mail_language_file)) return None @@ -1031,6 +1031,7 @@ def main(): config = config.replace(s, r) # Write out a temporary file containing the new config + temp_file_path = "" try: with tempfile.NamedTemporaryFile( prefix="mail-", dir="/tmp", delete=False From f42f23f9a60557c0f9b8cbb44166e47dcebbf8f1 Mon Sep 17 00:00:00 2001 From: Fei Su Date: Tue, 30 Jan 2024 09:08:43 +0800 Subject: [PATCH 005/222] add a conditiall branch for ensuring pass in python2 ut Signed-off-by: Fei Su --- scripts/mail-alarm | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/scripts/mail-alarm b/scripts/mail-alarm index 45e834d1c5e..5fd432339bf 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -120,8 +120,15 @@ def load_mail_language(mail_language): mail_language_file = os.path.join( mail_language_pack_path, mail_language + ".json" ) + + # this conditional branch won't be executed, it's solely for the purpose of ensuring pass in python2 ut. + if sys.version_info.major == 2: + with open(mail_language_file, "r") as fileh: + return json.load(fileh, encoding="utf-8") + with open(mail_language_file, encoding="utf-8") as fileh: return json.load(fileh) + except IOError: log_err('Read mail language pack error:["%s"]' % (mail_language_file)) return None From 089ed2a08b9c516766a8f56144bc68300d702a82 Mon Sep 17 00:00:00 2001 From: Fei Su Date: Fri, 2 Feb 2024 10:48:27 +0800 Subject: [PATCH 006/222] Error: scripts/mail-alarm was changed, remove it from expected_to_fail in pyproject.toml and make sure it passes pytype checks Signed-off-by: Fei Su --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b65a36bb062..dc0221cd329 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,8 +77,6 @@ expected_to_fail = [ "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", "scripts/nbd_client_manager.py", - # No attribute 'popen2' on module 'os' [module-attr] and a couple more: - "scripts/mail-alarm", # SSLSocket.send() only accepts bytes, not unicode string as argument: "scripts/examples/python/exportimport.py", # Other fixes needed: From afb29bf8c156f81e487f8398fe254450ddf2c40d Mon Sep 17 00:00:00 2001 From: Yann Dirson Date: Thu, 18 Jan 2024 17:59:37 +0100 Subject: [PATCH 007/222] py3: make xapi-storage py3-compatible This is a redo of 4140ff117038656da0e75a8387720b9c8401e9e0, not touching `str`. This uses the same mechanism as ac683ca9815f65ddc48bffdb4e242909387c3daa, to deal with occurrence of `long` causing `make test` to fail with python3, but also occurrences of `unicode`. Signed-off-by: Yann Dirson --- ocaml/xapi-storage/python/xapi/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index 0027af213bf..d2a0eed3f94 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -31,6 +31,11 @@ import json import argparse +# pylint: disable=invalid-name,redefined-builtin,undefined-variable +# pyright: reportUndefinedVariable=false +if sys.version_info[0] > 2: + long = int + unicode = str def success(result): return {"Status": "Success", "Value": result} From 75858d7bed6423fc88c435b7a4fd149e2148016e Mon Sep 17 00:00:00 2001 From: Yann Dirson Date: Thu, 18 Jan 2024 18:13:49 +0100 Subject: [PATCH 008/222] py3: make sure we are not using unicode type in python3 This is a redo of 48c8c3ec425c89afcd839b6a9c5b2bf7725de567, not touching `long` and `str`. `unicode` is only used in there for testing whether we have a string, so aliasing it to `str` is valid. OTOH we likely don't want to accept `bytes` where we accept `str` in python2, and `str` gets aliased to `bytes` in other areas of the code, so this might reveal issues in other places. Signed-off-by: Yann Dirson --- ocaml/xapi-storage/python/xapi/storage/api/datapath.py | 8 +++++++- ocaml/xapi-storage/python/xapi/storage/api/plugin.py | 8 +++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py index 1d5b43b0dca..69b37e5a9e7 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py @@ -6,6 +6,12 @@ import argparse import traceback import logging + +# pylint: disable=invalid-name,redefined-builtin,undefined-variable +# pyright: reportUndefinedVariable=false +if sys.version_info[0] > 2: + unicode = str + class Unimplemented(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) @@ -398,4 +404,4 @@ def _dispatch(self, method, params): class datapath_server_test(datapath_server_dispatcher): """Create a server which will respond to all calls, returning arbitrary values. This is intended as a marshal/unmarshal test.""" def __init__(self): - datapath_server_dispatcher.__init__(self, Datapath_server_dispatcher(Datapath_test())) \ No newline at end of file + datapath_server_dispatcher.__init__(self, Datapath_server_dispatcher(Datapath_test())) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py index 0185d900148..1b6d37214ca 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py @@ -6,6 +6,12 @@ import argparse import traceback import logging + +# pylint: disable=invalid-name,redefined-builtin,undefined-variable +# pyright: reportUndefinedVariable=false +if sys.version_info[0] > 2: + unicode = str + class Unimplemented(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) @@ -230,4 +236,4 @@ def _dispatch(self, method, params): class plugin_server_test(plugin_server_dispatcher): """Create a server which will respond to all calls, returning arbitrary values. This is intended as a marshal/unmarshal test.""" def __init__(self): - plugin_server_dispatcher.__init__(self, Plugin_server_dispatcher(Plugin_test())) \ No newline at end of file + plugin_server_dispatcher.__init__(self, Plugin_server_dispatcher(Plugin_test())) From 7d33cfb1368570d5dc7ba0d6bd31355903c52c2e Mon Sep 17 00:00:00 2001 From: Yann Dirson Date: Thu, 18 Jan 2024 17:33:46 +0100 Subject: [PATCH 009/222] Switch xapi-storage-scripts tests to python3 With this a "make test" after build out of OPAM on Debian 12 finishes successfully. Signed-off-by: Yann Dirson --- .../test/volume/org.xen.xapi.storage.dummy/plugin.py | 2 +- .../test/volume/org.xen.xapi.storage.dummy/sr.py | 10 +++++----- .../test/volume/org.xen.xapi.storage.dummy/volume.py | 10 +++++----- .../test/volume/org.xen.xapi.storage.dummyv5/plugin.py | 2 +- .../test/volume/org.xen.xapi.storage.dummyv5/sr.py | 10 +++++----- .../test/volume/org.xen.xapi.storage.dummyv5/volume.py | 10 +++++----- 6 files changed, 22 insertions(+), 22 deletions(-) diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py index 08fb78407e0..40e3a00911c 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py index 3cd7a211c8f..82c77d891db 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. @@ -6,7 +6,7 @@ import os import sys -import urlparse +import urllib.parse import xapi.storage.api.volume import plugin @@ -21,11 +21,11 @@ def create(self, dbg, uri, name, description, configuration): return def detach(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) return def ls(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return [{ "name": qr['name'], @@ -40,7 +40,7 @@ def ls(self, dbg, sr): }] def stat(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return { "sr": sr, diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py index 448ee6dcbc3..848c13bfd39 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py @@ -1,11 +1,11 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. """ import uuid -import urlparse +import urllib.parse import os import sys import xapi.storage.api.volume @@ -17,7 +17,7 @@ class Implementation(xapi.storage.api.volume.Volume_skeleton): def create(self, dbg, sr, name, description, size): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) voluuid = str(uuid.uuid4()) return { "name": name, @@ -32,11 +32,11 @@ def create(self, dbg, sr, name, description, size): } def destroy(self, dbg, sr, key): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) return def stat(self, dbg, sr, key): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return { "name": qr['name'], diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py index 5816f0dd217..e9ef122ca07 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/sr.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/sr.py index 6100407e91d..3c649423d15 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/sr.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/sr.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. @@ -6,7 +6,7 @@ import os import sys -import urlparse +import urllib.parse import xapi.storage.api.v5.volume import plugin @@ -22,11 +22,11 @@ def create(self, dbg, uuid, configuration, name, description): return configuration def detach(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) return def ls(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return [{ "name": qr['name'], @@ -42,7 +42,7 @@ def ls(self, dbg, sr): }] def stat(self, dbg, sr): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return { "sr": sr, diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/volume.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/volume.py index 20822dd8d73..fcf52ce3883 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/volume.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/volume.py @@ -1,11 +1,11 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ Copyright (C) Citrix Systems, Inc. """ import uuid -import urlparse +import urllib.parse import os import sys import xapi.storage.api.v5.volume @@ -17,7 +17,7 @@ class Implementation(xapi.storage.api.v5.volume.Volume_skeleton): def create(self, dbg, sr, name, description, size, sharable): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) voluuid = str(uuid.uuid4()) return { "name": name, @@ -33,11 +33,11 @@ def create(self, dbg, sr, name, description, size, sharable): } def destroy(self, dbg, sr, key): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) return def stat(self, dbg, sr, key): - urlparse.urlparse(sr) + urllib.parse.urlparse(sr) qr = plugin.Implementation().query(dbg) return { "name": qr['name'], From af3a3e4314b9cb94760a6a7f66bf428ab316a045 Mon Sep 17 00:00:00 2001 From: Yann Dirson Date: Tue, 23 Jan 2024 18:15:27 +0100 Subject: [PATCH 010/222] Remove now-unused PY_TEST guard Reported-by: Pau Ruiz Safont Signed-off-by: Yann Dirson --- Makefile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Makefile b/Makefile index bcfc5b9eb78..ba121000e3a 100644 --- a/Makefile +++ b/Makefile @@ -62,9 +62,7 @@ test: trap "kill $${PSTREE_SLEEP_PID}" SIGINT SIGTERM EXIT; \ timeout --foreground $(TEST_TIMEOUT2) \ dune runtest --profile=$(PROFILE) --error-reporting=twice -j $(JOBS) -ifneq ($(PY_TEST), NO) dune build @runtest-python --profile=$(PROFILE) -endif stresstest: dune build @stresstest --profile=$(PROFILE) --no-buffer -j $(JOBS) From b70c604e4f29dafc25b38a3d95cec7211e142e83 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 28 Feb 2024 01:36:47 +0000 Subject: [PATCH 011/222] CP-47935: Create a subdirectory for python3-only scripts insatlled in BIN Signed-off-by: Stephen Cheng --- .codecov.yml | 41 +++++++++++++++++++++++++-- .github/workflows/main.yml | 15 +++++++++- pyproject.toml | 6 ++-- python3/Makefile | 7 +++++ {scripts => python3/bin}/hfx_filename | 31 +++++++++++--------- scripts/Makefile | 1 - 6 files changed, 81 insertions(+), 20 deletions(-) create mode 100644 python3/Makefile rename {scripts => python3/bin}/hfx_filename (82%) diff --git a/.codecov.yml b/.codecov.yml index 8380434a2a5..9be7955160d 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -158,6 +158,31 @@ coverage: # threshold: 20% + python3: + + # + # The python3 limit applies to: + # ----------------------------- + # + # - python3/** + # - excluding: **/test_*.py + # + paths: ["python3/**", "!**/test_*.py"] + + # + # For python3/** (excluding tests): + # + # For python3, coverage should not be reduced compared to its base: + # + target: auto + + # + # Exception: the threshold value given is allowed + # + # Allows for not covering 20% if the changed lines of the PR: + # + threshold: 20% + # Checks each Python version separately: python-3.11: flags: ["python3.11"] @@ -175,18 +200,26 @@ coverage: # Python modules and scripts below scripts/ (excluding tests) # scripts: + paths: ["scripts/**", "!**/test_*.py"] target: 48% threshold: 2% - paths: ["scripts/**", "!**/test_*.py"] # - # Python modules and scripts below ocaml/ + # Python modules and scripts below ocaml/ (excluding tests) # ocaml: paths: ["ocaml/**", "!**/test_*.py"] target: 51% threshold: 3% + # + # Python modules and scripts below python3/ (excluding tests) + # + python3: + paths: ["python3/**", "!**/test_*.py"] + target: 48% + threshold: 2% + # # Test files # @@ -239,6 +272,10 @@ component_management: - "ocaml/xapi-storage-script/**" - "!**/test_*.py" + - component_id: python3 + name: python3 + paths: ["python3/**", "!**/test_*.py"] + - component_id: test_cases name: test_cases paths: ["**/test_*.py"] diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cadf84c35c4..7b660722b20 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -56,7 +56,8 @@ jobs: - name: Install common dependencies for Python ${{matrix.python-version}} run: pip install future mock pytest-coverage pytest-mock - - name: Run Pytest and get code coverage for Codecov + - name: Run Pytest for python 2 and get code coverage for Codecov + if: ${{ matrix.python-version == '2.7' }} run: > pytest --cov=scripts --cov=ocaml/xcp-rrdd @@ -67,6 +68,18 @@ jobs: env: PYTHONDEVMODE: yes + - name: Run Pytest for python 3 and get code coverage for Codecov + if: ${{ matrix.python-version != '2.7' }} + run: > + pytest + --cov=scripts --cov=ocaml/xcp-rrdd --cov=python3/ + scripts/ ocaml/xcp-rrdd python3/ -vv -rA + --junitxml=.git/pytest${{matrix.python-version}}.xml + --cov-report term-missing + --cov-report xml:.git/coverage${{matrix.python-version}}.xml + env: + PYTHONDEVMODE: yes + - name: Upload Python ${{matrix.python-version}} coverage report to Codecov uses: codecov/codecov-action@v3 with: diff --git a/pyproject.toml b/pyproject.toml index dc0221cd329..8c902456c05 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -70,7 +70,6 @@ discard_messages_matching = [ "No Node.TEXT_NODE in module xml.dom.minidom, referenced from 'xml.dom.expatbuilder'" ] expected_to_fail = [ - "scripts/hfx_filename", "scripts/perfmon", # Need 2to3 -w and maybe a few other minor updates: "scripts/hatests", @@ -96,7 +95,6 @@ expected_to_fail = [ [tool.pytype] inputs = [ - "scripts/hfx_filename", "scripts/perfmon", "scripts/static-vdis", "scripts/Makefile", @@ -112,6 +110,10 @@ inputs = [ "scripts/yum-plugins", "scripts/*.py", + # Python 3 + "python3/bin/hfx_filename", + "python3/*.py", + # To be added later, # when converted to Python3-compatible syntax: # "ocaml/message-switch/python", diff --git a/python3/Makefile b/python3/Makefile new file mode 100644 index 00000000000..6d0089bac98 --- /dev/null +++ b/python3/Makefile @@ -0,0 +1,7 @@ +include ../config.mk + +IPROG=install -m 755 + +install: + mkdir -p $(DESTDIR)$(OPTDIR)/bin + $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin \ No newline at end of file diff --git a/scripts/hfx_filename b/python3/bin/hfx_filename similarity index 82% rename from scripts/hfx_filename rename to python3/bin/hfx_filename index cea0f808200..dd8677fc499 100755 --- a/scripts/hfx_filename +++ b/python3/bin/hfx_filename @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2015 Citrix, Inc. # @@ -14,8 +14,8 @@ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. -from __future__ import print_function -import sys, os, socket, urllib2, urlparse, XenAPI, traceback, xmlrpclib + +import sys, socket, urllib.request, XenAPI db_url = "/remote_db_access" @@ -28,18 +28,20 @@ def rpc(session_id, request): headers = [ "POST %s?session_id=%s HTTP/1.0" % (db_url, session_id), "Connection:close", - "content-length:%d" % (len(request)), + "content-length:%d" % (len(request.encode('utf-8'))), "" ] - #print "Sending HTTP request:" for h in headers: - s.send("%s\r\n" % h) - #print "%s\r\n" % h, - s.send(request) + s.send((h + "\r\n").encode('utf-8')) + s.send(request.encode('utf-8')) + + result = "" + while True: + chunk = s.recv(1024) + if not chunk: + break + result += chunk.decode('utf-8') - result = s.recv(1024) - #print "Received HTTP response:" - #print result if "200 OK" not in result: print("Expected an HTTP 200, got %s" % result, file=sys.stderr) return @@ -55,13 +57,15 @@ def rpc(session_id, request): s.close() def parse_string(txt): + if not txt: + raise Exception("Unable to parse string response: None") prefix = "success" if not txt.startswith(prefix): - raise "Unable to parse string response" + raise Exception("Unable to parse string response: Wrong prefix") txt = txt[len(prefix):] suffix = "" if not txt.endswith(suffix): - raise "Unable to parse string response" + raise Exception("Unable to parse string response: Wrong suffix") txt = txt[:len(txt)-len(suffix)] return txt @@ -76,7 +80,6 @@ def read_field(session_id, table, fld, rf): return response if __name__ == "__main__": - import XenAPI xapi = XenAPI.xapi_local() xapi.xenapi.login_with_password('root', '') session_id = xapi._session diff --git a/scripts/Makefile b/scripts/Makefile index 8f07e91efe7..48aea975bf4 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -150,7 +150,6 @@ install: $(IPROG) xe-syslog-reconfigure $(DESTDIR)$(LIBEXECDIR) $(IPROG) xe-install-supplemental-pack $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-enable-ipv6 $(DESTDIR)$(OPTDIR)/bin - $(IPROG) hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) pv2hvm $(DESTDIR)$(OPTDIR)/bin mkdir -p $(DESTDIR)/etc/cron.daily mkdir -p $(DESTDIR)/etc/cron.hourly From aec50b0dd15a0487123622bba10458b175a2d500 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 28 Feb 2024 04:39:42 +0000 Subject: [PATCH 012/222] CP-47935: Add unit tests for hfx_filename Signed-off-by: Stephen Cheng --- python3/unittest/import_file.py | 25 +++++ python3/unittest/test_hfx_filename.py | 132 ++++++++++++++++++++++++++ 2 files changed, 157 insertions(+) create mode 100644 python3/unittest/import_file.py create mode 100644 python3/unittest/test_hfx_filename.py diff --git a/python3/unittest/import_file.py b/python3/unittest/import_file.py new file mode 100644 index 00000000000..2589e640232 --- /dev/null +++ b/python3/unittest/import_file.py @@ -0,0 +1,25 @@ +""" +This file is used for importing a non-".py" file as a module in unit test. +It never runs directly, so no shebang and no main() +""" +import sys +import os +from importlib import machinery, util + +def import_from_file(module_name, file_path): + """Import a file as a module""" + loader = machinery.SourceFileLoader(module_name, file_path) + spec = util.spec_from_loader(module_name, loader) + assert spec + assert spec.loader + module = util.module_from_spec(spec) + # Probably a good idea to add manually imported module stored in sys.modules + sys.modules[module_name] = module + spec.loader.exec_module(module) + return module + +def get_module(module_name, file_path): + """get the module from a file""" + testdir = os.path.dirname(__file__) + print(testdir) + return import_from_file(module_name, testdir + file_path) diff --git a/python3/unittest/test_hfx_filename.py b/python3/unittest/test_hfx_filename.py new file mode 100644 index 00000000000..6e6964a24f2 --- /dev/null +++ b/python3/unittest/test_hfx_filename.py @@ -0,0 +1,132 @@ +#!/usr/bin/env python3 +# -*- coding: utf-8 -*- +""" +This module provides unittest for hfx_filename +""" + +import sys +import unittest +from mock import MagicMock, patch, call +from import_file import get_module + +# mock modules to avoid dependencies +sys.modules["XenAPI"] = MagicMock() + +hfx_filename = get_module("hfx_filename", "/../bin/hfx_filename") + + +@patch("socket.socket") +class TestRpc(unittest.TestCase): + """ + This class tests blow functions: + rpc() + db_get_uuid() + read_field() + """ + def test_rpc(self, mock_socket): + """ + Tests rpc + """ + mock_connected_socket = MagicMock() + mock_socket.return_value = mock_connected_socket + + recv_data = b"HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHelloWorld" + # Set the return value for the first call to recv + mock_connected_socket.recv.side_effect = [recv_data, None] + + session_id = 0 + request = "socket request" + body = hfx_filename.rpc(session_id, request) + + # Assert that the socket methods were called as expected + expected_data = [ + b"POST /remote_db_access?session_id=0 HTTP/1.0\r\n", + b"Connection:close\r\n", + b"content-length:14\r\n", + b"\r\n", + b"socket request" + ] + mock_connected_socket.send.assert_has_calls([call(data) for data in expected_data]) + + expected_return = "HelloWorld" + self.assertEqual(expected_return, body) + + def test_rpc_international_character(self, mock_socket): + """ + Tests rpc using non-ascii characters + """ + mock_connected_socket = MagicMock() + mock_socket.return_value = mock_connected_socket + + recv_data = b"HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\nHelloWorld" + # Set the return value for the first call to recv + mock_connected_socket.recv.side_effect = [recv_data, None] + + session_id = 0 + # Use international character"socket 请求" as request + request = "socket 请求" + body = hfx_filename.rpc(session_id, request) + + # Assert that the socket methods were called as expected + expected_data = [ + b"POST /remote_db_access?session_id=0 HTTP/1.0\r\n", + b"Connection:close\r\n", + b"content-length:13\r\n", + b"\r\n", + request.encode('utf-8') + ] + mock_connected_socket.send.assert_has_calls([call(data) for data in expected_data]) + + expected_return = "HelloWorld" + self.assertEqual(expected_return, body) + + def test_db_get_uuid(self, mock_socket): + """ + Tests db_get_uuid + """ + mock_connected_socket = MagicMock() + mock_socket.return_value = mock_connected_socket + + header = "HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\n" + body = ("successHelloWorld" + "") + recv_data = (header + body).encode('utf-8') + # Set the return value for the first call to recv + mock_connected_socket.recv.side_effect = [recv_data, None] + + expected_response = "HelloWorld" + response = hfx_filename.db_get_by_uuid(0, "pool_patch", "22345") + self.assertEqual(expected_response, response) + + def test_read_field(self, mock_socket): + """ + Tests read_field + """ + mock_connected_socket = MagicMock() + mock_socket.return_value = mock_connected_socket + + header = "HTTP/1.1 200 OK\r\nContent-Length: 10\r\n\r\n" + body = ("successfile_name" + "") + recv_data = (header + body).encode('utf-8') + # Set the return value for the first call to recv + mock_connected_socket.recv.side_effect = [recv_data, None] + + expected_filename = "file_name" + filename = hfx_filename.read_field(0, "pool_patch", "filename", "rf") + self.assertEqual(expected_filename, filename) + + +class TestParse(unittest.TestCase): + """ + This class tests function parse_string() + """ + def test_parse_string(self): + """ + Tests parse_string + """ + txt = ("successabcde" + "") + expected_txt = "abcde" + return_txt = hfx_filename.parse_string(txt) + self.assertEqual(expected_txt, return_txt) From 60dbc3c8340ee0320b82e93cc94583f91d2298d7 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 8 Mar 2024 03:13:19 +0000 Subject: [PATCH 013/222] Set the unit test codcov target to 80% There were no python3 unit tests now. Set the unit test coverage to 80% for the py3 update. In the future, if there are cases where the scripts are not UTable or we can cover them by manual test or XenRT test, we can handle it by excluding them from the check. Signed-off-by: Stephen Cheng --- .codecov.yml | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index 9be7955160d..f7562dbaf3c 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -168,13 +168,7 @@ coverage: # - excluding: **/test_*.py # paths: ["python3/**", "!**/test_*.py"] - - # - # For python3/** (excluding tests): - # - # For python3, coverage should not be reduced compared to its base: - # - target: auto + target: 80% # # Exception: the threshold value given is allowed @@ -278,4 +272,5 @@ component_management: - component_id: test_cases name: test_cases - paths: ["**/test_*.py"] + paths: ["python3/unittest/test_*.py"] + From 10e9c37b5f170bde28e8630dc46b143090f0d25f Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 11 Mar 2024 07:24:02 +0000 Subject: [PATCH 014/222] Only test files migrated to python3 Signed-off-by: Stephen Cheng --- .codecov.yml | 4 +--- .github/workflows/main.yml | 4 ++-- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index f7562dbaf3c..47ef46ac090 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -180,8 +180,6 @@ coverage: # Checks each Python version separately: python-3.11: flags: ["python3.11"] - python-2.7: - flags: ["python2.7"] # # Project limits @@ -220,7 +218,7 @@ coverage: tests: # Ensure that all tests are executed (tests themselves must be 100% covered) target: 98% - paths: ["**/test_*.py"] + paths: ["python3/unittest/test_*.py"] # diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d38a825a23b..e8a6b84cbfb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -72,8 +72,8 @@ jobs: if: ${{ matrix.python-version != '2.7' }} run: > pytest - --cov=scripts --cov=ocaml/xcp-rrdd --cov=python3/ - scripts/ ocaml/xcp-rrdd python3/ -vv -rA + --cov=python3/unittest + python3/unittest -vv -rA --junitxml=.git/pytest${{matrix.python-version}}.xml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml From 9b73950b01641eec54e098858c3f06cd715d50ae Mon Sep 17 00:00:00 2001 From: acefei Date: Wed, 13 Mar 2024 13:52:22 +0800 Subject: [PATCH 015/222] CP-47555 Porting usb_scan.py to python3 (#5424) * formate with black * decode value produced by python3-pyudev which return bytes * remove usb_scan from expected_to_fail list in pyproject.toml * update some neccesary pylint issues * fix pytype error: unsupported operand type(s) for +: str and UsbInterface * Disable false positive in Pytype error reporting * update for comments * fix ut errors as python-pyudev return bytes instead of string * increase code coverage * format with black for test_usb_scan.py * move usb_scan.py with ut code into python3 folder * Disable the code coverage of scripts folder We're moving the python code and unittest into python3 folder, that would be resulting in a decrease in coverage rate in scripts folder * false positive for code coverage * solve the pylint warning --------- Signed-off-by: Fei Su --- .github/workflows/main.yml | 2 +- pyproject.toml | 18 +- python3/Makefile | 2 + {scripts => python3/libexec}/usb_scan.py | 207 +++++++++-------- python3/unittest/import_file.py | 2 +- python3/unittest/test_hfx_filename.py | 2 +- .../unittest}/test_usb_scan.py | 208 +++++++++--------- scripts/Makefile | 2 - 8 files changed, 234 insertions(+), 209 deletions(-) rename {scripts => python3/libexec}/usb_scan.py (79%) rename {scripts => python3/unittest}/test_usb_scan.py (66%) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e8a6b84cbfb..a51f40e91e6 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -60,7 +60,7 @@ jobs: if: ${{ matrix.python-version == '2.7' }} run: > pytest - --cov=scripts --cov=ocaml/xcp-rrdd + --cov=ocaml/xcp-rrdd scripts/ ocaml/xcp-rrdd -vv -rA --junitxml=.git/pytest${{matrix.python-version}}.xml --cov-report term-missing diff --git a/pyproject.toml b/pyproject.toml index 8ab98205e89..afc1ff32067 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,20 @@ profile = "black" combine_as_imports = true ensure_newline_before_comments = false +[tool.pylint.messages_control] +disable = [ + "missing-function-docstring", + "missing-module-docstring", + "consider-using-f-string", + "too-many-branches", + "broad-exception-caught", + "no-else-break", + "no-else-return", + "invalid-name", + "import-error", + "unnecessary-pass", + "unspecified-encoding", +] [tool.mypy] # Note mypy has no config setting for PYTHONPATH, so you need to call it with: @@ -85,10 +99,6 @@ expected_to_fail = [ "scripts/examples/python/shell.py", "scripts/examples/smapiv2.py", "scripts/static-vdis", - # add_interface: unsupported operand type(s) for +: str and UsbInterface - "scripts/usb_scan.py", - # TestUsbScan.assertIn() is called with wrong arguments(code not iterable) - "scripts/test_usb_scan.py", "scripts/plugins/extauth-hook-AD.py", ] diff --git a/python3/Makefile b/python3/Makefile index a8b6ad3d0b5..02d819443ed 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -8,6 +8,8 @@ SITE3_DIR=$(shell python3 -c "from distutils.sysconfig import get_python_lib; pr install: mkdir -p $(DESTDIR)$(OPTDIR)/bin mkdir -p $(DESTDIR)$(SITE3_DIR) + mkdir -p $(DESTDIR)$(LIBEXECDIR) $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ + $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) diff --git a/scripts/usb_scan.py b/python3/libexec/usb_scan.py similarity index 79% rename from scripts/usb_scan.py rename to python3/libexec/usb_scan.py index 25290b362a9..187418741e6 100755 --- a/scripts/usb_scan.py +++ b/python3/libexec/usb_scan.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # Copyright (C) Citrix Systems Inc. # @@ -21,16 +21,17 @@ # 2. check if device can be passed through based on policy file # 3. return the device info to XAPI in json format -from __future__ import print_function + import abc import argparse import json -import xcp.logger as log import logging -import pyudev import re import sys +import pyudev +import xcp.logger as log + def log_list(l): for s in l: @@ -43,7 +44,7 @@ def log_exit(m): def hex_equal(h1, h2): - """ check if the value of hex string are equal + """check if the value of hex string are equal :param h1:(str) lhs hex string :param h2:(str) rhs hex string @@ -56,14 +57,15 @@ def hex_equal(h1, h2): class UsbObject(dict): - """ Base class of USB classes, save USB properties in dict + """Base class of USB classes, save USB properties in dict node(str): the key, device node """ + __metaclass__ = abc.ABCMeta def __init__(self, node): - super(UsbObject, self).__init__() + super().__init__() self.node = node def get_node(self): @@ -90,11 +92,12 @@ def debug_str(self, level=0): :param level: the indent level :return: the debug string """ - return self.indent(level) + self.__class__.__name__ + ": " + \ - str((self.node, self)) + return ( + self.indent(level) + self.__class__.__name__ + ": " + str((self.node, self)) + ) def is_initialized(self): - """ check if all properties are properly set + """check if all properties are properly set :return: bool, if properties are ready """ @@ -107,24 +110,22 @@ def _is_class_hub(self, key_class): @abc.abstractmethod def is_class_hub(self): - """ check if this belongs to a hub + """check if this belongs to a hub :return: bool, if this belongs to a hub """ - pass @abc.abstractmethod def is_child_of(self, parent): - """ check if this is a child of parent + """check if this is a child of parent :param parent:(UsbObject) the parent to check against :return: """ - pass @staticmethod def validate_int(s, base=10): - """ validate if a string can be converted to int + """validate if a string can be converted to int :param s:(str) the string to be converted :param base:(int) the radix base of integer to convect @@ -138,10 +139,11 @@ def validate_int(s, base=10): class UsbDevice(UsbObject): - """ Class for USB device, save USB properties in UsbObject dict + """Class for USB device, save USB properties in UsbObject dict interfaces:([UsbInterface]) list of USB interfaces belonging to this device """ + _DESC_VENDOR = "ID_VENDOR_FROM_DATABASE" _DESC_PRODUCT = "ID_MODEL_FROM_DATABASE" @@ -156,13 +158,22 @@ class UsbDevice(UsbObject): _USB_SPEED = "speed" _PRODUCT_DESC = [_DESC_VENDOR, _DESC_PRODUCT] - _PRODUCT_DETAILS = [_VERSION, _ID_VENDOR, _ID_PRODUCT, _BCD_DEVICE, _SERIAL, - _CLASS, _CONF_VALUE, _NUM_INTERFACES, _USB_SPEED] + _PRODUCT_DETAILS = [ + _VERSION, + _ID_VENDOR, + _ID_PRODUCT, + _BCD_DEVICE, + _SERIAL, + _CLASS, + _CONF_VALUE, + _NUM_INTERFACES, + _USB_SPEED, + ] _PROPS = _PRODUCT_DESC + _PRODUCT_DETAILS _PROPS_NONABLE = _PRODUCT_DESC + [_SERIAL] def __init__(self, node, props1, props2): - """ initialise UsbDevice, set node and properties + """initialise UsbDevice, set node and properties :param node(str): device node :param props1(pyudev.Device): device, to get properties from UDEV @@ -170,14 +181,14 @@ def __init__(self, node, props1, props2): :param props2(pyudev.Device.attributes): device attributes, to get properties from sysfs """ - super(UsbDevice, self).__init__(node) + super().__init__(node) for p in self._PRODUCT_DESC: if props1.get(p) is not None: self[p] = props1.get(p) for p in self._PRODUCT_DETAILS: if props2.get(p) is not None: - self[p] = props2.get(p) + self[p] = props2.get(p).decode() for p in self._PROPS_NONABLE: if p not in self: self[p] = "" @@ -185,7 +196,7 @@ def __init__(self, node, props1, props2): self.interfaces = set() def debug_str(self, level=0): - s = super(UsbDevice, self).debug_str(level) + s = super().debug_str(level) for i in self.interfaces: s += i.debug_str(level + 1) return s @@ -203,7 +214,7 @@ def is_initialized(self): if not self.validate_int(self[p]): return False - return super(UsbDevice, self).is_initialized() + return super().is_initialized() def is_class_hub(self): return self._is_class_hub(self._CLASS) @@ -213,13 +224,13 @@ def is_child_of(self, parent): return False def add_interface(self, interface): - """ add an interface to this device + """add an interface to this device :param interface:(UsbInterface) the UsbInterface to add :return: None """ if interface in self.interfaces: - log.debug("overriding existing interface: " + interface) + log.debug("overriding existing interface: " + str(interface)) self.interfaces.remove(interface) self.interfaces.add(interface) @@ -230,18 +241,18 @@ def del_interface(self, interface): :return: None """ if interface in self.interfaces: - log.debug("removing interface: " + interface) + log.debug("removing interface: " + str(interface)) self.interfaces.remove(interface) def get_all_interfaces(self): - """ get all interfaces attached of this device + """get all interfaces attached of this device :return: set of all interfaces """ return self.interfaces def is_ready(self): - """ check if this device has all the interfaces attached + """check if this device has all the interfaces attached :return: bool, if it's ready to do policy check now """ @@ -250,9 +261,8 @@ def is_ready(self): class UsbInterface(UsbObject): - """ Class for USB interface, save USB properties in UsbObject dict + """Class for USB interface, save USB properties in UsbObject dict""" - """ _NUMBER = "bInterfaceNumber" _CLASS = "bInterfaceClass" _SUB_CLASS = "bInterfaceSubClass" @@ -261,20 +271,19 @@ class UsbInterface(UsbObject): _PROPS = [_NUMBER, _CLASS, _SUB_CLASS, _PROTOCOL] def __init__(self, node, props): - """ initialise UsbInterface, set node and properties + """initialise UsbInterface, set node and properties :param node(str): device node :param props(pyudev.Device.attributes): device attributes, to get properties from sysfs """ - super(UsbInterface, self).__init__(node) + super().__init__(node) for p in self._PROPS: if props.get(p) is not None: - self[p] = props.get(p) + self[p] = props.get(p).decode() def debug_str(self, level=0): - s = super(UsbInterface, self).debug_str(level) - return s + return super().debug_str(level) def is_class_hub(self): return self._is_class_hub(self._CLASS) @@ -287,13 +296,14 @@ def is_initialized(self): for p in self._PROPS: if p not in self or not self.validate_int(self[p], 16): return False - return super(UsbInterface, self).is_initialized() + return super().is_initialized() def is_child_of(self, parent): if isinstance(parent, UsbDevice) and parent.is_initialized(): conf_value = parent[UsbDevice._CONF_VALUE] - pattern = r"^{}:{}\.\d+$".format(re.escape(parent.get_node()), - re.escape(conf_value)) + pattern = r"^{}:{}\.\d+$".format( + re.escape(parent.get_node()), re.escape(conf_value) + ) return re.match(pattern, self.get_node()) is not None return False @@ -318,14 +328,15 @@ def get_usb_info(): return devices, interfaces -class Policy(object): - """ Parse policy file, and check if a UsbDevice can be passed through +class Policy: + """Parse policy file, and check if a UsbDevice can be passed through Policy file spec reference: https://support.citrix.com/article/CTX119722 rule_list: the list of parsed rule """ + _PATH = "/etc/xensource/usb-policy.conf" _CLASS = "class" @@ -336,36 +347,40 @@ class Policy(object): _BCD_DEVICE = "rel" # key in policy <--> key in usb device - _KEY_MAP_DEVICE = {_ID_VENDOR: UsbDevice._ID_VENDOR, - _ID_PRODUCT: UsbDevice._ID_PRODUCT, - _BCD_DEVICE: UsbDevice._BCD_DEVICE} + _KEY_MAP_DEVICE = { + _ID_VENDOR: UsbDevice._ID_VENDOR, # pylint: disable=protected-access + _ID_PRODUCT: UsbDevice._ID_PRODUCT, # pylint: disable=protected-access + _BCD_DEVICE: UsbDevice._BCD_DEVICE, # pylint: disable=protected-access + } # key in policy <--> key in usb interface - _KEY_MAP_INTERFACE = {_CLASS: UsbInterface._CLASS, - _SUBCLASS: UsbInterface._SUB_CLASS, - _PROTOCOL: UsbInterface._PROTOCOL} - - _PAT_KEY = r"\s*({}|{}|{}|{}|{}|{})\s*".format(_CLASS, _SUBCLASS, - _PROTOCOL, _ID_VENDOR, - _ID_PRODUCT, _BCD_DEVICE) + _KEY_MAP_INTERFACE = { + _CLASS: UsbInterface._CLASS, # pylint: disable=protected-access + _SUBCLASS: UsbInterface._SUB_CLASS, # pylint: disable=protected-access + _PROTOCOL: UsbInterface._PROTOCOL, # pylint: disable=protected-access + } + + _PAT_KEY = r"\s*({}|{}|{}|{}|{}|{})\s*".format( + _CLASS, _SUBCLASS, _PROTOCOL, _ID_VENDOR, _ID_PRODUCT, _BCD_DEVICE + ) _PATTERN = r"{}=\s*([0-9a-f]+)".format(_PAT_KEY) _ALLOW = "allow" def __init__(self): - """ parse policy file, generate rule list + """parse policy file, generate rule list Note: hubs are never allowed to pass through """ self.rule_list = [] try: - with open(self._PATH, "r") as f: + with open(self._PATH) as f: log.debug("=== policy file begin") for line in f: log.debug(line[0:-1]) self.parse_line(line) log.debug("=== policy file end") - except IOError as e: + except OSError as e: # without policy file, no device will be allowed to passed through log_exit("Caught error {}, policy file error".format(str(e))) @@ -375,19 +390,21 @@ def __init__(self): def check_hex_length(self, name, value): if name in [self._CLASS, self._SUBCLASS, self._PROTOCOL]: - return 2 == len(value) + return len(value) == 2 if name in [self._ID_VENDOR, self._ID_PRODUCT, self._BCD_DEVICE]: - return 4 == len(value) + return len(value) == 4 return False @staticmethod def parse_error(pos, end, target, line): log_exit( - "Malformed policy rule, unable to parse '{}', malformed line: {}" - .format(target[pos:end], line)) + "Malformed policy rule, unable to parse '{}', malformed line: {}".format( + target[pos:end], line + ) + ) def parse_line(self, line): - """ parse one line of policy file, generate rule, and append it to + """parse one line of policy file, generate rule, and append it to self.rule_list Example: @@ -413,13 +430,10 @@ def parse_line(self, line): # 2. split action and match field # ^\s*(ALLOW|DENY)\s*:\s*([^:]*)$ try: - action, target = [part.strip() for part in line.split(":")] + action, target = (part.strip() for part in line.split(":")) except ValueError as e: if line.rstrip(): - log_exit("Caught error {}, malformed line: {}" - .format(str(e), line)) - # empty line, just return - return + log_exit("Caught error {}, malformed line: {}".format(str(e), line)) # 3. parse action # \s*(ALLOW|DENY)\s* @@ -429,37 +443,39 @@ def parse_line(self, line): elif action.lower() == "deny": rule[self._ALLOW] = False else: - log_exit("Malformed action'{}', malformed line: {}".format( - action, line)) + log_exit("Malformed action'{}', malformed line: {}".format(action, line)) # 4. parse key=value pairs # pattern = r"\s*(class|subclass|prot|vid|pid|rel)\s*=\s*([0-9a-f]+)" last_end = 0 - for matchNum, match in enumerate(re.finditer(self._PATTERN, target, - re.IGNORECASE)): - if last_end != match.start(): - self.parse_error(last_end, match.start(), target, line) + name = "" + value = "" + for m in re.finditer(self._PATTERN, target, re.IGNORECASE): + if last_end != m.start(): + self.parse_error(last_end, m.start(), target, line) try: - name, value = [part.lower() for part in match.groups()] + name, value = (part.lower() for part in m.groups()) # This can happen if `part` is None except AttributeError: - self.parse_error(match.start(), match.end(), target, line) + self.parse_error(m.start(), m.end(), target, line) # This should never happen, because the regexp has exactly two # matching groups except ValueError: - self.parse_error(match.start(), match.end(), target, line) + self.parse_error(m.start(), m.end(), target, line) if not self.check_hex_length(name, value): - log_exit("hex'{}' length error, malformed line {}".format( - str(value), line)) + log_exit( + "hex'{}' length error, malformed line {}".format(str(value), line) + ) if name in rule: - log_exit("duplicated tag'{}' found, malformed line {}". - format(name, line)) + log_exit( + "duplicated tag'{}' found, malformed line {}".format(name, line) + ) rule[name] = value - last_end = match.end() + last_end = m.end() if last_end != len(target): self.parse_error(last_end, len(target) + 1, target, line) @@ -477,14 +493,20 @@ def match_device_interface(self, rule, device, interface): :return:(bool) if they match """ for k in [k for k in rule if k in self._KEY_MAP_DEVICE]: - log.debug("check {} props[{}] against {}".format( - interface.get_node(), k, str(rule))) + log.debug( + "check {} props[{}] against {}".format( + interface.get_node(), k, str(rule) + ) + ) if not hex_equal(rule[k], device[self._KEY_MAP_DEVICE[k]]): return False for k in [k for k in rule if k in self._KEY_MAP_INTERFACE]: - log.debug("check {} props[{}] against {}".format( - interface.get_node(), k, str(rule))) + log.debug( + "check {} props[{}] against {}".format( + interface.get_node(), k, str(rule) + ) + ) if not hex_equal(rule[k], interface[self._KEY_MAP_INTERFACE[k]]): return False @@ -549,16 +571,19 @@ def check(self, device): def parse_args(): - parser = argparse.ArgumentParser( - description="scanner to get USB devices info") - parser.add_argument("-d", "--diagnostic", dest="diagnostic", - action="store_true", - help="enable diagnostic mode") + parser = argparse.ArgumentParser(description="scanner to get USB devices info") + parser.add_argument( + "-d", + "--diagnostic", + dest="diagnostic", + action="store_true", + help="enable diagnostic mode", + ) return parser.parse_args() def to_pusb(device): - """ convert UsbDevice to pusb dict + """convert UsbDevice to pusb dict Example pusb dict: [ @@ -612,7 +637,7 @@ def to_pusb(device): def make_pusbs_list(devices, interfaces): - """ check the USB devices and interfaces against policy file, + """check the USB devices and interfaces against policy file, and return the pusb list that can be passed through :param devices:([UsbDevice]) USB device list we found in host @@ -633,7 +658,7 @@ def make_pusbs_list(devices, interfaces): return [to_pusb(d) for d in devices if d.is_ready() and policy.check(d)] -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover args = parse_args() if args.diagnostic: log.logToSyslog(level=logging.DEBUG) @@ -643,8 +668,8 @@ def make_pusbs_list(devices, interfaces): # get usb info try: devices, interfaces = get_usb_info() - except Exception as e: - log_exit("Failed to get usb info: {}".format(str(e))) + except Exception as ex: + log_exit("Failed to get usb info: {}".format(str(ex))) # debug info log_list(devices) diff --git a/python3/unittest/import_file.py b/python3/unittest/import_file.py index 2589e640232..581f8f4b401 100644 --- a/python3/unittest/import_file.py +++ b/python3/unittest/import_file.py @@ -22,4 +22,4 @@ def get_module(module_name, file_path): """get the module from a file""" testdir = os.path.dirname(__file__) print(testdir) - return import_from_file(module_name, testdir + file_path) + return import_from_file(module_name, "{}/{}".format(testdir, file_path)) diff --git a/python3/unittest/test_hfx_filename.py b/python3/unittest/test_hfx_filename.py index 6e6964a24f2..0fc4f5abba3 100644 --- a/python3/unittest/test_hfx_filename.py +++ b/python3/unittest/test_hfx_filename.py @@ -12,7 +12,7 @@ # mock modules to avoid dependencies sys.modules["XenAPI"] = MagicMock() -hfx_filename = get_module("hfx_filename", "/../bin/hfx_filename") +hfx_filename = get_module("hfx_filename", "../bin/hfx_filename") @patch("socket.socket") diff --git a/scripts/test_usb_scan.py b/python3/unittest/test_usb_scan.py similarity index 66% rename from scripts/test_usb_scan.py rename to python3/unittest/test_usb_scan.py index c64d89d8276..150cc16afba 100644 --- a/scripts/test_usb_scan.py +++ b/python3/unittest/test_usb_scan.py @@ -2,21 +2,22 @@ # # unittest for usb_scan.py -try: - from collections.abc import Mapping, Container, Iterable -except ImportError: # python2 - from collections import Mapping, Container, Iterable -import mock import os import shutil import sys import tempfile import unittest +from collections.abc import Mapping + +import mock +from import_file import get_module + def nottest(obj): obj.__test__ = False return obj + sys.modules["xcp"] = mock.Mock() sys.modules["xcp.logger"] = mock.Mock() sys.modules["pyudev"] = mock.Mock() @@ -26,11 +27,11 @@ class MocDeviceAttrs(Mapping): def __init__(self, device): self.d = device.get_attr() - def __iter__(self): + def __iter__(self): # pragma: no cover for name in self.d: yield name - def __len__(self): + def __len__(self): # pragma: no cover return len(self.d) def __getitem__(self, name): @@ -38,7 +39,6 @@ def __getitem__(self, name): class MocDevice(Mapping): - def __init__(self, d): self.d = d @@ -56,11 +56,11 @@ def get_attr(self): def attributes(self): return MocDeviceAttrs(self) - def __iter__(self): + def __iter__(self): # pragma: no cover for name in self.get_prop(): yield name - def __len__(self): + def __len__(self): # pragma: no cover return len(self.get_prop()) def __getitem__(self, name): @@ -68,7 +68,6 @@ def __getitem__(self, name): class MocEnumerator(object): - def __init__(self, ds): self.ds = ds @@ -78,17 +77,16 @@ def __iter__(self): class MocContext(object): - def __init__(self, devices, interfaces): self.devices = devices self.interfaces = interfaces def list_devices(self, **kwargs): - if "usb" == kwargs.pop("subsystem"): + if kwargs.pop("subsystem") == "usb": dev_type = kwargs.pop("DEVTYPE") - if "usb_device" == dev_type: + if dev_type == "usb_device": return MocEnumerator(self.devices) - elif "usb_interface" == dev_type: + elif dev_type == "usb_interface": return MocEnumerator(self.interfaces) return MocEnumerator([]) @@ -97,8 +95,7 @@ def mock_setup(mod, devices, interfaces, path): mod.log.error = test_log mod.log.debug = test_log mod.Policy._PATH = path - mod.pyudev.Context = mock.Mock(return_value=MocContext( - devices, interfaces)) + mod.pyudev.Context = mock.Mock(return_value=MocContext(devices, interfaces)) @nottest @@ -107,65 +104,70 @@ def test_log(m): class TestUsbScan(unittest.TestCase): - def setUp(self): - try: - self.work_dir = tempfile.mkdtemp(prefix="test_usb_scan") - except: - raise + self.work_dir = tempfile.mkdtemp(prefix="test_usb_scan") def tearDown(self): shutil.rmtree(self.work_dir, ignore_errors=True) @nottest - def test_usb_common(self, moc_devices, moc_interfaces, moc_results, - path="./scripts/usb-policy.conf"): - import usb_scan + def test_usb_common( + self, moc_devices, moc_interfaces, moc_results, path="./scripts/usb-policy.conf" + ): + usb_scan = get_module("usb_scan", "../libexec/usb_scan.py") + mock_setup(usb_scan, moc_devices, moc_interfaces, path) devices, interfaces = usb_scan.get_usb_info() + usb_scan.log_list(devices) + usb_scan.log_list(interfaces) + pusbs = usb_scan.make_pusbs_list(devices, interfaces) # pass pusbs in json to XAPI self.assertEqual(sorted(pusbs), sorted(moc_results)) @nottest - def test_usb_exit(self, devices, interfaces, results, - path="./scripts/usb-policy.conf", msg=""): + def test_usb_exit( + self, devices, interfaces, results, + path="./scripts/usb-policy.conf", + msg="" + ): # pylint: disable=too-many-arguments with self.assertRaises(SystemExit) as cm: self.test_usb_common(devices, interfaces, results, path) if msg: - self.assertIn(msg, cm.exception.code) + # cm.exception.code is int type whose format + # looks like "duplicated tag'vid' found, + # malformed line ALLOW:vid=056a vid=0314 class=03" + self.assertIn(msg, cm.exception.code) # pytype: disable=wrong-arg-types def test_usb_dongle(self): devices = [ { "name": "1-2", - "props": { - "ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc." - }, + "props": {"ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc."}, "attrs": { - "idVendor": "096e", - "bNumInterfaces": " 1", - "bConfigurationValue": "1", - "bcdDevice": "010a", - "version": " 1.10", - "idProduct": "0302", - "bDeviceClass": "00", - "speed": "480" - } + "idVendor": b"096e", + "bNumInterfaces": b" 1", + "bConfigurationValue": b"1", + "bcdDevice": b"010a", + "version": b" 1.10", + "idProduct": b"0302", + "bDeviceClass": b"00", + "speed": b"480", + }, } ] interfaces = [ { "name": "1-2:1.0", "attrs": { - "bInterfaceClass": "03", - "bInterfaceSubClass": "00", - "bInterfaceProtocol": "00", - "bInterfaceNumber": "00", - } + "bInterfaceClass": b"03", + "bInterfaceSubClass": b"00", + "bInterfaceProtocol": b"00", + "bInterfaceNumber": b"00", + }, } ] results = [ @@ -178,7 +180,7 @@ def test_usb_dongle(self): "vendor-id": "096e", "path": "1-2", "serial": "", - "speed": "480" + "speed": "480", } ] self.test_usb_common(devices, interfaces, results) @@ -187,30 +189,28 @@ def test_usb_dongle_on_hub(self): devices = [ { "name": "1-2.1", - "props": { - "ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc." - }, + "props": {"ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc."}, "attrs": { - "idVendor": "096e", - "bNumInterfaces": " 1", - "bConfigurationValue": "1", - "bcdDevice": "010a", - "version": " 1.10", - "idProduct": "0302", - "bDeviceClass": "00", - "speed": "12" - } + "idVendor": b"096e", + "bNumInterfaces": b" 1", + "bConfigurationValue": b"1", + "bcdDevice": b"010a", + "version": b" 1.10", + "idProduct": b"0302", + "bDeviceClass": b"00", + "speed": b"12", + }, } ] interfaces = [ { "name": "1-2.1:1.0", "attrs": { - "bInterfaceClass": "03", - "bInterfaceSubClass": "00", - "bInterfaceProtocol": "00", - "bInterfaceNumber": "00", - } + "bInterfaceClass": b"03", + "bInterfaceSubClass": b"00", + "bInterfaceProtocol": b"00", + "bInterfaceNumber": b"00", + }, } ] results = [ @@ -223,7 +223,7 @@ def test_usb_dongle_on_hub(self): "vendor-id": "096e", "path": "1-2.1", "serial": "", - "speed": "12" + "speed": "12", } ] self.test_usb_common(devices, interfaces, results) @@ -232,66 +232,59 @@ def test_usb_dongle_unbinded(self): devices = [ { "name": "1-2", - "props": { - "ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc." - }, + "props": {"ID_VENDOR_FROM_DATABASE": "Feitian Technologies, Inc."}, "attrs": { - "idVendor": "096e", - "bNumInterfaces": "", - "bConfigurationValue": "", - "bcdDevice": "010a", - "version": " 1.10", - "idProduct": "0302", - "bDeviceClass": "00", - } + "idVendor": b"096e", + "bNumInterfaces": b"", + "bConfigurationValue": b"", + "bcdDevice": b"010a", + "version": b" 1.10", + "idProduct": b"0302", + "bDeviceClass": b"00", + }, } ] - interfaces = [ - ] - results = [ - ] + interfaces = [] + results = [] self.test_usb_common(devices, interfaces, results) def test_usb_keyboard(self): devices = [ { "name": "1-2", - "props": { - "ID_VENDOR_FROM_DATABASE": "Dell Computer Corp." - }, + "props": {"ID_VENDOR_FROM_DATABASE": "Dell Computer Corp."}, "attrs": { - "idVendor": "413c", - "bNumInterfaces": " 2", - "bConfigurationValue": "1", - "bcdDevice": "0110", - "version": " 2.00", - "idProduct": "2113", - "bDeviceClass": "00", - } + "idVendor": b"413c", + "bNumInterfaces": b" 2", + "bConfigurationValue": b"1", + "bcdDevice": b"0110", + "version": b" 2.00", + "idProduct": b"2113", + "bDeviceClass": b"00", + }, } ] interfaces = [ { "name": "1-2:1.0", "attrs": { - "bInterfaceClass": "03", - "bInterfaceSubClass": "01", - "bInterfaceProtocol": "01", - "bInterfaceNumber": "00", - } + "bInterfaceClass": b"03", + "bInterfaceSubClass": b"01", + "bInterfaceProtocol": b"01", + "bInterfaceNumber": b"00", + }, }, { "name": "1-2:1.1", "attrs": { - "bInterfaceClass": "03", - "bInterfaceSubClass": "00", - "bInterfaceProtocol": "00", - "bInterfaceNumber": "01", - } - } - ] - results = [ + "bInterfaceClass": b"03", + "bInterfaceSubClass": b"00", + "bInterfaceProtocol": b"00", + "bInterfaceNumber": b"01", + }, + }, ] + results = [] self.test_usb_common(devices, interfaces, results) def test_usb_config_missing(self): @@ -309,8 +302,7 @@ def test_usb_config_error_unexpected_chars_with_comment(self): ALLOW:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, - "to unpack") + self.test_usb_config_error_common(content, "to unpack") def test_usb_config_error_duplicated_key(self): content = """# duplicated key word @@ -377,13 +369,11 @@ def test_usb_config_error_unexpected_non_empty_line(self): aa ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, - "to unpack") + self.test_usb_config_error_common(content, "to unpack") def test_usb_config_error_missing_colon(self): content = """# missing colon after action ALLOW:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW # Otherwise allow everything else """ - self.test_usb_config_error_common(content, - "to unpack") + self.test_usb_config_error_common(content, "to unpack") diff --git a/scripts/Makefile b/scripts/Makefile index 83b5526780a..d5984d927a7 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -110,7 +110,6 @@ install: $(IPROG) upload-wrapper logs-download $(DESTDIR)$(LIBEXECDIR) $(IDATA) usb-policy.conf $(DESTDIR)$(ETCXENDIR) $(IPROG) usb_reset.py $(DESTDIR)$(LIBEXECDIR) - $(IPROG) usb_scan.py $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(OPTDIR)/packages/iso #omg XXX $(IPROG) xapi-rolling-upgrade-miami $(DESTDIR)$(LIBEXECDIR)/xapi-rolling-upgrade $(IPROG) set-hostname $(DESTDIR)$(LIBEXECDIR) @@ -195,4 +194,3 @@ endif $(IDATA) mail-languages/ja-JP.json $(DESTDIR)/etc/xapi.d/mail-languages # uefi mkdir -p $(DESTDIR)/etc/xapi.d/efi-clone - From aac47d37a1841f249ac7100ffb62e9d93b44b740 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 4 Mar 2024 05:52:09 +0000 Subject: [PATCH 016/222] CP-47334: Move nbd_client_manager.py to python3/libexec Signed-off-by: Stephen Cheng --- python3/Makefile | 3 ++- {scripts => python3/libexec}/nbd_client_manager.py | 0 python3/unittest/test_nbd_client_manager.py | 0 scripts/Makefile | 1 - 4 files changed, 2 insertions(+), 2 deletions(-) rename {scripts => python3/libexec}/nbd_client_manager.py (100%) create mode 100644 python3/unittest/test_nbd_client_manager.py diff --git a/python3/Makefile b/python3/Makefile index 02d819443ed..e85e199f705 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -10,6 +10,7 @@ install: mkdir -p $(DESTDIR)$(SITE3_DIR) mkdir -p $(DESTDIR)$(LIBEXECDIR) - $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) + $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin + $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) diff --git a/scripts/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py similarity index 100% rename from scripts/nbd_client_manager.py rename to python3/libexec/nbd_client_manager.py diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/scripts/Makefile b/scripts/Makefile index d5984d927a7..51dc1b092f6 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -30,7 +30,6 @@ install: $(IPROG) xn_diagnostics $(DESTDIR)$(LIBEXECDIR) $(IPROG) thread_diagnostics $(DESTDIR)$(LIBEXECDIR) $(IPROG) list_plugins $(DESTDIR)$(LIBEXECDIR) - $(IPROG) nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(ETCXENDIR)/bugtool/xapi mkdir -p $(DESTDIR)$(ETCXENDIR)/bugtool/xenopsd mkdir -p $(DESTDIR)$(ETCXENDIR)/bugtool/observer From a6ede57f308ba5b2b9416ef8cba0f3362b4b0511 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 4 Mar 2024 05:59:15 +0000 Subject: [PATCH 017/222] CP-47334: Formatting by "black" tool Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 114 +++++++++++++++----------- 1 file changed, 65 insertions(+), 49 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index bebe97a2587..a832729b847 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 """ Provides functions and a CLI for safely connecting to and disconnecting from @@ -20,7 +20,7 @@ LOGGER = logging.getLogger("nbd_client_manager") LOGGER.setLevel(logging.DEBUG) -LOCK_FILE = '/var/run/nonpersistent/nbd_client_manager' +LOCK_FILE = "/var/run/nonpersistent/nbd_client_manager" # Don't wait more than 10 minutes for the NBD device MAX_DEVICE_WAIT_MINUTES = 10 @@ -31,14 +31,17 @@ class NbdDeviceNotFound(Exception): The NBD device file does not exist. Raised when there are no free NBD devices. """ + def __init__(self, nbd_device): super(NbdDeviceNotFound, self).__init__( - "NBD device '{}' does not exist".format(nbd_device)) + "NBD device '{}' does not exist".format(nbd_device) + ) self.nbd_device = nbd_device class FileLock(object): """Container for data relating to a file lock""" + def __init__(self, path): self._path = path self._lock_file = None @@ -46,7 +49,7 @@ def __init__(self, path): def _lock(self): """Acquire the lock""" flags = fcntl.LOCK_EX - self._lock_file = open(self._path, 'w+') + self._lock_file = open(self._path, "w+") fcntl.flock(self._lock_file, flags) def _unlock(self): @@ -73,25 +76,19 @@ def _call(cmd_args, error=True): """ LOGGER.debug("Running cmd %s", cmd_args) proc = subprocess.Popen( - cmd_args, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - close_fds=True + cmd_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True ) stdout, stderr = proc.communicate() if error and proc.returncode != 0: LOGGER.error( - "%s exitted with code %d: %s", - ' '.join(cmd_args), - proc.returncode, - stderr) + "%s exitted with code %d: %s", " ".join(cmd_args), proc.returncode, stderr + ) raise subprocess.CalledProcessError( - returncode=proc.returncode, - cmd=cmd_args, - output=stderr) + returncode=proc.returncode, cmd=cmd_args, output=stderr + ) return proc.returncode @@ -105,7 +102,7 @@ def _is_nbd_device_connected(nbd_device): # 1 for a non-existent file. if not os.path.exists(nbd_device): raise NbdDeviceNotFound(nbd_device) - cmd = ['nbd-client', '-check', nbd_device] + cmd = ["nbd-client", "-check", nbd_device] returncode = _call(cmd, error=False) if returncode == 0: return True @@ -133,31 +130,37 @@ def _wait_for_nbd_device(nbd_device, connected): if datetime.now() > deadline: raise Exception( "Timed out waiting for connection state of device %s to be %s" - % (nbd_device, connected)) + % (nbd_device, connected) + ) LOGGER.debug( - 'Connection status of NBD device %s not yet %s, waiting', + "Connection status of NBD device %s not yet %s, waiting", nbd_device, - connected) + connected, + ) time.sleep(0.1) + PERSISTENT_INFO_DIR = "/var/run/nonpersistent/nbd" + def _get_persistent_connect_info_filename(device): """ Return the full path for the persistent file containing the connection details. This is based on the device name, so /dev/nbd0 -> /var/run/nonpersistent/nbd/0 """ - number = re.search('/dev/nbd([0-9]+)', device).group(1) - return PERSISTENT_INFO_DIR + '/' + number + number = re.search("/dev/nbd([0-9]+)", device).group(1) + return PERSISTENT_INFO_DIR + "/" + number + def _persist_connect_info(device, path, exportname): if not os.path.exists(PERSISTENT_INFO_DIR): os.makedirs(PERSISTENT_INFO_DIR) filename = _get_persistent_connect_info_filename(device) - with open(filename, 'w') as info_file: - info_file.write(json.dumps({'path':path, 'exportname':exportname})) + with open(filename, "w") as info_file: + info_file.write(json.dumps({"path": path, "exportname": exportname})) + def _remove_persistent_connect_info(device): try: @@ -165,22 +168,34 @@ def _remove_persistent_connect_info(device): except OSError: pass + def connect_nbd(path, exportname): """Connects to a free NBD device using nbd-client and returns its path""" # We should not ask for too many nbds, as we might not have enough memory - _call(['modprobe', 'nbd', 'nbds_max=24']) + _call(["modprobe", "nbd", "nbds_max=24"]) retries = 0 while True: try: with FILE_LOCK: nbd_device = _find_unused_nbd_device() - cmd = ['nbd-client', '-unix', path, nbd_device, - '-timeout', '60', '-name', exportname] + cmd = [ + "nbd-client", + "-unix", + path, + nbd_device, + "-timeout", + "60", + "-name", + exportname, + ] _call(cmd) _wait_for_nbd_device(nbd_device=nbd_device, connected=True) _persist_connect_info(nbd_device, path, exportname) - nbd = (nbd_device[len('/dev/'):] - if nbd_device.startswith('/dev/') else nbd_device) + nbd = ( + nbd_device[len("/dev/") :] + if nbd_device.startswith("/dev/") + else nbd_device + ) with open("/sys/block/" + nbd + "/queue/scheduler", "w") as fd: fd.write("none") # Set the NBD queue size to the same as the qcow2 cluster size @@ -191,7 +206,7 @@ def connect_nbd(path, exportname): return nbd_device except NbdDeviceNotFound as exn: - LOGGER.warn('Failed to find free nbd device: %s', exn) + LOGGER.warn("Failed to find free nbd device: %s", exn) retries = retries + 1 if retries == 1: # We sleep for a shorter period first, in case an nbd device @@ -212,7 +227,7 @@ def disconnect_nbd_device(nbd_device): try: if _is_nbd_device_connected(nbd_device=nbd_device): _remove_persistent_connect_info(nbd_device) - cmd = ['nbd-client', '-disconnect', nbd_device] + cmd = ["nbd-client", "-disconnect", nbd_device] _call(cmd) _wait_for_nbd_device(nbd_device=nbd_device, connected=False) except NbdDeviceNotFound: @@ -220,10 +235,9 @@ def disconnect_nbd_device(nbd_device): pass - def _connect_cli(args): device = connect_nbd(path=args.path, exportname=args.exportname) - print device + print(device) def _disconnect_cli(args): @@ -234,39 +248,41 @@ def _main(): # Configure the root logger to log into syslog # (Specifically, into /var/log/user.log) syslog_handler = logging.handlers.SysLogHandler( - address='/dev/log', - facility=logging.handlers.SysLogHandler.LOG_USER) + address="/dev/log", facility=logging.handlers.SysLogHandler.LOG_USER + ) # Ensure the program name is included in the log messages: - formatter = logging.Formatter('%(name)s: [%(levelname)s] %(message)s') + formatter = logging.Formatter("%(name)s: [%(levelname)s] %(message)s") syslog_handler.setFormatter(formatter) logging.getLogger().addHandler(syslog_handler) try: parser = argparse.ArgumentParser( - description="Connect to and disconnect from an NBD device") + description="Connect to and disconnect from an NBD device" + ) - subparsers = parser.add_subparsers(dest='command_name') + subparsers = parser.add_subparsers(dest="command_name") parser_connect = subparsers.add_parser( - 'connect', - help='Connect to a free NBD device and return its path') + "connect", help="Connect to a free NBD device and return its path" + ) parser_connect.add_argument( - '--path', + "--path", required=True, - help="The path of the Unix domain socket of the NBD server") + help="The path of the Unix domain socket of the NBD server", + ) parser_connect.add_argument( - '--exportname', + "--exportname", required=True, - help="The export name of the device to connect to") + help="The export name of the device to connect to", + ) parser_connect.set_defaults(func=_connect_cli) parser_disconnect = subparsers.add_parser( - 'disconnect', - help='Disconnect from the given NBD device') + "disconnect", help="Disconnect from the given NBD device" + ) parser_disconnect.add_argument( - '--device', - required=True, - help="The path of the NBD device to disconnect") + "--device", required=True, help="The path of the NBD device to disconnect" + ) parser_disconnect.set_defaults(func=_disconnect_cli) args = parser.parse_args() @@ -276,5 +292,5 @@ def _main(): raise -if __name__ == '__main__': +if __name__ == "__main__": _main() From 13a51211cca0040b027bd3ea90ef9a7ee88faf81 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 4 Mar 2024 08:58:30 +0000 Subject: [PATCH 018/222] CP-47334: Migrate nbd_client_manager.py to python3 Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index a832729b847..aece3f55fed 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -6,17 +6,16 @@ """ import argparse +import fcntl +import json import logging import logging.handlers import os +import re import subprocess import time -import fcntl -import json -import re from datetime import datetime, timedelta - LOGGER = logging.getLogger("nbd_client_manager") LOGGER.setLevel(logging.DEBUG) @@ -76,7 +75,8 @@ def _call(cmd_args, error=True): """ LOGGER.debug("Running cmd %s", cmd_args) proc = subprocess.Popen( - cmd_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True + cmd_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, + universal_newlines=True ) stdout, stderr = proc.communicate() @@ -158,7 +158,7 @@ def _persist_connect_info(device, path, exportname): if not os.path.exists(PERSISTENT_INFO_DIR): os.makedirs(PERSISTENT_INFO_DIR) filename = _get_persistent_connect_info_filename(device) - with open(filename, "w") as info_file: + with open(filename, "w", encoding="utf-8") as info_file: info_file.write(json.dumps({"path": path, "exportname": exportname})) @@ -196,12 +196,12 @@ def connect_nbd(path, exportname): if nbd_device.startswith("/dev/") else nbd_device ) - with open("/sys/block/" + nbd + "/queue/scheduler", "w") as fd: + with open("/sys/block/" + nbd + "/queue/scheduler", "w", encoding="utf-8") as fd: fd.write("none") # Set the NBD queue size to the same as the qcow2 cluster size - with open("/sys/block/" + nbd + "/queue/max_sectors_kb", "w") as fd: + with open("/sys/block/" + nbd + "/queue/max_sectors_kb", "w", encoding="utf-8") as fd: fd.write("512") - with open("/sys/block/" + nbd + "/queue/nr_requests", "w") as fd: + with open("/sys/block/" + nbd + "/queue/nr_requests", "w", encoding="utf-8") as fd: fd.write("8") return nbd_device From 031ee7c446579e32fd6473a2b6ed5bc4d9ea407b Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 6 Mar 2024 05:31:01 +0000 Subject: [PATCH 019/222] CP-47334: Add unit tests for nbd_client_manager.py Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 8 +- python3/unittest/test_nbd_client_manager.py | 231 ++++++++++++++++++++ 2 files changed, 235 insertions(+), 4 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index aece3f55fed..aefe1002ee3 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -37,8 +37,7 @@ def __init__(self, nbd_device): ) self.nbd_device = nbd_device - -class FileLock(object): +class FileLock: # pragma: no cover """Container for data relating to a file lock""" def __init__(self, path): @@ -243,8 +242,9 @@ def _connect_cli(args): def _disconnect_cli(args): disconnect_nbd_device(nbd_device=args.device) - -def _main(): +# The main function is covered by manual test and XenRT test +# Exclude it from unit test coverage +def _main(): # pragma: no cover # Configure the root logger to log into syslog # (Specifically, into /var/log/user.log) syslog_handler = logging.handlers.SysLogHandler( diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index e69de29bb2d..461755c8ba7 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -0,0 +1,231 @@ +#!/usr/bin/env python3 +""" +This module provides unittest for nbd_client_manager.py +""" + +import sys +import unittest +import subprocess +from mock import MagicMock, patch, mock_open, call +from import_file import get_module + +nbd_client_manager = get_module("nbd_client_manager", "../libexec/nbd_client_manager.py") + +# mock modules to avoid dependencies +sys.modules["XenAPI"] = MagicMock() + +@patch('subprocess.Popen') +class TestCallFunction(unittest.TestCase): + + def test_call_success(self, mock_popen): + mock_process = mock_popen.return_value + mock_process.communicate.return_value = ("ls -l output", "") + mock_process.returncode = 0 + + returncode = nbd_client_manager._call(["ls", "-l"]) + + self.assertEqual(returncode, 0) + + def test_call_failure(self, mock_popen): + mock_process = mock_popen.return_value + mock_process.communicate.return_value = ("", "err") + mock_process.returncode = 1 + + with self.assertRaises(subprocess.CalledProcessError) as cm: + nbd_client_manager._call(["invalid_cmd"]) + + self.assertEqual(cm.exception.returncode, 1) + +@patch('nbd_client_manager.os.path.exists') +@patch('nbd_client_manager._call') +class TestIsNbdDeviceConnected(unittest.TestCase): + + def test_nbd_device_connected(self, mock_call, mock_exists): + mock_exists.return_value = True + mock_call.return_value = 0 + + result = nbd_client_manager._is_nbd_device_connected('/dev/nbd0') + + self.assertTrue(result) + mock_call.assert_called_once_with(["nbd-client", "-check", "/dev/nbd0"], error=False) + + def test_nbd_device_not_connected(self, mock_call, mock_exists): + mock_exists.return_value = True + mock_call.return_value = 1 + + result = nbd_client_manager._is_nbd_device_connected('/dev/nbd1') + + self.assertFalse(result) + mock_call.assert_called_once_with(["nbd-client", "-check", "/dev/nbd1"], error=False) + + def test_nbd_device_not_found(self, mock_call, mock_exists): + mock_exists.return_value = False + + # Testing the function with a non-existent device + with self.assertRaises(nbd_client_manager.NbdDeviceNotFound): + nbd_client_manager._is_nbd_device_connected('/dev/nbd2') + +@patch('nbd_client_manager._is_nbd_device_connected') +class TestFindUnusedNbdDevice(unittest.TestCase): + def test_find_unused_nbd_device(self, mock_is_nbd_device_connected): + # Mocking the function to return True for /dev/nbd0 and False for /dev/nbd1 + mock_is_nbd_device_connected.side_effect = [True, False] + + # Testing the function + unused_device = nbd_client_manager._find_unused_nbd_device() + + # Assertion + self.assertEqual(unused_device, "/dev/nbd1") + + def test_no_unused_nbd_device(self, mock_is_nbd_device_connected): + # Mocking the function to always raise NbdDeviceNotFound + mock_is_nbd_device_connected.side_effect = nbd_client_manager.NbdDeviceNotFound('/dev/nbd1') + + # Testing the function when no unused devices are found + with self.assertRaises(nbd_client_manager.NbdDeviceNotFound): + nbd_client_manager._find_unused_nbd_device() + +@patch('nbd_client_manager._is_nbd_device_connected') +class TestWaitForNbdDevice(unittest.TestCase): + def test_wait_for_nbd_device_connected(self, mock_is_nbd_device_connected): + mock_is_nbd_device_connected.return_value = True + nbd_client_manager._wait_for_nbd_device('/dev/nbd0', connected=True) + mock_is_nbd_device_connected.assert_called_once_with(nbd_device='/dev/nbd0') + + def test_wait_for_nbd_device_disconnected(self, mock_is_nbd_device_connected): + mock_is_nbd_device_connected.return_value = False + nbd_client_manager._wait_for_nbd_device('/dev/nbd1', connected=False) + mock_is_nbd_device_connected.assert_called_once_with(nbd_device='/dev/nbd1') + +class TestGetPersistentConnectInfoFilename(unittest.TestCase): + def test_get_persistent_connect_info_filename(self): + # Test for device /dev/nbd0 + device = "/dev/nbd0" + expected_filename = f"{nbd_client_manager.PERSISTENT_INFO_DIR}/0" + self.assertEqual(nbd_client_manager._get_persistent_connect_info_filename(device), expected_filename) + +@patch('nbd_client_manager.os.makedirs') +@patch('nbd_client_manager.os.path.exists') +class TestPersistConnectInfo(unittest.TestCase): + + def test_persist_connect_info(self, mock_exists, mock_makedirs): + mock_exists.return_value = False + + # Test data + device = "/dev/nbd0" + path = "/some/path" + exportname = "example_export" + + # Setting up mock for file write + mock_file = mock_open() + with patch('builtins.open', mock_file): + # Run the function + nbd_client_manager._persist_connect_info(device, path, exportname) + + # Assertions + mock_makedirs.assert_called_once_with(nbd_client_manager.PERSISTENT_INFO_DIR) + mock_file.assert_called_once_with('/var/run/nonpersistent/nbd/0', 'w', encoding='utf-8') + mock_file().write.assert_called_once_with('{"path": "/some/path", "exportname": "example_export"}') + + def test_persist_connect_info_directory_exists(self, mock_exists, mock_makedirs): + mock_exists.return_value = True + + # Test data + device = "/dev/nbd0" + path = "/some/path" + exportname = "example_export" + + # Setting up mock for file write + mock_file = mock_open() + with patch('builtins.open', mock_file): + # Run the function + nbd_client_manager._persist_connect_info(device, path, exportname) + + # Assertions + mock_makedirs.assert_not_called() + mock_file.assert_called_once_with('/var/run/nonpersistent/nbd/0', 'w', encoding='utf-8') + mock_file().write.assert_called_once_with('{"path": "/some/path", "exportname": "example_export"}') + +class TestRemovePersistentConnectInfo(unittest.TestCase): + @patch('nbd_client_manager.os.remove') + def test_remove_persistent_connect_info(self, mock_os_remove): + nbd_client_manager._remove_persistent_connect_info('/dev/nbd0') + mock_os_remove.assert_called_once_with('/var/run/nonpersistent/nbd/0') + +class TestConnectNbd(unittest.TestCase): + @patch('nbd_client_manager._call') + @patch('nbd_client_manager._find_unused_nbd_device') + @patch('nbd_client_manager._wait_for_nbd_device') + @patch('nbd_client_manager._persist_connect_info') + @patch('nbd_client_manager.open') + @patch('nbd_client_manager.FILE_LOCK', MagicMock()) # Mocking FILE_LOCK + def test_connect_nbd(self, mock_open, mock_persist_info, mock_wait_for_nbd, mock_find_unused, mock_call): + # Mocking necessary functions and file operations + mock_find_unused.return_value = "/dev/nbd0" + mock_call.return_value = 0 + mock_file_scheduler = MagicMock() + mock_file_max_sectors_kb = MagicMock() + mock_file_nr_requests = MagicMock() + mock_open.side_effect = [mock_file_scheduler, mock_file_max_sectors_kb, mock_file_nr_requests] + + # Testing the function + result = nbd_client_manager.connect_nbd("/path/of/socket/file", "export_name") + + # Assertions + self.assertEqual(result, "/dev/nbd0") + mock_find_unused.assert_called_once() + mock_call.assert_called() + mock_wait_for_nbd.assert_called_once_with(nbd_device="/dev/nbd0", connected=True) + mock_persist_info.assert_called_once_with("/dev/nbd0", "/path/of/socket/file", "export_name") + # Checking open calls + mock_open.assert_has_calls([ + call("/sys/block/nbd0/queue/scheduler", "w", encoding="utf-8"), + call("/sys/block/nbd0/queue/max_sectors_kb", "w", encoding="utf-8"), + call("/sys/block/nbd0/queue/nr_requests", "w", encoding="utf-8") + ], any_order=True) + +@patch('nbd_client_manager._is_nbd_device_connected') +@patch('nbd_client_manager._remove_persistent_connect_info') +@patch('nbd_client_manager._call') +@patch('nbd_client_manager._wait_for_nbd_device') +class TestDisconnectNbdDevice(unittest.TestCase): + + def test_disconnect_nbd_device_connected(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + # Mocking _is_nbd_device_connected to return True + mock_is_connected.return_value = True + + # Testing the function when device is connected + nbd_client_manager.disconnect_nbd_device("/dev/nbd0") + + # Assertions + mock_is_connected.assert_called_once_with(nbd_device="/dev/nbd0") + mock_remove_persistent.assert_called_once_with("/dev/nbd0") + mock_call.assert_called_once_with(["nbd-client", "-disconnect", "/dev/nbd0"]) + mock_wait_for_nbd.assert_called_once_with(nbd_device="/dev/nbd0", connected=False) + + def test_disconnect_nbd_device_disconnected(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + # Mocking _is_nbd_device_connected to return False + mock_is_connected.return_value = False + + # Testing the function when device is already disconnected + nbd_client_manager.disconnect_nbd_device("/dev/nbd0") + + # Assertions + mock_is_connected.assert_called_once_with(nbd_device="/dev/nbd0") + mock_remove_persistent.assert_not_called() + mock_call.assert_not_called() + mock_wait_for_nbd.assert_not_called() + + def test_disconnect_nbd_device_not_found(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + # Mocking _is_nbd_device_connected to raise NbdDeviceNotFound + mock_is_connected.side_effect = nbd_client_manager.NbdDeviceNotFound('/dev/nbd0') + + # Testing the function when device is not found + nbd_client_manager.disconnect_nbd_device("/dev/nbd0") + + # Assertions + mock_is_connected.assert_called_once_with(nbd_device="/dev/nbd0") + mock_remove_persistent.assert_not_called() + mock_call.assert_not_called() + mock_wait_for_nbd.assert_not_called() + From 8f8708801886f525f3a2eac404f5ddeda2347d6d Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 7 Mar 2024 00:57:54 +0000 Subject: [PATCH 020/222] CP-47334: Fix pylint and pytype issues Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 51 +++++++++++++++------ python3/unittest/test_nbd_client_manager.py | 51 ++++++++++++++------- 2 files changed, 71 insertions(+), 31 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index aefe1002ee3..5179ccc21cc 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -25,6 +25,18 @@ MAX_DEVICE_WAIT_MINUTES = 10 +class NotGetNbdNumber(Exception): + """ + The NBD device should be in this format: nbd{0-100} + If we cannot match this pattern, raise this exception + """ + +class NbdConnStateTimeout(Exception): + """ + If we cannot get the connection status of a nbd device, + raise this exception. + """ + class NbdDeviceNotFound(Exception): """ The NBD device file does not exist. Raised when there are no free NBD @@ -32,8 +44,8 @@ class NbdDeviceNotFound(Exception): """ def __init__(self, nbd_device): - super(NbdDeviceNotFound, self).__init__( - "NBD device '{}' does not exist".format(nbd_device) + super().__init__( + f"NBD device '{nbd_device}' does not exist" ) self.nbd_device = nbd_device @@ -47,7 +59,8 @@ def __init__(self, path): def _lock(self): """Acquire the lock""" flags = fcntl.LOCK_EX - self._lock_file = open(self._path, "w+") + # pylint: disable=consider-using-with + self._lock_file = open(self._path, "w+", encoding="utf8") fcntl.flock(self._lock_file, flags) def _unlock(self): @@ -73,12 +86,13 @@ def _call(cmd_args, error=True): If [error] and exit code != 0, log and throws a CalledProcessError. """ LOGGER.debug("Running cmd %s", cmd_args) + # pylint: disable=consider-using-with proc = subprocess.Popen( cmd_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True, universal_newlines=True ) - stdout, stderr = proc.communicate() + _, stderr = proc.communicate() if error and proc.returncode != 0: LOGGER.error( @@ -117,19 +131,22 @@ def _find_unused_nbd_device(): Raises NbdDeviceNotFound if no devices are available. """ for device_no in range(0, 1000): - nbd_device = "/dev/nbd{}".format(device_no) + nbd_device = f"/dev/nbd{device_no}" if not _is_nbd_device_connected(nbd_device=nbd_device): return nbd_device - + # Actually `_is_nbd_device_connected` will raise an exception + # if no unused device + # Add this return for pylint check + return None def _wait_for_nbd_device(nbd_device, connected): deadline = datetime.now() + timedelta(minutes=MAX_DEVICE_WAIT_MINUTES) while _is_nbd_device_connected(nbd_device=nbd_device) != connected: if datetime.now() > deadline: - raise Exception( - "Timed out waiting for connection state of device %s to be %s" - % (nbd_device, connected) + raise NbdConnStateTimeout( + f"Timed out waiting for connection state of " + f"device {nbd_device} to be {connected}" ) LOGGER.debug( @@ -149,7 +166,10 @@ def _get_persistent_connect_info_filename(device): the connection details. This is based on the device name, so /dev/nbd0 -> /var/run/nonpersistent/nbd/0 """ - number = re.search("/dev/nbd([0-9]+)", device).group(1) + matched = re.search("/dev/nbd([0-9]+)", device) + if not matched: + raise NotGetNbdNumber(f"Can not get the nbd number for device: {device}") + number = matched.group(1) return PERSISTENT_INFO_DIR + "/" + number @@ -195,17 +215,20 @@ def connect_nbd(path, exportname): if nbd_device.startswith("/dev/") else nbd_device ) - with open("/sys/block/" + nbd + "/queue/scheduler", "w", encoding="utf-8") as fd: + with open("/sys/block/" + nbd + "/queue/scheduler", + "w", encoding="utf-8") as fd: fd.write("none") # Set the NBD queue size to the same as the qcow2 cluster size - with open("/sys/block/" + nbd + "/queue/max_sectors_kb", "w", encoding="utf-8") as fd: + with open("/sys/block/" + nbd + "/queue/max_sectors_kb", + "w", encoding="utf-8") as fd: fd.write("512") - with open("/sys/block/" + nbd + "/queue/nr_requests", "w", encoding="utf-8") as fd: + with open("/sys/block/" + nbd + "/queue/nr_requests", + "w", encoding="utf-8") as fd: fd.write("8") return nbd_device except NbdDeviceNotFound as exn: - LOGGER.warn("Failed to find free nbd device: %s", exn) + LOGGER.warning("Failed to find free nbd device: %s", exn) retries = retries + 1 if retries == 1: # We sleep for a shorter period first, in case an nbd device diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index 461755c8ba7..31f6d8083ca 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -14,6 +14,9 @@ # mock modules to avoid dependencies sys.modules["XenAPI"] = MagicMock() +# pylint: disable=protected-access +# pylint: disable=missing-function-docstring +# pylint: disable=missing-class-docstring @patch('subprocess.Popen') class TestCallFunction(unittest.TestCase): @@ -35,11 +38,11 @@ def test_call_failure(self, mock_popen): nbd_client_manager._call(["invalid_cmd"]) self.assertEqual(cm.exception.returncode, 1) - + @patch('nbd_client_manager.os.path.exists') -@patch('nbd_client_manager._call') class TestIsNbdDeviceConnected(unittest.TestCase): + @patch('nbd_client_manager._call') def test_nbd_device_connected(self, mock_call, mock_exists): mock_exists.return_value = True mock_call.return_value = 0 @@ -49,6 +52,7 @@ def test_nbd_device_connected(self, mock_call, mock_exists): self.assertTrue(result) mock_call.assert_called_once_with(["nbd-client", "-check", "/dev/nbd0"], error=False) + @patch('nbd_client_manager._call') def test_nbd_device_not_connected(self, mock_call, mock_exists): mock_exists.return_value = True mock_call.return_value = 1 @@ -58,13 +62,13 @@ def test_nbd_device_not_connected(self, mock_call, mock_exists): self.assertFalse(result) mock_call.assert_called_once_with(["nbd-client", "-check", "/dev/nbd1"], error=False) - def test_nbd_device_not_found(self, mock_call, mock_exists): + def test_nbd_device_not_found(self, mock_exists): mock_exists.return_value = False # Testing the function with a non-existent device with self.assertRaises(nbd_client_manager.NbdDeviceNotFound): nbd_client_manager._is_nbd_device_connected('/dev/nbd2') - + @patch('nbd_client_manager._is_nbd_device_connected') class TestFindUnusedNbdDevice(unittest.TestCase): def test_find_unused_nbd_device(self, mock_is_nbd_device_connected): @@ -102,7 +106,8 @@ def test_get_persistent_connect_info_filename(self): # Test for device /dev/nbd0 device = "/dev/nbd0" expected_filename = f"{nbd_client_manager.PERSISTENT_INFO_DIR}/0" - self.assertEqual(nbd_client_manager._get_persistent_connect_info_filename(device), expected_filename) + self.assertEqual(nbd_client_manager._get_persistent_connect_info_filename(device), + expected_filename) @patch('nbd_client_manager.os.makedirs') @patch('nbd_client_manager.os.path.exists') @@ -110,7 +115,7 @@ class TestPersistConnectInfo(unittest.TestCase): def test_persist_connect_info(self, mock_exists, mock_makedirs): mock_exists.return_value = False - + # Test data device = "/dev/nbd0" path = "/some/path" @@ -125,11 +130,13 @@ def test_persist_connect_info(self, mock_exists, mock_makedirs): # Assertions mock_makedirs.assert_called_once_with(nbd_client_manager.PERSISTENT_INFO_DIR) mock_file.assert_called_once_with('/var/run/nonpersistent/nbd/0', 'w', encoding='utf-8') - mock_file().write.assert_called_once_with('{"path": "/some/path", "exportname": "example_export"}') + mock_file().write.assert_called_once_with( + '{"path": "/some/path", "exportname": "example_export"}' + ) def test_persist_connect_info_directory_exists(self, mock_exists, mock_makedirs): mock_exists.return_value = True - + # Test data device = "/dev/nbd0" path = "/some/path" @@ -144,7 +151,9 @@ def test_persist_connect_info_directory_exists(self, mock_exists, mock_makedirs) # Assertions mock_makedirs.assert_not_called() mock_file.assert_called_once_with('/var/run/nonpersistent/nbd/0', 'w', encoding='utf-8') - mock_file().write.assert_called_once_with('{"path": "/some/path", "exportname": "example_export"}') + mock_file().write.assert_called_once_with( + '{"path": "/some/path", "exportname": "example_export"}' + ) class TestRemovePersistentConnectInfo(unittest.TestCase): @patch('nbd_client_manager.os.remove') @@ -159,14 +168,18 @@ class TestConnectNbd(unittest.TestCase): @patch('nbd_client_manager._persist_connect_info') @patch('nbd_client_manager.open') @patch('nbd_client_manager.FILE_LOCK', MagicMock()) # Mocking FILE_LOCK - def test_connect_nbd(self, mock_open, mock_persist_info, mock_wait_for_nbd, mock_find_unused, mock_call): + # pylint: disable=too-many-arguments + def test_connect_nbd(self, mock_openfile, mock_persist_info, + mock_wait_for_nbd, mock_find_unused, mock_call): # Mocking necessary functions and file operations mock_find_unused.return_value = "/dev/nbd0" mock_call.return_value = 0 mock_file_scheduler = MagicMock() mock_file_max_sectors_kb = MagicMock() mock_file_nr_requests = MagicMock() - mock_open.side_effect = [mock_file_scheduler, mock_file_max_sectors_kb, mock_file_nr_requests] + mock_openfile.side_effect = [mock_file_scheduler, + mock_file_max_sectors_kb, + mock_file_nr_requests] # Testing the function result = nbd_client_manager.connect_nbd("/path/of/socket/file", "export_name") @@ -176,9 +189,11 @@ def test_connect_nbd(self, mock_open, mock_persist_info, mock_wait_for_nbd, mock mock_find_unused.assert_called_once() mock_call.assert_called() mock_wait_for_nbd.assert_called_once_with(nbd_device="/dev/nbd0", connected=True) - mock_persist_info.assert_called_once_with("/dev/nbd0", "/path/of/socket/file", "export_name") + mock_persist_info.assert_called_once_with( + "/dev/nbd0", "/path/of/socket/file", "export_name" + ) # Checking open calls - mock_open.assert_has_calls([ + mock_openfile.assert_has_calls([ call("/sys/block/nbd0/queue/scheduler", "w", encoding="utf-8"), call("/sys/block/nbd0/queue/max_sectors_kb", "w", encoding="utf-8"), call("/sys/block/nbd0/queue/nr_requests", "w", encoding="utf-8") @@ -190,7 +205,8 @@ def test_connect_nbd(self, mock_open, mock_persist_info, mock_wait_for_nbd, mock @patch('nbd_client_manager._wait_for_nbd_device') class TestDisconnectNbdDevice(unittest.TestCase): - def test_disconnect_nbd_device_connected(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + def test_disconnect_nbd_device_connected(self, mock_wait_for_nbd, + mock_call, mock_remove_persistent, mock_is_connected): # Mocking _is_nbd_device_connected to return True mock_is_connected.return_value = True @@ -203,7 +219,8 @@ def test_disconnect_nbd_device_connected(self, mock_wait_for_nbd, mock_call, moc mock_call.assert_called_once_with(["nbd-client", "-disconnect", "/dev/nbd0"]) mock_wait_for_nbd.assert_called_once_with(nbd_device="/dev/nbd0", connected=False) - def test_disconnect_nbd_device_disconnected(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + def test_disconnect_nbd_device_disconnected(self, mock_wait_for_nbd, mock_call, + mock_remove_persistent, mock_is_connected): # Mocking _is_nbd_device_connected to return False mock_is_connected.return_value = False @@ -216,7 +233,8 @@ def test_disconnect_nbd_device_disconnected(self, mock_wait_for_nbd, mock_call, mock_call.assert_not_called() mock_wait_for_nbd.assert_not_called() - def test_disconnect_nbd_device_not_found(self, mock_wait_for_nbd, mock_call, mock_remove_persistent, mock_is_connected): + def test_disconnect_nbd_device_not_found(self, mock_wait_for_nbd, mock_call, + mock_remove_persistent, mock_is_connected): # Mocking _is_nbd_device_connected to raise NbdDeviceNotFound mock_is_connected.side_effect = nbd_client_manager.NbdDeviceNotFound('/dev/nbd0') @@ -228,4 +246,3 @@ def test_disconnect_nbd_device_not_found(self, mock_wait_for_nbd, mock_call, moc mock_remove_persistent.assert_not_called() mock_call.assert_not_called() mock_wait_for_nbd.assert_not_called() - From e7e61b0cae254b14827463cc3353af9c857d9b19 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 7 Mar 2024 02:37:18 +0000 Subject: [PATCH 021/222] CP-47334: Adjust pytype config for new python3 directory Signed-off-by: Stephen Cheng --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index afc1ff32067..8b00e6402e6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -89,7 +89,6 @@ expected_to_fail = [ "scripts/hatests", "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", - "scripts/nbd_client_manager.py", # SSLSocket.send() only accepts bytes, not unicode string as argument: "scripts/examples/python/exportimport.py", # Other fixes needed: @@ -123,7 +122,8 @@ inputs = [ # Python 3 "python3/bin/hfx_filename", - "python3/*.py", + "python3/bin/*.py", + "python3/libexec/*.py", # To be added later, # when converted to Python3-compatible syntax: From 4cf9f3c8ab0e203c593ec53c5a51f849fcbcdacb Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 7 Mar 2024 06:01:34 +0000 Subject: [PATCH 022/222] CP-47334: Not using f-string During building, our build system uses both python2 and python3 to compile against .py files and f-string won't be accepted by py2 compiling. Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 10 +++++----- python3/unittest/test_nbd_client_manager.py | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index 5179ccc21cc..e251198c762 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -45,7 +45,7 @@ class NbdDeviceNotFound(Exception): def __init__(self, nbd_device): super().__init__( - f"NBD device '{nbd_device}' does not exist" + "NBD device '{}' does not exist".format(nbd_device) ) self.nbd_device = nbd_device @@ -131,7 +131,7 @@ def _find_unused_nbd_device(): Raises NbdDeviceNotFound if no devices are available. """ for device_no in range(0, 1000): - nbd_device = f"/dev/nbd{device_no}" + nbd_device = "/dev/nbd{}".format(device_no) if not _is_nbd_device_connected(nbd_device=nbd_device): return nbd_device # Actually `_is_nbd_device_connected` will raise an exception @@ -145,8 +145,8 @@ def _wait_for_nbd_device(nbd_device, connected): while _is_nbd_device_connected(nbd_device=nbd_device) != connected: if datetime.now() > deadline: raise NbdConnStateTimeout( - f"Timed out waiting for connection state of " - f"device {nbd_device} to be {connected}" + "Timed out waiting for connection state of device %s to be %s" + % (nbd_device, connected) ) LOGGER.debug( @@ -168,7 +168,7 @@ def _get_persistent_connect_info_filename(device): """ matched = re.search("/dev/nbd([0-9]+)", device) if not matched: - raise NotGetNbdNumber(f"Can not get the nbd number for device: {device}") + raise NotGetNbdNumber("Can not get the nbd number") number = matched.group(1) return PERSISTENT_INFO_DIR + "/" + number diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index 31f6d8083ca..0c06a4c258e 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -105,7 +105,7 @@ class TestGetPersistentConnectInfoFilename(unittest.TestCase): def test_get_persistent_connect_info_filename(self): # Test for device /dev/nbd0 device = "/dev/nbd0" - expected_filename = f"{nbd_client_manager.PERSISTENT_INFO_DIR}/0" + expected_filename = "/var/run/nonpersistent/nbd/0" self.assertEqual(nbd_client_manager._get_persistent_connect_info_filename(device), expected_filename) From 7719ef80d51df3b02ef748c38f8bea28c9214884 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 8 Mar 2024 01:44:31 +0000 Subject: [PATCH 023/222] CP-47334: Disable some pylint checks Signed-off-by: Stephen Cheng --- pyproject.toml | 1 + python3/unittest/test_nbd_client_manager.py | 3 --- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8b00e6402e6..e7b587a89e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,7 @@ disable = [ "import-error", "unnecessary-pass", "unspecified-encoding", + "protected-access", ] [tool.mypy] diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index 0c06a4c258e..8ead3cfa580 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -14,9 +14,6 @@ # mock modules to avoid dependencies sys.modules["XenAPI"] = MagicMock() -# pylint: disable=protected-access -# pylint: disable=missing-function-docstring -# pylint: disable=missing-class-docstring @patch('subprocess.Popen') class TestCallFunction(unittest.TestCase): From 81205e110a0b5a8d1b75b1010567123791383e47 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 15 Mar 2024 02:50:18 +0000 Subject: [PATCH 024/222] CP-47334: Change to a more readable exception name Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index e251198c762..281c0ab3445 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -25,9 +25,9 @@ MAX_DEVICE_WAIT_MINUTES = 10 -class NotGetNbdNumber(Exception): +class InvalidNbdDevName(Exception): """ - The NBD device should be in this format: nbd{0-100} + The NBD device should be in this format: nbd{0-1000} If we cannot match this pattern, raise this exception """ @@ -168,7 +168,7 @@ def _get_persistent_connect_info_filename(device): """ matched = re.search("/dev/nbd([0-9]+)", device) if not matched: - raise NotGetNbdNumber("Can not get the nbd number") + raise InvalidNbdDevName("Can not get the nbd number") number = matched.group(1) return PERSISTENT_INFO_DIR + "/" + number From 2c6a0fc4a4e905e5db690f213b6d23413c0eb32c Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 15 Mar 2024 04:02:44 +0000 Subject: [PATCH 025/222] CP-47334: Raise exception if all nbd devices are connected Signed-off-by: Stephen Cheng --- python3/libexec/nbd_client_manager.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index 281c0ab3445..e30477316d8 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -134,10 +134,9 @@ def _find_unused_nbd_device(): nbd_device = "/dev/nbd{}".format(device_no) if not _is_nbd_device_connected(nbd_device=nbd_device): return nbd_device - # Actually `_is_nbd_device_connected` will raise an exception - # if no unused device - # Add this return for pylint check - return None + + # If there are 1000 nbd devices (unlikely) and all are connected + raise NbdDeviceNotFound(nbd_device) def _wait_for_nbd_device(nbd_device, connected): deadline = datetime.now() + timedelta(minutes=MAX_DEVICE_WAIT_MINUTES) From 7b7a1f0f5a62653f5b7dfe1704ddabe7d11c6e6f Mon Sep 17 00:00:00 2001 From: acefei Date: Mon, 25 Mar 2024 11:10:43 +0800 Subject: [PATCH 026/222] CP-48466 Fix ci warnings for usb_scan.py (#5511) Signed-off-by: Fei Su --- pyproject.toml | 3 +- python3/libexec/usb_scan.py | 2 +- python3/unittest/test_usb_scan.py | 87 ++++++++++++++----------------- 3 files changed, 42 insertions(+), 50 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index e7b587a89e2..32bd0ad84d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,14 +40,13 @@ disable = [ "missing-module-docstring", "consider-using-f-string", "too-many-branches", + "too-many-arguments", "broad-exception-caught", "no-else-break", "no-else-return", "invalid-name", "import-error", "unnecessary-pass", - "unspecified-encoding", - "protected-access", ] [tool.mypy] diff --git a/python3/libexec/usb_scan.py b/python3/libexec/usb_scan.py index 187418741e6..e940aa626f5 100755 --- a/python3/libexec/usb_scan.py +++ b/python3/libexec/usb_scan.py @@ -374,7 +374,7 @@ def __init__(self): """ self.rule_list = [] try: - with open(self._PATH) as f: + with open(self._PATH, encoding="utf-8", errors="backslashreplace") as f: log.debug("=== policy file begin") for line in f: log.debug(line[0:-1]) diff --git a/python3/unittest/test_usb_scan.py b/python3/unittest/test_usb_scan.py index 150cc16afba..d87f9b12b27 100644 --- a/python3/unittest/test_usb_scan.py +++ b/python3/unittest/test_usb_scan.py @@ -12,12 +12,6 @@ import mock from import_file import get_module - -def nottest(obj): - obj.__test__ = False - return obj - - sys.modules["xcp"] = mock.Mock() sys.modules["xcp.logger"] = mock.Mock() sys.modules["pyudev"] = mock.Mock() @@ -82,24 +76,23 @@ def __init__(self, devices, interfaces): self.interfaces = interfaces def list_devices(self, **kwargs): - if kwargs.pop("subsystem") == "usb": - dev_type = kwargs.pop("DEVTYPE") - if dev_type == "usb_device": - return MocEnumerator(self.devices) - elif dev_type == "usb_interface": - return MocEnumerator(self.interfaces) - return MocEnumerator([]) + assert kwargs.pop("subsystem") == "usb" + dev_type = kwargs.pop("DEVTYPE") + if dev_type == "usb_device": + return MocEnumerator(self.devices) + elif dev_type == "usb_interface": + return MocEnumerator(self.interfaces) def mock_setup(mod, devices, interfaces, path): - mod.log.error = test_log - mod.log.debug = test_log + mod.log.error = verify_log + mod.log.debug = verify_log mod.Policy._PATH = path - mod.pyudev.Context = mock.Mock(return_value=MocContext(devices, interfaces)) + mod.pyudev.Context = mock.Mock( + return_value=MocContext(devices, interfaces)) -@nottest -def test_log(m): +def verify_log(_): pass @@ -110,9 +103,11 @@ def setUp(self): def tearDown(self): shutil.rmtree(self.work_dir, ignore_errors=True) - @nottest - def test_usb_common( - self, moc_devices, moc_interfaces, moc_results, path="./scripts/usb-policy.conf" + def verify_usb_common( + self, moc_devices, + moc_interfaces, + moc_results, + path="./scripts/usb-policy.conf" ): usb_scan = get_module("usb_scan", "../libexec/usb_scan.py") @@ -128,14 +123,13 @@ def test_usb_common( # pass pusbs in json to XAPI self.assertEqual(sorted(pusbs), sorted(moc_results)) - @nottest - def test_usb_exit( + def verify_usb_exit( self, devices, interfaces, results, path="./scripts/usb-policy.conf", msg="" - ): # pylint: disable=too-many-arguments + ): with self.assertRaises(SystemExit) as cm: - self.test_usb_common(devices, interfaces, results, path) + self.verify_usb_common(devices, interfaces, results, path) if msg: # cm.exception.code is int type whose format # looks like "duplicated tag'vid' found, @@ -183,7 +177,7 @@ def test_usb_dongle(self): "speed": "480", } ] - self.test_usb_common(devices, interfaces, results) + self.verify_usb_common(devices, interfaces, results) def test_usb_dongle_on_hub(self): devices = [ @@ -226,7 +220,7 @@ def test_usb_dongle_on_hub(self): "speed": "12", } ] - self.test_usb_common(devices, interfaces, results) + self.verify_usb_common(devices, interfaces, results) def test_usb_dongle_unbinded(self): devices = [ @@ -246,7 +240,7 @@ def test_usb_dongle_unbinded(self): ] interfaces = [] results = [] - self.test_usb_common(devices, interfaces, results) + self.verify_usb_common(devices, interfaces, results) def test_usb_keyboard(self): devices = [ @@ -285,83 +279,82 @@ def test_usb_keyboard(self): }, ] results = [] - self.test_usb_common(devices, interfaces, results) + self.verify_usb_common(devices, interfaces, results) def test_usb_config_missing(self): - self.test_usb_exit([], [], [], "not_exist.conf") + self.verify_usb_exit([], [], [], "not_exist.conf") - @nottest - def test_usb_config_error_common(self, content, msg): + def verify_usb_config_error_common(self, content, msg): path = os.path.join(self.work_dir, "usb-policy.conf") with open(path, "w") as f: f.write(content) - self.test_usb_exit([], [], [], path, msg) + self.verify_usb_exit([], [], [], path, msg) def test_usb_config_error_unexpected_chars_with_comment(self): content = """ss# unexpected words with comment ALLOW:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "to unpack") + self.verify_usb_config_error_common(content, "to unpack") def test_usb_config_error_duplicated_key(self): content = """# duplicated key word ALLOW:vid=056a vid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "duplicated tag") + self.verify_usb_config_error_common(content, "duplicated tag") def test_usb_config_error_invalid_key(self): content = """# invalid key word ALLOW:vid=056a psid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "Malformed policy rule, " - "unable to parse") + self.verify_usb_config_error_common( + content, "Malformed policy rule, unable to parse") def test_usb_config_error_hex_length_4(self): content = """# hex length not 4 ALLOW:vid=056a pid=031 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "length error") + self.verify_usb_config_error_common(content, "length error") def test_usb_config_error_hex_length_2(self): content = """# hex length not 2 DENY:vid=056a pid=0314 class=035 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "length error") + self.verify_usb_config_error_common(content, "length error") def test_usb_config_error_action_key(self): content = """# wrong action key word ALLOWED:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "Malformed action") + self.verify_usb_config_error_common(content, "Malformed action") def test_usb_config_error_unexpected_chars_end(self): content = """# unexpected words in the end ALLOW:vid=056a pid=0314 class=03 kk # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "Malformed policy rule, " - "unable to parse") + self.verify_usb_config_error_common( + content, "Malformed policy rule, unable to parse") def test_usb_config_error_unexpected_chars_beg(self): content = """# unexpected words at the beginning ii ALLOW:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "Malformed action") + self.verify_usb_config_error_common(content, "Malformed action") def test_usb_config_error_unexpected_chars_mid(self): content = """# unexpected words in the middle ALLOW:vid=056a pid=0314 jj class=03 # Wacom Intuos tablet ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "Malformed policy rule, " - "unable to parse") + self.verify_usb_config_error_common( + content, "Malformed policy rule, unable to parse") def test_usb_config_error_unexpected_non_empty_line(self): content = """# unexpected non empty line @@ -369,11 +362,11 @@ def test_usb_config_error_unexpected_non_empty_line(self): aa ALLOW: # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "to unpack") + self.verify_usb_config_error_common(content, "to unpack") def test_usb_config_error_missing_colon(self): content = """# missing colon after action ALLOW:vid=056a pid=0314 class=03 # Wacom Intuos tablet ALLOW # Otherwise allow everything else """ - self.test_usb_config_error_common(content, "to unpack") + self.verify_usb_config_error_common(content, "to unpack") From acdab6bc723c547c8ab92d32b686764c7b213fb5 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 12 Mar 2024 07:26:49 +0000 Subject: [PATCH 027/222] CP-47653: Move perfmon.py to python3/bin Signed-off-by: Stephen Cheng --- python3/Makefile | 6 +++++- {scripts => python3/bin}/perfmon | 0 scripts/Makefile | 1 - 3 files changed, 5 insertions(+), 2 deletions(-) rename {scripts => python3/bin}/perfmon (100%) diff --git a/python3/Makefile b/python3/Makefile index e85e199f705..26e2bdfa943 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -10,7 +10,11 @@ install: mkdir -p $(DESTDIR)$(SITE3_DIR) mkdir -p $(DESTDIR)$(LIBEXECDIR) + $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ + $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) - $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) + + $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin + $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/perfmon b/python3/bin/perfmon similarity index 100% rename from scripts/perfmon rename to python3/bin/perfmon diff --git a/scripts/Makefile b/scripts/Makefile index 51dc1b092f6..6a850199ba6 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -126,7 +126,6 @@ install: $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)/etc/sysconfig $(IPROG) sysconfig-perfmon $(DESTDIR)/etc/sysconfig/perfmon - $(IPROG) perfmon $(DESTDIR)$(OPTDIR)/bin mkdir -p $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) From ce50f50b71312fd297be65bba78f9fb7031422b7 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 12 Mar 2024 08:20:28 +0000 Subject: [PATCH 028/222] CP-47653: Format the script using black and isort tools Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 961 ++++++++++++++++++++++++++++---------------- 1 file changed, 619 insertions(+), 342 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index a84c8eb5d61..3b68b485aa9 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -1,13 +1,13 @@ #!/usr/bin/env python # -# perfmon - a daemon for monitoring performance of the host on which it is run -# and of all the local VMs, and for generating events based on configurable +# perfmon - a daemon for monitoring performance of the host on which it is run +# and of all the local VMs, and for generating events based on configurable # triggers # # Notes: # ====== # The XAPI instance running on localhost monitors a number of variables -# for each VM running locally (i.e not on other pool members) and +# for each VM running locally (i.e not on other pool members) and # for the host itself. Each variable is stored in 16 RRDs (Round Robin Databases). # # Consolidation Number of samples in RRD @@ -17,7 +17,7 @@ # MAX 120 (10m) 120 (2h) ? ? # LAST 120 (10m) 120 (2h) ? ? # -# The "Consolidation function" tells how that RRD is built up from the +# The "Consolidation function" tells how that RRD is built up from the # one with the next highest sample rate. E.g. In the 1m/sample "AVERAGE" RRD # each sample is the average of 12 from the 1s/sample "AVERAGE" RRD, whereas # in the 1m/sample "MIN" RRD each sample is the minimum of 12 from the 1s/sample @@ -30,39 +30,47 @@ # The "cf" CGI param specfies the row. (All rows are returned if it's missing.) from __future__ import print_function -import sys -import os + +import commands +import gc import getopt +import os +import random +import re +import signal +import socket +import sys +import syslog +import time import traceback -import XenAPI import urllib -from xml import sax # used to parse rrd_updates because this may be large and sax is more efficient -from xml.dom import minidom # used to parse other-config:perfmon. Efficiency is less important than reliability here +# used to parse rrd_updates because this may be large and sax is more efficient +from xml import sax +# used to parse other-config:perfmon. Efficiency is less important than reliability here +from xml.dom import minidom from xml.parsers.expat import ExpatError -import time -import re -import random -import syslog -import socket -import gc -import signal -import commands + +import XenAPI + def print_debug(string): - if debug: + if debug: print("DEBUG:", string, file=sys.stderr) syslog.syslog(syslog.LOG_USER | syslog.LOG_INFO, "PERFMON(DEBUG): %s" % string) + def log_err(string): print(string, file=sys.stderr) syslog.syslog(syslog.LOG_USER | syslog.LOG_ERR, "PERFMON: %s" % string) pass + def log_info(string): print(string, file=sys.stderr) syslog.syslog(syslog.LOG_INFO | syslog.LOG_INFO, "PERFMON: %s" % string) pass + def debug_mem(): objCount = {} gc.collect() @@ -76,80 +84,100 @@ def debug_mem(): objCount[name] += 1 else: objCount[name] = 1 - + output = [] for name in objCount: output.append("%s :%s" % (name, objCount[name])) log_info("\n".join(output)) + class PerfMonException(Exception): pass + class XmlConfigException(PerfMonException): pass + class UsageException(Exception): pass # Start a session with the master of a pool. # Note: when calling http://localhost/rrd_update we must pass the session -# ID as a param. The host then uses this to verify our validity with +# ID as a param. The host then uses this to verify our validity with # the master before responding. # If the verification fails we should get a 401 response class XapiSession(XenAPI.Session): - """ Object that represents a XenAPI session with the pool master - One of these is needed to refresh a VMMonitor or HOSTMonitor config, or + """Object that represents a XenAPI session with the pool master + One of these is needed to refresh a VMMonitor or HOSTMonitor config, or to refresh an RRDUpdates object """ + def __init__(self): - XenAPI.Session.__init__(self, "http://_var_xapi_xapi", transport=XenAPI.UDSTransport()) + XenAPI.Session.__init__( + self, "http://_var_xapi_xapi", transport=XenAPI.UDSTransport() + ) self.xenapi.login_with_password("", "", "1.0", "xen-api-scripts-perfmon") - def __del__ (self): + + def __del__(self): self.xenapi.session.logout() + def id(self): return self._session + class ObjectReport: def __init__(self, objtype, uuid): - self.objtype = objtype # a string like "vm", or "host" taken from an tag - self.uuid = uuid # the object's uuid - self.vars = {} # maps rrd variable name to array of floats + self.objtype = ( + objtype # a string like "vm", or "host" taken from an tag + ) + self.uuid = uuid # the object's uuid + self.vars = {} # maps rrd variable name to array of floats + def get_uuid(self): return self.uuid + def get_var_names(self): return self.vars.keys() + def get_value(self, var_name, row): try: return (self.vars[var_name])[row] except: return 0.0 + def insert_value(self, var_name, index, value): if var_name not in self.vars: self.vars[var_name] = [] self.vars[var_name].insert(index, value) + class RRDReport: "This is just a data structure passed that is completed by RRDContentHandler" + def __init__(self): self.reset() - + def reset(self): - self.columns = 0 # num xapi vars in xml - self.rows = 0 # num samples in xml - self.start_time = 0 # timestamp of 1st sample in xml - self.end_time = 0 # timestamp of last sample in xml - self.step_time = 0 # seconds between each pair of samples - self.obj_reports = {} # maps uuids to ObjectReports, built from xml + self.columns = 0 # num xapi vars in xml + self.rows = 0 # num samples in xml + self.start_time = 0 # timestamp of 1st sample in xml + self.end_time = 0 # timestamp of last sample in xml + self.step_time = 0 # seconds between each pair of samples + self.obj_reports = {} # maps uuids to ObjectReports, built from xml + class RRDColumn: "class used internally by RRDContentHandler" + def __init__(self, paramname, obj_report): self.paramname = paramname self.obj_report = obj_report + class RRDContentHandler(sax.ContentHandler): - """ Handles data in this format: + """Handles data in this format: INTEGER @@ -177,6 +205,7 @@ class RRDContentHandler(sax.ContentHandler): """ + def __init__(self, report): "report is saved and later updated by this object. report should contain defaults already" self.report = report @@ -190,63 +219,66 @@ class RRDContentHandler(sax.ContentHandler): self.in_row_tag = False self.column_details = [] self.row = 0 - + def startElement(self, name, attrs): self.raw_text = "" - if name == 'start': + if name == "start": self.in_start_tag = True - elif name == 'step': + elif name == "step": self.in_step_tag = True - elif name == 'end': + elif name == "end": self.in_end_tag = True - elif name == 'rows': + elif name == "rows": self.in_rows_tag = True - elif name == 'columns': + elif name == "columns": self.in_columns_tag = True - elif name == 'entry': + elif name == "entry": self.in_entry_tag = True - elif name == 'row': + elif name == "row": self.in_row_tag = True self.col = 0 if self.in_row_tag: - if name == 't': + if name == "t": self.in_t_tag = True - elif name == 'v': + elif name == "v": self.in_v_tag = True - + def characters(self, chars): - if (self.in_start_tag or - self.in_step_tag or - self.in_end_tag or - self.in_rows_tag or - self.in_columns_tag or - self.in_entry_tag or - #self.in_row_tag # ignore text under row tag, s are just for holding and nodes - self.in_t_tag or - self.in_v_tag): + if ( + self.in_start_tag + or self.in_step_tag + or self.in_end_tag + or self.in_rows_tag + or self.in_columns_tag + or self.in_entry_tag + or + # self.in_row_tag # ignore text under row tag, s are just for holding and nodes + self.in_t_tag + or self.in_v_tag + ): self.raw_text += chars - + def endElement(self, name): - if name == 'start': + if name == "start": # This overwritten later if there are any rows self.report.start_time = int(self.raw_text) self.in_start_tag = False - elif name == 'step': + elif name == "step": self.report.step_time = int(self.raw_text) self.in_step_tag = False - elif name == 'end': + elif name == "end": # This overwritten later if there are any rows self.report.end_time = int(self.raw_text) self.in_end_tag = False - elif name == 'rows': + elif name == "rows": self.report.rows = int(self.raw_text) self.in_rows_tag = False - elif name == 'columns': + elif name == "columns": self.report.columns = int(self.raw_text) self.in_columns_tag = False - elif name == 'entry': - (_, objtype, uuid, paramname) = self.raw_text.split(':') + elif name == "entry": + (_, objtype, uuid, paramname) = self.raw_text.split(":") # lookup the obj_report corresponding to this uuid, or create if it does not exist if uuid not in self.report.obj_reports: self.report.obj_reports[uuid] = ObjectReport(objtype, uuid) @@ -255,10 +287,10 @@ class RRDContentHandler(sax.ContentHandler): # save the details of this column self.column_details.append(RRDColumn(paramname, obj_report)) self.in_entry_tag = False - elif name == 'row': + elif name == "row": self.in_row_tag = False self.row += 1 - elif name == 't': + elif name == "t": # Extract start and end time from row data as it's more reliable than the values in the meta data t = int(self.raw_text) # Last row corresponds to start time @@ -269,50 +301,53 @@ class RRDContentHandler(sax.ContentHandler): self.in_t_tag = False - elif name == 'v': + elif name == "v": v = float(self.raw_text) # Find object report and paramname for this col col_details = self.column_details[self.col] obj_report = col_details.obj_report paramname = col_details.paramname - + # Update object_report - obj_report.insert_value(paramname, index=0, value=v) # use index=0 as this is the earliest sample so far - + obj_report.insert_value( + paramname, index=0, value=v + ) # use index=0 as this is the earliest sample so far + # Update position in row self.col += 1 - + self.in_t_tag = False - - + # An object of this class should persist the lifetime of the program class RRDUpdates: - """ Object used to get and parse the output the http://localhost/rrd_udpates?... - """ + """Object used to get and parse the output the http://localhost/rrd_udpates?...""" + def __init__(self): # params are what get passed to the CGI executable in the URL self.params = dict() - self.params['start'] = int(time.time()) - interval # interval seconds ago - self.params['host'] = 'true' # include data for host (as well as for VMs) - self.params['sr_uuid'] = 'all' # include data for all SRs attached to this host - self.params['cf'] = 'AVERAGE' # consolidation function, each sample averages 12 from the 5 second RRD - self.params['interval'] = str(rrd_step) # distinct from the perfmon interval - self.report = RRDReport() # data structure updated by RRDContentHandler + self.params["start"] = int(time.time()) - interval # interval seconds ago + self.params["host"] = "true" # include data for host (as well as for VMs) + self.params["sr_uuid"] = "all" # include data for all SRs attached to this host + self.params["cf"] = ( + "AVERAGE" # consolidation function, each sample averages 12 from the 5 second RRD + ) + self.params["interval"] = str(rrd_step) # distinct from the perfmon interval + self.report = RRDReport() # data structure updated by RRDContentHandler def __repr__(self): - return '' % str(self.params) - - def refresh(self, session, override_params = {}): + return "" % str(self.params) + + def refresh(self, session, override_params={}): "reread the rrd_updates over CGI and parse" params = override_params - params['session_id'] = session.id() + params["session_id"] = session.id() params.update(self.params) - paramstr = "&".join(["%s=%s" % (k,params[k]) for k in params]) + paramstr = "&".join(["%s=%s" % (k, params[k]) for k in params]) print_debug("Calling http://localhost/rrd_updates?%s" % paramstr) - # this is better than urllib.urlopen() as it raises an Exception on http 401 'Unauthorised' error + # this is better than urllib.urlopen() as it raises an Exception on http 401 'Unauthorised' error # rather than drop into interactive mode sock = urllib.URLopener().open("http://localhost/rrd_updates?%s" % paramstr) xmlsource = sock.read() @@ -323,10 +358,14 @@ class RRDUpdates: sax.parseString(xmlsource, RRDContentHandler(self.report)) # Update the time used on the next run - self.params['start'] = self.report.end_time + 1 # avoid retrieving same data twice + self.params["start"] = ( + self.report.end_time + 1 + ) # avoid retrieving same data twice - print_debug("Refreshed rrd_updates, start = %d, end = %d, rows = %d" % \ - (self.report.start_time, self.report.end_time, self.report.rows)) + print_debug( + "Refreshed rrd_updates, start = %d, end = %d, rows = %d" + % (self.report.start_time, self.report.end_time, self.report.rows) + ) def get_num_rows(self): "Return the number of samples of each parameter" @@ -338,102 +377,138 @@ class RRDUpdates: return self.report.obj_reports[uuid] except: return None - + def get_uuid_list_by_objtype(self, objtype): "Return a list of uuids corresonding to the objects of this type for which we have ObjectReports" - return [ objrep.uuid - for objrep in self.report.obj_reports.values() - if objrep.objtype == objtype ] + return [ + objrep.uuid + for objrep in self.report.obj_reports.values() + if objrep.objtype == objtype + ] # Consolidation functions: -supported_consolidation_functions = [ 'sum', 'average', 'max', 'get_percent_fs_usage', 'get_percent_log_fs_usage', 'get_percent_mem_usage', 'get_percent_sr_usage' ] +supported_consolidation_functions = [ + "sum", + "average", + "max", + "get_percent_fs_usage", + "get_percent_log_fs_usage", + "get_percent_mem_usage", + "get_percent_sr_usage", +] + def average(mylist): - return sum(mylist)/float(len(mylist)) + return sum(mylist) / float(len(mylist)) + def get_percent_log_fs_usage(ignored): "Get the percent usage of the host filesystem for logs partition. Input list is ignored and should be empty" - fs_output = commands.getoutput('df /etc/passwd') - log_fs_output = commands.getoutput('df /var/log') - fs_output = ' '.join(fs_output.splitlines()[1:]) - log_fs_output = ' '.join(log_fs_output.splitlines()[1:]) + fs_output = commands.getoutput("df /etc/passwd") + log_fs_output = commands.getoutput("df /var/log") + fs_output = " ".join(fs_output.splitlines()[1:]) + log_fs_output = " ".join(log_fs_output.splitlines()[1:]) # Get the percent usage only when there is a separate logs partition - if (fs_output.split()[0] != log_fs_output.split()[0]): + if fs_output.split()[0] != log_fs_output.split()[0]: percentage = log_fs_output.split()[4] # remove % character and convert to float - return float(percentage[0:-1])/100.0 + return float(percentage[0:-1]) / 100.0 else: - return float('NaN') + return float("NaN") + def get_percent_fs_usage(ignored): "Get the percent usage of the host filesystem. Input list is ignored and should be empty" # this file is on the filesystem of interest in both OEM and Retail - output = commands.getoutput('df /etc/passwd') - output = ' '.join(output.splitlines()[1:]) # remove header line and rewrap on single line + output = commands.getoutput("df /etc/passwd") + output = " ".join( + output.splitlines()[1:] + ) # remove header line and rewrap on single line percentage = output.split()[4] # remove % character and convert to float - return float(percentage[0:-1])/100.0 + return float(percentage[0:-1]) / 100.0 + def get_percent_mem_usage(ignored): "Get the percent usage of Dom0 memory/swap. Input list is ignored and should be empty" try: - memfd = open('/proc/meminfo', 'r') + memfd = open("/proc/meminfo", "r") memlist = memfd.readlines() memfd.close() - memdict = [ m.split(':', 1) for m in memlist ] - memdict = dict([(k.strip(), float(re.search('\d+', v.strip()).group(0))) for (k,v) in memdict]) + memdict = [m.split(":", 1) for m in memlist] + memdict = dict( + [ + (k.strip(), float(re.search("\d+", v.strip()).group(0))) + for (k, v) in memdict + ] + ) # We consider the sum of res memory and swap in use as the hard demand # of mem usage, it is bad if this number is beyond the physical mem, as # in such case swapping is obligatory rather than voluntary, hence # degrading the performance. We define the percentage metrics as # (res_mem + swap_in_use) / phy_mem, which could potentially go beyond # 100% (but is considered bad when it does) - mem_in_use = memdict['MemTotal'] - memdict['MemFree'] - memdict['Buffers'] - memdict['Cached'] - swap_in_use = memdict['SwapTotal'] - memdict['SwapFree'] - return float(mem_in_use + swap_in_use) / memdict['MemTotal'] + mem_in_use = ( + memdict["MemTotal"] + - memdict["MemFree"] + - memdict["Buffers"] + - memdict["Cached"] + ) + swap_in_use = memdict["SwapTotal"] - memdict["SwapFree"] + return float(mem_in_use + swap_in_use) / memdict["MemTotal"] except Exception as e: log_err("Error %s in get_percent_mem_usage, return 0.0 instead" % e) return 0.0 + def get_percent_sr_usage(mylist): """Get the percent usage of the SR. Input list should be exactly two items: [physical_utilisation, size]""" try: if len(mylist) != 2: - raise Exception("Incorrect number of values to consolidate: %d (exactly 2 values)" % len(mylist)) + raise Exception( + "Incorrect number of values to consolidate: %d (exactly 2 values)" + % len(mylist) + ) physical_utilisation, size = mylist[0:2] return float(physical_utilisation) / size except Exception as e: log_err("Error %s in get_percent_sr_usage, return 0.0 instead" % e) return 0.0 + class VariableConfig: """Object storing the configuration of a Variable - + Initialisation parameters: xmldoc = dom object representing the nodes in the ObjectMonitor config strings. See VMMonitor.__doc__ and HOSTMonitor.__doc__ - alarm_create_callback = + alarm_create_callback = callback called by Variable.update() to create and send an alarm - get_default_variable_config = + get_default_variable_config = a function that VariableConfig.__init__() uses to lookup default tag values by variable name """ + def __init__(self, xmldoc, alarm_create_callback, get_default_variable_config): - try: name = xmldoc.getElementsByTagName('name')[0].getAttribute('value') - except IndexError: raise XmlConfigException("variable missing 'name' tag") - def get_value(tag): + try: + name = xmldoc.getElementsByTagName("name")[0].getAttribute("value") + except IndexError: + raise XmlConfigException("variable missing 'name' tag") + + def get_value(tag): try: - return xmldoc.getElementsByTagName(tag)[0].getAttribute('value') + return xmldoc.getElementsByTagName(tag)[0].getAttribute("value") except: return get_default_variable_config(name, tag) - rrd_regex = get_value('rrd_regex') - consolidation_fn = get_value('consolidation_fn') - alarm_trigger_level = get_value('alarm_trigger_level') - alarm_trigger_period = get_value('alarm_trigger_period') - alarm_auto_inhibit_period = get_value('alarm_auto_inhibit_period') - alarm_trigger_sense = get_value('alarm_trigger_sense') - alarm_priority = get_value('alarm_priority') + + rrd_regex = get_value("rrd_regex") + consolidation_fn = get_value("consolidation_fn") + alarm_trigger_level = get_value("alarm_trigger_level") + alarm_trigger_period = get_value("alarm_trigger_period") + alarm_auto_inhibit_period = get_value("alarm_auto_inhibit_period") + alarm_trigger_sense = get_value("alarm_trigger_sense") + alarm_priority = get_value("alarm_priority") # Save xmldoc: we need this when creating the body of the alarms self.xmldoc = xmldoc @@ -442,54 +517,68 @@ class VariableConfig: try: self.rrd_regex = re.compile("^%s$" % rrd_regex) except: - raise XmlConfigException("variable %s: regex %s does not compile" % (name, rrd_regex)) + raise XmlConfigException( + "variable %s: regex %s does not compile" % (name, rrd_regex) + ) if consolidation_fn not in supported_consolidation_functions: - raise XmlConfigException("variable %s: consolidation function %s not supported" \ - % (name, consolidation_fn)) + raise XmlConfigException( + "variable %s: consolidation function %s not supported" + % (name, consolidation_fn) + ) self.consolidation_fn = eval(consolidation_fn) try: self.alarm_trigger_period = int(alarm_trigger_period) except: - raise XmlConfigException("variable %s: alarm_trigger_period %s not an int" % \ - (name, alarm_trigger_period)) + raise XmlConfigException( + "variable %s: alarm_trigger_period %s not an int" + % (name, alarm_trigger_period) + ) try: self.alarm_auto_inhibit_period = int(alarm_auto_inhibit_period) except: - raise XmlConfigException("variable %s: alarm_auto_inhibit_period %s not an int" % \ - (name, alarm_auto_inhibit_period)) + raise XmlConfigException( + "variable %s: alarm_auto_inhibit_period %s not an int" + % (name, alarm_auto_inhibit_period) + ) try: trigger_level = float(alarm_trigger_level) except: - raise XmlConfigException("variable %s: alarm_trigger_level %s not a float" % \ - (name, alarm_trigger_level)) + raise XmlConfigException( + "variable %s: alarm_trigger_level %s not a float" + % (name, alarm_trigger_level) + ) self.alarm_priority = alarm_priority - + if alarm_trigger_sense == "high": - self.test_level = lambda : (self.value > trigger_level) + self.test_level = lambda: (self.value > trigger_level) else: - self.test_level = lambda : (self.value < trigger_level) + self.test_level = lambda: (self.value < trigger_level) self.alarm_create_callback = alarm_create_callback + def variable_configs_differ(vc1, vc2): "Say whether configuration of one variable differs from that of another" return vc1.xmldoc.toxml() != vc2.xmldoc.toxml() + class VariableState: - """ Object storing the state of a Variable - """ + """Object storing the state of a Variable""" + def __init__(self): self.value = None self.timeof_last_alarm = time.time() - self.alarm_auto_inhibit_period self.trigger_down_counter = self.alarm_trigger_period + class Variable(VariableConfig, VariableState): - """ Variable() is used by ObjectMonitor to create one Variable object for each + """Variable() is used by ObjectMonitor to create one Variable object for each variable specified in it's config string """ + def __init__(self, *args): VariableConfig.__init__(self, *args) VariableState.__init__(self) @@ -497,32 +586,43 @@ class Variable(VariableConfig, VariableState): print_debug("Created Variable %s" % self.name) def set_active(self, active): - print_debug("set_active on %s. (old, new) = (%s, %s)" % (self.name, self.active, active)) + print_debug( + "set_active on %s. (old, new) = (%s, %s)" % (self.name, self.active, active) + ) if active == self.active: - return # nothing to do + return # nothing to do self.active = active if active: - VariableState.__init__(self) # reset when reactivating + VariableState.__init__(self) # reset when reactivating def __generate_alarm(self, session): - """ Generate an alarm using callback provided by creator - - ... provided that one has not been generated in the last + """Generate an alarm using callback provided by creator + + ... provided that one has not been generated in the last self.alarm_auto_inhibit_period seconds """ t = time.time() delta = t - self.timeof_last_alarm - print_debug("Time since last alarm for var %s is %d - %d = %d. Refractory period = %d." % (self.name, t, self.timeof_last_alarm, delta, self.alarm_auto_inhibit_period)) + print_debug( + "Time since last alarm for var %s is %d - %d = %d. Refractory period = %d." + % ( + self.name, + t, + self.timeof_last_alarm, + delta, + self.alarm_auto_inhibit_period, + ) + ) if delta < self.alarm_auto_inhibit_period: - return # we are in the auto inhibit period - do nothing + return # we are in the auto inhibit period - do nothing self.timeof_last_alarm = t message = "value: %f\nconfig:\n%s" % (self.value, self.xmldoc.toprettyxml()) - + self.alarm_create_callback(self, session, message) def update(self, value, session): """Update the value of the variable using an RRDUpdates object - + Calls self.__generate_alarm() if level has been 'bad' for more than self.alarm_trigger_period seconds """ @@ -538,35 +638,40 @@ class Variable(VariableConfig, VariableState): else: # level good - reset trigger counter self.trigger_down_counter = self.alarm_trigger_period - + class ObjectMonitor: """Abstract class, used as base for VMMonitor and HOSTMonitor - + Public attributes are uuid, refresh_config() Inherited classes must implement a public attribute process_rrd_updates() """ + def __init__(self, uuid): - self.uuid = uuid + self.uuid = uuid self.xmlconfig = None # "variables" is the public attribute of interest self.variables = [] self.refresh_config() - + def refresh_config(self): if self.__update_xmlconfig(): # config has changed - reparse it try: self.__parse_xmlconfig() except XmlConfigException as e: - errmsg = "\n".join([ str(x) for x in e.args ]) - log_err("%s %s config error: %s" % (self.monitortype, self.uuid, errmsg)) + errmsg = "\n".join([str(x) for x in e.args]) + log_err( + "%s %s config error: %s" % (self.monitortype, self.uuid, errmsg) + ) except ExpatError as e: - errmsg = "\n".join([ str(x) for x in e.args ]) - log_err("%s %s XML parse error: %s" % (self.monitortype, self.uuid, errmsg)) + errmsg = "\n".join([str(x) for x in e.args]) + log_err( + "%s %s XML parse error: %s" % (self.monitortype, self.uuid, errmsg) + ) return True else: - return False # config unchanged + return False # config unchanged def __update_xmlconfig(self): if self.uuid not in all_xmlconfigs: @@ -578,16 +683,16 @@ class ObjectMonitor: self.xmlconfig = xmlconfig changed = True return changed - + def __parse_xmlconfig(self): if not self.xmlconfig: # Possible if this VM/host is not configured yet self.variables = [] return xmldoc = minidom.parseString(self.xmlconfig) - variable_nodes = xmldoc.getElementsByTagName('variable') + variable_nodes = xmldoc.getElementsByTagName("variable") variable_names = [] - + for vn in variable_nodes: # create a variable using the config in vn var = Variable(vn, self.alarm_create, self.get_default_variable_config) @@ -597,13 +702,16 @@ class ObjectMonitor: variable_names.append(var.name) # build list of variables already present with same name - vars_with_same_name = [ v for v in self.variables if v.name == var.name ] + vars_with_same_name = [v for v in self.variables if v.name == var.name] count = 0 append_var = True for v in vars_with_same_name: # this list should be 0 or 1 long! - if count > 0: - log_err("programmer error: found duplicate variable %s (uuid %s)" % (var.name, self.uuid)) + if count > 0: + log_err( + "programmer error: found duplicate variable %s (uuid %s)" + % (var.name, self.uuid) + ) self.variables.remove(v) continue count += 1 @@ -614,35 +722,46 @@ class ObjectMonitor: self.variables.remove(v) else: append_var = False - + if append_var: - print_debug("Appending %s to list of variables for %s UUID=%s" % (var.name, self.monitortype, self.uuid)) + print_debug( + "Appending %s to list of variables for %s UUID=%s" + % (var.name, self.monitortype, self.uuid) + ) self.variables.append(var) # Now delete any old variables that do not appear in the new variable_nodes - variables_to_remove = [ v for v in self.variables if v.name not in variable_names ] + variables_to_remove = [ + v for v in self.variables if v.name not in variable_names + ] for v in variables_to_remove: - print_debug("Deleting %s from list of variables for UUID=%s" % (v.name, self.uuid)) + print_debug( + "Deleting %s from list of variables for UUID=%s" % (v.name, self.uuid) + ) self.variables.remove(v) - def get_active_variables(self): return self.variables def process_rrd_updates(self, rrd_updates, session): - print_debug("%sMonitor processing rrd_updates for %s" % (self.monitortype, self.uuid)) + print_debug( + "%sMonitor processing rrd_updates for %s" % (self.monitortype, self.uuid) + ) obj_report = rrd_updates.get_obj_report_by_uuid(self.uuid) - num_rows = rrd_updates.get_num_rows() + num_rows = rrd_updates.get_num_rows() if not obj_report: return params_in_obj_report = obj_report.get_var_names() - + for var in self.get_active_variables(): # find the subset of the params returned for this object that we need to consolidate into var params_to_consolidate = filter(var.rrd_regex.match, params_in_obj_report) for row in range(num_rows): # Get the values to consolidate - values_to_consolidate = map(lambda param: obj_report.get_value(param, row), params_to_consolidate) + values_to_consolidate = map( + lambda param: obj_report.get_value(param, row), + params_to_consolidate, + ) # Consolidate them value = var.consolidation_fn(values_to_consolidate) # Pass result on to the variable object - this may result in an alarm being generated @@ -650,9 +769,15 @@ class ObjectMonitor: def alarm_create(self, var, session, message): "Callback used by Variable var to actually send an alarm" - print_debug("Creating an alarm for %s %s, message: %s" % (self.monitortype, self.uuid, message)) - session.xenapi.message.create("ALARM", var.alarm_priority, self.monitortype, self.uuid, message) - + print_debug( + "Creating an alarm for %s %s, message: %s" + % (self.monitortype, self.uuid, message) + ) + session.xenapi.message.create( + "ALARM", var.alarm_priority, self.monitortype, self.uuid, message + ) + + class VMMonitor(ObjectMonitor): """Object that maintains state of one VM @@ -674,6 +799,7 @@ class VMMonitor(ObjectMonitor): * rrd_regex matches the names of variables from (xe vm-data-sources-list uuid=$vmuuid) used to compute value (only has defaults for "cpu_usage", "network_usage", and "disk_usage") """ + def __init__(self, *args): self.monitortype = "VM" ObjectMonitor.__init__(self, *args) @@ -681,33 +807,66 @@ class VMMonitor(ObjectMonitor): def get_default_variable_config(self, variable_name, config_tag): "This allows user to not specify full set of tags for each variable in xml config" - if config_tag == 'consolidation_fn': - if variable_name == "cpu_usage": return 'average' - elif variable_name == "fs_usage": return 'get_percent_fs_usage' - elif variable_name == "log_fs_usage": return 'get_percent_log_fs_usage' - elif variable_name == "mem_usage": return 'get_percent_mem_usage' - else: return 'sum' - elif config_tag == 'rrd_regex': - if variable_name == "cpu_usage": return "cpu[0-9]+" - elif variable_name == "network_usage": return "vif_[0-9]+_[rt]x" - elif variable_name == "disk_usage": return "vbd_(xvd|hd)[a-z]+_(read|write)" - elif variable_name == "fs_usage": return "_$_DUMMY__" # match nothing - elif variable_name == "log_fs_usage": return "_$_DUMMY__" # match nothing - elif variable_name == "mem_usage": return "_$_DUMMY__" # match nothing - elif variable_name == "memory_internal_free": return variable_name - else:raise XmlConfigException("variable %s: no default rrd_regex - please specify one" % variable_name) - elif config_tag == 'alarm_trigger_period': return '60' # 1 minute - elif config_tag == 'alarm_auto_inhibit_period': return '3600' # 1 hour - elif config_tag == 'alarm_trigger_level': - if variable_name == "fs_usage": return '0.9' # trigger when 90% full - elif variable_name == "log_fs_usage": return '0.9' # trigger when 90% full - elif variable_name == "mem_usage": return '0.95' # tigger when mem demanded is close to phy_mem - else:raise XmlConfigException("variable %s: no default alarm_trigger_level - please specify one" % variable_name) - elif config_tag == 'alarm_trigger_sense': - if variable_name == "memory_internal_free": return "low" - else: return 'high' # trigger if *above* - elif config_tag == 'alarm_priority': return '3' # Service degradation level defined in PR-1455 - else:raise XmlConfigException("variable %s: no default available for tag %s" % (variable_name, config_tag)) + if config_tag == "consolidation_fn": + if variable_name == "cpu_usage": + return "average" + elif variable_name == "fs_usage": + return "get_percent_fs_usage" + elif variable_name == "log_fs_usage": + return "get_percent_log_fs_usage" + elif variable_name == "mem_usage": + return "get_percent_mem_usage" + else: + return "sum" + elif config_tag == "rrd_regex": + if variable_name == "cpu_usage": + return "cpu[0-9]+" + elif variable_name == "network_usage": + return "vif_[0-9]+_[rt]x" + elif variable_name == "disk_usage": + return "vbd_(xvd|hd)[a-z]+_(read|write)" + elif variable_name == "fs_usage": + return "_$_DUMMY__" # match nothing + elif variable_name == "log_fs_usage": + return "_$_DUMMY__" # match nothing + elif variable_name == "mem_usage": + return "_$_DUMMY__" # match nothing + elif variable_name == "memory_internal_free": + return variable_name + else: + raise XmlConfigException( + "variable %s: no default rrd_regex - please specify one" + % variable_name + ) + elif config_tag == "alarm_trigger_period": + return "60" # 1 minute + elif config_tag == "alarm_auto_inhibit_period": + return "3600" # 1 hour + elif config_tag == "alarm_trigger_level": + if variable_name == "fs_usage": + return "0.9" # trigger when 90% full + elif variable_name == "log_fs_usage": + return "0.9" # trigger when 90% full + elif variable_name == "mem_usage": + return "0.95" # tigger when mem demanded is close to phy_mem + else: + raise XmlConfigException( + "variable %s: no default alarm_trigger_level - please specify one" + % variable_name + ) + elif config_tag == "alarm_trigger_sense": + if variable_name == "memory_internal_free": + return "low" + else: + return "high" # trigger if *above* + elif config_tag == "alarm_priority": + return "3" # Service degradation level defined in PR-1455 + else: + raise XmlConfigException( + "variable %s: no default available for tag %s" + % (variable_name, config_tag) + ) + class SRMonitor(ObjectMonitor): """Object that maintains state of one SR @@ -730,6 +889,7 @@ class SRMonitor(ObjectMonitor): * rrd_regex matches the names of variables from (xe sr-data-sources-list uuid=$sruuid) used to compute value (has default for "physical_utilistaion") """ + def __init__(self, *args): self.monitortype = "SR" ObjectMonitor.__init__(self, *args) @@ -737,21 +897,43 @@ class SRMonitor(ObjectMonitor): def get_default_variable_config(self, variable_name, config_tag): "This allows user to not specify full set of tags for each variable in xml config" - if config_tag == 'consolidation_fn': - if variable_name == 'physical_utilisation': return 'get_percent_sr_usage' - else: return 'sum' - elif config_tag == 'rrd_regex': - if variable_name == 'physical_utilisation': return 'physical_utilisation|size' - elif variable_name == "sr_io_throughput_total_per_host": return '_$_DUMMY__' # (these are to drive Host RRDs and so are handled by the HOSTMonitor) - else:raise XmlConfigException("variable %s: no default rrd_regex - please specify one" % variable_name) - elif config_tag == 'alarm_trigger_period': return '60' # 1 minute - elif config_tag == 'alarm_auto_inhibit_period': return '3600' # 1 hour - elif config_tag == 'alarm_trigger_level': - if variable_name == "physical_utilistaion": return '0.8' # trigger when 80% full - else:raise XmlConfigException("variable %s: no default alarm_trigger_level - please specify one" % variable_name) - elif config_tag == 'alarm_trigger_sense': return 'high' # trigger if *above* - elif config_tag == 'alarm_priority': return '3' # Service degradation level defined in PR-1455 - else:raise XmlConfigException("variable %s: no default available for tag %s" % (variable_name, config_tag)) + if config_tag == "consolidation_fn": + if variable_name == "physical_utilisation": + return "get_percent_sr_usage" + else: + return "sum" + elif config_tag == "rrd_regex": + if variable_name == "physical_utilisation": + return "physical_utilisation|size" + elif variable_name == "sr_io_throughput_total_per_host": + return "_$_DUMMY__" # (these are to drive Host RRDs and so are handled by the HOSTMonitor) + else: + raise XmlConfigException( + "variable %s: no default rrd_regex - please specify one" + % variable_name + ) + elif config_tag == "alarm_trigger_period": + return "60" # 1 minute + elif config_tag == "alarm_auto_inhibit_period": + return "3600" # 1 hour + elif config_tag == "alarm_trigger_level": + if variable_name == "physical_utilistaion": + return "0.8" # trigger when 80% full + else: + raise XmlConfigException( + "variable %s: no default alarm_trigger_level - please specify one" + % variable_name + ) + elif config_tag == "alarm_trigger_sense": + return "high" # trigger if *above* + elif config_tag == "alarm_priority": + return "3" # Service degradation level defined in PR-1455 + else: + raise XmlConfigException( + "variable %s: no default available for tag %s" + % (variable_name, config_tag) + ) + class HOSTMonitor(ObjectMonitor): """Object that maintains state of one Host @@ -783,35 +965,58 @@ class HOSTMonitor(ObjectMonitor): This only works for that one specific variable-name, and rrd_regex must not be specified. Configuration done on the host directly (variable-name sr_io_throughput_total_xxxxxxxx) takes priority. """ + def __init__(self, *args): self.monitortype = "Host" self.secondary_variables = set() - self.secondary_xmlconfigs = {} # map of sr uuid to xml text + self.secondary_xmlconfigs = {} # map of sr uuid to xml text ObjectMonitor.__init__(self, *args) print_debug("Created HOSTMonitor with uuid %s" % self.uuid) def get_default_variable_config(self, variable_name, config_tag): "This allows user to not specify full set of tags for each variable in xml config" - if config_tag == 'consolidation_fn': - if variable_name == "cpu_usage": return 'average' - else: return 'sum' - elif config_tag == 'rrd_regex': - if variable_name == "cpu_usage": return "cpu[0-9]+" - elif variable_name == "network_usage": return "pif_eth[0-9]+_[rt]x" - elif variable_name == "memory_free_kib": return variable_name - elif re.match("sr_io_throughput_total_[0-9a-f]{8}$", variable_name): return variable_name[3:] - else:raise XmlConfigException("variable %s: no default rrd_regex - please specify one" % variable_name) - elif config_tag == 'alarm_trigger_period': return '60' # 1 minute - elif config_tag == 'alarm_auto_inhibit_period': return '3600' # 1 hour - elif config_tag == 'alarm_trigger_sense': - if variable_name == "memory_free_kib": return "low" - else: return 'high' # trigger if *above* level - elif config_tag == 'alarm_priority': return '3' # Service degradation level defined in PR-1455 - else:raise XmlConfigException("variable %s: no default available for tag %s" % (variable_name, config_tag)) + if config_tag == "consolidation_fn": + if variable_name == "cpu_usage": + return "average" + else: + return "sum" + elif config_tag == "rrd_regex": + if variable_name == "cpu_usage": + return "cpu[0-9]+" + elif variable_name == "network_usage": + return "pif_eth[0-9]+_[rt]x" + elif variable_name == "memory_free_kib": + return variable_name + elif re.match("sr_io_throughput_total_[0-9a-f]{8}$", variable_name): + return variable_name[3:] + else: + raise XmlConfigException( + "variable %s: no default rrd_regex - please specify one" + % variable_name + ) + elif config_tag == "alarm_trigger_period": + return "60" # 1 minute + elif config_tag == "alarm_auto_inhibit_period": + return "3600" # 1 hour + elif config_tag == "alarm_trigger_sense": + if variable_name == "memory_free_kib": + return "low" + else: + return "high" # trigger if *above* level + elif config_tag == "alarm_priority": + return "3" # Service degradation level defined in PR-1455 + else: + raise XmlConfigException( + "variable %s: no default available for tag %s" + % (variable_name, config_tag) + ) def get_active_variables(self): r = self.variables + [v for v in self.secondary_variables if v.active] - print_debug("Returning active variables: %d main, %d total" % (len(self.variables), len(r))) + print_debug( + "Returning active variables: %d main, %d total" + % (len(self.variables), len(r)) + ) return r def refresh_config(self): @@ -828,8 +1033,8 @@ class HOSTMonitor(ObjectMonitor): return secondary_changed = False - old_sruuids = set(self.secondary_xmlconfigs) # create set of keys - current_sruuids = sruuids_by_hostuuid[self.uuid] # a set already + old_sruuids = set(self.secondary_xmlconfigs) # create set of keys + current_sruuids = sruuids_by_hostuuid[self.uuid] # a set already if old_sruuids != current_sruuids: print_debug("Changed set of perfmon sruuids for host %s" % self.uuid) secondary_changed = True @@ -838,10 +1043,15 @@ class HOSTMonitor(ObjectMonitor): sr_xmlconfig = all_xmlconfigs[sruuid] # As an optimisation, if xml unchanged then do not re-parse. # Otherwise we would create Variables which would turn out to be same as existing ones so we would ignore them. - if sruuid in self.secondary_xmlconfigs and self.secondary_xmlconfigs[sruuid] == sr_xmlconfig: + if ( + sruuid in self.secondary_xmlconfigs + and self.secondary_xmlconfigs[sruuid] == sr_xmlconfig + ): print_debug("Unchanged sr_xmlconfig for sruuid %s" % sruuid) else: - print_debug("Found new/different sr_xmlconfig for sruuid %s" % sruuid) + print_debug( + "Found new/different sr_xmlconfig for sruuid %s" % sruuid + ) secondary_changed = True break @@ -849,11 +1059,17 @@ class HOSTMonitor(ObjectMonitor): try: self.__parse_secondary_xmlconfigs() except XmlConfigException as e: - errmsg = "\n".join([ str(x) for x in e.args ]) - log_err("%s %s secondary config error: %s" % (self.monitortype, self.uuid, errmsg)) + errmsg = "\n".join([str(x) for x in e.args]) + log_err( + "%s %s secondary config error: %s" + % (self.monitortype, self.uuid, errmsg) + ) except ExpatError as e: - errmsg = "\n".join([ str(x) for x in e.args ]) - log_err("%s %s secondary XML parse error: %s" % (self.monitortype, self.uuid, errmsg)) + errmsg = "\n".join([str(x) for x in e.args]) + log_err( + "%s %s secondary XML parse error: %s" + % (self.monitortype, self.uuid, errmsg) + ) if main_changed or secondary_changed: # Calculate which secondary variables are active, i.e. not overridden by ones configured on the host rather than the SR. @@ -862,67 +1078,102 @@ class HOSTMonitor(ObjectMonitor): v.set_active(v.name not in main_names) def __parse_secondary_xmlconfigs(self): - variable_names = set() # Names of the Variable objects we create based on the xml nodes we find + variable_names = ( + set() + ) # Names of the Variable objects we create based on the xml nodes we find self.secondary_xmlconfigs.clear() for sruuid in sruuids_by_hostuuid[self.uuid]: print_debug("Looking for config on SR uuid %s" % sruuid) sr_xmlconfig = all_xmlconfigs[sruuid] self.secondary_xmlconfigs[sruuid] = sr_xmlconfig xmldoc = minidom.parseString(sr_xmlconfig) - variable_nodes = xmldoc.getElementsByTagName('variable') + variable_nodes = xmldoc.getElementsByTagName("variable") found = False for vn in variable_nodes: try: - name_element = vn.getElementsByTagName('name')[0] - name = name_element.getAttribute('value') + name_element = vn.getElementsByTagName("name")[0] + name = name_element.getAttribute("value") except IndexError: - log_err("variable missing 'name' tag in perfmon xml config of SR %s" % sruuid) - continue # perhaps other nodes are valid - print_debug("Found variable with name %s on SR uuid %s" % (name, sruuid)) - if name != 'sr_io_throughput_total_per_host': - continue # Do nothing unless the variable is meant for the host - if len(vn.getElementsByTagName('rrd_regex')) > 0: - log_err("Configuration error: rrd_regex must not be specified in config on SR meant for each host") - continue # perhaps another node is valid + log_err( + "variable missing 'name' tag in perfmon xml config of SR %s" + % sruuid + ) + continue # perhaps other nodes are valid + print_debug( + "Found variable with name %s on SR uuid %s" % (name, sruuid) + ) + if name != "sr_io_throughput_total_per_host": + continue # Do nothing unless the variable is meant for the host + if len(vn.getElementsByTagName("rrd_regex")) > 0: + log_err( + "Configuration error: rrd_regex must not be specified in config on SR meant for each host" + ) + continue # perhaps another node is valid if found: - log_err("Configuration error: duplicate variable %s on SR %s" % (name, sruuid)) + log_err( + "Configuration error: duplicate variable %s on SR %s" + % (name, sruuid) + ) # A host can only have one Variable from a given SR since we only accept one kind (one name). break found = True - name_override = 'sr_io_throughput_total_%s' % sruuid[0:8] - name_element.setAttribute('value', name_override) - provenance_element = xmldoc.createElement('configured_on') - provenance_element.setAttribute('class', 'SR') - provenance_element.setAttribute('uuid', sruuid) + name_override = "sr_io_throughput_total_%s" % sruuid[0:8] + name_element.setAttribute("value", name_override) + provenance_element = xmldoc.createElement("configured_on") + provenance_element.setAttribute("class", "SR") + provenance_element.setAttribute("uuid", sruuid) vn.appendChild(provenance_element) var = Variable(vn, self.alarm_create, self.get_default_variable_config) variable_names.add(var.name) append_var = True - vars_with_same_name = [ v for v in self.secondary_variables if v.name == var.name ] + vars_with_same_name = [ + v for v in self.secondary_variables if v.name == var.name + ] for v in vars_with_same_name: # this list should be 0 or 1 long! # only replace variable in self.secondary_variables if its config has changed. # This way we don't reset its state if variable_configs_differ(var, v): - print_debug("Removing existing secondary variable to replace with new: %s" % v.name) + print_debug( + "Removing existing secondary variable to replace with new: %s" + % v.name + ) self.secondary_variables.remove(v) else: - print_debug("Found existing secondary variable with same config: %s" % v.name) + print_debug( + "Found existing secondary variable with same config: %s" + % v.name + ) append_var = False if append_var: - print_debug("Adding %s to set of secondary variables for host UUID=%s" % (var.name, self.uuid)) + print_debug( + "Adding %s to set of secondary variables for host UUID=%s" + % (var.name, self.uuid) + ) self.secondary_variables.add(var) # Now that we have read all the xml items, # delete any old variables that do not appear in the new variable_nodes - print_debug("Going to delete any secondary_variables not in %s" % variable_names) - variables_to_remove = [ v for v in self.secondary_variables if v.name not in variable_names ] + print_debug( + "Going to delete any secondary_variables not in %s" % variable_names + ) + variables_to_remove = [ + v for v in self.secondary_variables if v.name not in variable_names + ] for v in variables_to_remove: - print_debug("Deleting %s from set of secondary variables for UUID=%s" % (v.name, self.uuid)) + print_debug( + "Deleting %s from set of secondary variables for UUID=%s" + % (v.name, self.uuid) + ) self.secondary_variables.remove(v) + all_xmlconfigs = {} -sruuids_by_hostuuid = {} # Maps host uuid to a set of the uuids of the host's SRs that have other-config:perfmon +sruuids_by_hostuuid = ( + {} +) # Maps host uuid to a set of the uuids of the host's SRs that have other-config:perfmon + + def update_all_xmlconfigs(session): """Update all_xmlconfigs, a global dictionary that maps any uuid (SR, host or VM) to the xml config string in other-config:perfmon keys @@ -930,42 +1181,44 @@ def update_all_xmlconfigs(session): lookup of the other-config:perfmon xml of the SRs connected to a host""" global all_xmlconfigs global sruuids_by_hostuuid - + all_host_recs = session.xenapi.host.get_all_records() - all_vm_recs = session.xenapi.VM.get_all_records() - all_sr_recs = session.xenapi.SR.get_all_records() + all_vm_recs = session.xenapi.VM.get_all_records() + all_sr_recs = session.xenapi.SR.get_all_records() # build dictionary mapping uuids to other_configs all_otherconfigs = {} for recs in (all_host_recs, all_vm_recs, all_sr_recs): - all_otherconfigs.update([ - (recs[ref]['uuid'], recs[ref]['other_config']) - for ref in recs.keys() - ]) + all_otherconfigs.update( + [(recs[ref]["uuid"], recs[ref]["other_config"]) for ref in recs.keys()] + ) # rebuild dictionary mapping uuids to xmlconfigs all_xmlconfigs.clear() - all_xmlconfigs.update([ - (uuid, other_config['perfmon']) + all_xmlconfigs.update( + [ + (uuid, other_config["perfmon"]) for (uuid, other_config) in all_otherconfigs.items() - if 'perfmon' in other_config - ]) + if "perfmon" in other_config + ] + ) # Rebuild another map sruuids_by_hostuuid.clear() - for (sr, rec) in all_sr_recs.items(): - if 'perfmon' in rec['other_config']: - sruuid = rec['uuid'] + for sr, rec in all_sr_recs.items(): + if "perfmon" in rec["other_config"]: + sruuid = rec["uuid"] # If we hadn't done SR.get_all_records we would now do SR.get_PBDs. - host_refs = [session.xenapi.PBD.get_host(pbd) for pbd in rec['PBDs']] - host_uuids = [all_host_recs[ref]['uuid'] for ref in host_refs] + host_refs = [session.xenapi.PBD.get_host(pbd) for pbd in rec["PBDs"]] + host_uuids = [all_host_recs[ref]["uuid"] for ref in host_refs] for hu in host_uuids: if hu in sruuids_by_hostuuid: sruuids_by_hostuuid[hu].add(sruuid) else: sruuids_by_hostuuid[hu] = {sruuid} + # 5 minute default interval interval = 300 interval_percent_dither = 5 @@ -978,35 +1231,46 @@ config_update_period = 1800 cmdsockname = "\0perfmon" # an af_unix socket name (the "\0" stops socket.bind() creating a fs node) cmdmaxlen = 256 + def main(): global interval global interval_percent_dither global rrd_step global debug global config_update_period - maxruns=None + maxruns = None try: argv = sys.argv[1:] - opts, args = getopt.getopt(argv, "i:n:ds:c:D:", - ["interval=", "numloops=","debug","rrdstep=","config_update_period=","interval_percent_dither="]) + opts, args = getopt.getopt( + argv, + "i:n:ds:c:D:", + [ + "interval=", + "numloops=", + "debug", + "rrdstep=", + "config_update_period=", + "interval_percent_dither=", + ], + ) except getopt.GetoptError: raise UsageException - + configfname = None for opt, arg in opts: - if opt == '-i' or opt == '--interval': + if opt == "-i" or opt == "--interval": interval = int(arg) - elif opt == '-n' or opt == '--numloops': + elif opt == "-n" or opt == "--numloops": maxruns = int(arg) - elif opt == '-d' or opt == '--debug': + elif opt == "-d" or opt == "--debug": debug = True - elif opt == '-s' or opt == '--rrdstep': + elif opt == "-s" or opt == "--rrdstep": rrd_step = int(arg) if rrd_step != 5 and rrd_step != 60: raise UsageException - elif opt == '-c' or opt == '--config_update_period': + elif opt == "-c" or opt == "--config_update_period": config_update_period = int(arg) - elif opt == '-D' or opt == '--interval_percent_dither': + elif opt == "-D" or opt == "--interval_percent_dither": interval_percent_dither = int(arg) else: raise UsageException @@ -1015,23 +1279,22 @@ def main(): cmdsock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) cmdsock.bind(cmdsockname) - # The dither on each loop (prevents stampede on master) rand = random.Random().uniform - dither = (interval * interval_percent_dither)/100.0 + dither = (interval * interval_percent_dither) / 100.0 # Create a XAPI session on first run restart_session = True # Create a client for getting the rrd_updates over HTTP - rrd_updates = RRDUpdates() + rrd_updates = RRDUpdates() - # Work out when next to update all the xmlconfigs for all the + # Work out when next to update all the xmlconfigs for all the # hosts and all the VMs. This causes a lot of data to be retrieved # from the master, so we only do it once every config_update_period # and we cache the results next_config_update = time.time() - + # monitors for vms running on this host. # This dictionary uses uuids to lookup each monitor object vm_mon_lookup = {} @@ -1042,7 +1305,7 @@ def main(): # The monitor for the host host_mon = None - + runs = 0 while True: print_debug("Run: %d" % runs) @@ -1055,24 +1318,24 @@ def main(): restart_session = False rrd_updates.refresh(session) - + # Should we update all_xmlconfigs if time.time() >= next_config_update: print_debug("Updating all_xmlconfigs") # yes - update all the xml configs: this generates a few LARGE xapi messages from the master update_all_xmlconfigs(session) - + # Set time when to do this next next_config_update = time.time() + config_update_period # List of VMs present in rrd_updates - vm_uuid_list = rrd_updates.get_uuid_list_by_objtype('vm') + vm_uuid_list = rrd_updates.get_uuid_list_by_objtype("vm") # Remove any monitors for VMs no longer listed in rrd_updates page for uuid in vm_mon_lookup.keys(): if uuid not in vm_uuid_list: vm_mon_lookup.pop(uuid) - + # Create monitors for VMs that have just appeared in rrd_updates page for uuid in vm_uuid_list: if uuid not in vm_mon_lookup.keys(): @@ -1080,11 +1343,13 @@ def main(): else: # check if the config has changed, e.g. by XenCenter vm_mon_lookup[uuid].refresh_config() - + # Remove monitor for the host if it's no longer listed in rrd_updates page # Create monitor for the host if it has just appeared in rrd_updates page try: - host_uuid = rrd_updates.get_uuid_list_by_objtype('host')[0] # should only ever be one of these + host_uuid = rrd_updates.get_uuid_list_by_objtype("host")[ + 0 + ] # should only ever be one of these except: # list may be empty! host_uuid = None @@ -1094,14 +1359,16 @@ def main(): elif not host_mon: host_mon = HOSTMonitor(host_uuid) elif host_mon.uuid != host_uuid: - raise PerfMonException("host uuid in rrd_updates changed (old: %s, new %s)" % \ - (host_mon.uuid, host_uuid)) + raise PerfMonException( + "host uuid in rrd_updates changed (old: %s, new %s)" + % (host_mon.uuid, host_uuid) + ) else: # check if the config has changed, e.g. by XenCenter host_mon.refresh_config() # List of SRs present in rrd_updates - sr_uuid_list = rrd_updates.get_uuid_list_by_objtype('sr') + sr_uuid_list = rrd_updates.get_uuid_list_by_objtype("sr") print_debug("sr_uuid_list = %s" % sr_uuid_list) # Remove monitors for SRs no longer listed in the rrd_updates page @@ -1133,21 +1400,27 @@ def main(): time.sleep(2) pass - log_err("caught socket.error: (%s) - restarting XAPI session" % " ".join([str(x) for x in e.args])) + log_err( + "caught socket.error: (%s) - restarting XAPI session" + % " ".join([str(x) for x in e.args]) + ) restart_session = True except IOError as e: - if e.args[0] == 'http error' and e.args[1] in (401, 500): + if e.args[0] == "http error" and e.args[1] in (401, 500): # Error getting rrd_updates: 401=Unauthorised, 500=Internal - start new session - pass - elif e.args[0] == 'socket error': + pass + elif e.args[0] == "socket error": # This happens if we send messages or read other-config:perfmon after xapi is restarted pass else: # Don't know why we got this error - crash, die and look at logs later raise - log_err("caught IOError: (%s) - restarting XAPI session" % " ".join([str(x) for x in e.args])) + log_err( + "caught IOError: (%s) - restarting XAPI session" + % " ".join([str(x) for x in e.args]) + ) restart_session = True runs += 1 @@ -1178,26 +1451,28 @@ def main(): return 0 + def sigterm_handler(sig, stack_frame): log_err("Caught signal %d - exiting" % sig) sys.exit(1) + pidfile = "/var/run/perfmon.pid" if __name__ == "__main__": - + # setup signal handler to print out notice when killed signal.signal(signal.SIGTERM, sigterm_handler) - - if '--daemon' in sys.argv[1:]: - sys.argv.remove('--daemon') + + if "--daemon" in sys.argv[1:]: + sys.argv.remove("--daemon") if os.fork() != 0: sys.exit(0) os.setsid() - sys.stdout=open("/dev/null", 'w') - sys.stdin=open("/dev/null", 'r') - sys.stderr=sys.stdout - + sys.stdout = open("/dev/null", "w") + sys.stdin = open("/dev/null", "r") + sys.stderr = sys.stdout + # Exit if perfmon already running if os.path.exists(pidfile): pid = open(pidfile).read() @@ -1206,8 +1481,8 @@ if __name__ == "__main__": sys.exit(3) try: - # Write out pidfile - fd = open(pidfile,"w") + # Write out pidfile + fd = open(pidfile, "w") fd.write("%d" % os.getpid()) fd.close() @@ -1216,17 +1491,19 @@ if __name__ == "__main__": except UsageException as e: # Print the usage - log_err("usage: %s [-i -n -d -s -c -D ] \\\n" \ - "\t[--interval= --numloops= --debug \\\n" \ - "\t --rrdstep= --daemon]\n" \ - "\t --config_update_period=\n" \ - "\t --interval_percent_dither=\n" \ - " interval:\tseconds between reads of http://localhost/rrd_updates?...\n" \ - " loops:\tnumber of times to run before exiting\n" \ - " rrd_step:\tseconds between samples provided by rrd_updates. Valid values are 5 or 60\n" \ - " config_update_period:\tseconds between getting updates of all VM/host records from master\n" \ - " interval_percent_dither:\tmax percent dither in each loop - prevents stampede on master\n" \ - % (sys.argv[0])) + log_err( + "usage: %s [-i -n -d -s -c -D ] \\\n" + "\t[--interval= --numloops= --debug \\\n" + "\t --rrdstep= --daemon]\n" + "\t --config_update_period=\n" + "\t --interval_percent_dither=\n" + " interval:\tseconds between reads of http://localhost/rrd_updates?...\n" + " loops:\tnumber of times to run before exiting\n" + " rrd_step:\tseconds between samples provided by rrd_updates. Valid values are 5 or 60\n" + " config_update_period:\tseconds between getting updates of all VM/host records from master\n" + " interval_percent_dither:\tmax percent dither in each loop - prevents stampede on master\n" + % (sys.argv[0]) + ) rc = 1 except SystemExit: @@ -1239,16 +1516,16 @@ if __name__ == "__main__": log_err("Exception is of class %s" % e.__class__) ex = sys.exc_info() err = traceback.format_exception(*ex) - + # Python built-in Exception has args, # but XenAPI.Failure has details instead. Sigh. try: - errmsg = "\n".join([ str(x) for x in e.args ]) + errmsg = "\n".join([str(x) for x in e.args]) # print the exception args nicely log_err(errmsg) except Exception as ignored: try: - errmsg = "\n".join([ str(x) for x in e.details ]) + errmsg = "\n".join([str(x) for x in e.details]) # print the exception args nicely log_err(errmsg) except Exception as ignored: From d834700401e8765f5948f2312f03f3455933ceaa Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 12 Mar 2024 08:33:08 +0000 Subject: [PATCH 029/222] CP-47653: py2->py3 for perfmon Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 46 +++++++++++++++++++++++---------------------- 1 file changed, 24 insertions(+), 22 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 3b68b485aa9..1c2f3c3f95a 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # perfmon - a daemon for monitoring performance of the host on which it is run # and of all the local VMs, and for generating events based on configurable @@ -29,9 +29,8 @@ # # The "cf" CGI param specfies the row. (All rows are returned if it's missing.) -from __future__ import print_function -import commands +import subprocess import gc import getopt import os @@ -43,9 +42,11 @@ import sys import syslog import time import traceback -import urllib +import urllib.request + # used to parse rrd_updates because this may be large and sax is more efficient from xml import sax + # used to parse other-config:perfmon. Efficiency is less important than reliability here from xml.dom import minidom from xml.parsers.expat import ExpatError @@ -139,7 +140,7 @@ class ObjectReport: return self.uuid def get_var_names(self): - return self.vars.keys() + return list(self.vars.keys()) def get_value(self, var_name, row): try: @@ -347,10 +348,8 @@ class RRDUpdates: paramstr = "&".join(["%s=%s" % (k, params[k]) for k in params]) print_debug("Calling http://localhost/rrd_updates?%s" % paramstr) - # this is better than urllib.urlopen() as it raises an Exception on http 401 'Unauthorised' error - # rather than drop into interactive mode - sock = urllib.URLopener().open("http://localhost/rrd_updates?%s" % paramstr) - xmlsource = sock.read() + sock = urllib.request.urlopen("http://localhost/rrd_updates?%s" % paramstr) + xmlsource = sock.read().decode("utf-8") sock.close() # Use sax rather than minidom and save Vvvast amounts of time and memory. @@ -405,8 +404,8 @@ def average(mylist): def get_percent_log_fs_usage(ignored): "Get the percent usage of the host filesystem for logs partition. Input list is ignored and should be empty" - fs_output = commands.getoutput("df /etc/passwd") - log_fs_output = commands.getoutput("df /var/log") + fs_output = subprocess.getoutput("df /etc/passwd") + log_fs_output = subprocess.getoutput("df /var/log") fs_output = " ".join(fs_output.splitlines()[1:]) log_fs_output = " ".join(log_fs_output.splitlines()[1:]) # Get the percent usage only when there is a separate logs partition @@ -421,7 +420,7 @@ def get_percent_log_fs_usage(ignored): def get_percent_fs_usage(ignored): "Get the percent usage of the host filesystem. Input list is ignored and should be empty" # this file is on the filesystem of interest in both OEM and Retail - output = commands.getoutput("df /etc/passwd") + output = subprocess.getoutput("df /etc/passwd") output = " ".join( output.splitlines()[1:] ) # remove header line and rewrap on single line @@ -755,13 +754,14 @@ class ObjectMonitor: for var in self.get_active_variables(): # find the subset of the params returned for this object that we need to consolidate into var - params_to_consolidate = filter(var.rrd_regex.match, params_in_obj_report) + params_to_consolidate = list( + filter(var.rrd_regex.match, params_in_obj_report) + ) for row in range(num_rows): # Get the values to consolidate - values_to_consolidate = map( - lambda param: obj_report.get_value(param, row), - params_to_consolidate, - ) + values_to_consolidate = [ + obj_report.get_value(param, row) for param in params_to_consolidate + ] # Consolidate them value = var.consolidation_fn(values_to_consolidate) # Pass result on to the variable object - this may result in an alarm being generated @@ -1191,7 +1191,7 @@ def update_all_xmlconfigs(session): for recs in (all_host_recs, all_vm_recs, all_sr_recs): all_otherconfigs.update( - [(recs[ref]["uuid"], recs[ref]["other_config"]) for ref in recs.keys()] + [(recs[ref]["uuid"], recs[ref]["other_config"]) for ref in recs] ) # rebuild dictionary mapping uuids to xmlconfigs @@ -1332,13 +1332,14 @@ def main(): vm_uuid_list = rrd_updates.get_uuid_list_by_objtype("vm") # Remove any monitors for VMs no longer listed in rrd_updates page - for uuid in vm_mon_lookup.keys(): + # We use .pop() inside the loop, use list(dict_var.keys()): + for uuid in list(vm_mon_lookup.keys()): if uuid not in vm_uuid_list: vm_mon_lookup.pop(uuid) # Create monitors for VMs that have just appeared in rrd_updates page for uuid in vm_uuid_list: - if uuid not in vm_mon_lookup.keys(): + if uuid not in vm_mon_lookup: vm_mon_lookup[uuid] = VMMonitor(uuid) else: # check if the config has changed, e.g. by XenCenter @@ -1372,12 +1373,13 @@ def main(): print_debug("sr_uuid_list = %s" % sr_uuid_list) # Remove monitors for SRs no longer listed in the rrd_updates page - for uuid in sr_mon_lookup.keys(): + # We use .pop() inside the loop, use list(dict_var.keys()): + for uuid in list(sr_mon_lookup.keys()): if uuid not in sr_uuid_list: sr_mon_lookup.pop(uuid) # Create monitors for SRs that have just appeared in rrd_updates page for uuid in sr_uuid_list: - if uuid not in sr_mon_lookup.keys(): + if uuid not in sr_mon_lookup: sr_mon_lookup[uuid] = SRMonitor(uuid) else: sr_mon_lookup[uuid].refresh_config() From a195f64225015e5650e50c323b5fa06eb185c509 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 14 Mar 2024 09:04:26 +0000 Subject: [PATCH 030/222] CP-47653: Fix pylint `line-too-long` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 167 +++++++++++++++++++++++++++++--------------- 1 file changed, 112 insertions(+), 55 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 1c2f3c3f95a..fc46a01e19a 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -29,6 +29,12 @@ # # The "cf" CGI param specfies the row. (All rows are returned if it's missing.) +# pylint: disable=too-many-lines, missing-function-docstring, missing-module-docstring +# pylint: disable=consider-using-f-string, missing-class-docstring, too-few-public-methods +# pylint: disable=too-many-instance-attributes, import-error, unnecessary-pass +# pylint: disable=too-many-branches, too-many-arguments, broad-exception-caught +# pylint: disable=no-else-break, no-else-return, invalid-name + import subprocess import gc @@ -48,7 +54,7 @@ import urllib.request from xml import sax # used to parse other-config:perfmon. Efficiency is less important than reliability here -from xml.dom import minidom +from xml.dom import minidom # pytype: disable=pyi-error from xml.parsers.expat import ExpatError import XenAPI @@ -254,7 +260,8 @@ class RRDContentHandler(sax.ContentHandler): or self.in_columns_tag or self.in_entry_tag or - # self.in_row_tag # ignore text under row tag, s are just for holding and nodes + # self.in_row_tag + # ignore text under row tag, s are just for holding and nodes self.in_t_tag or self.in_v_tag ): @@ -292,7 +299,8 @@ class RRDContentHandler(sax.ContentHandler): self.in_row_tag = False self.row += 1 elif name == "t": - # Extract start and end time from row data as it's more reliable than the values in the meta data + # Extract start and end time from row data + # as it's more reliable than the values in the meta data t = int(self.raw_text) # Last row corresponds to start time self.report.start_time = t @@ -378,7 +386,10 @@ class RRDUpdates: return None def get_uuid_list_by_objtype(self, objtype): - "Return a list of uuids corresonding to the objects of this type for which we have ObjectReports" + ''' + Return a list of uuids corresonding to the objects + of this type for which we have ObjectReports + ''' return [ objrep.uuid for objrep in self.report.obj_reports.values() @@ -403,7 +414,10 @@ def average(mylist): def get_percent_log_fs_usage(ignored): - "Get the percent usage of the host filesystem for logs partition. Input list is ignored and should be empty" + ''' + Get the percent usage of the host filesystem for logs partition. + Input list is ignored and should be empty + ''' fs_output = subprocess.getoutput("df /etc/passwd") log_fs_output = subprocess.getoutput("df /var/log") fs_output = " ".join(fs_output.splitlines()[1:]) @@ -438,7 +452,7 @@ def get_percent_mem_usage(ignored): memdict = [m.split(":", 1) for m in memlist] memdict = dict( [ - (k.strip(), float(re.search("\d+", v.strip()).group(0))) + (k.strip(), float(re.search(r"\d+", v.strip()).group(0))) for (k, v) in memdict ] ) @@ -462,7 +476,10 @@ def get_percent_mem_usage(ignored): def get_percent_sr_usage(mylist): - """Get the percent usage of the SR. Input list should be exactly two items: [physical_utilisation, size]""" + """ + Get the percent usage of the SR. + Input list should be exactly two items: [physical_utilisation, size] + """ try: if len(mylist) != 2: raise Exception( @@ -480,13 +497,13 @@ class VariableConfig: """Object storing the configuration of a Variable Initialisation parameters: - xmldoc = dom object representing the nodes in the ObjectMonitor config strings. - See VMMonitor.__doc__ and HOSTMonitor.__doc__ - alarm_create_callback = - callback called by Variable.update() to create and send an alarm - get_default_variable_config = - a function that VariableConfig.__init__() uses to lookup default tag values - by variable name + xmldoc = dom object representing the nodes in the ObjectMonitor config strings. + See VMMonitor.__doc__ and HOSTMonitor.__doc__ + alarm_create_callback = + callback called by Variable.update() to create and send an alarm + get_default_variable_config = + a function that VariableConfig.__init__() uses to lookup default tag values + by variable name """ def __init__(self, xmldoc, alarm_create_callback, get_default_variable_config): @@ -753,7 +770,8 @@ class ObjectMonitor: params_in_obj_report = obj_report.get_var_names() for var in self.get_active_variables(): - # find the subset of the params returned for this object that we need to consolidate into var + # find the subset of the params returned for this object + # that we need to consolidate into var params_to_consolidate = list( filter(var.rrd_regex.match, params_in_obj_report) ) @@ -764,7 +782,8 @@ class ObjectMonitor: ] # Consolidate them value = var.consolidation_fn(values_to_consolidate) - # Pass result on to the variable object - this may result in an alarm being generated + # Pass result on to the variable object + # This may result in an alarm being generated var.update(value, session) def alarm_create(self, var, session, message): @@ -783,7 +802,8 @@ class VMMonitor(ObjectMonitor): Configured by writing an xml string into an other-config key, e.g. xe vm-param-set uuid=$vmuuid other-config:perfmon=\ - '' + ' + ' Notes: - Multiple nodes allowed @@ -791,12 +811,19 @@ class VMMonitor(ObjectMonitor): * name: what to call the variable (no default) * alarm_priority: the priority of the messages generated (default '3') * alarm_trigger_level: level of value that triggers an alarm (no default) - * alarm_trigger_sense: 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') - * alarm_trigger_period: num seconds of 'bad' values before an alarm is sent (default '60') - * alarm_auto_inhibit_period: num seconds this alarm disabled after an alarm is sent (default '3600') - * consolidation_fn: how to combine variables from rrd_updates into one value - (default is 'average' for 'cpu_usage', 'get_percent_fs_usage' for 'fs_usage', 'get_percent_log_fs_usage' for 'log_fs_usage', 'get_percent_mem_usage' for 'mem_usage', & 'sum' for everything else) - * rrd_regex matches the names of variables from (xe vm-data-sources-list uuid=$vmuuid) used to compute value + * alarm_trigger_sense: + 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') + * alarm_trigger_period: + num seconds of 'bad' values before an alarm is sent (default '60') + * alarm_auto_inhibit_period: + num seconds this alarm disabled after an alarm is sent (default '3600') + * consolidation_fn: + how to combine variables from rrd_updates into one value + (default is 'average' for 'cpu_usage', 'get_percent_fs_usage' for 'fs_usage', + 'get_percent_log_fs_usage' for 'log_fs_usage', + 'get_percent_mem_usage' for 'mem_usage', & 'sum' for everything else) + * rrd_regex matches the names of variables + from (xe vm-data-sources-list uuid=$vmuuid) used to compute value (only has defaults for "cpu_usage", "network_usage", and "disk_usage") """ @@ -873,7 +900,8 @@ class SRMonitor(ObjectMonitor): Configured by writing an xml string into an other-config key, e.g. xe sr-param-set uuid=$vmuuid other-config:perfmon=\ - '' + ' + ' Notes: - Multiple nodes allowed @@ -881,12 +909,18 @@ class SRMonitor(ObjectMonitor): * name: what to call the variable (no default) * alarm_priority: the priority of the messages generated (default '3') * alarm_trigger_level: level of value that triggers an alarm (no default) - * alarm_trigger_sense: 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') - * alarm_trigger_period: num seconds of 'bad' values before an alarm is sent (default '60') - * alarm_auto_inhibit_period: num seconds this alarm disabled after an alarm is sent (default '3600') - * consolidation_fn: how to combine variables from rrd_updates into one value - (default is 'get_percent_sr_usage' for 'physical_utilistation', & 'sum' for everything else) - * rrd_regex matches the names of variables from (xe sr-data-sources-list uuid=$sruuid) used to compute value + * alarm_trigger_sense: + 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') + * alarm_trigger_period: + num seconds of 'bad' values before an alarm is sent (default '60') + * alarm_auto_inhibit_period: + num seconds this alarm disabled after an alarm is sent (default '3600') + * consolidation_fn: + how to combine variables from rrd_updates into one value + (default is 'get_percent_sr_usage' for 'physical_utilistation', + & 'sum' for everything else) + * rrd_regex matches the names of variables + from (xe sr-data-sources-list uuid=$sruuid) used to compute value (has default for "physical_utilistaion") """ @@ -906,7 +940,8 @@ class SRMonitor(ObjectMonitor): if variable_name == "physical_utilisation": return "physical_utilisation|size" elif variable_name == "sr_io_throughput_total_per_host": - return "_$_DUMMY__" # (these are to drive Host RRDs and so are handled by the HOSTMonitor) + # (these are to drive Host RRDs and so are handled by the HOSTMonitor) + return "_$_DUMMY__" else: raise XmlConfigException( "variable %s: no default rrd_regex - please specify one" @@ -940,7 +975,8 @@ class HOSTMonitor(ObjectMonitor): Configured by writing an xml string into an other-config key, e.g. xe host-param-set uuid=$hostuuid other-config:perfmon=\ - '' + ' + ' Notes: - Multiple nodes allowed @@ -948,22 +984,30 @@ class HOSTMonitor(ObjectMonitor): * name: what to call the variable (no default) * alarm_priority: the priority of the messages generated (default '3') * alarm_trigger_level: level of value that triggers an alarm (no default) - * alarm_trigger_sense: 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') - * alarm_trigger_period: num seconds of 'bad' values before an alarm is sent (default '60') - * alarm_auto_inhibit_period: num seconds this alarm disabled after an alarm is sent (default '3600') + * alarm_trigger_sense: + 'high' if alarm_trigger_level is a max, otherwise 'low'. (default 'high') + * alarm_trigger_period: + num seconds of 'bad' values before an alarm is sent (default '60') + * alarm_auto_inhibit_period: + num seconds this alarm disabled after an alarm is sent (default '3600') * consolidation_fn: how to combine variables from rrd_updates into one value - (default is 'average' for 'cpu_usage' & 'sum' for everything else) - * rrd_regex matches the names of variables from (xe host-data-source-list uuid=$hostuuid) used to compute value - (only has defaults for "cpu_usage", "network_usage", "memory_free_kib" and "sr_io_throughput_total_xxxxxxxx" + (default is 'average' for 'cpu_usage' & 'sum' for everything else) + * rrd_regex matches the names of variables + from (xe host-data-source-list uuid=$hostuuid) used to compute value + (only has defaults for "cpu_usage", "network_usage", "memory_free_kib" + and "sr_io_throughput_total_xxxxxxxx" where that last one ends with the first eight characters of the SR uuid) Also, as a special case for SR throughput, it is possible to configure a Host by writing xml into the other-config key of an SR connected to it, e.g. xe sr-param-set uuid=$sruuid other-config:perfmon=\ - ' + ' + - This only works for that one specific variable-name, and rrd_regex must not be specified. - Configuration done on the host directly (variable-name sr_io_throughput_total_xxxxxxxx) takes priority. + This only works for that one specific variable-name, + and rrd_regex must not be specified. + Configuration done on the host directly + (variable-name sr_io_throughput_total_xxxxxxxx) takes priority. """ def __init__(self, *args): @@ -1042,7 +1086,8 @@ class HOSTMonitor(ObjectMonitor): for sruuid in sruuids_by_hostuuid[self.uuid]: sr_xmlconfig = all_xmlconfigs[sruuid] # As an optimisation, if xml unchanged then do not re-parse. - # Otherwise we would create Variables which would turn out to be same as existing ones so we would ignore them. + # Otherwise we would create Variables which would + # turn out to be same as existing ones so we would ignore them. if ( sruuid in self.secondary_xmlconfigs and self.secondary_xmlconfigs[sruuid] == sr_xmlconfig @@ -1072,7 +1117,8 @@ class HOSTMonitor(ObjectMonitor): ) if main_changed or secondary_changed: - # Calculate which secondary variables are active, i.e. not overridden by ones configured on the host rather than the SR. + # Calculate which secondary variables are active, + # i.e. not overridden by ones configured on the host rather than the SR. main_names = {v.name for v in self.variables} for v in self.secondary_variables: v.set_active(v.name not in main_names) @@ -1106,7 +1152,8 @@ class HOSTMonitor(ObjectMonitor): continue # Do nothing unless the variable is meant for the host if len(vn.getElementsByTagName("rrd_regex")) > 0: log_err( - "Configuration error: rrd_regex must not be specified in config on SR meant for each host" + "Configuration error:" \ + "rrd_regex must not be specified in config on SR meant for each host" ) continue # perhaps another node is valid if found: @@ -1114,7 +1161,8 @@ class HOSTMonitor(ObjectMonitor): "Configuration error: duplicate variable %s on SR %s" % (name, sruuid) ) - # A host can only have one Variable from a given SR since we only accept one kind (one name). + # A host can only have one Variable from a given SR + # since we only accept one kind (one name). break found = True name_override = "sr_io_throughput_total_%s" % sruuid[0:8] @@ -1228,7 +1276,8 @@ debug = False # rate to call update_all_xmlconfigs() config_update_period = 1800 -cmdsockname = "\0perfmon" # an af_unix socket name (the "\0" stops socket.bind() creating a fs node) +# an af_unix socket name (the "\0" stops socket.bind() creating a fs node) +cmdsockname = "\0perfmon" cmdmaxlen = 256 @@ -1322,7 +1371,8 @@ def main(): # Should we update all_xmlconfigs if time.time() >= next_config_update: print_debug("Updating all_xmlconfigs") - # yes - update all the xml configs: this generates a few LARGE xapi messages from the master + # yes - update all the xml configs: + # this generates a few LARGE xapi messages from the master update_all_xmlconfigs(session) # Set time when to do this next @@ -1384,7 +1434,8 @@ def main(): else: sr_mon_lookup[uuid].refresh_config() - # Go through each vm_mon and update it using the rrd_udpates - this may generate alarms + # Go through each vm_mon and update it using the rrd_udpates + # this may generate alarms for vm_mon in vm_mon_lookup.values(): vm_mon.process_rrd_updates(rrd_updates, session) @@ -1398,7 +1449,8 @@ def main(): except socket.error as e: if e.args[0] == 111: - # "Connection refused" - this happens when we try to restart session and *that* fails + # "Connection refused" + # this happens when we try to restart session and *that* fails time.sleep(2) pass @@ -1413,7 +1465,8 @@ def main(): # Error getting rrd_updates: 401=Unauthorised, 500=Internal - start new session pass elif e.args[0] == "socket error": - # This happens if we send messages or read other-config:perfmon after xapi is restarted + # This happens if we send messages or + # read other-config:perfmon after xapi is restarted pass else: # Don't know why we got this error - crash, die and look at logs later @@ -1494,16 +1547,20 @@ if __name__ == "__main__": except UsageException as e: # Print the usage log_err( - "usage: %s [-i -n -d -s -c -D ] \\\n" + "usage: %s [-i -n -d -s -c" \ + " -D ] \\\n" "\t[--interval= --numloops= --debug \\\n" "\t --rrdstep= --daemon]\n" "\t --config_update_period=\n" "\t --interval_percent_dither=\n" " interval:\tseconds between reads of http://localhost/rrd_updates?...\n" " loops:\tnumber of times to run before exiting\n" - " rrd_step:\tseconds between samples provided by rrd_updates. Valid values are 5 or 60\n" - " config_update_period:\tseconds between getting updates of all VM/host records from master\n" - " interval_percent_dither:\tmax percent dither in each loop - prevents stampede on master\n" + " rrd_step:\tseconds between samples provided by rrd_updates." \ + " Valid values are 5 or 60\n" + " config_update_period:\tseconds between getting updates" \ + " of all VM/host records from master\n" + " interval_percent_dither:\tmax percent dither in each loop" \ + " - prevents stampede on master\n" % (sys.argv[0]) ) rc = 1 @@ -1527,7 +1584,7 @@ if __name__ == "__main__": log_err(errmsg) except Exception as ignored: try: - errmsg = "\n".join([str(x) for x in e.details]) + errmsg = "\n".join([str(x) for x in e.details]) # pytype: disable=attribute-error # print the exception args nicely log_err(errmsg) except Exception as ignored: From 3d9546a867618800088324eff8eedd539735b0b6 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 14 Mar 2024 09:12:08 +0000 Subject: [PATCH 031/222] CP-47653: Fix pylint `redefined-outer-name` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index fc46a01e19a..365f9871238 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -1544,7 +1544,7 @@ if __name__ == "__main__": # run the main loop rc = main() - except UsageException as e: + except UsageException: # Print the usage log_err( "usage: %s [-i -n -d -s -c" \ @@ -1569,25 +1569,25 @@ if __name__ == "__main__": # we caught a signal which we have already logged pass - except Exception as e: + except Exception as exp: rc = 2 log_err("FATAL ERROR: perfmon will exit") - log_err("Exception is of class %s" % e.__class__) + log_err("Exception is of class %s" % exp.__class__) ex = sys.exc_info() err = traceback.format_exception(*ex) # Python built-in Exception has args, # but XenAPI.Failure has details instead. Sigh. try: - errmsg = "\n".join([str(x) for x in e.args]) + err_msg = "\n".join([str(x) for x in exp.args]) # print the exception args nicely - log_err(errmsg) - except Exception as ignored: + log_err(err_msg) + except Exception: try: - errmsg = "\n".join([str(x) for x in e.details]) # pytype: disable=attribute-error + err_msg = "\n".join([str(x) for x in exp.details]) # pytype: disable=attribute-error # print the exception args nicely - log_err(errmsg) - except Exception as ignored: + log_err(err_msg) + except Exception: pass # now log the traceback to syslog From 67d8ae16ad2dda8ba263301bbfa856281546a071 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 14 Mar 2024 09:24:41 +0000 Subject: [PATCH 032/222] CP-47653: Fix pylint `attribute-defined-outside-init` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 365f9871238..63005a70182 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -35,6 +35,8 @@ # pylint: disable=too-many-branches, too-many-arguments, broad-exception-caught # pylint: disable=no-else-break, no-else-return, invalid-name +# pylint: disable=global-statement + import subprocess import gc @@ -226,6 +228,10 @@ class RRDContentHandler(sax.ContentHandler): self.in_row_tag = False self.column_details = [] self.row = 0 + self.raw_text = "" + self.col = 0 + self.in_t_tag = False + self.in_v_tag = False def startElement(self, name, attrs): self.raw_text = "" From b34f3e55b0847540d87b0f1fb5d85d3d204748d9 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 15 Mar 2024 00:41:24 +0000 Subject: [PATCH 033/222] CP-47653: Fix pylint `unused-argument` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 63005a70182..08fe3329fc2 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -424,6 +424,7 @@ def get_percent_log_fs_usage(ignored): Get the percent usage of the host filesystem for logs partition. Input list is ignored and should be empty ''' + _ = ignored # unused: not sure if it'll be used later, passing pylint fs_output = subprocess.getoutput("df /etc/passwd") log_fs_output = subprocess.getoutput("df /var/log") fs_output = " ".join(fs_output.splitlines()[1:]) @@ -438,7 +439,12 @@ def get_percent_log_fs_usage(ignored): def get_percent_fs_usage(ignored): - "Get the percent usage of the host filesystem. Input list is ignored and should be empty" + ''' + Get the percent usage of the host filesystem. + Input list is ignored and should be empty + ''' + _ = ignored # unused: not sure if it'll be used later, passing pylint + # this file is on the filesystem of interest in both OEM and Retail output = subprocess.getoutput("df /etc/passwd") output = " ".join( @@ -450,7 +456,11 @@ def get_percent_fs_usage(ignored): def get_percent_mem_usage(ignored): - "Get the percent usage of Dom0 memory/swap. Input list is ignored and should be empty" + ''' + Get the percent usage of Dom0 memory/swap. + Input list is ignored and should be empty + ''' + _ = ignored # unused: not sure if it'll be used later, passing pylint try: memfd = open("/proc/meminfo", "r") memlist = memfd.readlines() @@ -1514,6 +1524,7 @@ def main(): def sigterm_handler(sig, stack_frame): + _ = stack_frame # unused: not sure if it'll be used later, passing pylint log_err("Caught signal %d - exiting" % sig) sys.exit(1) From 8f3540af236999f2207cbfd3d0ea34a27f674b9e Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 06:57:30 +0000 Subject: [PATCH 034/222] CP-47653: Fix pylint `unspecified-encoding` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 08fe3329fc2..ba7040d4bc6 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -444,7 +444,7 @@ def get_percent_fs_usage(ignored): Input list is ignored and should be empty ''' _ = ignored # unused: not sure if it'll be used later, passing pylint - + # this file is on the filesystem of interest in both OEM and Retail output = subprocess.getoutput("df /etc/passwd") output = " ".join( @@ -462,7 +462,7 @@ def get_percent_mem_usage(ignored): ''' _ = ignored # unused: not sure if it'll be used later, passing pylint try: - memfd = open("/proc/meminfo", "r") + memfd = open("/proc/meminfo", "r", encoding="utf-8") memlist = memfd.readlines() memfd.close() memdict = [m.split(":", 1) for m in memlist] @@ -1541,20 +1541,21 @@ if __name__ == "__main__": if os.fork() != 0: sys.exit(0) os.setsid() - sys.stdout = open("/dev/null", "w") - sys.stdin = open("/dev/null", "r") + # For /dev/null, encoding is not needed + sys.stdout = open("/dev/null", "w") # pylint: disable=unspecified-encoding + sys.stdin = open("/dev/null", "r") # pylint: disable=unspecified-encoding sys.stderr = sys.stdout # Exit if perfmon already running if os.path.exists(pidfile): - pid = open(pidfile).read() + pid = open(pidfile, encoding="utf-8").read() if os.path.exists("/proc/%s" % pid): log_err("perfmon already running - exiting") sys.exit(3) try: # Write out pidfile - fd = open(pidfile, "w") + fd = open(pidfile, "w", encoding="utf-8") fd.write("%d" % os.getpid()) fd.close() From 3d72b3f88d3b9e7830f50416f6a5921725fd5378 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 07:02:32 +0000 Subject: [PATCH 035/222] CP-47653: Fix pylint `unused-variable` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index ba7040d4bc6..b844cf0ecaf 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -1270,7 +1270,7 @@ def update_all_xmlconfigs(session): # Rebuild another map sruuids_by_hostuuid.clear() - for sr, rec in all_sr_recs.items(): + for _, rec in all_sr_recs.items(): if "perfmon" in rec["other_config"]: sruuid = rec["uuid"] # If we hadn't done SR.get_all_records we would now do SR.get_PBDs. @@ -1306,7 +1306,7 @@ def main(): maxruns = None try: argv = sys.argv[1:] - opts, args = getopt.getopt( + opts, _ = getopt.getopt( argv, "i:n:ds:c:D:", [ @@ -1321,7 +1321,6 @@ def main(): except getopt.GetoptError: raise UsageException - configfname = None for opt, arg in opts: if opt == "-i" or opt == "--interval": interval = int(arg) From c5b9c6001f13b5c1106fbfe182133a8d427e3112 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 07:24:00 +0000 Subject: [PATCH 036/222] CP-47653: Fix pylint `consider-using-in` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index b844cf0ecaf..e9758fc93da 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -1322,19 +1322,19 @@ def main(): raise UsageException for opt, arg in opts: - if opt == "-i" or opt == "--interval": + if opt in ("-i", "--interval"): interval = int(arg) - elif opt == "-n" or opt == "--numloops": + elif opt in ("-n", "--numloops"): maxruns = int(arg) - elif opt == "-d" or opt == "--debug": + elif opt in ("-d", "--debug"): debug = True - elif opt == "-s" or opt == "--rrdstep": + elif opt in ("-s", "--rrdstep"): rrd_step = int(arg) - if rrd_step != 5 and rrd_step != 60: + if rrd_step not in (5, 60): raise UsageException - elif opt == "-c" or opt == "--config_update_period": + elif opt in ("-c", "--config_update_period"): config_update_period = int(arg) - elif opt == "-D" or opt == "--interval_percent_dither": + elif opt in ("-D", "--interval_percent_dither"): interval_percent_dither = int(arg) else: raise UsageException From 6f3f525ebe22845894eb0cf1437cad92a38607b6 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 07:49:30 +0000 Subject: [PATCH 037/222] CP-47653: Fix pylint `consider-using-with` warnings Signed-off-by: Stephen Cheng --- pyproject.toml | 3 +++ python3/bin/perfmon | 27 ++++++++++++++------------- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 32bd0ad84d2..addefd26e72 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,6 +47,9 @@ disable = [ "invalid-name", "import-error", "unnecessary-pass", + "unspecified-encoding", + "protected-access", + "no-member", # Some mutiple inheritance classes may have this issue ] [tool.mypy] diff --git a/python3/bin/perfmon b/python3/bin/perfmon index e9758fc93da..6c2b50b1957 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -362,9 +362,9 @@ class RRDUpdates: paramstr = "&".join(["%s=%s" % (k, params[k]) for k in params]) print_debug("Calling http://localhost/rrd_updates?%s" % paramstr) - sock = urllib.request.urlopen("http://localhost/rrd_updates?%s" % paramstr) - xmlsource = sock.read().decode("utf-8") - sock.close() + url = "http://localhost/rrd_updates?%s" % paramstr + with urllib.request.urlopen(url) as sock: + xmlsource = sock.read().decode("utf-8") # Use sax rather than minidom and save Vvvast amounts of time and memory. self.report.reset() @@ -462,9 +462,8 @@ def get_percent_mem_usage(ignored): ''' _ = ignored # unused: not sure if it'll be used later, passing pylint try: - memfd = open("/proc/meminfo", "r", encoding="utf-8") - memlist = memfd.readlines() - memfd.close() + with open("/proc/meminfo", "r", encoding="utf-8") as memfd: + memlist = memfd.readlines() memdict = [m.split(":", 1) for m in memlist] memdict = dict( [ @@ -1540,23 +1539,25 @@ if __name__ == "__main__": if os.fork() != 0: sys.exit(0) os.setsid() - # For /dev/null, encoding is not needed - sys.stdout = open("/dev/null", "w") # pylint: disable=unspecified-encoding - sys.stdin = open("/dev/null", "r") # pylint: disable=unspecified-encoding + # For /dev/null, `encoding` and `with` is not needed + # pylint: disable=unspecified-encoding, consider-using-with + sys.stdout = open("/dev/null", "w") + sys.stdin = open("/dev/null", "r") sys.stderr = sys.stdout # Exit if perfmon already running if os.path.exists(pidfile): - pid = open(pidfile, encoding="utf-8").read() + with open(pidfile, encoding="utf-8") as file: + pid = file.read() + if os.path.exists("/proc/%s" % pid): log_err("perfmon already running - exiting") sys.exit(3) try: # Write out pidfile - fd = open(pidfile, "w", encoding="utf-8") - fd.write("%d" % os.getpid()) - fd.close() + with open(pidfile, "w", encoding="utf-8") as fd: + fd.write("%d" % os.getpid()) # run the main loop rc = main() From 9942089f583165171e07b8bec9a6eecc21d150ee Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 07:54:59 +0000 Subject: [PATCH 038/222] CP-47653: Fix pylint `raise-missing-from` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 6c2b50b1957..64922e9371f 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -525,7 +525,7 @@ class VariableConfig: try: name = xmldoc.getElementsByTagName("name")[0].getAttribute("value") except IndexError: - raise XmlConfigException("variable missing 'name' tag") + raise XmlConfigException("variable missing 'name' tag") from None def get_value(tag): try: @@ -550,7 +550,7 @@ class VariableConfig: except: raise XmlConfigException( "variable %s: regex %s does not compile" % (name, rrd_regex) - ) + ) from None if consolidation_fn not in supported_consolidation_functions: raise XmlConfigException( @@ -565,7 +565,7 @@ class VariableConfig: raise XmlConfigException( "variable %s: alarm_trigger_period %s not an int" % (name, alarm_trigger_period) - ) + ) from None try: self.alarm_auto_inhibit_period = int(alarm_auto_inhibit_period) @@ -573,14 +573,14 @@ class VariableConfig: raise XmlConfigException( "variable %s: alarm_auto_inhibit_period %s not an int" % (name, alarm_auto_inhibit_period) - ) + ) from None try: trigger_level = float(alarm_trigger_level) except: raise XmlConfigException( "variable %s: alarm_trigger_level %s not a float" % (name, alarm_trigger_level) - ) + ) from None self.alarm_priority = alarm_priority @@ -1318,7 +1318,7 @@ def main(): ], ) except getopt.GetoptError: - raise UsageException + raise UsageException from None for opt, arg in opts: if opt in ("-i", "--interval"): From 64839b6b81d4d80c52592c3769ce9deffd1195e0 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 08:01:18 +0000 Subject: [PATCH 039/222] CP-47653: Fix pylint `bare-except` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 64922e9371f..3e76c637280 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -112,6 +112,9 @@ class UsageException(Exception): pass +class NotGetValueException(Exception): + pass + # Start a session with the master of a pool. # Note: when calling http://localhost/rrd_update we must pass the session # ID as a param. The host then uses this to verify our validity with @@ -153,7 +156,7 @@ class ObjectReport: def get_value(self, var_name, row): try: return (self.vars[var_name])[row] - except: + except NotGetValueException: return 0.0 def insert_value(self, var_name, index, value): @@ -388,7 +391,7 @@ class RRDUpdates: "Return an ObjectReport for the object with this uuid" try: return self.report.obj_reports[uuid] - except: + except NotGetValueException: return None def get_uuid_list_by_objtype(self, objtype): @@ -530,7 +533,7 @@ class VariableConfig: def get_value(tag): try: return xmldoc.getElementsByTagName(tag)[0].getAttribute("value") - except: + except NotGetValueException: return get_default_variable_config(name, tag) rrd_regex = get_value("rrd_regex") @@ -1415,7 +1418,7 @@ def main(): host_uuid = rrd_updates.get_uuid_list_by_objtype("host")[ 0 ] # should only ever be one of these - except: + except NotGetValueException: # list may be empty! host_uuid = None From 14045207b0718ba05a3076f9aaa80d855e3441a3 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 08:29:25 +0000 Subject: [PATCH 040/222] CP-47653: Disable some pylint checks There are some long functions in the code. So disable the below checks: too-many-locals / too-many-statements / too-many-return-statements Signed-off-by: Stephen Cheng --- pyproject.toml | 4 ++++ python3/bin/perfmon | 22 +++++++++++----------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index addefd26e72..d171bf88358 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,6 +38,7 @@ ensure_newline_before_comments = false disable = [ "missing-function-docstring", "missing-module-docstring", + "missing-class-docstring", "consider-using-f-string", "too-many-branches", "too-many-arguments", @@ -50,6 +51,9 @@ disable = [ "unspecified-encoding", "protected-access", "no-member", # Some mutiple inheritance classes may have this issue + "too-many-locals", # Long functions. Need to refine the code + "too-many-statements", + "too-many-return-statements" ] [tool.mypy] diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 3e76c637280..c0adff37960 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -261,19 +261,19 @@ class RRDContentHandler(sax.ContentHandler): self.in_v_tag = True def characters(self, chars): - if ( - self.in_start_tag - or self.in_step_tag - or self.in_end_tag - or self.in_rows_tag - or self.in_columns_tag - or self.in_entry_tag - or + conditions = [ + self.in_start_tag, + self.in_step_tag, + self.in_end_tag, + self.in_rows_tag, + self.in_columns_tag, + self.in_entry_tag, + self.in_t_tag, + self.in_v_tag # self.in_row_tag # ignore text under row tag, s are just for holding and nodes - self.in_t_tag - or self.in_v_tag - ): + ] + if any(conditions): self.raw_text += chars def endElement(self, name): From 8a444f7ba6cf22997c9a6d84b73217ccc853dd35 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 08:50:26 +0000 Subject: [PATCH 041/222] CP-47653: Fix pylint dict related warnings use-dict-literal, dangerous-default-value, consider-using-dict-comprehension Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 38 +++++++++++++++++++++++--------------- 1 file changed, 23 insertions(+), 15 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index c0adff37960..90063c35a67 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -95,8 +95,8 @@ def debug_mem(): objCount[name] = 1 output = [] - for name in objCount: - output.append("%s :%s" % (name, objCount[name])) + for name, cnt in objCount.items(): + output.append("%s :%s" % (name, cnt)) log_info("\n".join(output)) @@ -219,9 +219,12 @@ class RRDContentHandler(sax.ContentHandler): """ def __init__(self, report): - "report is saved and later updated by this object. report should contain defaults already" + ''' + report is saved and later updated by this object. + report should contain defaults already + ''' + super().__init__() self.report = report - self.in_start_tag = False self.in_step_tag = False self.in_end_tag = False @@ -260,7 +263,7 @@ class RRDContentHandler(sax.ContentHandler): elif name == "v": self.in_v_tag = True - def characters(self, chars): + def characters(self, content): conditions = [ self.in_start_tag, self.in_step_tag, @@ -274,7 +277,7 @@ class RRDContentHandler(sax.ContentHandler): # ignore text under row tag, s are just for holding and nodes ] if any(conditions): - self.raw_text += chars + self.raw_text += content def endElement(self, name): if name == "start": @@ -344,7 +347,7 @@ class RRDUpdates: def __init__(self): # params are what get passed to the CGI executable in the URL - self.params = dict() + self.params = {} self.params["start"] = int(time.time()) - interval # interval seconds ago self.params["host"] = "true" # include data for host (as well as for VMs) self.params["sr_uuid"] = "all" # include data for all SRs attached to this host @@ -357,8 +360,10 @@ class RRDUpdates: def __repr__(self): return "" % str(self.params) - def refresh(self, session, override_params={}): + def refresh(self, session, override_params=None): "reread the rrd_updates over CGI and parse" + if override_params is None: + override_params = {} params = override_params params["session_id"] = session.id() params.update(self.params) @@ -467,13 +472,16 @@ def get_percent_mem_usage(ignored): try: with open("/proc/meminfo", "r", encoding="utf-8") as memfd: memlist = memfd.readlines() - memdict = [m.split(":", 1) for m in memlist] - memdict = dict( - [ - (k.strip(), float(re.search(r"\d+", v.strip()).group(0))) - for (k, v) in memdict - ] - ) + # memorylists is a list of lists, each list contains two parts: memtype and size + memorylists = [m.split(":", 1) for m in memlist] + memdict = {} + for item in memorylists: + memtype = item[0].strip() + size = item[1].strip() + match = re.search(r"\d+", size.strip()) + if match is None: + raise NotGetValueException + memdict[memtype] = float(match.group(0)) # We consider the sum of res memory and swap in use as the hard demand # of mem usage, it is bad if this number is beyond the physical mem, as # in such case swapping is obligatory rather than voluntary, hence From bec60a32b018fe59b8810a951c31b8a502c1cb5b Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 18 Mar 2024 10:29:02 +0000 Subject: [PATCH 042/222] CP-47653: Fix exception issues - In python3, socket.error, IOError are merged into OSError - ConnectionRefusedError is a subclass of OSError: - ConnectionRefusedError -> ConnectionError -> OSError - urllib.error.HTTPError is a subclass of OSError: - urllib.error.HTTPError <- urllib.error.URLError <- OSError - HTTPError doesn't have content in `args`. So we can't use `e.args[0]` Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 64 +++++++++++++++++++++------------------------ 1 file changed, 30 insertions(+), 34 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 90063c35a67..992d6966654 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -115,6 +115,10 @@ class UsageException(Exception): class NotGetValueException(Exception): pass + +class IncorrectInputException(Exception): + pass + # Start a session with the master of a pool. # Note: when calling http://localhost/rrd_update we must pass the session # ID as a param. The host then uses this to verify our validity with @@ -508,7 +512,7 @@ def get_percent_sr_usage(mylist): """ try: if len(mylist) != 2: - raise Exception( + raise IncorrectInputException( "Incorrect number of values to consolidate: %d (exactly 2 values)" % len(mylist) ) @@ -568,6 +572,8 @@ class VariableConfig: "variable %s: consolidation function %s not supported" % (name, consolidation_fn) ) + # It's fine to use eval here + # pylint: disable=eval-used self.consolidation_fn = eval(consolidation_fn) try: @@ -702,14 +708,12 @@ class ObjectMonitor: try: self.__parse_xmlconfig() except XmlConfigException as e: - errmsg = "\n".join([str(x) for x in e.args]) log_err( - "%s %s config error: %s" % (self.monitortype, self.uuid, errmsg) + "%s %s config error: %s" % (self.monitortype, self.uuid, str(e)) ) except ExpatError as e: - errmsg = "\n".join([str(x) for x in e.args]) log_err( - "%s %s XML parse error: %s" % (self.monitortype, self.uuid, errmsg) + "%s %s XML parse error: %s" % (self.monitortype, self.uuid, str(e)) ) return True else: @@ -1130,16 +1134,14 @@ class HOSTMonitor(ObjectMonitor): try: self.__parse_secondary_xmlconfigs() except XmlConfigException as e: - errmsg = "\n".join([str(x) for x in e.args]) log_err( "%s %s secondary config error: %s" - % (self.monitortype, self.uuid, errmsg) + % (self.monitortype, self.uuid, str(e)) ) except ExpatError as e: - errmsg = "\n".join([str(x) for x in e.args]) log_err( "%s %s secondary XML parse error: %s" - % (self.monitortype, self.uuid, errmsg) + % (self.monitortype, self.uuid, str(e)) ) if main_changed or secondary_changed: @@ -1253,6 +1255,8 @@ def update_all_xmlconfigs(session): (SR, host or VM) to the xml config string in other-config:perfmon keys and update sruuids_by_hostuuid which together with all_xmlconfigs allows lookup of the other-config:perfmon xml of the SRs connected to a host""" + # `all_xmlconfigs` and `sruuids_by_hostuuid` are updated by clear() and update() + # pylint: disable=global-variable-not-assigned global all_xmlconfigs global sruuids_by_hostuuid @@ -1472,35 +1476,28 @@ def main(): for sr_mon in sr_mon_lookup.values(): sr_mon.process_rrd_updates(rrd_updates, session) - except socket.error as e: - if e.args[0] == 111: - # "Connection refused" - # this happens when we try to restart session and *that* fails - time.sleep(2) - pass - + except ConnectionRefusedError as e: + # "Connection refused[111]" + # this happens when we try to restart session and *that* fails + time.sleep(2) log_err( - "caught socket.error: (%s) - restarting XAPI session" - % " ".join([str(x) for x in e.args]) + "caught connection refused error: (%s) - restarting XAPI session" + % str(e) ) restart_session = True - - except IOError as e: - if e.args[0] == "http error" and e.args[1] in (401, 500): - # Error getting rrd_updates: 401=Unauthorised, 500=Internal - start new session - pass - elif e.args[0] == "socket error": - # This happens if we send messages or - # read other-config:perfmon after xapi is restarted - pass + except urllib.error.HTTPError as e: + if e.code in (401, 500): + # Error getting rrd_updates: 401=Unauthorised, 500=Internal + # start new session + log_err("caught http.error: (%s) - restarting XAPI session" % str(e)) + restart_session = True else: # Don't know why we got this error - crash, die and look at logs later raise - - log_err( - "caught IOError: (%s) - restarting XAPI session" - % " ".join([str(x) for x in e.args]) - ) + except OSError as e: + # This happens if we send messages or + # read other-config:perfmon after xapi is restarted + log_err("caught connection error: (%s) - restarting XAPI session" % str(e)) restart_session = True runs += 1 @@ -1608,9 +1605,8 @@ if __name__ == "__main__": # Python built-in Exception has args, # but XenAPI.Failure has details instead. Sigh. try: - err_msg = "\n".join([str(x) for x in exp.args]) # print the exception args nicely - log_err(err_msg) + log_err(str(exp)) except Exception: try: err_msg = "\n".join([str(x) for x in exp.details]) # pytype: disable=attribute-error From 2533874dbeb0a04487299856e0b9d314d9b861fc Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 19 Mar 2024 01:51:21 +0000 Subject: [PATCH 043/222] CP-47653: Move pylint disable statements to the specific lines. Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 992d6966654..f0b64c347e7 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -29,14 +29,7 @@ # # The "cf" CGI param specfies the row. (All rows are returned if it's missing.) -# pylint: disable=too-many-lines, missing-function-docstring, missing-module-docstring -# pylint: disable=consider-using-f-string, missing-class-docstring, too-few-public-methods -# pylint: disable=too-many-instance-attributes, import-error, unnecessary-pass -# pylint: disable=too-many-branches, too-many-arguments, broad-exception-caught -# pylint: disable=no-else-break, no-else-return, invalid-name - -# pylint: disable=global-statement - +# pylint: disable=too-many-lines, missing-class-docstring import subprocess import gc @@ -169,6 +162,7 @@ class ObjectReport: self.vars[var_name].insert(index, value) +# pylint: disable=too-few-public-methods class RRDReport: "This is just a data structure passed that is completed by RRDContentHandler" @@ -192,6 +186,7 @@ class RRDColumn: self.obj_report = obj_report +# pylint: disable=too-many-instance-attributes class RRDContentHandler(sax.ContentHandler): """Handles data in this format: @@ -523,6 +518,7 @@ def get_percent_sr_usage(mylist): return 0.0 +# pylint: disable=too-few-public-methods class VariableConfig: """Object storing the configuration of a Variable @@ -1310,7 +1306,7 @@ config_update_period = 1800 cmdsockname = "\0perfmon" cmdmaxlen = 256 - +# pylint: disable=global-statement def main(): global interval global interval_percent_dither From 39d29feff13c4403a21df9dce8b97a51000e05aa Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 19 Mar 2024 02:01:11 +0000 Subject: [PATCH 044/222] CP-47653: Disable pytype `attribute-error` warnings Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index f0b64c347e7..22f597de601 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -614,6 +614,10 @@ class VariableState: def __init__(self): self.value = None + # Attributes `alarm_auto_inhibit_period` and `alarm_trigger_period` are defined + # in VariableConfig, and Class Varialbe multiple inherit from + # VariableConfig and VariableState + # pytype: disable=attribute-error self.timeof_last_alarm = time.time() - self.alarm_auto_inhibit_period self.trigger_down_counter = self.alarm_trigger_period @@ -1605,7 +1609,9 @@ if __name__ == "__main__": log_err(str(exp)) except Exception: try: - err_msg = "\n".join([str(x) for x in exp.details]) # pytype: disable=attribute-error + # As the comment above said, the XenAPI.Failure has `details` + # pytype: disable=attribute-error + err_msg = "\n".join([str(x) for x in exp.details]) # print the exception args nicely log_err(err_msg) except Exception: From 36fbf20d13f26dee37e6cb88d2af2d1d2a47ea29 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 19 Mar 2024 06:40:13 +0000 Subject: [PATCH 045/222] CP-47653: Apply pytype to the new path Signed-off-by: Stephen Cheng --- pyproject.toml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index d171bf88358..739597edef4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,7 +91,6 @@ discard_messages_matching = [ "No Node.TEXT_NODE in module xml.dom.minidom, referenced from 'xml.dom.expatbuilder'" ] expected_to_fail = [ - "scripts/perfmon", # Need 2to3 -w and maybe a few other minor updates: "scripts/hatests", "scripts/backup-sr-metadata.py", @@ -111,7 +110,6 @@ expected_to_fail = [ [tool.pytype] inputs = [ - "scripts/perfmon", "scripts/static-vdis", "scripts/Makefile", "scripts/generate-iscsi-iqn", @@ -129,6 +127,7 @@ inputs = [ # Python 3 "python3/bin/hfx_filename", + "python3/bin/perfmon", "python3/bin/*.py", "python3/libexec/*.py", From 9c32647aa462f3dd3a533cb7c3c93462db4c35c8 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 19 Mar 2024 10:13:00 +0000 Subject: [PATCH 046/222] CP-47653: Use general exception for not getting data. Previously, for fixing the pylint, I used a specific exception for not getting data. But by testing, it didn't catch the index error. Not sure if there are any other exceptions. So just keep the original logic, use the general exception. Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 30 +++++++++++------------------- 1 file changed, 11 insertions(+), 19 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 22f597de601..30ba6a235cf 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -30,6 +30,7 @@ # The "cf" CGI param specfies the row. (All rows are returned if it's missing.) # pylint: disable=too-many-lines, missing-class-docstring +# pytype: disable=attribute-error import subprocess import gc @@ -105,10 +106,6 @@ class UsageException(Exception): pass -class NotGetValueException(Exception): - pass - - class IncorrectInputException(Exception): pass @@ -153,7 +150,7 @@ class ObjectReport: def get_value(self, var_name, row): try: return (self.vars[var_name])[row] - except NotGetValueException: + except Exception: return 0.0 def insert_value(self, var_name, index, value): @@ -395,7 +392,7 @@ class RRDUpdates: "Return an ObjectReport for the object with this uuid" try: return self.report.obj_reports[uuid] - except NotGetValueException: + except Exception: return None def get_uuid_list_by_objtype(self, objtype): @@ -473,14 +470,12 @@ def get_percent_mem_usage(ignored): memlist = memfd.readlines() # memorylists is a list of lists, each list contains two parts: memtype and size memorylists = [m.split(":", 1) for m in memlist] - memdict = {} - for item in memorylists: - memtype = item[0].strip() - size = item[1].strip() - match = re.search(r"\d+", size.strip()) - if match is None: - raise NotGetValueException - memdict[memtype] = float(match.group(0)) + memdict = { + # pytype complained that No attribute 'group' on None + # Let Exception catch the `not matched` issue and return 0.0 + k.strip(): float(re.search(r"\d+", v.strip()).group(0)) + for (k, v) in memorylists + } # We consider the sum of res memory and swap in use as the hard demand # of mem usage, it is bad if this number is beyond the physical mem, as # in such case swapping is obligatory rather than voluntary, hence @@ -541,7 +536,7 @@ class VariableConfig: def get_value(tag): try: return xmldoc.getElementsByTagName(tag)[0].getAttribute("value") - except NotGetValueException: + except Exception: return get_default_variable_config(name, tag) rrd_regex = get_value("rrd_regex") @@ -617,7 +612,6 @@ class VariableState: # Attributes `alarm_auto_inhibit_period` and `alarm_trigger_period` are defined # in VariableConfig, and Class Varialbe multiple inherit from # VariableConfig and VariableState - # pytype: disable=attribute-error self.timeof_last_alarm = time.time() - self.alarm_auto_inhibit_period self.trigger_down_counter = self.alarm_trigger_period @@ -1430,7 +1424,7 @@ def main(): host_uuid = rrd_updates.get_uuid_list_by_objtype("host")[ 0 ] # should only ever be one of these - except NotGetValueException: + except Exception: # list may be empty! host_uuid = None @@ -1609,8 +1603,6 @@ if __name__ == "__main__": log_err(str(exp)) except Exception: try: - # As the comment above said, the XenAPI.Failure has `details` - # pytype: disable=attribute-error err_msg = "\n".join([str(x) for x in exp.details]) # print the exception args nicely log_err(err_msg) From 8576a334c42cc98d95d6227412c9500c30d69887 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 21 Mar 2024 02:19:16 +0000 Subject: [PATCH 047/222] CP-47653: Fix a minor bug where the variable "in_v_tag" was incorrectly written as "in_t_tag." Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 30ba6a235cf..6b6dd34ef94 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -334,7 +334,7 @@ class RRDContentHandler(sax.ContentHandler): # Update position in row self.col += 1 - self.in_t_tag = False + self.in_v_tag = False # An object of this class should persist the lifetime of the program From 9ff99ae2db59c4e8693f6b20884c7c0887fe84c1 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 22 Mar 2024 06:44:17 +0000 Subject: [PATCH 048/222] CP-47653: Add unit tests for perfmon Signed-off-by: Stephen Cheng --- .github/workflows/main.yml | 4 +- python3/bin/perfmon | 17 +- python3/unittest/test_nbd_client_manager.py | 3 - python3/unittest/test_perfmon.py | 600 ++++++++++++++++++++ 4 files changed, 612 insertions(+), 12 deletions(-) create mode 100644 python3/unittest/test_perfmon.py diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index a51f40e91e6..9d55ec60312 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -72,8 +72,8 @@ jobs: if: ${{ matrix.python-version != '2.7' }} run: > pytest - --cov=python3/unittest - python3/unittest -vv -rA + --cov=python3/ + python3/unittest python3/tests -vv -rA --junitxml=.git/pytest${{matrix.python-version}}.xml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 6b6dd34ef94..669182f5ec4 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -56,7 +56,7 @@ from xml.parsers.expat import ExpatError import XenAPI -def print_debug(string): +def print_debug(string): # pragma: no cover if debug: print("DEBUG:", string, file=sys.stderr) syslog.syslog(syslog.LOG_USER | syslog.LOG_INFO, "PERFMON(DEBUG): %s" % string) @@ -68,13 +68,13 @@ def log_err(string): pass -def log_info(string): +def log_info(string): # pragma: no cover print(string, file=sys.stderr) syslog.syslog(syslog.LOG_INFO | syslog.LOG_INFO, "PERFMON: %s" % string) pass -def debug_mem(): +def debug_mem(): # pragma: no cover objCount = {} gc.collect() objList = gc.get_objects() @@ -114,7 +114,7 @@ class IncorrectInputException(Exception): # ID as a param. The host then uses this to verify our validity with # the master before responding. # If the verification fails we should get a 401 response -class XapiSession(XenAPI.Session): +class XapiSession(XenAPI.Session): # pragma: no cover """Object that represents a XenAPI session with the pool master One of these is needed to refresh a VMMonitor or HOSTMonitor config, or to refresh an RRDUpdates object @@ -420,6 +420,9 @@ supported_consolidation_functions = [ def average(mylist): + if not mylist: + log_err("Error in average, no input data, return 0.0 instead") + return 0.0 return sum(mylist) / float(len(mylist)) @@ -1305,7 +1308,7 @@ cmdsockname = "\0perfmon" cmdmaxlen = 256 # pylint: disable=global-statement -def main(): +def main(): # pragma: no cover global interval global interval_percent_dither global rrd_step @@ -1523,7 +1526,7 @@ def main(): return 0 -def sigterm_handler(sig, stack_frame): +def sigterm_handler(sig, stack_frame): # pragma: no cover _ = stack_frame # unused: not sure if it'll be used later, passing pylint log_err("Caught signal %d - exiting" % sig) sys.exit(1) @@ -1531,7 +1534,7 @@ def sigterm_handler(sig, stack_frame): pidfile = "/var/run/perfmon.pid" -if __name__ == "__main__": +if __name__ == "__main__": # pragma: no cover # setup signal handler to print out notice when killed signal.signal(signal.SIGTERM, sigterm_handler) diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index 8ead3cfa580..48ca22be297 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -11,9 +11,6 @@ nbd_client_manager = get_module("nbd_client_manager", "../libexec/nbd_client_manager.py") -# mock modules to avoid dependencies -sys.modules["XenAPI"] = MagicMock() - @patch('subprocess.Popen') class TestCallFunction(unittest.TestCase): diff --git a/python3/unittest/test_perfmon.py b/python3/unittest/test_perfmon.py new file mode 100644 index 00000000000..a61e66aeaa4 --- /dev/null +++ b/python3/unittest/test_perfmon.py @@ -0,0 +1,600 @@ +#!/usr/bin/env python3 +""" +This module provides unittest for perfmon +""" + +import sys +import math +import unittest +from mock import MagicMock, patch, mock_open +from import_file import get_module + +# mock modules to avoid dependencies +sys.modules["XenAPI"] = MagicMock() + +perfmon = get_module("perfmon", "../bin/perfmon") + + +@patch("subprocess.getoutput") +class TestGetFsUsage(unittest.TestCase): + '''Test get_percent_log_fs_usage and get_percent_fs_usage''' + def mock_subprocess_getoutput(self, cmd): + df_etc_passwd = r"""Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda1 18402132 2244748 15213668 13% / + """ + df_var_log = r"""Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda5 4054752 59820 3785220 2% /var/log + """ + if cmd == "df /etc/passwd": + return df_etc_passwd + if cmd == "df /var/log": + return df_var_log + return None + + def mock_subprocess_getoutput_same_file_system(self, cmd): + df_etc_passwd = r"""Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda5 18402132 2244748 15213668 13% / + """ + df_var_log = r"""Filesystem 1K-blocks Used Available Use% Mounted on + /dev/sda5 4054752 59820 3785220 2% /var/log + """ + if cmd == "df /etc/passwd": + return df_etc_passwd + if cmd == "df /var/log": + return df_var_log + return None + + def test_get_percent_log_fs_usage(self, mock_getoutput): + """Assert that get_percent_log_fs_usage returns as expected""" + mock_getoutput.side_effect = self.mock_subprocess_getoutput + + expected_percentage = 0.02 + test_percentage = perfmon.get_percent_log_fs_usage(None) + self.assertAlmostEqual(test_percentage, expected_percentage, 7) + + def test_get_percent_log_fs_usage_same_file_system(self, mock_getoutput): + """Test where /etc/passwd and /var/log are in the same filesystem""" + mock_getoutput.side_effect = self.mock_subprocess_getoutput_same_file_system + + test_percentage = perfmon.get_percent_log_fs_usage(None) + self.assertTrue(math.isnan(test_percentage)) + + def test_get_percent_fs_usage(self, mock_getoutput): + """Assert that get_percent_fs_usage returns as expected""" + mock_getoutput.side_effect = self.mock_subprocess_getoutput + + expected_percentage = 0.13 + test_percentage = perfmon.get_percent_fs_usage(None) + self.assertAlmostEqual(test_percentage, expected_percentage, 7) + + +class TestGetMemUsage(unittest.TestCase): + '''Test get_percent_mem_usage ''' + + meminfo = '''MemTotal: 2580464 kB + MemFree: 1511024 kB + MemAvailable: 2210924 kB + Buffers: 95948 kB + Cached: 518164 kB + SwapCached: 0 kB + Active: 424468 kB + Inactive: 390016 kB + Active(anon): 207944 kB + Inactive(anon): 8740 kB + Active(file): 216524 kB + Inactive(file): 381276 kB + Unevictable: 13620 kB + Mlocked: 13620 kB + SwapTotal: 1048572 kB + SwapFree: 1048572 kB''' + @patch("builtins.open", new_callable=mock_open, read_data=meminfo) + def test_get_percent_mem_usage(self, _): + self.assertAlmostEqual(perfmon.get_percent_mem_usage([]), 0.17645198692948244) + + @patch('builtins.open', side_effect=Exception) + def test_get_percent_mem_usage_exception(self, _): + self.assertEqual(perfmon.get_percent_mem_usage(None), 0.0) + + +class TestGetPercentSRUsage(unittest.TestCase): + '''Test get_percent_sr_usage ''' + + def test_get_percent_sr_usage_correct_input(self): + input_list = [100, 200] + expected_result = 0.5 + self.assertAlmostEqual(perfmon.get_percent_sr_usage(input_list), + expected_result) + + def test_get_percent_sr_usage_incorrect_input(self): + input_list = [100] # Incorrect input, expecting two values + expected_result = 0.0 + self.assertAlmostEqual(perfmon.get_percent_sr_usage(input_list), + expected_result) + + def test_get_percent_sr_usage_zero_division(self): + input_list = [0, 200] # Physical utilization is 0 + expected_result = 0.0 + self.assertAlmostEqual(perfmon.get_percent_sr_usage(input_list), + expected_result) + + def test_get_percent_sr_usage_exception_handling(self): + input_list = ["invalid", 200] # Invalid input, should raise an exception + expected_result = 0.0 # Since exception is handled, function should return 0.0 + self.assertAlmostEqual(perfmon.get_percent_sr_usage(input_list), + expected_result) + + +class TestAverage(unittest.TestCase): + '''Test get_percent_sr_usage ''' + def test_average_empty_list(self): + result = perfmon.average([]) + self.assertEqual(result, 0.0) + + def test_average_single_element_list(self): + result = perfmon.average([5]) + self.assertEqual(result, 5.0) + + def test_average_positive_numbers(self): + result = perfmon.average([1, 2, 3, 4, 5]) + self.assertEqual(result, 3.0) + + +class TestUpdateAllXMLConfigs(unittest.TestCase): + '''Test update_all_xmlconfigs''' + def test_update_all_xmlconfigs(self): + + perfmon.all_xmlconfigs = {} + perfmon.sruuids_by_hostuuid = {} + + host_uuid = '28a574e4-bf57-4476-a83d-72cba7578d23' + vm_uuid = '2cf37285-57bc-4633-a24f-0c6c825dda66' + sr_uuid = '0e7f8fb3-1ba2-4bce-9889-48812273a316' + perfmon_config = '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' + + mock_session = MagicMock() + mock_session.xenapi.host.get_all_records.return_value = { + 'OpaqueRef:8be06dc8-bed5-4d81-d030-937eca11094a':{ + 'uuid': host_uuid, + 'name_label': 'xrtuk-11-43', + 'name_description': 'Default install', + 'memory_overhead': '631816192', + 'software_version': { + 'product_version': '8.4.0', 'product_version_text': '8', + 'product_version_text_short': '8', 'platform_name': 'XCP', + 'platform_version': '3.4.0', 'product_brand': 'XenServer', + 'build_number': 'stream', 'git_id': '0', 'hostname': 'localhost', + 'date': '20240229T15:07:05Z', 'dbv': '2024.0229', + 'is_preview_release': 'false', 'xapi': '24.11', + 'xapi_build': '24.11.0', 'xen': '4.17.3-4', + 'linux': '4.19.0+1', 'xencenter_min': '2.21', + 'xencenter_max': '2.21', 'network_backend': 'openvswitch', + 'db_schema': '5.775'}, + 'other_config': { + 'iscsi_iqn': 'iqn.2024-03.xenrtcloud:339cd227', + 'agent_start_time': '1710910331.', + 'boot_time': '1710910266.', + 'perfmon': perfmon_config} + } + } + mock_session.xenapi.VM.get_all_records.return_value = { + 'OpaqueRef:fffc65bb-b909-03b2-c20a-8277434a4495': { + 'uuid': vm_uuid, + 'other_config': { + 'storage_driver_domain': 'OpaqueRef:11de3275-b5e4-a56c-a295', + 'is_system_domain': 'true', 'perfmon': perfmon_config + } + } + } + mock_session.xenapi.SR.get_all_records.return_value = { + 'OpaqueRef:fffc65bb-b909-03b2-c20a-8277434a4495': { + 'uuid': sr_uuid, + 'other_config': { + 'storage_driver_domain': 'OpaqueRef:11de3275-b5e4-a56c-a295', + 'is_system_domain': 'true', 'perfmon': perfmon_config + }, + 'PBDs': ['pbd1', 'pbd2'] + } + } + # One SR is connected to two hosts + mock_session.xenapi.PBD.get_host.return_value = \ + 'OpaqueRef:8be06dc8-bed5-4d81-d030-937eca11094a' + + + # Call the function to test + perfmon.update_all_xmlconfigs(mock_session) + + # Check that all_xmlconfigs and sruuids_by_hostuuid were updated correctly + expect_xmlconfigs = { + host_uuid: perfmon_config, + vm_uuid: perfmon_config, + sr_uuid: perfmon_config + } + self.assertEqual(perfmon.all_xmlconfigs, expect_xmlconfigs) + print(perfmon.sruuids_by_hostuuid) + self.assertEqual(perfmon.sruuids_by_hostuuid, {host_uuid: {sr_uuid}}) + +class TestObjectReport(unittest.TestCase): + '''Test Class ObjectReport ''' + def setUp(self): + # Create an instance of ObjectReport for testing + self.obj_report = perfmon.ObjectReport(objtype="vm", + uuid="e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e") + + def test_get_uuid(self): + self.assertEqual(self.obj_report.get_uuid(), + "e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e") + + def test_get_var_names(self): + # Initially, there are no variables, so the list should be empty + self.assertEqual(self.obj_report.get_var_names(), []) + + # Insert a variable and check if it appears in the list + self.obj_report.insert_value("cpu_usage", 0, 0.5) + self.assertEqual(self.obj_report.get_var_names(), ["cpu_usage"]) + + def test_get_value(self): + # Insert a value for a variable and retrieve it + self.obj_report.insert_value("cpu_usage", 0, 0.5) + self.assertEqual(self.obj_report.get_value("cpu_usage", 0), 0.5) + + # Trying to retrieve a value for a non-existing variable should return 0.0 + self.assertEqual(self.obj_report.get_value("memory_usage", 0), 0.0) + + def test_insert_value(self): + # Insert a value for a variable and check if it's stored correctly + self.obj_report.insert_value("cpu_usage", 0, 0.5) + self.assertEqual(self.obj_report.vars["cpu_usage"], [0.5]) + + # Insert another value for the same variable and check if it's stored correctly + self.obj_report.insert_value("cpu_usage", 1, 0.6) + self.assertEqual(self.obj_report.vars["cpu_usage"], [0.5, 0.6]) + + +@patch("perfmon.XapiSession") +@patch("perfmon.get_percent_fs_usage") +@patch("perfmon.get_percent_log_fs_usage") +@patch("perfmon.get_percent_mem_usage") +class TestVMMonitor(unittest.TestCase): + '''Test getting VM performance data from VMMonitor''' + + def test_process_rrd_updates(self, mock_get_percent_mem_usage, + mock_get_percent_log_fs_usage, + mock_get_percent_fs_usage, + mock_xapisession): + uuid = 'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e' + perfmon.all_xmlconfigs = {'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e': + ''' + + + + + + + + + + + + '''} + monitor = perfmon.VMMonitor(uuid) + rrd_updates = perfmon.RRDUpdates() + obj_report = perfmon.ObjectReport("vm", uuid) + obj_report.vars = { + 'cpu0': [0.0063071, 0.0048038, 0.0045862, 0.0048865, 0.0048923], + 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], + 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], + 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], + 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], + 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], + 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], + 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], + 'memory': [2785000000.0, 2785000000.0, 2785000000.0, + 2785000000.0, 2785000000.0] + } + rrd_updates.report.obj_reports[uuid] = obj_report + rrd_updates.report.rows = 1 + session = mock_xapisession() + + mock_get_percent_fs_usage.return_value = 0.12 + mock_get_percent_mem_usage.return_value = 0.17380 + mock_get_percent_log_fs_usage.return_value = float("NaN") + monitor.process_rrd_updates(rrd_updates, session) + mock_get_percent_fs_usage.assert_called() + mock_get_percent_log_fs_usage.assert_called() + mock_get_percent_mem_usage.assert_called() + self.assertAlmostEqual(monitor.variables[0].value, 0.12) + self.assertAlmostEqual(monitor.variables[1].value, 0.17380) + self.assertTrue(math.isnan(monitor.variables[2].value)) + + +class TestHOSTMonitor(unittest.TestCase): + '''Test getting HOST performance data from HOSTMonitor''' + + @patch("perfmon.XapiSession") + def test_process_rrd_updates(self, mock_xapisession): + uuid = 'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e' + perfmon.all_xmlconfigs = {'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e': + ''' + '''} + monitor = perfmon.HOSTMonitor(uuid) + rrd_updates = perfmon.RRDUpdates() + obj_report = perfmon.ObjectReport("vm", uuid) + obj_report.vars = { + 'cpu0': [0.0063071, 0.0048038, 0.0045862, 0.0048865, 0.0048923], + 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], + 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], + 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], + 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], + 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], + 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], + 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], + 'memory': [2785000000.0, 2785000000.0, 2785000000.0, + 2785000000.0, 2785000000.0] + } + rrd_updates.report.obj_reports[uuid] = obj_report + rrd_updates.report.rows = 5 + session = mock_xapisession() + + monitor.process_rrd_updates(rrd_updates, session) + # Average of cpu0-cpu7 (row 5) + # [0.0048923, 0.0053645, 0.0056833, 0.0048769, + # 0.0050993, 0.0062017, 0.0050934, 0.0049544] + self.assertAlmostEqual(monitor.variables[0].value, 0.005270725) + + def test_refresh_config(self): + perfmon.all_xmlconfigs = {} + perfmon.sruuids_by_hostuuid = {} + + host_uuid = '28a574e4-bf57-4476-a83d-72cba7578d23' + sr_uuid = '0e7f8fb3-1ba2-4bce-9889-48812273a316' + perfmon_config = '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' \ + '' + + mock_session = MagicMock() + mock_session.xenapi.host.get_all_records.return_value = { + 'OpaqueRef:8be06dc8-bed5-4d81-d030-937eca11094a':{ + 'uuid': host_uuid, + 'other_config': { + 'iscsi_iqn': 'iqn.2024-03.xenrtcloud:339cd227', + 'agent_start_time': '1710910331.', + 'boot_time': '1710910266.', + 'perfmon': perfmon_config} + } + } + mock_session.xenapi.SR.get_all_records.return_value = { + 'OpaqueRef:fffc65bb-b909-03b2-c20a-8277434a4495': { + 'uuid': sr_uuid, + 'other_config': { + 'storage_driver_domain': 'OpaqueRef:11de3275-b5e4-a56c-a295', + 'is_system_domain': 'true', 'perfmon': perfmon_config + }, + 'PBDs': ['pbd1', 'pbd2'] + } + } + mock_session.xenapi.PBD.get_host.return_value = \ + 'OpaqueRef:8be06dc8-bed5-4d81-d030-937eca11094a' + perfmon.update_all_xmlconfigs(mock_session) + monitor = perfmon.HOSTMonitor(host_uuid) + monitor.refresh_config() + expected_sruuids = {sr_uuid} + self.assertEqual(set(monitor.secondary_xmlconfigs), expected_sruuids) + + +@patch("perfmon.XapiSession") +class TestSRMonitor(unittest.TestCase): + '''Test getting SR performance data from SrMonitor''' + def test_process_rrd_updates(self, mock_xapisession): + uuid = 'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e' + perfmon.all_xmlconfigs = {'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e': + ''' + '''} + monitor = perfmon.SRMonitor(uuid) + rrd_updates = perfmon.RRDUpdates() + obj_report = perfmon.ObjectReport("vm", uuid) + obj_report.vars = { + 'size': [100, 200, 300, 400, 500], + 'physical_utilisation': [2000, 3000, 4000, 5000, 6000], + } + rrd_updates.report.obj_reports[uuid] = obj_report + rrd_updates.report.rows = 5 + session = mock_xapisession() + + monitor.process_rrd_updates(rrd_updates, session) + # get_percent_sr_usage([500, 6000]) + self.assertAlmostEqual(monitor.variables[0].value, 0.08333333333333333) + + +class TestRRDUpdates(unittest.TestCase): + '''Test Class RRDUpdates and RRDContentHandler''' + + @patch('time.time', return_value=100000) + def test_init(self, _): + rrd_updates = perfmon.RRDUpdates() + + expected_start = 100000 - perfmon.interval + self.assertEqual(rrd_updates.params['start'], expected_start) + self.assertEqual(rrd_updates.params["host"], "true") + self.assertEqual(rrd_updates.params["sr_uuid"], "all") + self.assertEqual(rrd_updates.params["cf"], "AVERAGE") + self.assertEqual(rrd_updates.params["interval"], str(perfmon.rrd_step)) + + + @patch('time.time', return_value=100000) + @patch("perfmon.XapiSession") + @patch('urllib.request.urlopen') + def test_refresh(self, mock_urlopen, mock_xapisession, _): + rrd_updates = perfmon.RRDUpdates() + + # mock_session + mock_session = mock_xapisession() + mock_session.id.return_value = "mocked_session_id" + + # mock xmlsource + xml = r''' + + 1213578000 + 3600 + 1213617600 + 2 + 12 + + AVERAGE:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu1 + AVERAGE:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu0 + AVERAGE:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:memory + MIN:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu1 + MIN:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu0 + MIN:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:memory + MAX:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu1 + MAX:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu0 + MAX:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:memory + LAST:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu1 + LAST:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:cpu0 + LAST:vm:ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3:memory + + + + + 1213617600 # The first row corresponds to end time + 0.0 + 0.0282 + 209715200.0000 + 0.0 + 0.0201 + 209715200.0000 + 0.0 + 0.0445 + 209715200.0000 + 0.0 + 0.0243 + 209715200.0000 + + + 1213616600 #The last row corresponds to Start time + 0.0 + 0.0282 + 209715200.0000 + 0.0 + 0.0201 + 209715200.0000 + 0.0 + 0.0445 + 209715200.0000 + 0.0 + 0.0243 + 209715200.0000 + + +''' + xml_rrdupdates = xml.encode(encoding='utf-8') + cm = MagicMock() + cm.read.return_value = xml_rrdupdates + cm.__enter__.return_value = cm + mock_urlopen.return_value = cm + rrd_updates.refresh(mock_session) + + # Test __repr__ + print(rrd_updates) + + self.assertEqual(rrd_updates.get_num_rows(), 2) + self.assertIsNotNone( + rrd_updates.get_obj_report_by_uuid("ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3") + ) + self.assertIsNone( + rrd_updates.get_obj_report_by_uuid("123345") + ) + self.assertEqual(rrd_updates.get_uuid_list_by_objtype("vm"), + ["ecd8d7a0-1be3-4d91-bd0e-4888c0e30ab3"]) + + +class TestVariable(unittest.TestCase): + '''Test Class Varible''' + + def test_set_active(self): + # Construct varible node for VaribleConfig + # Not used, just for input + xmlconfig = b'' \ + b'' + xmldoc = perfmon.minidom.parseString(xmlconfig) + variable_nodes = xmldoc.getElementsByTagName("variable") + node = variable_nodes[0] + + # Construct function alarm_create and mock_get_default_varible_config + # Not used, just for input + uuid = 'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e' + monitor = perfmon.VMMonitor(uuid) + var = perfmon.Variable(node, monitor.alarm_create, + monitor.get_default_variable_config) + + # Call set_active with active=True + var.set_active(True) + self.assertTrue(var.active) + + # Call set_active with active=False + var.set_active(False) + self.assertFalse(var.active) + + @patch("perfmon.XapiSession") + def test_update(self, mock_xapisession): + xmlconfig = b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' \ + b'' + xmldoc = perfmon.minidom.parseString(xmlconfig) + variable_nodes = xmldoc.getElementsByTagName("variable") + node = variable_nodes[0] + + uuid = 'e1ae3f5d-4c8b-4575-bbb8-2af7e8a2c31e' + monitor = perfmon.VMMonitor(uuid) + var = perfmon.Variable(node, monitor.alarm_create, + monitor.get_default_variable_config) + + session = mock_xapisession() + + # Trigger alarm + var.trigger_down_counter = 50 + var.update(0.95,session) + self.assertEqual(var.trigger_down_counter, 60) + + # Not trigger alarm - time isn't up + var.trigger_down_counter = 100 + var.update(0.95,session) + self.assertEqual(var.trigger_down_counter, 40) + + # Not trigger alarm - level good + var.trigger_down_counter = 50 + var.update(0.8,session) + self.assertEqual(var.trigger_down_counter, 60) + +if __name__ == '__main__': + unittest.main() From 99391dd71dd8db0e5baaaed9aeb91134bef97ca0 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 25 Mar 2024 05:11:12 +0000 Subject: [PATCH 049/222] CI configuration change for python unit test coverage Signed-off-by: Stephen Cheng --- .codecov.yml | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index 47ef46ac090..79d69aa0b14 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -177,10 +177,6 @@ coverage: # threshold: 20% - # Checks each Python version separately: - python-3.11: - flags: ["python3.11"] - # # Project limits # -------------- @@ -235,12 +231,12 @@ component_management: - type: project # `auto` will use the coverage from the base commit (pull request base # or parent commit) coverage to compare against. - target: auto + target: 48 threshold: 2% - type: patch - target: auto - threshold: 10% + target: 80 + threshold: 5% individual_components: From 6acbf1aab83fc3fb44bf17d58c55d43313222f1e Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 26 Mar 2024 01:09:18 +0000 Subject: [PATCH 050/222] CP-47653: Explicit exception chaining Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 669182f5ec4..4d1244856a7 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -533,8 +533,8 @@ class VariableConfig: def __init__(self, xmldoc, alarm_create_callback, get_default_variable_config): try: name = xmldoc.getElementsByTagName("name")[0].getAttribute("value") - except IndexError: - raise XmlConfigException("variable missing 'name' tag") from None + except IndexError as e: + raise XmlConfigException("variable missing 'name' tag") from e def get_value(tag): try: @@ -556,10 +556,10 @@ class VariableConfig: self.name = name try: self.rrd_regex = re.compile("^%s$" % rrd_regex) - except: + except Exception as e: raise XmlConfigException( "variable %s: regex %s does not compile" % (name, rrd_regex) - ) from None + ) from e if consolidation_fn not in supported_consolidation_functions: raise XmlConfigException( @@ -572,26 +572,26 @@ class VariableConfig: try: self.alarm_trigger_period = int(alarm_trigger_period) - except: + except Exception as e: raise XmlConfigException( "variable %s: alarm_trigger_period %s not an int" % (name, alarm_trigger_period) - ) from None + ) from e try: self.alarm_auto_inhibit_period = int(alarm_auto_inhibit_period) - except: + except Exception as e: raise XmlConfigException( "variable %s: alarm_auto_inhibit_period %s not an int" % (name, alarm_auto_inhibit_period) - ) from None + ) from e try: trigger_level = float(alarm_trigger_level) - except: + except Exception as e: raise XmlConfigException( "variable %s: alarm_trigger_level %s not a float" % (name, alarm_trigger_level) - ) from None + ) from e self.alarm_priority = alarm_priority @@ -1329,8 +1329,8 @@ def main(): # pragma: no cover "interval_percent_dither=", ], ) - except getopt.GetoptError: - raise UsageException from None + except getopt.GetoptError as e: + raise UsageException from e for opt, arg in opts: if opt in ("-i", "--interval"): @@ -1599,8 +1599,7 @@ if __name__ == "__main__": # pragma: no cover ex = sys.exc_info() err = traceback.format_exception(*ex) - # Python built-in Exception has args, - # but XenAPI.Failure has details instead. Sigh. + # XenAPI.Failure has `details`. try: # print the exception args nicely log_err(str(exp)) From 88c9dfcb09c28bd4b46d09e3c794dfa7339a323a Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 27 Mar 2024 08:44:31 +0000 Subject: [PATCH 051/222] CP-47653: Fix a `bytes-str` bug Also add the fix for scripts/plugins/perfmon Signed-off-by: Stephen Cheng --- python3/bin/perfmon | 21 ++++++++------------- scripts/plugins/perfmon | 5 +++-- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index 4d1244856a7..e5c6741b2d3 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -358,9 +358,9 @@ class RRDUpdates: def refresh(self, session, override_params=None): "reread the rrd_updates over CGI and parse" - if override_params is None: - override_params = {} - params = override_params + params = {} + if override_params is not None: + params = override_params params["session_id"] = session.id() params.update(self.params) paramstr = "&".join(["%s=%s" % (k, params[k]) for k in params]) @@ -426,12 +426,11 @@ def average(mylist): return sum(mylist) / float(len(mylist)) -def get_percent_log_fs_usage(ignored): +def get_percent_log_fs_usage(_): ''' Get the percent usage of the host filesystem for logs partition. Input list is ignored and should be empty ''' - _ = ignored # unused: not sure if it'll be used later, passing pylint fs_output = subprocess.getoutput("df /etc/passwd") log_fs_output = subprocess.getoutput("df /var/log") fs_output = " ".join(fs_output.splitlines()[1:]) @@ -445,13 +444,11 @@ def get_percent_log_fs_usage(ignored): return float("NaN") -def get_percent_fs_usage(ignored): +def get_percent_fs_usage(_): ''' Get the percent usage of the host filesystem. Input list is ignored and should be empty ''' - _ = ignored # unused: not sure if it'll be used later, passing pylint - # this file is on the filesystem of interest in both OEM and Retail output = subprocess.getoutput("df /etc/passwd") output = " ".join( @@ -462,12 +459,11 @@ def get_percent_fs_usage(ignored): return float(percentage[0:-1]) / 100.0 -def get_percent_mem_usage(ignored): +def get_percent_mem_usage(_): ''' Get the percent usage of Dom0 memory/swap. Input list is ignored and should be empty ''' - _ = ignored # unused: not sure if it'll be used later, passing pylint try: with open("/proc/meminfo", "r", encoding="utf-8") as memfd: memlist = memfd.readlines() @@ -1509,7 +1505,7 @@ def main(): # pragma: no cover timeout = rand(interval, interval + dither) cmdsock.settimeout(timeout) try: - cmd = cmdsock.recv(cmdmaxlen) + cmd = cmdsock.recv(cmdmaxlen).decode() except socket.timeout: pass else: @@ -1526,8 +1522,7 @@ def main(): # pragma: no cover return 0 -def sigterm_handler(sig, stack_frame): # pragma: no cover - _ = stack_frame # unused: not sure if it'll be used later, passing pylint +def sigterm_handler(sig, _): # pragma: no cover log_err("Caught signal %d - exiting" % sig) sys.exit(1) diff --git a/scripts/plugins/perfmon b/scripts/plugins/perfmon index 2186c938938..e3dc2452691 100644 --- a/scripts/plugins/perfmon +++ b/scripts/plugins/perfmon @@ -14,16 +14,17 @@ def send_perfmon_cmd(cmd): "Return True for success, or ERROR_%d: otherwise" if len(cmd) >= cmdmaxlen: return "ERROR_0: command too long" + cmd_bytes = cmd.encode() try: sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) - rc = sock.sendto(cmd, cmdsockname) + rc = sock.sendto(cmd_bytes, cmdsockname) except socket.error as e: err, msg = e.args return "ERROR_%d: %s" % (err, msg) except Exception: return "ERROR_1: unknown error" - return str(rc == len(cmd)) + return str(rc == len(cmd_bytes)) def stop(session, args): From 1ade72d198f6d89ccd9ac3bcde70403667bd4335 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 17 Apr 2024 12:00:00 +0200 Subject: [PATCH 052/222] CA-390883: Move usb_reset.py to python3, test mount() in a namespace Use a rootless container (like unshare --map-root-user --mount) to test the correct calling convention for mount()/umount(). Use a context manager test fixture to temporarily mock module imports: This allows to mock global modules only temporary for importing the testee without affecting other tests. - Add a sufficient testcase to test usb_reset.py: mount() and umount() without mocking the system or library calls in any way. - Use python3/tests a Python tests package to allow for non-deprecated relative imports: Absolute imports within a module are deprecated. Signed-off-by: Bernhard Kaindl --- python3/Makefile | 1 + {scripts => python3/libexec}/usb_reset.py | 0 python3/tests/conftest.py | 10 +++ python3/tests/import_helper.py | 70 +++++++++++++++++++ python3/tests/rootless_container.py | 83 +++++++++++++++++++++++ python3/tests/test_usb_reset_mount.py | 14 ++++ scripts/Makefile | 1 - 7 files changed, 178 insertions(+), 1 deletion(-) rename {scripts => python3/libexec}/usb_reset.py (100%) create mode 100644 python3/tests/conftest.py create mode 100644 python3/tests/import_helper.py create mode 100644 python3/tests/rootless_container.py create mode 100644 python3/tests/test_usb_reset_mount.py diff --git a/python3/Makefile b/python3/Makefile index 26e2bdfa943..1384df9284c 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -13,6 +13,7 @@ install: $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ + $(IPROG) libexec/usb_reset.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) diff --git a/scripts/usb_reset.py b/python3/libexec/usb_reset.py similarity index 100% rename from scripts/usb_reset.py rename to python3/libexec/usb_reset.py diff --git a/python3/tests/conftest.py b/python3/tests/conftest.py new file mode 100644 index 00000000000..d0a4777e1f1 --- /dev/null +++ b/python3/tests/conftest.py @@ -0,0 +1,10 @@ +"""scripts/unit_test/conftest.py: Common pytest module for shared pytest fixtures""" +import pytest + +from .rootless_container import enter_private_mount_namespace + + +@pytest.fixture(scope="session") +def private_mount_namespace(): + """Enter a private mount namespace that allows us to test mount and unmount""" + return enter_private_mount_namespace() diff --git a/python3/tests/import_helper.py b/python3/tests/import_helper.py new file mode 100644 index 00000000000..87541c9b6cf --- /dev/null +++ b/python3/tests/import_helper.py @@ -0,0 +1,70 @@ +"""helpers for unit-testing functions in scripts without permanent global mocks""" +import os +import sys +from contextlib import contextmanager +from types import ModuleType + +from typing import Generator +from mock import Mock + + +@contextmanager +def mocked_modules(*module_names): # type:(str) -> Generator[None, None, None] + """Context manager that temporarily mocks the specified modules. + + :param module_names: Variable number of names of the modules to be mocked. + :yields: None + + During the context, the specified modules are added to the sys.modules + dictionary as instances of the ModuleType class. + This effectively mocks the modules, allowing them to be imported and used + within the context. After the context, the mocked modules are removed + from the sys.modules dictionary. + + Example usage: + ```python + with mocked_modules("module1", "module2"): + # Code that uses the mocked modules + ``` + """ + for module_name in module_names: + sys.modules[module_name] = Mock() + yield + for module_name in module_names: + sys.modules.pop(module_name) + + +def import_file_as_module(relative_script_path): # type:(str) -> ModuleType + """Import a Python script without the .py extension as a python module. + + :param relative_script_path (str): The relative path of the script to import. + :returns module: The imported module. + :raises: AssertionError: If the spec or loader is not available. + + Note: + - This function uses different methods depending on the Python version. + - For Python 2, it uses the imp module. + - For Python 3, it uses the importlib module. + + Example: + - import_script_as_module('scripts/mail-alarm') # Returns the imported module. + """ + script_path = os.path.dirname(__file__) + "/../../" + relative_script_path + module_name = os.path.basename(script_path.replace(".py", "")) + + # For Python 3.11+: Import Python script without the .py extension: + # https://gist.github.com/bernhardkaindl/1aaa04ea925fdc36c40d031491957fd3: + # pylint: disable-next=import-outside-toplevel + from importlib import ( # pylint: disable=no-name-in-module + machinery, + util, + ) + + loader = machinery.SourceFileLoader(module_name, script_path) + spec = util.spec_from_loader(module_name, loader) + assert spec + assert spec.loader + module = util.module_from_spec(spec) + sys.modules[module_name] = module + spec.loader.exec_module(module) + return module diff --git a/python3/tests/rootless_container.py b/python3/tests/rootless_container.py new file mode 100644 index 00000000000..30ff364ace3 --- /dev/null +++ b/python3/tests/rootless_container.py @@ -0,0 +1,83 @@ +"""rootless_container.py: Create a rootless container on any Linux and GitHub CI""" +import ctypes +import os + +# Unshare the user namespace, so that the calling process is moved into a new +# user namespace which is not shared with any previously existing process. +# Needed so that the current user id can be mapped to 0 for getting a new +# mount namespace. +CLONE_NEWUSER = 0x10000000 +# Unshare the mount namespace, so that the calling process has a private copy +# of its root directory namespace which is not shared with any other process: +CLONE_NEWNS = 0x00020000 +# Flags for mount(2): +MS_BIND = 4096 +MS_REC = 16384 +MS_PRIVATE = 1 << 18 + + +def unshare(flags): # type:(int) -> None + """Wrapper for the library call to unshare Linux kernel namespaces""" + lib = ctypes.CDLL(None, use_errno=True) + lib.unshare.argtypes = [ctypes.c_int] + rc = lib.unshare(flags) + if rc != 0: # pragma: no cover + errno = ctypes.get_errno() + raise OSError(errno, os.strerror(errno), flags) + + +def mount(source="none", target="", fs="", flags=0, options=""): + # type:(str, str, str, int, str) -> None + """Wrapper for the library call mount(). Supports Python2.7 and Python3.x""" + lib = ctypes.CDLL(None, use_errno=True) + lib.mount.argtypes = ( + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_char_p, + ctypes.c_ulong, + ctypes.c_char_p, + ) + result = lib.mount( + source.encode(), target.encode(), fs.encode(), flags, options.encode() + ) + if result < 0: # pragma: no cover + errno = ctypes.get_errno() + raise OSError( + errno, + "mount " + target + " (" + options + "): " + os.strerror(errno), + ) + + +def umount(target): # type:(str) -> None + """Wrapper for the Linux umount system call, supports Python2.7 and Python3.x""" + lib = ctypes.CDLL(None, use_errno=True) + result = lib.umount(ctypes.c_char_p(target.encode())) + if result < 0: # pragma: no cover + errno = ctypes.get_errno() + raise OSError(errno, "umount " + target + ": " + os.strerror(errno)) + + +def enter_private_mount_namespace(): + """Enter a private mount and user namespace with the user and simulate uid 0 + + Some code like mount() requires to be run as root. The container simulates + root-like privileges and a new mount namespace that allows mount() in it. + + Implements the equivalent of `/usr/bin/unshare --map-root-user --mount` + """ + + # Read the actual user and group ids before entering the new user namespace: + real_uid = os.getuid() + real_gid = os.getgid() + unshare(CLONE_NEWUSER | CLONE_NEWNS) + # Setup user map to map the user id to behave like uid 0: + with open("/proc/self/uid_map", "wb") as proc_self_user_map: + proc_self_user_map.write(b"0 %d 1" % real_uid) + with open("/proc/self/setgroups", "wb") as proc_self_set_groups: + proc_self_set_groups.write(b"deny") + # Setup group map for the user's gid to behave like gid 0: + with open("/proc/self/gid_map", "wb") as proc_self_group_map: + proc_self_group_map.write(b"0 %d 1" % real_gid) + # Private root mount in the mount namespace top support mounting a private tmpfs: + mount(target="/", flags=MS_REC | MS_PRIVATE) + return True diff --git a/python3/tests/test_usb_reset_mount.py b/python3/tests/test_usb_reset_mount.py new file mode 100644 index 00000000000..9cfe3b5b804 --- /dev/null +++ b/python3/tests/test_usb_reset_mount.py @@ -0,0 +1,14 @@ +"""scripts/unit_test/test_usb_reset_mount.py: Test usb_reset.mount and .umount""" +from __future__ import print_function + +from .import_helper import import_file_as_module, mocked_modules + + +def test_usb_reset_mount_umount(private_mount_namespace): + """Test usb_reset.mount and .umount""" + assert private_mount_namespace + with mocked_modules("xcp", "xcp.logger"): + usb_reset = import_file_as_module("python3/libexec/usb_reset.py") + usb_reset.log.error = print + usb_reset.mount(source="tmpfs", target="/tmp", fs="tmpfs") + usb_reset.umount("/tmp") diff --git a/scripts/Makefile b/scripts/Makefile index 6a850199ba6..38459115396 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -108,7 +108,6 @@ install: $(IPROG) pam.d-xapi $(DESTDIR)/etc/pam.d/xapi $(IPROG) upload-wrapper logs-download $(DESTDIR)$(LIBEXECDIR) $(IDATA) usb-policy.conf $(DESTDIR)$(ETCXENDIR) - $(IPROG) usb_reset.py $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(OPTDIR)/packages/iso #omg XXX $(IPROG) xapi-rolling-upgrade-miami $(DESTDIR)$(LIBEXECDIR)/xapi-rolling-upgrade $(IPROG) set-hostname $(DESTDIR)$(LIBEXECDIR) From 2bf2a44cc29740e2bcc50d9625fbb35eaf9ad93e Mon Sep 17 00:00:00 2001 From: Pau Ruiz Safont Date: Wed, 24 Apr 2024 12:00:00 +0200 Subject: [PATCH 053/222] python3/unittest: Replace import_file with import_helper Bernhard Kaindl: Resolved a conflict with one of my other upcoming changes which removes the need to a pytype ignore by using a cast() instead, which fixes pyright/pylance/vscode. Also keep import_helper in python3/tests as the the name "unittest" sould likely avoided as it clashes with the unittest module. The tests in python3/tests are written for pytest instead and not all of them will classify as unit tests, so using the name unittest for them would also be a misnomer. Using the name tests is shorter and more generic, and we can also use the separation beween tests and unittest to differentiate between modern pytest tests and legact unittest-based tests which should possibly be better migrated to pytest at some point for the benefits that pytest gives: For example with pytest, you can use use just assert and you do not need to use self.assert...(), because pytest implements the proper assert matching diagnostics. In the long run, the classic unittest tests should no longer be used. Co-authored-by: Bernhard Kaindl Signed-off-by: Pau Ruiz Safont --- python3/__init__.py | 0 python3/tests/test_observer.py | 4 +-- python3/tests/test_usb_reset_mount.py | 2 +- python3/unittest/import_file.py | 25 --------------- python3/unittest/test_hfx_filename.py | 8 ++--- python3/unittest/test_nbd_client_manager.py | 5 ++- python3/unittest/test_perfmon.py | 34 ++++++++++----------- python3/unittest/test_usb_scan.py | 11 ++++--- 8 files changed, 33 insertions(+), 56 deletions(-) create mode 100644 python3/__init__.py delete mode 100644 python3/unittest/import_file.py diff --git a/python3/__init__.py b/python3/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/python3/tests/test_observer.py b/python3/tests/test_observer.py index 53944d97ca9..9464efbc3e3 100644 --- a/python3/tests/test_observer.py +++ b/python3/tests/test_observer.py @@ -10,7 +10,7 @@ with patch("os.listdir") as mock_listdir: # Prevent it finding an observer.conf mock_listdir.return_value = [] - from packages import observer + from python3.packages import observer # mock modules to avoid dependencies sys.modules["opentelemetry"] = MagicMock() @@ -29,7 +29,7 @@ OTEL_RESOURCE_ATTRIBUTES='service.name=sm' """ TEST_OBSERVER_CONF = "test-observer.conf" -OBSERVER_OPEN = "packages.observer.open" +OBSERVER_OPEN = "python3.packages.observer.open" # pylint: disable=missing-function-docstring,protected-access diff --git a/python3/tests/test_usb_reset_mount.py b/python3/tests/test_usb_reset_mount.py index 9cfe3b5b804..e9d432742f6 100644 --- a/python3/tests/test_usb_reset_mount.py +++ b/python3/tests/test_usb_reset_mount.py @@ -1,7 +1,7 @@ """scripts/unit_test/test_usb_reset_mount.py: Test usb_reset.mount and .umount""" from __future__ import print_function -from .import_helper import import_file_as_module, mocked_modules +from python3.tests.import_helper import import_file_as_module, mocked_modules def test_usb_reset_mount_umount(private_mount_namespace): diff --git a/python3/unittest/import_file.py b/python3/unittest/import_file.py deleted file mode 100644 index 581f8f4b401..00000000000 --- a/python3/unittest/import_file.py +++ /dev/null @@ -1,25 +0,0 @@ -""" -This file is used for importing a non-".py" file as a module in unit test. -It never runs directly, so no shebang and no main() -""" -import sys -import os -from importlib import machinery, util - -def import_from_file(module_name, file_path): - """Import a file as a module""" - loader = machinery.SourceFileLoader(module_name, file_path) - spec = util.spec_from_loader(module_name, loader) - assert spec - assert spec.loader - module = util.module_from_spec(spec) - # Probably a good idea to add manually imported module stored in sys.modules - sys.modules[module_name] = module - spec.loader.exec_module(module) - return module - -def get_module(module_name, file_path): - """get the module from a file""" - testdir = os.path.dirname(__file__) - print(testdir) - return import_from_file(module_name, "{}/{}".format(testdir, file_path)) diff --git a/python3/unittest/test_hfx_filename.py b/python3/unittest/test_hfx_filename.py index 0fc4f5abba3..ca3618f38c2 100644 --- a/python3/unittest/test_hfx_filename.py +++ b/python3/unittest/test_hfx_filename.py @@ -7,12 +7,12 @@ import sys import unittest from mock import MagicMock, patch, call -from import_file import get_module +from python3.tests.import_helper import import_file_as_module # mock modules to avoid dependencies sys.modules["XenAPI"] = MagicMock() -hfx_filename = get_module("hfx_filename", "../bin/hfx_filename") +hfx_filename = import_file_as_module("python3/bin/hfx_filename") @patch("socket.socket") @@ -82,7 +82,7 @@ def test_rpc_international_character(self, mock_socket): def test_db_get_uuid(self, mock_socket): """ - Tests db_get_uuid + Tests db_get_uuid """ mock_connected_socket = MagicMock() mock_socket.return_value = mock_connected_socket @@ -100,7 +100,7 @@ def test_db_get_uuid(self, mock_socket): def test_read_field(self, mock_socket): """ - Tests read_field + Tests read_field """ mock_connected_socket = MagicMock() mock_socket.return_value = mock_connected_socket diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/unittest/test_nbd_client_manager.py index 48ca22be297..224a1c3e2ea 100644 --- a/python3/unittest/test_nbd_client_manager.py +++ b/python3/unittest/test_nbd_client_manager.py @@ -3,13 +3,12 @@ This module provides unittest for nbd_client_manager.py """ -import sys import unittest import subprocess from mock import MagicMock, patch, mock_open, call -from import_file import get_module +from python3.tests.import_helper import import_file_as_module -nbd_client_manager = get_module("nbd_client_manager", "../libexec/nbd_client_manager.py") +nbd_client_manager = import_file_as_module("python3/libexec/nbd_client_manager.py") @patch('subprocess.Popen') class TestCallFunction(unittest.TestCase): diff --git a/python3/unittest/test_perfmon.py b/python3/unittest/test_perfmon.py index a61e66aeaa4..9d638f4fab4 100644 --- a/python3/unittest/test_perfmon.py +++ b/python3/unittest/test_perfmon.py @@ -7,12 +7,12 @@ import math import unittest from mock import MagicMock, patch, mock_open -from import_file import get_module +from python3.tests.import_helper import import_file_as_module # mock modules to avoid dependencies sys.modules["XenAPI"] = MagicMock() -perfmon = get_module("perfmon", "../bin/perfmon") +perfmon = import_file_as_module("python3/bin/perfmon") @patch("subprocess.getoutput") @@ -292,13 +292,13 @@ def test_process_rrd_updates(self, mock_get_percent_mem_usage, obj_report = perfmon.ObjectReport("vm", uuid) obj_report.vars = { 'cpu0': [0.0063071, 0.0048038, 0.0045862, 0.0048865, 0.0048923], - 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], - 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], - 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], - 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], - 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], - 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], - 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], + 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], + 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], + 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], + 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], + 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], + 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], + 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], 'memory': [2785000000.0, 2785000000.0, 2785000000.0, 2785000000.0, 2785000000.0] } @@ -332,13 +332,13 @@ def test_process_rrd_updates(self, mock_xapisession): obj_report = perfmon.ObjectReport("vm", uuid) obj_report.vars = { 'cpu0': [0.0063071, 0.0048038, 0.0045862, 0.0048865, 0.0048923], - 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], - 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], - 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], - 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], - 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], - 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], - 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], + 'cpu1': [0.0067629, 0.0055811, 0.0058988, 0.0058809, 0.0053645], + 'cpu2': [0.0088599, 0.0078701, 0.0058573, 0.0063993, 0.0056833], + 'cpu3': [0.0085826, 0.0056874, 0.005697, 0.0061155, 0.0048769], + 'cpu4': [0.0051265, 0.0045452, 0.0046137, 0.0066399, 0.0050993], + 'cpu5': [0.0062369, 0.0053982, 0.0056624, 0.00606, 0.0062017], + 'cpu6': [0.006235, 0.0041764, 0.0048101, 0.0053798, 0.0050934], + 'cpu7': [0.0050709, 0.005482, 0.0058926, 0.0052934, 0.0049544], 'memory': [2785000000.0, 2785000000.0, 2785000000.0, 2785000000.0, 2785000000.0] } @@ -415,7 +415,7 @@ def test_process_rrd_updates(self, mock_xapisession): obj_report = perfmon.ObjectReport("vm", uuid) obj_report.vars = { 'size': [100, 200, 300, 400, 500], - 'physical_utilisation': [2000, 3000, 4000, 5000, 6000], + 'physical_utilisation': [2000, 3000, 4000, 5000, 6000], } rrd_updates.report.obj_reports[uuid] = obj_report rrd_updates.report.rows = 5 diff --git a/python3/unittest/test_usb_scan.py b/python3/unittest/test_usb_scan.py index d87f9b12b27..e5ee00a253a 100644 --- a/python3/unittest/test_usb_scan.py +++ b/python3/unittest/test_usb_scan.py @@ -8,9 +8,11 @@ import tempfile import unittest from collections.abc import Mapping +from typing import cast import mock -from import_file import get_module + +from python3.tests.import_helper import import_file_as_module sys.modules["xcp"] = mock.Mock() sys.modules["xcp.logger"] = mock.Mock() @@ -107,9 +109,10 @@ def verify_usb_common( self, moc_devices, moc_interfaces, moc_results, - path="./scripts/usb-policy.conf" + # Use relative path to allow tests to be started in subdirectories + path = os.path.dirname(__file__) + "/../../scripts/usb-policy.conf" ): - usb_scan = get_module("usb_scan", "../libexec/usb_scan.py") + usb_scan = import_file_as_module("python3/libexec/usb_scan.py") mock_setup(usb_scan, moc_devices, moc_interfaces, path) @@ -134,7 +137,7 @@ def verify_usb_exit( # cm.exception.code is int type whose format # looks like "duplicated tag'vid' found, # malformed line ALLOW:vid=056a vid=0314 class=03" - self.assertIn(msg, cm.exception.code) # pytype: disable=wrong-arg-types + self.assertIn(msg, cast(str, cm.exception.code)) # code is a str def test_usb_dongle(self): devices = [ From fdc9c0812cb33bce30dbcec2039879cc44bd27b3 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Sun, 28 Apr 2024 02:06:09 +0100 Subject: [PATCH 054/222] Merge python3/unittest into python3/tests Signed-off-by: Stephen Cheng --- .codecov.yml | 4 ++-- .github/workflows/main.yml | 2 +- python3/{unittest => tests}/test_hfx_filename.py | 0 python3/{unittest => tests}/test_nbd_client_manager.py | 0 python3/{unittest => tests}/test_perfmon.py | 0 python3/{unittest => tests}/test_usb_scan.py | 4 ++-- 6 files changed, 5 insertions(+), 5 deletions(-) rename python3/{unittest => tests}/test_hfx_filename.py (100%) rename python3/{unittest => tests}/test_nbd_client_manager.py (100%) rename python3/{unittest => tests}/test_perfmon.py (100%) rename python3/{unittest => tests}/test_usb_scan.py (99%) diff --git a/.codecov.yml b/.codecov.yml index 79d69aa0b14..f67f6913dc8 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -214,7 +214,7 @@ coverage: tests: # Ensure that all tests are executed (tests themselves must be 100% covered) target: 98% - paths: ["python3/unittest/test_*.py"] + paths: ["python3/tests/test_*.py"] # @@ -266,5 +266,5 @@ component_management: - component_id: test_cases name: test_cases - paths: ["python3/unittest/test_*.py"] + paths: ["python3/tests/test_*.py"] diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 9d55ec60312..da0e2bd35a2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -73,7 +73,7 @@ jobs: run: > pytest --cov=python3/ - python3/unittest python3/tests -vv -rA + python3/tests -vv -rA --junitxml=.git/pytest${{matrix.python-version}}.xml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml diff --git a/python3/unittest/test_hfx_filename.py b/python3/tests/test_hfx_filename.py similarity index 100% rename from python3/unittest/test_hfx_filename.py rename to python3/tests/test_hfx_filename.py diff --git a/python3/unittest/test_nbd_client_manager.py b/python3/tests/test_nbd_client_manager.py similarity index 100% rename from python3/unittest/test_nbd_client_manager.py rename to python3/tests/test_nbd_client_manager.py diff --git a/python3/unittest/test_perfmon.py b/python3/tests/test_perfmon.py similarity index 100% rename from python3/unittest/test_perfmon.py rename to python3/tests/test_perfmon.py diff --git a/python3/unittest/test_usb_scan.py b/python3/tests/test_usb_scan.py similarity index 99% rename from python3/unittest/test_usb_scan.py rename to python3/tests/test_usb_scan.py index e5ee00a253a..f63e4bb8f10 100644 --- a/python3/unittest/test_usb_scan.py +++ b/python3/tests/test_usb_scan.py @@ -13,10 +13,11 @@ import mock from python3.tests.import_helper import import_file_as_module - +# mock modules to avoid dependencies sys.modules["xcp"] = mock.Mock() sys.modules["xcp.logger"] = mock.Mock() sys.modules["pyudev"] = mock.Mock() +usb_scan = import_file_as_module("python3/libexec/usb_scan.py") class MocDeviceAttrs(Mapping): @@ -112,7 +113,6 @@ def verify_usb_common( # Use relative path to allow tests to be started in subdirectories path = os.path.dirname(__file__) + "/../../scripts/usb-policy.conf" ): - usb_scan = import_file_as_module("python3/libexec/usb_scan.py") mock_setup(usb_scan, moc_devices, moc_interfaces, path) From 0c96f1d4d68ad6070b2464b3ce970500487cb423 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 6 May 2024 12:00:00 +0200 Subject: [PATCH 055/222] .codecov.yml: Update Coverity config to cleanup obsolete python2 components Signed-off-by: Bernhard Kaindl --- .codecov.yml | 17 ----------------- 1 file changed, 17 deletions(-) diff --git a/.codecov.yml b/.codecov.yml index f67f6913dc8..c0092974257 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -184,14 +184,6 @@ coverage: # project: - # - # Python modules and scripts below scripts/ (excluding tests) - # - scripts: - paths: ["scripts/**", "!**/test_*.py"] - target: 48% - threshold: 2% - # # Python modules and scripts below ocaml/ (excluding tests) # @@ -240,15 +232,6 @@ component_management: individual_components: - - component_id: scripts # this is an identifier that should not be changed - name: scripts # this is a display name, and can be changed freely - # The list of paths that should be in- and excluded in this component: - paths: ["scripts/**", "!scripts/examples/**", "!**/test_*.py"] - - - component_id: scripts/examples - name: scripts/examples - paths: ["scripts/examples/**", "!scripts/**/test_*.py"] - - component_id: ocaml name: ocaml paths: ["ocaml/**", "!**/test_*.py"] From dcab8cb66f9fa4c5b7fef2e24e4fa7c27d5310ce Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 9 May 2024 12:00:00 +0200 Subject: [PATCH 056/222] Fixup the merge: fix duplicated pylint config and fix pytype with pyudev Signed-off-by: Bernhard Kaindl --- .github/workflows/other.yml | 2 +- pyproject.toml | 64 +++++++++++++++++----------- python3/libexec/usb_reset.py | 2 +- python3/libexec/usb_scan.py | 4 +- python3/tests/import_helper.py | 2 +- python3/tests/test_observer.py | 2 +- scripts/examples/python/provision.py | 4 +- scripts/mail-alarm | 2 +- 8 files changed, 48 insertions(+), 34 deletions(-) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index d65b7abe575..0284e7d7819 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -51,7 +51,7 @@ jobs: - name: Install dependencies only needed for python 3 if: ${{ matrix.python-version != '2.7' }} - run: pip install opentelemetry-api opentelemetry-exporter-zipkin-json opentelemetry-sdk pandas pytype toml wrapt + run: pip install opentelemetry-api opentelemetry-exporter-zipkin-json opentelemetry-sdk pandas pytype toml wrapt pyudev - name: Install common dependencies for Python ${{matrix.python-version}} run: pip install future mock pytest-coverage pytest-mock diff --git a/pyproject.toml b/pyproject.toml index 4c4a855c59e..2eed863da0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,28 +88,6 @@ profile = "black" combine_as_imports = true ensure_newline_before_comments = false -[tool.pylint.messages_control] -disable = [ - "missing-function-docstring", - "missing-module-docstring", - "missing-class-docstring", - "consider-using-f-string", - "too-many-branches", - "too-many-arguments", - "broad-exception-caught", - "no-else-break", - "no-else-return", - "invalid-name", - "import-error", - "unnecessary-pass", - "unspecified-encoding", - "protected-access", - "no-member", # Some mutiple inheritance classes may have this issue - "too-many-locals", # Long functions. Need to refine the code - "too-many-statements", - "too-many-return-statements" -] - # ----------------------------------------------------------------------------- # Mypy static analysis - https://mypy.readthedocs.io/en/stable/config_file.html # ----------------------------------------------------------------------------- @@ -257,16 +235,52 @@ discard_messages_matching = [ "No attribute 'group' on None", "No Node.TEXT_NODE in module xml.dom.minidom, referenced from 'xml.dom.expatbuilder'" ] -expected_to_fail = [] - +expected_to_fail = [ + # Need 2to3 -w and maybe a few other minor updates: + "scripts/hatests", + "scripts/backup-sr-metadata.py", + "scripts/restore-sr-metadata.py", + # SSLSocket.send() only accepts bytes, not unicode string as argument: + "scripts/examples/python/exportimport.py", + # Other fixes needed: + "scripts/examples/python/mini-xenrt.py", + "scripts/examples/python/XenAPI/XenAPI.py", + "scripts/examples/python/monitor-unwanted-domains.py", + "scripts/examples/python/shell.py", + "scripts/examples/smapiv2.py", + "scripts/static-vdis", + "scripts/plugins/extauth-hook-AD.py", +] [tool.pytype] inputs = [ + # Python 3 "python3/", "ocaml/xcp-rrdd", + # Python2: These will generate warnings that need to be fixed: + "scripts/static-vdis", + "scripts/generate-iscsi-iqn", + "scripts/hatests", + "scripts/host-display", + "scripts/mail-alarm", + "scripts/print-custom-templates", + "scripts/probe-device-for-file", + "scripts/xe-reset-networking", + "scripts/xe-scsi-dev-map", + "scripts/examples/python", + "scripts/yum-plugins", + "scripts/*.py", + + # To be added later, + # when converted to Python3-compatible syntax: + # "ocaml/message-switch/python", + # "ocaml/idl/ocaml_backend/python", + # "ocaml/xapi-storage/python", ] disable = [ + # Reduce noise from python2 scripts(import yum, xenfsimage, xcp, urlgrabber) + "import-error", ] platform = "linux" # Allow pytype to find the XenAPI module, the rrdd module and python3 modules: -pythonpath = "python3:scripts/examples/python:ocaml/xcp-rrdd/scripts/rrdd" +pythonpath = "scripts/examples/python:.:scripts:scripts/plugins:scripts/examples" diff --git a/python3/libexec/usb_reset.py b/python3/libexec/usb_reset.py index 82a690bcea0..8d96dde369e 100755 --- a/python3/libexec/usb_reset.py +++ b/python3/libexec/usb_reset.py @@ -47,7 +47,7 @@ import errno import fcntl import grp -import xcp.logger as log +import xcp.logger as log # pytype: disable=import-error import logging import os import pwd diff --git a/python3/libexec/usb_scan.py b/python3/libexec/usb_scan.py index e940aa626f5..c45686f6404 100755 --- a/python3/libexec/usb_scan.py +++ b/python3/libexec/usb_scan.py @@ -30,7 +30,7 @@ import sys import pyudev -import xcp.logger as log +import xcp.logger as log # pytype: disable=import-error def log_list(l): @@ -56,7 +56,7 @@ def hex_equal(h1, h2): return False -class UsbObject(dict): +class UsbObject(dict): # pytype: disable=ignored-metaclass """Base class of USB classes, save USB properties in dict node(str): the key, device node diff --git a/python3/tests/import_helper.py b/python3/tests/import_helper.py index 87541c9b6cf..076a24913c7 100644 --- a/python3/tests/import_helper.py +++ b/python3/tests/import_helper.py @@ -9,7 +9,7 @@ @contextmanager -def mocked_modules(*module_names): # type:(str) -> Generator[None, None, None] +def mocked_modules(*module_names: str) -> Generator[None, None, None]: """Context manager that temporarily mocks the specified modules. :param module_names: Variable number of names of the modules to be mocked. diff --git a/python3/tests/test_observer.py b/python3/tests/test_observer.py index cdd7f7e143f..a8d6f238eec 100644 --- a/python3/tests/test_observer.py +++ b/python3/tests/test_observer.py @@ -4,7 +4,7 @@ import sys import unittest -from mock import MagicMock, mock_open, patch +from unittest.mock import MagicMock, mock_open, patch # Ensure observer is initialised as noop with patch("os.listdir") as mock_listdir: diff --git a/scripts/examples/python/provision.py b/scripts/examples/python/provision.py index b8aa3f3935f..4c5ab11daef 100644 --- a/scripts/examples/python/provision.py +++ b/scripts/examples/python/provision.py @@ -62,7 +62,7 @@ def setSR(self, sr): def parseProvisionSpec(txt): """Return an instance of type ProvisionSpec given XML text""" - doc = xml.dom.minidom.parseString(txt) + doc = xml.dom.minidom.parseString(txt) # pytype: disable=pyi-error all = doc.getElementsByTagName("provision") if len(all) != 1: raise ValueError("Expected to find exactly one element") @@ -74,7 +74,7 @@ def parseProvisionSpec(txt): def printProvisionSpec(ps): """Return a string containing pretty-printed XML corresponding to the supplied provisioning spec""" - doc = xml.dom.minidom.Document() + doc = xml.dom.minidom.Document() # pytype: disable=pyi-error doc.appendChild(ps.toElement(doc)) return doc.toprettyxml() diff --git a/scripts/mail-alarm b/scripts/mail-alarm index 5fd432339bf..0b41dd5e0e9 100755 --- a/scripts/mail-alarm +++ b/scripts/mail-alarm @@ -21,7 +21,7 @@ import syslog import tempfile import traceback from socket import getfqdn -from xml.dom import minidom +from xml.dom import minidom # pytype: disable=pyi-error import XenAPI from xcp import branding From 6545e1da25b1bedaedde94e8474e719c91dd94d4 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 9 May 2024 12:00:00 +0200 Subject: [PATCH 057/222] CA-390883: python3/tests: Fix remaining pylint and pyright comments in python3/*.py Signed-off-by: Bernhard Kaindl --- python3/bin/hfx_filename | 8 +++++++- python3/libexec/nbd_client_manager.py | 2 +- python3/libexec/usb_reset.py | 2 +- python3/libexec/usb_scan.py | 9 ++++++--- python3/tests/test_perfmon.py | 2 ++ python3/tests/test_usb_scan.py | 11 +++++------ scripts/test_mail-alarm.py | 4 ++++ 7 files changed, 26 insertions(+), 12 deletions(-) diff --git a/python3/bin/hfx_filename b/python3/bin/hfx_filename index dd8677fc499..28fb05bbc78 100755 --- a/python3/bin/hfx_filename +++ b/python3/bin/hfx_filename @@ -14,8 +14,14 @@ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +# pylint: disable=redefined-outer-name +# pyright: reportFunctionMemberAccess=false +# pyright: reportOptionalMemberAccess=false, reportAttributeAccessIssue=false -import sys, socket, urllib.request, XenAPI +import sys +import socket + +import XenAPI db_url = "/remote_db_access" diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index e30477316d8..0f77e69b12e 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -136,7 +136,7 @@ def _find_unused_nbd_device(): return nbd_device # If there are 1000 nbd devices (unlikely) and all are connected - raise NbdDeviceNotFound(nbd_device) + raise NbdDeviceNotFound(nbd_device) # pyright:ignore[reportPossiblyUnboundVariable] def _wait_for_nbd_device(nbd_device, connected): deadline = datetime.now() + timedelta(minutes=MAX_DEVICE_WAIT_MINUTES) diff --git a/python3/libexec/usb_reset.py b/python3/libexec/usb_reset.py index 8d96dde369e..573936ae1c3 100755 --- a/python3/libexec/usb_reset.py +++ b/python3/libexec/usb_reset.py @@ -132,7 +132,7 @@ def load_device_ids(device): # ignore and continue log.warning("Failed to remove device ids: {}".format(str(e))) - return uid, gid + return uid, gid # pyright: ignore[reportPossiblyUnboundVariable] # pragma: no cover # throw IOError, ValueError diff --git a/python3/libexec/usb_scan.py b/python3/libexec/usb_scan.py index c45686f6404..03d89f7baed 100755 --- a/python3/libexec/usb_scan.py +++ b/python3/libexec/usb_scan.py @@ -21,6 +21,8 @@ # 2. check if device can be passed through based on policy file # 3. return the device info to XAPI in json format +# pylint: disable=redefined-outer-name +# pyright: reportPossiblyUnboundVariable=false, reportAttributeAccessIssue=false import abc import argparse @@ -71,7 +73,7 @@ def __init__(self, node): def get_node(self): return self.node - def __hash__(self): + def __hash__(self): # pyright:ignore[reportIncompatibleVariableOverride] return hash(self.node) def __eq__(self, other): @@ -109,14 +111,14 @@ def _is_class_hub(self, key_class): return cls is not None and hex_equal(__VALUE_CLASS_HUB, cls) @abc.abstractmethod - def is_class_hub(self): + def is_class_hub(self) -> bool: """check if this belongs to a hub :return: bool, if this belongs to a hub """ @abc.abstractmethod - def is_child_of(self, parent): + def is_child_of(self, parent) -> bool: """check if this is a child of parent :param parent:(UsbObject) the parent to check against @@ -282,6 +284,7 @@ def __init__(self, node, props): if props.get(p) is not None: self[p] = props.get(p).decode() + # pylint: disable-next=useless-parent-delegation # This parent call is superfluous def debug_str(self, level=0): return super().debug_str(level) diff --git a/python3/tests/test_perfmon.py b/python3/tests/test_perfmon.py index 9d638f4fab4..c133a1171ac 100644 --- a/python3/tests/test_perfmon.py +++ b/python3/tests/test_perfmon.py @@ -3,6 +3,8 @@ This module provides unittest for perfmon """ +# pyright: reportAttributeAccessIssue=false + import sys import math import unittest diff --git a/python3/tests/test_usb_scan.py b/python3/tests/test_usb_scan.py index f63e4bb8f10..bf0bad03fef 100644 --- a/python3/tests/test_usb_scan.py +++ b/python3/tests/test_usb_scan.py @@ -25,8 +25,7 @@ def __init__(self, device): self.d = device.get_attr() def __iter__(self): # pragma: no cover - for name in self.d: - yield name + yield from self.d def __len__(self): # pragma: no cover return len(self.d) @@ -54,8 +53,7 @@ def attributes(self): return MocDeviceAttrs(self) def __iter__(self): # pragma: no cover - for name in self.get_prop(): - yield name + yield from self.get_prop() def __len__(self): # pragma: no cover return len(self.get_prop()) @@ -64,7 +62,7 @@ def __getitem__(self, name): return self.get_prop().get(name) -class MocEnumerator(object): +class MocEnumerator(): def __init__(self, ds): self.ds = ds @@ -73,7 +71,7 @@ def __iter__(self): yield MocDevice(d) -class MocContext(object): +class MocContext(): def __init__(self, devices, interfaces): self.devices = devices self.interfaces = interfaces @@ -85,6 +83,7 @@ def list_devices(self, **kwargs): return MocEnumerator(self.devices) elif dev_type == "usb_interface": return MocEnumerator(self.interfaces) + raise AssertionError(f"unexpected {dev_type}") # pragma: no cover def mock_setup(mod, devices, interfaces, path): diff --git a/scripts/test_mail-alarm.py b/scripts/test_mail-alarm.py index 2a918f5edbe..acd5f5f20a5 100644 --- a/scripts/test_mail-alarm.py +++ b/scripts/test_mail-alarm.py @@ -8,6 +8,10 @@ import sys import unittest import mock +import pytest + +if sys.version_info > (2, ): + pytest.skip(allow_module_level=True) def nottest(obj): obj.__test__ = False From 816157241572f7a42bc231601d8cc087b29900b6 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 9 May 2024 12:00:00 +0200 Subject: [PATCH 058/222] CA-390883: pyproject.toml: Add config for pytest & other local checks Signed-off-by: Bernhard Kaindl --- pyproject.toml | 96 ++++++++++++++++++++--------------- python3/stubs/XenAPI.pyi | 85 +++++++++++++++++++++++++++++++ python3/stubs/xcp/branding.py | 38 ++++++++++++++ python3/stubs/xcp/logger.pyi | 6 +++ pytype_reporter.py | 5 +- 5 files changed, 186 insertions(+), 44 deletions(-) create mode 100644 python3/stubs/XenAPI.pyi create mode 100644 python3/stubs/xcp/branding.py create mode 100644 python3/stubs/xcp/logger.pyi diff --git a/pyproject.toml b/pyproject.toml index 2eed863da0b..5ea22b96551 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,11 +34,15 @@ line-length = 88 [tool.coverage.report] # Here, developers can configure which lines do not need to be covered by tests: +# fail_under: minimum code coverage percentage +fail_under = 50 +# exclude_lines: lines that are not required to be covered exclude_lines = [ "pragma: no cover", # standard pragma for not covering a line or block "if TYPE_CHECKING:", # imports for type checking only "pass", # Other specific lines that do not need to be covered, comment in which file: + "raise NbdDeviceNotFound", # python3/libexec/usb_scan.py ] # precision digits to use when reporting coverage (sub-percent-digits are not reported): precision = 0 @@ -166,69 +170,73 @@ disable = [ # ----------------------------------------------------------------------------- # Pyright is the static analysis behind the VSCode Python extension / Pylance -# https://microsoft.github.io/pyright/#/configuration?id=main-configuration-options +# https://microsoft.github.io/pyright/#/configuration # ----------------------------------------------------------------------------- [tool.pyright] -# Specifies the paths of directories or files that should be included in the -# analysis. If no paths are specified, all files in the workspace are included: -include = ["python3", "ocaml/xcp-rrdd"] - -# Conditionalize the stube files for type definitions based on the platform: -pythonPlatform = "Linux" - -# typeCheckingMode: "off", "basic", "standard" or "strict" -typeCheckingMode = "standard" - -# Specifies the version of Python that will be used to execute the source code. -# Generate errors if the source code makes use of language features that are -# not supported in that version. It will also tailor its use of type stub files, -# which conditionalizes type definitions based on the version. If no version is -# specified, pyright will use the version of the current python interpreter, -# if one is present: -pythonVersion = "3.6" - -# Paths of directories or files that should use "strict" analysis if they are -# included. This is the same as manually adding a "# pyright: strict" comment. -# In strict mode, most type-checking rules are enabled, and the type-checker -# will be more aggressive in inferring types. If no paths are specified, strict -# mode is not enabled: -strict = ["python3/tests/test_observer.py"] - -# -# Paths to exclude from analysis. If a file is excluded, it will not be -# analyzed. -# -# FIXME: Some of these may have type errors, so they should be inspected and fixed: -# -exclude = [ +# include: directories to include in checking +# strict: paths for which strict checking works +# typeCheckingMode: set the standard type checking mode +include = ["python3", "ocaml/xcp-rrdd"] +strict = ["python3/tests/observer"] +pythonPlatform = "Linux" +typeCheckingMode = "standard" +reportMissingImports = false +pythonVersion = "3.6" +exclude = [ "ocaml/xcp-rrdd/scripts/rrdd/rrdd.py", "ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py", "python3/packages/observer.py", - "python3/tests/pytype_reporter.py", ] # ----------------------------------------------------------------------------- # Pytest is the test framework, for discovering and running tests, fixtures etc -# https://pytest.readthedocs.io/en/latest/customize.html +# https://pytest.readthedocs.io/en/latest/customize.html, https://docs.pytest.org # ----------------------------------------------------------------------------- - [tool.pytest.ini_options] -addopts = "-ra" # Show the output of all tests, including those that passed -log_cli = true # Capture log messages and show them in the output as well +# ----------------------------------------------------------------------------- +# Options to enable for pytest by default: +# -v show what happens +# -rA show summary after running tests +# --cov=python3 measure coverage of the python3 directory +# --cov-fail-under minimum coverage percentage +# --cov-report=term-missing show missing lines in the coverage report +# --cov-report=html: generate an HTML coverage report(for viewing) +# --cov-report=xml: generate an XML coverage report(for upload) +# ----------------------------------------------------------------------------- +addopts = """ +-v -rA --cov=python3 --cov=scripts --cov-fail-under=50 +--cov-report=html:.git/coverage --cov-report=term-missing +--cov-report=xml:.git/coverage3.11.xml +""" + +# ----------------------------------------------------------------------------- +# Other pytest config options: +# log_cli: show logger messages +# log_cli_level: log level to show +# python_files: pattern for test files +# python_functions: pattern for test functions +# testpaths: directories to search for tests +# minversion: this config requires pytest>=7 to configure pythonpath +# pythonpath: path to stub files and typing stubs for tests +# xfail_strict: require to remove pytext.xfail marker when test is fixed +# required_plugins: require that these plugins are installed before testing +# ----------------------------------------------------------------------------- +testpaths = ["python3", "scripts", "ocaml/xcp-rrdd"] +required_plugins = ["pytest-cov", "pytest-mock"] log_cli_level = "INFO" +log_cli = true +minversion = "7.0" +pythonpath = "python3/stubs:scripts/examples/python" # Allow to import the XenAPI module python_files = ["test_*.py", "it_*.py"] python_functions = ["test_", "it_", "when_"] -pythonpath = "scripts/examples/python" # Allows to import the XenAPI module -required_plugins = ["pytest-mock"] -testpaths = ["python3", "scripts", "ocaml/xcp-rrdd"] xfail_strict = true # is used to fail tests that are marked as xfail but pass(for TDD) [tool.pytype_reporter] -default_branch = "master" +default_branch = "feature/py3" discard_messages_matching = [ "Couldn't import pyi for 'xml.dom.minidom'", "No attribute '.*' on RRDContentHandler", @@ -252,6 +260,10 @@ expected_to_fail = [ "scripts/plugins/extauth-hook-AD.py", ] +# ----------------------------------------------------------------------------- +# pytype: Google's static type analyzer - https://google.github.io/pytype/ +# ----------------------------------------------------------------------------- + [tool.pytype] inputs = [ # Python 3 diff --git a/python3/stubs/XenAPI.pyi b/python3/stubs/XenAPI.pyi new file mode 100644 index 00000000000..4590e614814 --- /dev/null +++ b/python3/stubs/XenAPI.pyi @@ -0,0 +1,85 @@ +""" +Stub for the XenAPI module: https://xapi-project.github.io/xen-api/overview.html +""" + + +import http.client as httplib +import xmlrpc.client as xmlrpclib +from _typeshed import Incomplete as Incomplete + +translation: Incomplete +API_VERSION_1_1: str +API_VERSION_1_2: str + + +class Failure(Exception): + details: Incomplete + + def __init__(self, details) -> None: ... + + +class UDSHTTPConnection(httplib.HTTPConnection): + sock: Incomplete + + def connect(self) -> None: ... + + +class UDSTransport(xmlrpclib.Transport): + def add_extra_header(self, key, value) -> None: ... + + # def make_connection(self, host) -> None: ... + + +def notimplemented(name, *args, **kwargs) -> None: ... + + +class _Dispatcher: + """A dispatcher for the Xen-API. It is used to call methods on the server""" + def __init__(self, API_version, send, name) -> None: ... + def __getattr__(self, name) -> None: ... + def __call__(self, *args) -> None: ... + def login_with_password(self, username, password, version, client_name) -> None: + """Authenticate the session with the XenAPI server.""" + def logout(self) -> None: + """End the session with the XenAPI server.""" + session: Incomplete + secret: Incomplete + SR: Incomplete + PBD: Incomplete + pool: Incomplete + VM: Incomplete + + +class Session(xmlrpclib.ServerProxy): + """A server proxy and session manager for communicating with xapi using + the Xen-API. + + Example: + + session = Session('http://localhost/') + session.login_with_password('me', 'mypassword', '1.0', 'xen-api-scripts-xenapi.py') + session.xenapi.VM.start(vm_uuid) + session.xenapi.session.logout() + """ + + transport: Incomplete + last_login_method: Incomplete + last_login_params: Incomplete + API_version: Incomplete + xenapi: _Dispatcher + + def __init__( + self, + uri, + transport: Incomplete | None = ..., + encoding: Incomplete | None = ..., + verbose: int = ..., + allow_none: int = ..., + ignore_ssl: bool = ..., + ) -> None: ... + def xenapi_request(self, methodname, params) -> None: ... + + # def __getattr__(self, name) -> None: ... + + +def xapi_local() -> Session: ... diff --git a/python3/stubs/xcp/branding.py b/python3/stubs/xcp/branding.py new file mode 100644 index 00000000000..30ff69600bf --- /dev/null +++ b/python3/stubs/xcp/branding.py @@ -0,0 +1,38 @@ +# Example xcp.branding module as test stub for test mail-alarm. +# python3/stubs is added to PYTHONPATH by pyproject.toml +COPYRIGHT_YEARS = '2009-2024' +PRODUCT_BRAND = 'XenServer' +PRODUCT_BRAND_DASHED = 'xenserver' +PRODUCT_NAME = 'xenenterprise' +COMPANY_NAME_LEGAL = 'Cloud Software Group, Inc.' +COMPANY_NAME_SHORT = 'Cloud Software Group' +COMPANY_DOMAIN = 'xenserver.com' +COMPANY_PRODUCT_BRAND = 'XenServer' +BRAND_CONSOLE = 'XenCenter' +BRAND_SERVER = 'XenServer Host' +BRAND_VDI = 'Virtual Desktops' +BRAND_CONSOLE_URL = 'https://www.xenserver.com/downloads' +ISO_PV_TOOLS_COPYRIGHT = 'Cloud Software Group, Inc. 2009-2024' +ISO_PV_TOOLS_LABEL = 'XenServer VM Tools' +COMPANY_NAME = 'Cloud Software Group, Inc.' +COMPANY = 'Cloud Software Group' +COMPANY_WEBSITE = 'www.xenserver.com' +PLATFORM_NAME = 'XCP' +PLATFORM_ORGANISATION = 'xen.org' +PLATFORM_WEBSITE = 'www.xen.org' +BRAND_GUEST = 'Virtual Machine' +BRAND_GUESTS = 'Virtual Machines' +BRAND_GUEST_SHORT = 'VM' +BRAND_GUESTS_SHORT = 'VMs' +BRAND_SERVERS = 'XenServer Hosts' +ISO_PV_TOOLS_PUBLISHER = 'Cloud Software Group, Inc.' +PRODUCT_MAJOR_VERSION = '8' +PRODUCT_MINOR_VERSION = '4' +PRODUCT_MICRO_VERSION = '0' +PRODUCT_VERSION_TEXT = '8' +PRODUCT_VERSION_TEXT_SHORT = '8' +PLATFORM_MAJOR_VERSION = '3' +PLATFORM_MINOR_VERSION = '4' +PLATFORM_MICRO_VERSION = '0' +PLATFORM_VERSION = '3.4.0' +PRODUCT_VERSION = '8.4.0' diff --git a/python3/stubs/xcp/logger.pyi b/python3/stubs/xcp/logger.pyi new file mode 100644 index 00000000000..f4aa2dab371 --- /dev/null +++ b/python3/stubs/xcp/logger.pyi @@ -0,0 +1,6 @@ +# Minimal stub for xcp.logger module +def debug(*al, **ad) -> None: ... +def error(*al, **ad) -> None: ... +def warning(*al, **ad) -> None: ... +def logToStdout(level) -> bool: ... +def logToSyslog(level) -> bool: ... diff --git a/pytype_reporter.py b/pytype_reporter.py index 877dc29c9d8..4e7d91f172b 100755 --- a/pytype_reporter.py +++ b/pytype_reporter.py @@ -599,10 +599,11 @@ def main(): config_file = "pyproject.toml" config = load_config(config_file, basename(__file__)) config.setdefault("expected_to_fail", []) - debug("Expected to fail: %s", ", ".join(config["expected_to_fail"])) changed_but_in_expected_to_fail = [] - if config["expected_to_fail"] != []: + if config["expected_to_fail"]: + debug("Expected to fail: %s", ", ".join(config["expected_to_fail"])) + changed_but_in_expected_to_fail = git_diff( "--name-only", find_branch_point(config), From c44c4a472ceb5ab5459ba139ec102a881b5ef8af Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 8 May 2024 12:00:00 +0200 Subject: [PATCH 059/222] CA-390883: .pre-commit-config.yaml: Add venvs for running local tests Signed-off-by: Bernhard Kaindl --- .pre-commit-config.yaml | 101 +++++++++++++++++++++++++++++++++++++++- 1 file changed, 100 insertions(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0ca5ef37fee..668b4190ce1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,6 +14,8 @@ # pre-commit run -av --hook-stage pre-push # default_stages: [commit, push] +default_language_version: + python: python3.11 repos: # Recommendation for a minimal git pre-commit hook: # https://github.com/pre-commit/pre-commit-hooks/blob/main/README.md: @@ -29,6 +31,95 @@ repos: - id: check-executables-have-shebangs exclude: ocaml + +# Improve Python formatting incrementally: +# https://dev.to/akaihola/improving-python-code-incrementally-3f7a +# +# darker checks if staged python changes are formatted according using +# the PEP8-aligned black formatter. It also checks if the imports are sorted. +# +# It is a good idea to run this before committing, and it is also run in the +# GitHub Workflow. +# +# Note: darker only checks the changes in files ending in .py! +# Python scripts that don't end in .py should be renamed to have the .py extension +# when moving them to python3/bin. +# (remove the .py extension in the Makefile when installing the file) +# +- repo: https://github.com/akaihola/darker + rev: 1.7.3 + hooks: + - id: darker + files: python3/ + name: check changes in Python3 tree using darker and isort + args: [--diff, --skip-string-normalization, --isort, -tpy36] + additional_dependencies: [isort] + +# +# Run pytest and diff-cover to check that the new /python3 test suite in passes. +# This hook uses a local venv containing the required dependencies. When adding +# new dependencies, they should be added to the additional_dependencies below. +# +- repo: local + hooks: + - id: pytest + files: python3/ + name: check that the Python3 test suite in passes + entry: env PYTHONDEVMODE=yes sh -c 'python3 -m pytest -vv && + diff-cover --ignore-whitespace --compare-branch=origin/feature/py3 + --show-uncovered --html-report .git/coverage-diff.html + --fail-under 50 .git/coverage3.11.xml' + require_serial: true + pass_filenames: false + language: python + types: [python] + additional_dependencies: + - coverage + - diff-cover + - future + - opentelemetry-api + - opentelemetry-exporter-zipkin-json + - opentelemetry-sdk + - pytest-coverage + - pytest-mock + - mock + - wrapt + - XenAPI + + +- repo: https://github.com/RobertCraigie/pyright-python + rev: v1.1.361 + hooks: + - id: pyright + name: check that python3 tree passes pyright/VSCode check + files: python3/ + additional_dependencies: + - mock + - opentelemetry-api + - opentelemetry-exporter-zipkin-json + - opentelemetry-sdk + - pytest + - pyudev + - XenAPI + + +# Check that pylint passes for the changes in new /python3 code. +- repo: local + hooks: + - id: pylint + files: python3/ + stages: [push] + name: check that changes to python3 tree pass pylint + entry: diff-quality --violations=pylint + --ignore-whitespace --compare-branch=origin/feature/py3 + pass_filenames: false + language: python + types: [python] + additional_dependencies: [diff-cover, pylint, pytest] + + +# pre-push hook (it only runs if you install pre-commit as a pre-push hook): +# It can be manually tested using: `pre-commit run -av --hook-stage push` # Recommendation for a minimal git pre-push hook: # While using pre-commit yields great results, it # is "not fast". Therefore only run it pre-push, @@ -53,4 +144,12 @@ repos: # developers have such version installed, it can be configured here: # language_version: python3.11 require_serial: true - additional_dependencies: [pandas, pytype] + additional_dependencies: + - future + - opentelemetry-api + - opentelemetry-exporter-zipkin-json + - opentelemetry-sdk + - pandas + - pytest + - pytype + files: python3/ From 9b128cda5e0024e4d9133a6fdcdb2c77cdb6f0cc Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 7 May 2024 12:00:00 +0200 Subject: [PATCH 060/222] CA-390883: Simplify GitHub Workflow to re-use venvs from pre-commit Signed-off-by: Bernhard Kaindl --- .github/workflows/other.yml | 32 +++----------------------------- 1 file changed, 3 insertions(+), 29 deletions(-) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 0284e7d7819..7c00b893e4a 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -45,40 +45,14 @@ jobs: env: SKIP: no-commit-to-branch - - name: Install dependencies only needed for python 2 - if: ${{ matrix.python-version == '2.7' }} - run: pip install enum - - - name: Install dependencies only needed for python 3 - if: ${{ matrix.python-version != '2.7' }} - run: pip install opentelemetry-api opentelemetry-exporter-zipkin-json opentelemetry-sdk pandas pytype toml wrapt pyudev - - - name: Install common dependencies for Python ${{matrix.python-version}} - run: pip install future mock pytest-coverage pytest-mock - - name: Run Pytest for python 2 and get code coverage for Codecov if: ${{ matrix.python-version == '2.7' }} run: > + pip install enum future mock pytest-coverage pytest-mock && pytest - --cov=scripts --cov=ocaml/xcp-rrdd - scripts/ ocaml/xcp-rrdd -vv -rA - --junitxml=.git/pytest${{matrix.python-version}}.xml + --cov=scripts scripts --cov-fail-under 45 -vv -rA --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml - env: - PYTHONDEVMODE: yes - - - name: Run Pytest for python 3 and get code coverage for Codecov - if: ${{ matrix.python-version != '2.7' }} - run: > - pytest - --cov=scripts --cov=ocaml/xcp-rrdd --cov=python3/ - scripts/ ocaml/xcp-rrdd python3/ -vv -rA - --junitxml=.git/pytest${{matrix.python-version}}.xml - --cov-report term-missing - --cov-report xml:.git/coverage${{matrix.python-version}}.xml - env: - PYTHONDEVMODE: yes - name: Upload Python ${{matrix.python-version}} coverage report to Codecov uses: codecov/codecov-action@v3 @@ -102,7 +76,7 @@ jobs: - name: Run pytype checks if: ${{ matrix.python-version != '2.7' }} - run: ./pytype_reporter.py + run: pip install pandas pytype toml && ./pytype_reporter.py env: PR_NUMBER: ${{ github.event.number }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} From 384b30bd5e577c85e91962461170314c24ec8cb6 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 6 Mar 2024 12:00:00 +0100 Subject: [PATCH 061/222] tests/observer: Cover the changed lines of the merge from master Signed-off-by: Bernhard Kaindl --- python3/packages/observer.py | 2 +- python3/tests/observer/__init__.py | 36 +++++ python3/tests/observer/all.conf | 1 + python3/tests/observer/it_handles_errors.py | 147 ++++++++++++++++++++ python3/tests/observer/it_traces.py | 104 ++++++++++++++ python3/tests/observer/observer.conf | 1 + python3/tests/observer/traced_script.py | 36 +++++ 7 files changed, 326 insertions(+), 1 deletion(-) create mode 100644 python3/tests/observer/__init__.py create mode 100644 python3/tests/observer/all.conf create mode 100644 python3/tests/observer/it_handles_errors.py create mode 100644 python3/tests/observer/it_traces.py create mode 100644 python3/tests/observer/observer.conf create mode 100755 python3/tests/observer/traced_script.py diff --git a/python3/packages/observer.py b/python3/packages/observer.py index 3419c89cd36..8742d21fd97 100644 --- a/python3/packages/observer.py +++ b/python3/packages/observer.py @@ -41,7 +41,7 @@ # We only want to import opentelemetry libraries if instrumentation is enabled # pylint: disable=import-outside-toplevel -DEBUG_ENABLED = False +DEBUG_ENABLED = os.getenv("XAPI_TEST") DEFAULT_MODULES = "LVHDSR,XenAPI,SR,SRCommand,util" FORMAT = "observer.py: %(message)s" handler = SysLogHandler(facility="local5", address="/dev/log") diff --git a/python3/tests/observer/__init__.py b/python3/tests/observer/__init__.py new file mode 100644 index 00000000000..dbdea4ed0d7 --- /dev/null +++ b/python3/tests/observer/__init__.py @@ -0,0 +1,36 @@ +""" +Package providing helper definitions and functions like call_observer() +to run python3/packages/observer.py as a script using runpy.run_path(). +""" + +import os +import runpy +import sys + +from typing import Any, Dict + +testdir = os.path.dirname(__file__) +OBSERVER_PY = os.path.relpath(testdir + "/../../packages/observer.py") +TRACED_SCRIPT = os.path.relpath(testdir + "/traced_script.py") +TRACED_SCRIPT_PRINT = "Hello, I am a print() in traced_script.py.\n" + + +def call_observer(*args: str) -> Dict[str, Any]: + """ + Call the observer.py script and return its globals dictionary for checking it + + Note: This is only possible when the script is run using runpy.run_path() + and the script exits normally (does not raise and Exception like SystemExit). + + Features: + - __name__ is set to "__main__", so the module is run as a script. + - sys.argv is set to the passed arguments + - no mocks are used, so the actual observer.py script is run. + - sets os.environ["OBSERVER_DEBUG"] = "True" to enable debug logging + to let the tests check the debug messages for checking the reading + of the configuration files and setting up tracing. + """ + + os.environ["XAPI_TEST"] = "True" # Enable printing debug messages in observer.py + sys.argv = [OBSERVER_PY, *args] + return runpy.run_path(OBSERVER_PY, run_name="__main__") diff --git a/python3/tests/observer/all.conf b/python3/tests/observer/all.conf new file mode 100644 index 00000000000..843d5d7cc72 --- /dev/null +++ b/python3/tests/observer/all.conf @@ -0,0 +1 @@ +module_names=XenAPI,tests.observer.traced_script \ No newline at end of file diff --git a/python3/tests/observer/it_handles_errors.py b/python3/tests/observer/it_handles_errors.py new file mode 100644 index 00000000000..efe58c56c76 --- /dev/null +++ b/python3/tests/observer/it_handles_errors.py @@ -0,0 +1,147 @@ +""" +Test error handing of python3/packages/observer.py, calling it using call_observer() + +This module contains tests for the error handling functionality of the observer.py +script in the python3/packages directory. + +The tests are executed by calling the observer.py script via the call_observer() +function. The primary focus of these tests is to verify the behavior of the observer.py +script when various errors occur. + +The tests included in this module are: + +1. `it_handles_not_finding_the_script`: + + This test verifies that when the observer.py does not find the script to trace + is not found, it exits with the correct exit code and produces the expected output. + +2. `it_prints_exception_traceback`: + + This test verifies that when the traced script raises an exception, the observer.py + script captures the exception traceback and exits with the correct exit code. + +3. `it_shows_the_usage_message`: + + This test verifies that when the observer.py script is called without any arguments, + it exits with the correct exit code and produces the expected output. + +4. `it_handles_error_exit`: + + This test verifies that when the traced script exits with a non-zero exit code, the + observer.py script captures the exit code and produces the expected output. + +5. `it_does_not_trace_without_config`: + + This test verifies that when observer.py is called without a configuration + file, it does not trace the traced script and produces the expected output. + +The tests are run using the pytest framework and are executed by calling the +call_observer() function, which simulates running the observer.py script from the +command line. +""" + +import os + +import pytest +from pytest import CaptureFixture + +from . import OBSERVER_PY, TRACED_SCRIPT, TRACED_SCRIPT_PRINT, call_observer + + +def it_handles_not_finding_the_script(capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started with a configuration file, + and the traced script is not found: + - The test checks that the exit code and the captured output are as expected. + """ + nonexisting_script = "nonexisting_traced_script.py" + with pytest.raises(SystemExit) as exc_info: + call_observer(nonexisting_script, "arg") + + assert exc_info.value.code == 2 # Set as the exit code for a missing script + + # Check that the error message is as expected + with capsys.disabled(): + stderr = capsys.readouterr().err.splitlines() + assert stderr[0] == f"{OBSERVER_PY} {nonexisting_script} arg:" + assert stderr[1] == f"Script not found: {os.getcwd()}/{nonexisting_script}" + + +def it_prints_exception_traceback(capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started with a configuration file, + and an invalid argument is passed to to the traced script as its argument: + + - The traced script should raise an exception and exit with 139 + - The test checks that the exit code and the captured output are as expected. + """ + with pytest.raises(SystemExit) as exc_info: + call_observer(TRACED_SCRIPT, "not_an_int") + + # 139 is used as the exit code when an Exception in the traced script was caught + assert exc_info.value.code == 139 + + # Check that the error message is as expected + with capsys.disabled(): + stderr = capsys.readouterr().err.splitlines() + assert stderr[0] == f"{OBSERVER_PY} {TRACED_SCRIPT} not_an_int:" + assert stderr[1] == "Exception in the traced script:" + assert stderr[2] == "invalid literal for int() with base 10: 'not_an_int'" + assert stderr[3] == "Traceback (most recent call last):" + + +def it_shows_the_usage_message(capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started as a script without any arguments: + - The test checks that the exit code and the captured output are as expected. + """ + + with pytest.raises(SystemExit) as exc_info: + call_observer() + assert exc_info.value.code == 31 + with capsys.disabled(): + stderr = capsys.readouterr().err + assert stderr == f"{OBSERVER_PY}: usage: command argument list\n" + + +def it_handles_error_exit(capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started with a configuration file, + and the traced script exits with a non-zero exit code: + - The expected exit code is passed to to the traced script as its argument. + - The traced script should print a message and exit with the given exit code. + - The test checks that the exit code and the captured output are as expected. + """ + + # Passing 1 to the traced script will make it print() and exit with code 1 + with pytest.raises(SystemExit) as exc_info: + call_observer(TRACED_SCRIPT, "1") + assert exc_info.value.code == 1 + with capsys.disabled(): + assert capsys.readouterr().out == TRACED_SCRIPT_PRINT + + +def it_does_not_trace_without_config(capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started without a configuration file: + + - The expected exit code is passed to to the traced script as its argument. + - The traced script should print a message and exit with 0 + - The test checks that the exit code and the captured output are as expected. + """ + + # Prepare the environment and run the observer.py script + os.environ["OBSERVER_CONFIG_DIR"] = "nonexisting_config_directory" + + # Passing 0 to the traced script will make it print() and exit with code 0 + globs = call_observer(TRACED_SCRIPT, "0") + + with capsys.disabled(): + assert capsys.readouterr().out == TRACED_SCRIPT_PRINT + + # Check that the observer.py script didn't install the tracing functions + span = globs.get("span") + patch_module = globs.get("patch_module") + assert span and patch_module + assert span.__name__ == "_span_noop" + assert patch_module.__name__ == "_patch_module_noop" diff --git a/python3/tests/observer/it_traces.py b/python3/tests/observer/it_traces.py new file mode 100644 index 00000000000..99179c85a93 --- /dev/null +++ b/python3/tests/observer/it_traces.py @@ -0,0 +1,104 @@ +""" +Test that packages/observer.py, creates a tracer, calling it using call_observer() + +The tests included in this module are: + +1. `it_creates_a_tracer`: + + This test verifies that when the observer.py script is called with a configuration + file, it creates a tracer and sets the span and patch_module functions as expected. + +The tests are run using the pytest framework and are executed by calling the +call_observer() function, which simulates running the observer.py script from the +command line. + +The test directory contains a dummy `observer.conf` (currently empty) configuration +file that is used to enable tracing for the test. +""" + +import os +import types +from typing import Any, Dict + +from pytest import CaptureFixture, LogCaptureFixture + +from . import TRACED_SCRIPT, TRACED_SCRIPT_PRINT, call_observer, testdir + + +def assert_imported_modules(globals_dict_of_observer: Dict[str, Any]): + """Assert that the expected modules were imported by observer.py""" + + observer_modules = globals_dict_of_observer["sys"].modules + imported_modules = [ + "opentelemetry.baggage.propagation", + "opentelemetry.context", + "opentelemetry.exporter.zipkin.json", + "opentelemetry.sdk.resources", + "opentelemetry.sdk.trace.export", + "opentelemetry.trace", + ] + assert all(mod in observer_modules for mod in imported_modules) + + +def it_creates_a_tracer(caplog: LogCaptureFixture, capsys: CaptureFixture[str]): + """ + Given that packages/observer.py is started with a configuration file, it: + - imports the opentelemetry packages [checked by this test] + - reads the configuration file [checked by this test] + - creates a tracer [checked by this test (using caplog)] + - sets the span() and patch_module() [checked by this test] + - runs the traced script [checked by this test] + - traces the script [not yet checked by this test] + """ + os.environ["OBSERVER_CONFIG_DIR"] = os.path.dirname(__file__) + + # Passing 0 to the traced script will make it print() and exit with code 0 + globals_dict_of_observer = call_observer(TRACED_SCRIPT, "0") + + with capsys.disabled(): + # If this test fails in your environment without any changes to the repo, + # check for import errors from observer.py:_init_tracing() in the pytest logs. + + # Get the span and patch_module functions from the module's globals + span = globals_dict_of_observer.get("span") + patch_module = globals_dict_of_observer.get("patch_module") + + # Assert that the span and patch_module are functions + assert callable(span) + assert callable(patch_module) + assert isinstance(span, types.FunctionType) + assert isinstance(patch_module, types.FunctionType) + + # Assert that span and patch_module are the expected tracing functions + assert span.__name__ == "span_of_tracers" + assert span.__qualname__ == "_init_tracing..span_of_tracers" + assert patch_module.__name__ == "_patch_module" + assert patch_module.__qualname__ == "_init_tracing.._patch_module" + + # Assert that the captured output is as expected + assert capsys.readouterr().out == TRACED_SCRIPT_PRINT + + assert_imported_modules(globals_dict_of_observer) + assert_debug_logs(caplog) + + +def assert_debug_logs(caplog: LogCaptureFixture): + """ + Assert that the observer.py script read the configuration file all.conf + by expecting the configuration file and its content in the log messages. + """ + + msg = caplog.messages + if not msg: # pragma: no cover + print("No logs found in caplog, check that debug logging is enabled!") + expected_modules = "{'module_names': 'XenAPI,tests.observer.traced_script'}" + assert msg[1] == f"{testdir}/all.conf: {expected_modules}" + assert msg[2] == "module_names: ['XenAPI', 'tests.observer.traced_script']" + + # Assert that the observer.py script red the observer.conf configuration file + config = """{'otel_resource_attributes': '"key1=value1,key2=value2"'}""" + assert msg[0] == f"configs = ['{testdir}/observer.conf']" + assert msg[3] == f"{testdir}/observer.conf: {config}" + + # Assert that the observer.py script created a tracer + assert msg[4].startswith("tracers=[ "InstrumentMe": + """A method to be traced by packages/observer.py as part of tests""" + + print("Hello, I am a print() in traced_script.py.") + return self + + def return_int(self, return_int: str) -> int: + """A method to be traced by packages/observer.py as part of tests""" + return int(return_int) + + +def main(return_code_string: str) -> int: + """Main of the tested script, to be traced by packages/observer.py.""" + + return InstrumentMe().print().return_int(return_code_string) + + +if __name__ == "__main__": + # Only use sys.exit(ret) raising SystemExit if the return code is not 0 + # to allow test_observer_as_script() to get the globals of observer.py: + ret = main(sys.argv[-1]) + if ret: + sys.exit(ret) From b9735568920272e38fbac137ccde42f61fd1f76f Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 21 May 2024 04:26:21 +0100 Subject: [PATCH 062/222] Disable pylint warnings Signed-off-by: Stephen Cheng --- python3/tests/test_usb_scan.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python3/tests/test_usb_scan.py b/python3/tests/test_usb_scan.py index bf0bad03fef..ad72c0cd928 100644 --- a/python3/tests/test_usb_scan.py +++ b/python3/tests/test_usb_scan.py @@ -61,7 +61,7 @@ def __len__(self): # pragma: no cover def __getitem__(self, name): return self.get_prop().get(name) - +# pylint: disable=too-few-public-methods class MocEnumerator(): def __init__(self, ds): self.ds = ds @@ -70,7 +70,7 @@ def __iter__(self): for d in self.ds: yield MocDevice(d) - +# pylint: disable=too-few-public-methods class MocContext(): def __init__(self, devices, interfaces): self.devices = devices From 256fe98b61d79010e459a0d581966294996ed054 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 12:24:11 +0000 Subject: [PATCH 063/222] CP-47869: Updated documentation to include python3 /doc/content/xapi/storage/_index.md b/doc/content/xapi/storage/_index.md Signed-off-by: Ashwinh --- doc/content/xapi/storage/_index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/content/xapi/storage/_index.md b/doc/content/xapi/storage/_index.md index c265353869a..009ceabd4bd 100644 --- a/doc/content/xapi/storage/_index.md +++ b/doc/content/xapi/storage/_index.md @@ -245,7 +245,7 @@ From this interface we generate and appear in the` _build/default/python/xapi/storage/api/v5` directory. - On a XenServer host, they are stored in the - `/usr/lib/python2.7/site-packages/xapi/storage/api/v5/` + `/usr/lib/python3.6/site-packages/xapi/storage/api/v5/` directory ### SMAPIv3 Plugins From f785993f244c0b2234f68338a3478d908bc4a9f4 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 12:26:48 +0000 Subject: [PATCH 064/222] CP-47869: Updated documentation to include python3 /ocaml/doc/wire-protocol.md b/ocaml/doc/wire-protocol.md Signed-off-by: Ashwinh --- ocaml/doc/wire-protocol.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/doc/wire-protocol.md b/ocaml/doc/wire-protocol.md index 20e39627cc3..cc5734e76b8 100644 --- a/ocaml/doc/wire-protocol.md +++ b/ocaml/doc/wire-protocol.md @@ -463,7 +463,7 @@ XML-RPC and JSON-RPC client libraries. First, initialise python: ```bash -$ python2.7 +$ python3 >>> ``` From c3d96a2819abe1b089df15d54c984bbcb9978195 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 12:55:45 +0000 Subject: [PATCH 065/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage-script/examples/datapath/block/Datapath.activate Signed-off-by: Ashwinh --- .../examples/datapath/block/Datapath.activate | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.activate b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.activate index 3115f233480..9cda8c0cf23 100755 --- a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.activate +++ b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.activate @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import sys sys.path.append("/home/vagrant/djs55/dbus-test/python") @@ -17,10 +17,10 @@ if __name__ == "__main__": args = vars(parser.parse_args()) if not(args['json']): - print "Not implemented" + print("Not implemented") sys.exit(1) dispatcher = d.Datapath_server_dispatcher(Implementation()) - request = json.loads(sys.stdin.readline(),) + request = json.loads(sys.stdin.readline()) results = dispatcher.activate(request) - print json.dumps(results) + print(json.dumps(results)) From 8e4b50d36fe07aea0f7d0a82196becd461e34187 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 13:01:21 +0000 Subject: [PATCH 066/222] CP-47869: Modified code using 2to3 ocaml/xapi-storage-script/examples/datapath/block/Datapath.deactivate Signed-off-by: Ashwinh --- .../examples/datapath/block/Datapath.deactivate | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.deactivate b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.deactivate index 48240856deb..1585a267eb0 100755 --- a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.deactivate +++ b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.deactivate @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import sys sys.path.append("/home/vagrant/djs55/dbus-test/python") @@ -17,10 +17,10 @@ if __name__ == "__main__": args = vars(parser.parse_args()) if not(args['json']): - print "Not implemented" + print("Not implemented") sys.exit(1) dispatcher = d.Datapath_server_dispatcher(Implementation()) - request = json.loads(sys.stdin.readline(),) + request = json.loads(sys.stdin.readline()) results = dispatcher.deactivate(request) - print json.dumps(results) + print(json.dumps(results)) From b734b7267d45fe0329aca1a77c2962c1f226a1fa Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 13:05:02 +0000 Subject: [PATCH 067/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage-script/examples/datapath/block/Datapath.attach Signed-off-by: Ashwinh --- .../examples/datapath/block/Datapath.attach | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.attach b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.attach index db6eb6de2eb..6a2ec399460 100755 --- a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.attach +++ b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.attach @@ -1,15 +1,15 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import sys sys.path.append("/home/vagrant/djs55/dbus-test/python") import xapi, d -import argparse, json, urlparse +import argparse, json, urllib.parse class Implementation(d.Datapath_skeleton): def attach(self, dbg, uri, domain): - u = urlparse.urlparse(uri) + u = urllib.parse.urlparse(uri) return { 'implementations': [ ['XenDisk', {"backend_type":"vbd", "extra":{}, "params":u.path}], ['BlockDevice', {"path":u.path}] ] } @@ -20,10 +20,10 @@ if __name__ == "__main__": args = vars(parser.parse_args()) if not(args['json']): - print "Not implemented" + print("Not implemented") sys.exit(1) dispatcher = d.Datapath_server_dispatcher(Implementation()) - request = json.loads(sys.stdin.readline(),) + request = json.loads(sys.stdin.readline()) results = dispatcher.attach(request) - print json.dumps(results) + print(json.dumps(results)) From ddcda0b14b22c215741ef2333dc823273e5cee14 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 24 May 2024 13:08:38 +0000 Subject: [PATCH 068/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage-script/examples/datapath/block/Datapath.detach Signed-off-by: Ashwinh --- .../examples/datapath/block/Datapath.detach | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.detach b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.detach index aac2e9d3773..5e42f252943 100755 --- a/ocaml/xapi-storage-script/examples/datapath/block/Datapath.detach +++ b/ocaml/xapi-storage-script/examples/datapath/block/Datapath.detach @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 import sys sys.path.append("/home/vagrant/djs55/dbus-test/python") @@ -17,10 +17,10 @@ if __name__ == "__main__": args = vars(parser.parse_args()) if not(args['json']): - print "Not implemented" + print("Not implemented") sys.exit(1) dispatcher = d.Datapath_server_dispatcher(Implementation()) - request = json.loads(sys.stdin.readline(),) + request = json.loads(sys.stdin.readline()) results = dispatcher.detach(request) - print json.dumps(results) + print(json.dumps(results)) From b4c5764f78ab40162ee8c4478ce22e3e94904564 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 08:46:26 +0000 Subject: [PATCH 069/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py Signed-off-by: Ashwinh --- .../examples/datapath/loop+blkback/datapath.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py index ed65d595477..f076b700a6f 100755 --- a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py +++ b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -# Copyright (C) Citrix Systems Inc. +# Copyright (C) Cloud Software Group. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published @@ -15,10 +15,10 @@ # along with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA -from __future__ import print_function + import os import sys -import urlparse +import urllib.parse import xapi.storage.api.v5.datapath from xapi.storage.common import call @@ -64,8 +64,8 @@ def activate(self, dbg, uri, domain): pass def attach(self, dbg, uri, domain): - parsed_url = urlparse.urlparse(uri) - query = urlparse.parse_qs(parsed_url.query) + parsed_url = urllib.parse.urlparse(uri) + query = urllib.parse.parse_qs(parsed_url.query) file_path = os.path.realpath(parsed_url.path) @@ -97,7 +97,7 @@ def deactivate(self, dbg, uri, domain): pass def detach(self, dbg, uri, domain): - parsed_url = urlparse.urlparse(uri) + parsed_url = urllib.parse.urlparse(uri) file_path = os.path.realpath(parsed_url.path) From 051f3e3cd3a86b5dc65bc8f03c6b828089659368 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 08:51:48 +0000 Subject: [PATCH 070/222] CP-47869: Update code to be compatible with python3 /ocaml/xapi-storage/python/examples/datapath/loop+blkback/plugin.py Signed-off-by: Ashwinh --- .../python/examples/datapath/loop+blkback/plugin.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/plugin.py b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/plugin.py index e16a53794a7..4cbc9939fbd 100755 --- a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/plugin.py +++ b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/plugin.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -# Copyright (C) Citrix Systems Inc. +# Copyright (C) Cloud Software Group,Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published From 93f5fee835966f25e7339b0b82750b5879d8b2e7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 08:56:57 +0000 Subject: [PATCH 071/222] CP-47869: Update code to be compatible with python3 ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py - Changed shebhang and copyright in /ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/plugin.py Signed-off-by: Ashwinh --- .../org.xen.xapi.storage.simple-file/plugin.py | 4 ++-- .../org.xen.xapi.storage.simple-file/sr.py | 16 ++++++++-------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/plugin.py b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/plugin.py index 61a41db978f..583043015ed 100755 --- a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/plugin.py +++ b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/plugin.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -# Copyright (C) Citrix Systems Inc. +# Copyright (C) Cloud Software Group, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published diff --git a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py index 35e96b6ab83..8f2f5ca3942 100755 --- a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py +++ b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -# Copyright (C) Citrix Systems Inc. +# Copyright (C) Cloud Software Group, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published @@ -18,8 +18,8 @@ from __future__ import print_function import os import sys -import urllib -import urlparse +import urllib.request, urllib.parse, urllib.error +import urllib.parse import xapi.storage.api.v5.volume from xapi import InternalError @@ -66,12 +66,12 @@ def attach(self, dbg, configuration): # As a simple "stateless" implementation, encode all the # configuration into the URI returned. This is passed back # into volume interface APIs and the stat and ls operations. - return urlparse.urlunparse(( + return urllib.parse.urlunparse(( 'file', '', configuration['path'], '', - urllib.urlencode(configuration, True), + urllib.parse.urlencode(configuration, True), None)) def detach(self, dbg, sr): @@ -96,8 +96,8 @@ def stat(self, dbg, sr): [stat sr] returns summary metadata associated with [sr]. Note this call does not return details of sub-volumes, see SR.ls. """ - parsed_url = urlparse.urlparse(sr) - config = urlparse.parse_qs(parsed_url.query) + parsed_url = urllib.parse.urlparse(sr) + config = urllib.parse.parse_qs(parsed_url.query) description = (config['description'][0] if 'description' in config From 647dc6a6eb66edff1acc96e7b22f2bf25720f4a4 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:18:29 +0000 Subject: [PATCH 072/222] CP-47869: Removed looper2.py from ocaml/tests/tests/ Signed-off-by: Ashwinh --- ocaml/tests/tests/looper2.py | 52 ------------------------------------ 1 file changed, 52 deletions(-) delete mode 100755 ocaml/tests/tests/looper2.py diff --git a/ocaml/tests/tests/looper2.py b/ocaml/tests/tests/looper2.py deleted file mode 100755 index 3e3395653ac..00000000000 --- a/ocaml/tests/tests/looper2.py +++ /dev/null @@ -1,52 +0,0 @@ -#!/usr/bin/python - -print "Program attempts to log into an XAPI server to fetch a list of VMs and" -print "a list of debug objects. It then chooses the first debug object, " -print "queries the int->float map and then calls the 'recycle' message using" -print "that map as an argument" -print - -import getopt, sys, xapi - -url = "http://localhost:8086" #default -parsed = getopt.getopt(sys.argv[1:], "u:url") -if len(parsed[0]) == 1: - url = parsed[0][0][1] - -print "Connecting to server on URL: ", url -print "(change with -u argument)" - -# Create an object to represent our server. -server = xapi.Server(url); - -# Call the server and get our result. -print "Logging in... ", -session = server.Session.login_with_password("user", "passwd") -print "OK" -print "Session ID: \""+session+"\"" -vm_list = server.VM.get_all(session) - -print "VM list = " + repr(vm_list) - -for vm in vm_list: - print "VM ", vm, " in state: ", server.VM.get_power_state(session, vm) - -first_vm = vm_list[0] - -debug_objs = server.Debug.get_all(session) -debug = debug_objs[0] -ifm = server.Debug.get_int_float_map(session, debug) -print "Got an int->float map: " + repr(ifm) - -print "doing the int_float_map recycle thing" - -attempt = 0 -while 1: - this = server.Debug.recycle_int_float_map(ifm) - if ifm <> this: - print "Got a different response!" - print "this = ", repr(this) - print "ifm = ", repr(ifm) - raise "Failed" - attempt = attempt + 1 - print attempt From 5a3a533f898eeb359b063a3a238e12903748f0b3 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:20:20 +0000 Subject: [PATCH 073/222] CP-47869: Removed looper.py from ocaml/tests/tests/ Signed-off-by: Ashwinh --- ocaml/tests/tests/looper.py | 44 ------------------------------------- 1 file changed, 44 deletions(-) delete mode 100755 ocaml/tests/tests/looper.py diff --git a/ocaml/tests/tests/looper.py b/ocaml/tests/tests/looper.py deleted file mode 100755 index 8977fc6efec..00000000000 --- a/ocaml/tests/tests/looper.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/python - -print "Program attempts to log into an XAPI server, fetch a list of VMs and" -print "then calls VM.get_otherConfig on the first one in a loop" -print -import getopt, sys, xapi - -url = "http://localhost:8086" #default -parsed = getopt.getopt(sys.argv[1:], "u:url") -if len(parsed[0]) == 1: - url = parsed[0][0][1] -print "Connecting to server on URL: ", url -print "(change with -u argument)" - -# Create an object to represent our server. -server = xapi.Server(url); - -# Call the server and get our result. -print "Logging in... ", -session = server.Session.login_with_password("user", "passwd", "1.0", "xen-api-tests-looper") -print "OK" -print "Session ID: \""+session+"\"" -vm_list = server.VM.get_all(session) - -print "VM list = " + repr(vm_list) - -for vm in vm_list: - print "VM ", vm, " in state: ", server.VM.get_power_state(session, vm) - -first_vm = vm_list[0] - -print "Getting the otherConfig of " + first_vm - -attempt = 0 -last = server.VM.get_otherConfig(session, first_vm) -while 1: - this = server.VM.get_otherConfig(session, first_vm) - if last <> this: - print "Got a different response!" - print "this = ", repr(this) - print "last = ", repr(last) - raise "Failed" - attempt = attempt + 1 - print attempt From 88f5fd8306b16efb96e757b11b78e61797550864 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 09:08:03 +0000 Subject: [PATCH 074/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/volume.py Signed-off-by: Ashwinh --- .../volume.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/volume.py b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/volume.py index d97ceb4ab5d..6593a8fd536 100755 --- a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/volume.py +++ b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/volume.py @@ -1,6 +1,6 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 # -# Copyright (C) Citrix Systems Inc. +# Copyright (C) Cloud Software Group, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published @@ -21,8 +21,9 @@ import os import sys import uuid -import urllib -import urlparse +import urllib.request +import urllib.parse +import urllib.error import xapi.storage.api.v5.volume from xapi.storage import log @@ -31,8 +32,8 @@ class Implementation(xapi.storage.api.v5.volume.Volume_skeleton): def parse_sr(self, sr_uri): - parsed_url = urlparse.urlparse(sr_uri) - config = urlparse.parse_qs(parsed_url.query) + parsed_url = urllib.parse.urlparse(sr_uri) + config = urllib.parse.parse_qs(parsed_url.query) return parsed_url, config def create_volume_data(self, name, description, size, uris, uuid): @@ -50,8 +51,8 @@ def create_volume_data(self, name, description, size, uris, uuid): } def volume_uris(self, sr_path, name, size): - query = urllib.urlencode({'size': size}, True) - return [urlparse.urlunparse( + query = urllib.parse.urlencode({'size': size}, True) + return [urllib.parse.urlunparse( ('loop+blkback', None, os.path.join(sr_path, name), None, query, None))] @@ -187,7 +188,7 @@ def ls(self, dbg, sr): """ [ls sr] lists the volumes from [sr] """ - parsed_url = urlparse.urlparse(sr) + parsed_url = urllib.parse.urlparse(sr) sr_path = parsed_url.path files = glob.glob(os.path.join(sr_path, '*.inf')) log.debug('files to list {}'.format(files)) From 655ca9a0cd94e89953be922709da322362447032 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 09:12:27 +0000 Subject: [PATCH 075/222] CP-47869: Changed shebang to python3 /ocaml/xapi-storage/python/xapi/storage/__init__.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/storage/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/__init__.py b/ocaml/xapi-storage/python/xapi/storage/__init__.py index 18ff5363796..e5a0d9b4834 100644 --- a/ocaml/xapi-storage/python/xapi/storage/__init__.py +++ b/ocaml/xapi-storage/python/xapi/storage/__init__.py @@ -1 +1 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 From 981f142bebb5e08158c3931cb7dd5be67989b91f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 09:13:28 +0000 Subject: [PATCH 076/222] CP-47869: Changed shebang to python3 /ocaml/xapi-storage/python/xapi/storage/common.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/storage/common.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/common.py b/ocaml/xapi-storage/python/xapi/storage/common.py index a311446a416..e8d34869277 100644 --- a/ocaml/xapi-storage/python/xapi/storage/common.py +++ b/ocaml/xapi-storage/python/xapi/storage/common.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 from xapi.storage import log import xapi From 26acd4817e6fc41b6c2af87a7decf3efa4751b7d Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 09:16:55 +0000 Subject: [PATCH 077/222] CP-47869: Changed shebang to python3 /ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py b/ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py index 18ff5363796..e5a0d9b4834 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/v5/__init__.py @@ -1 +1 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 From 333bec8a575271088fa94c532b7640561552b131 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 09:42:19 +0000 Subject: [PATCH 078/222] CP-47869: Replaced <> with != to support python3 /scripts/examples/python/provision.py Signed-off-by: Ashwinh --- scripts/examples/python/provision.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/examples/python/provision.py b/scripts/examples/python/provision.py index 4c5ab11daef..3b8a224ffae 100644 --- a/scripts/examples/python/provision.py +++ b/scripts/examples/python/provision.py @@ -107,5 +107,5 @@ def setProvisionSpec(session, vm, ps): txt2 = printProvisionSpec(ps) print(txt2) if txt != txt2: - raise AssertionError("Sanity-check failed: print(parse(print(x))) <> print(x)") + raise AssertionError("Sanity-check failed: print(parse(print(x))) != print(x)") print("* OK: print(parse(print(x))) == print(x)") From d73690d63928748de4e54cb1c494d289d3ec1de9 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:38:03 +0000 Subject: [PATCH 079/222] CP-47869: Modified code using 2to3 /ocaml/xapi-storage/python/xapi/__init__.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index d2a0eed3f94..1960e549d46 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -1,7 +1,7 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ -Copyright (c) 2013-2018, Citrix Inc. +Copyright (c) 2013-2024, Cloud Software Group,Inc. All rights reserved. Redistribution and use in source and binary forms, with or without @@ -72,7 +72,7 @@ class XenAPIException(Exception): def __init__(self, code, params): Exception.__init__(self) - if not isinstance(code, str) and not isinstance(code, unicode): + if not isinstance(code, str) and not isinstance(code, str): raise TypeError("string", repr(code)) if not isinstance(params, list): raise TypeError("list", repr(params)) @@ -139,7 +139,7 @@ def __init__(self, name): def is_long(x): try: - long(x) + int(x) return True except ValueError: return False From 2954895720742164af3a2984ca929cf85f76aef7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:39:58 +0000 Subject: [PATCH 080/222] CP-47869: Changed shebang to python3 ocaml/xapi-storage/python/xapi/storage/api/__init__.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/storage/api/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/__init__.py b/ocaml/xapi-storage/python/xapi/storage/api/__init__.py index 18ff5363796..e5a0d9b4834 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/__init__.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/__init__.py @@ -1 +1 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 From 756b9f25c5e19231c1731212186dce2080c060f1 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:43:02 +0000 Subject: [PATCH 081/222] CP-47869: Modified to python3 /ocaml/xapi-storage/python/Makefile Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/Makefile b/ocaml/xapi-storage/python/Makefile index bc8eff9b851..a2ccad97c8c 100644 --- a/ocaml/xapi-storage/python/Makefile +++ b/ocaml/xapi-storage/python/Makefile @@ -1,5 +1,5 @@ PREFIX?=/usr -PYTHON?=python2 +PYTHON?=python3 .PHONY: build release clean install uninstall From 99293187a4a1bcd3a29dc0842a78c171d4d20bb6 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 28 May 2024 09:38:39 +0000 Subject: [PATCH 082/222] CP-47869: Updated to python3 .github/workflows/setup-xapi-environment/action.yml Signed-off-by: Ashwinh --- .github/workflows/setup-xapi-environment/action.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/setup-xapi-environment/action.yml b/.github/workflows/setup-xapi-environment/action.yml index d46ae3a5b96..e32110ad977 100644 --- a/.github/workflows/setup-xapi-environment/action.yml +++ b/.github/workflows/setup-xapi-environment/action.yml @@ -27,9 +27,9 @@ runs: shell: bash run: sudo apt-get update - - name: Install python2 + - name: Install python3 shell: bash - run: sudo apt-get install python2 + run: sudo apt-get install python3 - name: Use disk with more space for TMPDIR and XDG_CACHE_HOME shell: bash From 5d0a5a687ace12454712fced475ec0e3f362eabf Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 3 Jun 2024 09:25:04 +0000 Subject: [PATCH 083/222] CP-47869: Fixed No attribute 'server' on module 'xmlrpc.client' [module-attr] in /scripts/examples/python/mini-xenrt.py Signed-off-by: Ashwinh --- scripts/examples/python/mini-xenrt.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/examples/python/mini-xenrt.py b/scripts/examples/python/mini-xenrt.py index 0907132da80..b30e9d9973c 100644 --- a/scripts/examples/python/mini-xenrt.py +++ b/scripts/examples/python/mini-xenrt.py @@ -109,7 +109,7 @@ def make_operation_list(vm): print(" -- performs parallel operations on VMs with the specified other-config key") sys.exit(1) - x = xmlrpc.client.server(sys.argv[1]) + x = xmlrpc.client.ServerProxy(sys.argv[1]) key = sys.argv[2] session = x.session.login_with_password("root", "xenroot", "1.0", "xen-api-scripts-minixenrt.py")["Value"] vms = x.VM.get_all_records(session)["Value"] From e061a29549144f8fab3adf6b4170df6b0918590b Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 3 Jun 2024 11:18:22 +0000 Subject: [PATCH 084/222] CP-47869: fixed Name 'FilenotFoundError' is not defined [name-error] in scripts/examples/smapiv2.py Signed-off-by: Ashwinh --- scripts/examples/smapiv2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/examples/smapiv2.py b/scripts/examples/smapiv2.py index cc990dcadf2..1047f57825c 100644 --- a/scripts/examples/smapiv2.py +++ b/scripts/examples/smapiv2.py @@ -13,7 +13,7 @@ def reopenlog(log_file): if log_file: try: log_f = open(log_file, "a") - except FilenotFoundError: + except FileNotFoundError: log_f = open(log_file, "w") else: log_f = open(os.dup(sys.stdout.fileno()), "a") From 43d9130ed4daca656781f128e64b73ae9525863c Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 3 Jun 2024 11:40:43 +0000 Subject: [PATCH 085/222] CP-47869: removed smapiv2.py and mini-xenrt.py from expected_to_fail in pyproject.toml Signed-off-by: Ashwinh --- pyproject.toml | 2 -- 1 file changed, 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5ea22b96551..2fb086f0b11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -251,11 +251,9 @@ expected_to_fail = [ # SSLSocket.send() only accepts bytes, not unicode string as argument: "scripts/examples/python/exportimport.py", # Other fixes needed: - "scripts/examples/python/mini-xenrt.py", "scripts/examples/python/XenAPI/XenAPI.py", "scripts/examples/python/monitor-unwanted-domains.py", "scripts/examples/python/shell.py", - "scripts/examples/smapiv2.py", "scripts/static-vdis", "scripts/plugins/extauth-hook-AD.py", ] From 0e72cb68f537c6947076b0b68dffae202b1ff561 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Fri, 24 May 2024 12:00:00 +0200 Subject: [PATCH 086/222] CI/coverage: Fix addopts, migrate away from pytest-cov pyproject.toml: - Don't use [tool.pytest.ini_options].addopts to pass test paths: addopts forces those options to be used every time pytest is run, which is very restrictive. Instead, use `coverage run` to configure coverage options, and support running specific tests by passing them as arguments to pytest: coverage run -m pytest python3/tests/test_xenapi.py .pre-commit-config.yaml: - No longer rely on [tool.pytest.ini_options].addopts to use pytest-cov. Instead, use `coverage run` to run pytest with coverage, and then `coverage xml` to get an xml coverage dump of the coverage, as well as `coverage html` to generate html for viewing coverage locally, and `coverage report` to generate a brief textual report of the coverage. This also improves the configuration to show the coverage report after the test results have been shown and not in the middle of it. Signed-off-by: Bernhard Kaindl --- .pre-commit-config.yaml | 3 ++- pyproject.toml | 31 ++++++++++++++++++------------- 2 files changed, 20 insertions(+), 14 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 668b4190ce1..d714b01cd6e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -65,7 +65,8 @@ repos: - id: pytest files: python3/ name: check that the Python3 test suite in passes - entry: env PYTHONDEVMODE=yes sh -c 'python3 -m pytest -vv && + entry: env PYTHONDEVMODE=yes sh -c 'coverage run && coverage xml && + coverage html && coverage report && diff-cover --ignore-whitespace --compare-branch=origin/feature/py3 --show-uncovered --html-report .git/coverage-diff.html --fail-under 50 .git/coverage3.11.xml' diff --git a/pyproject.toml b/pyproject.toml index 5ea22b96551..abcdd512aab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,7 +52,8 @@ skip_covered = true [tool.coverage.run] # Default command line for "coverage run": Run pytest in non-verbose mode -command_line = "-m pytest -p no:logging -p no:warnings" +command_line = "-m pytest -v -ra" + # Default data file for "coverage run": Store coverage data in .git/.coverage data_file = ".git/.coverage" # Default context for "coverage run": Use the name of the test function @@ -75,7 +76,7 @@ relative_files = true # Default output when writing "coveragle xml" data. This needs to match what # diff-cover and coverage upload to Codecov expect [tool.coverage.xml] -output = ".git/coverage.xml" +output = ".git/coverage3.11.xml" # Default output directory for writing "coverage html" data. @@ -199,18 +200,22 @@ exclude = [ # ----------------------------------------------------------------------------- # Options to enable for pytest by default: # -v show what happens -# -rA show summary after running tests -# --cov=python3 measure coverage of the python3 directory -# --cov-fail-under minimum coverage percentage -# --cov-report=term-missing show missing lines in the coverage report -# --cov-report=html: generate an HTML coverage report(for viewing) -# --cov-report=xml: generate an XML coverage report(for upload) +# -ra show short summary after running tests +# Other options should not be passed using addopts, as addopts forces those +# options to be used every time pytest is run, which is very restrictive. +# Instead, use `coverage run` to configure coverage options, and support +# running specific tests by passing them as arguments to pytest: +# For example: +# coverage run -m pytest python3/tests/test_xenapi.py +# Adding specific --cov options using addopts is not recommended as it would +# require to use the pytest-cov plugin, which would conflict with the use of +# `coverage run`. Instead, use `coverage` to configure coverage options. +# Specifying directories to test is better done using the testpaths option, +# as testpaths sets the default directories to search for tests, but does not +# force them to be run, so you can still run specific tests files by just +# passing them as arguments to pytest: pytest python3/tests/test_xenapi.py # ----------------------------------------------------------------------------- -addopts = """ --v -rA --cov=python3 --cov=scripts --cov-fail-under=50 ---cov-report=html:.git/coverage --cov-report=term-missing ---cov-report=xml:.git/coverage3.11.xml -""" +addopts = "-v -ra" # ----------------------------------------------------------------------------- # Other pytest config options: From ed86b8f1d3234cf4e42a0fff8a1f9a62b1f0855a Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 11 Jun 2024 08:14:39 +0000 Subject: [PATCH 087/222] Revert "CP-47869: Modified code using 2to3 /ocaml/xapi-storage/python/xapi/__init__.py" This reverts commit d73690d63928748de4e54cb1c494d289d3ec1de9. Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/__init__.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index 1960e549d46..d2a0eed3f94 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -1,7 +1,7 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python2 """ -Copyright (c) 2013-2024, Cloud Software Group,Inc. +Copyright (c) 2013-2018, Citrix Inc. All rights reserved. Redistribution and use in source and binary forms, with or without @@ -72,7 +72,7 @@ class XenAPIException(Exception): def __init__(self, code, params): Exception.__init__(self) - if not isinstance(code, str) and not isinstance(code, str): + if not isinstance(code, str) and not isinstance(code, unicode): raise TypeError("string", repr(code)) if not isinstance(params, list): raise TypeError("list", repr(params)) @@ -139,7 +139,7 @@ def __init__(self, name): def is_long(x): try: - int(x) + long(x) return True except ValueError: return False From 789b9e490022489eb83ee1095b4df15d882fe3e0 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 11 Jun 2024 08:21:19 +0000 Subject: [PATCH 088/222] CP-47869: Changed shebang to python3 /ocaml/xapi-storage/python/xapi/__init__.py Signed-off-by: Ashwinh --- ocaml/xapi-storage/python/xapi/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index d2a0eed3f94..57a7c0c9f2d 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -1,7 +1,7 @@ -#!/usr/bin/env python2 +#!/usr/bin/env python3 """ -Copyright (c) 2013-2018, Citrix Inc. +Copyright (c) 2013-2024, Cloud Software Group,Inc. All rights reserved. Redistribution and use in source and binary forms, with or without From 23ed63da3766b4a0372ce34787d4189d5e23165b Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 11 Jun 2024 08:58:15 +0000 Subject: [PATCH 089/222] CP-47869: Fix pylint reimport issue ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py Signed-off-by: Ashwinh --- .../examples/volume/org.xen.xapi.storage.simple-file/sr.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py index 8f2f5ca3942..07f4f9c0436 100755 --- a/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py +++ b/ocaml/xapi-storage/python/examples/volume/org.xen.xapi.storage.simple-file/sr.py @@ -18,8 +18,9 @@ from __future__ import print_function import os import sys -import urllib.request, urllib.parse, urllib.error +import urllib.request import urllib.parse +import urllib.error import xapi.storage.api.v5.volume from xapi import InternalError From 4e4b35f9db4091a74a0ae9b7bb6e49cd8e8ef79e Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:25:28 +0000 Subject: [PATCH 090/222] CP-47869: Deleted message_switch.py ocaml/message-switch/python/message_switch.py Signed-off-by: Ashwinh --- ocaml/message-switch/python/message_switch.py | 414 ------------------ 1 file changed, 414 deletions(-) delete mode 100755 ocaml/message-switch/python/message_switch.py diff --git a/ocaml/message-switch/python/message_switch.py b/ocaml/message-switch/python/message_switch.py deleted file mode 100755 index 460d4ee2e04..00000000000 --- a/ocaml/message-switch/python/message_switch.py +++ /dev/null @@ -1,414 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Citrix Systems Inc -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import json - -class Http_request: - def __init__(self, method, uri, body = None): - self.method = method - self.uri = uri - self.body = body - - def to_string(self): - body = "" - if self.body: - body = self.body - lines = [ - "%s %s HTTP/1.1" % (self.method, self.uri), - "Content-Length: %d" % len(body), - "", - body - ] - return "\r\n".join(lines) - -class Http_response: - def __init__(self, body): - self.body = body - - def to_string(self): - lines = [ - "HTTP/1.1 200 OK", - "Content-Length: %d" % len(self.body), - "", - self.body - ] - return "\r\n".join(lines) - - @classmethod - def of_string(cls, txt): - lines = txt.split("\r\n") - if lines[0] <> "HTTP/1.1 200 OK": - raise "Unexpected status line: %s" % lines[0] - rest = "\r\n".join(lines[3:]) - return cls(rest) - -class Message: - def __init__(self, payload, correlation_id, reply_to = None): - self.payload = payload - self.correlation_id = correlation_id - self.reply_to = reply_to - - def save(self): - result = { - "payload": self.payload, - "correlation_id": self.correlation_id - } - if self.reply_to: - result["reply_to"] = self.reply_to - return result - - @classmethod - def load(cls, x): - payload = x["payload"] - correlation_id = x["correlation_id"] - reply_to = None - if "reply_to" in x: - reply_to = x["reply_to"] - return cls(payload, correlation_id, reply_to) - - def __str__(self): - return json.dumps(self.save()) - -class Login: - def __init__(self, some_credential): - self.some_credential = some_credential - - def to_request(self): - return Http_request("GET", "/login/%s" % self.some_credential) - -class Create_request: - def __init__(self, name = None): - self.name = name - - def to_request(self): - uri = "/create" - if self.name: - uri = uri + "/" + self.name - return Http_request("GET", uri) - -class Create_response: - def __init__(self, name = None): - self.name = name - - @classmethod - def of_response(cls, response): - return cls(response.body) - - def to_response(self): - return Http_response(self.name) - -class Subscribe: - def __init__(self, name): - self.name = name - - def to_request(self): - return Http_request("GET", "/subscribe/%s" % self.name) - -class Send: - def __init__(self, name, message): - self.name = name - self.message = message - def to_request(self): - if self.message.reply_to: - return Http_request("POST", "/send/%s/%d/%s" % (self.name, self.message.correlation_id, self.message.reply_to), self.message.payload) - else: - return Http_request("POST", "/send/%s/%d" % (self.name, self.message.correlation_id), self.message.payload) - -class Transfer_request: - def __init__(self, ack_to, timeout): - self.ack_to = ack_to - self.timeout = timeout - - def to_request(self): - return Http_request("GET", "/transfer/%Ld/%.16g" % (self.ack_to, self.timeout)) - -class Transfer_response: - def __init__(self, messages): - self.messages = messages - - @classmethod - def of_response(cls, response): - x = json.loads(response.body) - result = {} - for (k, v) in x["messages"]: - result[long(k)] = Message.load(v) - return Transfer_response(result) - -class Ack: - def __init__(self, ack): - self.ack = ack - - def to_request(self): - return Http_request("GET", "/ack/%Ld" % self.ack) - -import string, socket - -default_config = { - "ip": "169.254.0.1", # HIMN IP of dom0 - "port": 8080, # default for xenswitch -} - -class End_of_file(Exception): - def __init__(self): - pass -class Bad_status(Exception): - def __init__(self, status): - self.status = status -class Missing_content_length(Exception): - def __init__(self): - pass -class StreamReader: - def __init__(self, sock): - self.sock = sock - self.buffered = "" - def read_fragment(self, n): - if len(self.buffered) > 0: - num_available = min(n, len(self.buffered)) - fragment = self.buffered[0:num_available] - self.buffered = self.buffered[num_available:] - return fragment - else: - self.buffered = self.sock.recv(16384) - if len(self.buffered) == 0: - raise End_of_file() - return self.read_fragment(n) - def read(self, n): - results = "" - while n > 0: - fragment = self.read_fragment(n) - n = n - len(fragment) - results = results + fragment - return results - - def readline(self): - results = "" - eol = False - while not eol: - byte = self.read(1) - if byte == "\n": - eol = True - else: - results = results + byte - return results - -def link_send(sock, m): - sock.sendall(m.to_request().to_string()) - -def link_recv(reader): - status = reader.readline() - if not(status.startswith("HTTP/1.1 200 OK")): - raise Bad_status(status) - content_length = None - eoh = False - while not eoh: - header = reader.readline().strip() - if header == "": - eoh = True - else: - bits = header.split(":") - key = string.lower(bits[0]) - if key == "content-length": - content_length = int(bits[1]) - if content_length == None: - raise Missing_content_length() - body = reader.read(content_length) - return Http_response(body) - -def login(sock, reader, some_credential): - link_send(sock, Login(some_credential)) - link_recv(reader) - -def create(sock, reader, name = None): - link_send(sock, Create_request(name)) - return Create_response.of_response(link_recv(reader)).name - -def subscribe(sock, reader, name): - link_send(sock, Subscribe(name)) - link_recv(reader) - -def send(sock, reader, name, msg): - link_send(sock, Send(name, msg)) - link_recv(reader) - -def transfer(sock, reader, ack_to, timeout): - link_send(sock, Transfer_request(ack_to, timeout)) - return Transfer_response.of_response(link_recv(reader)).messages - -def ack(sock, reader, id): - link_send(sock, Ack(id)) - link_recv(reader) - -from threading import Thread, Event, Lock - -class Receiver(Thread): - def __init__(self, sock, reader, server): - Thread.__init__(self) - self.daemon = True - self.sock = sock - self.reader = reader - self.server = server - self.events = {} - self.replies = {} - def register_correlation_id(self, correlation_id): - event = Event() - self.events[correlation_id] = event - return event - def get_reply(self, correlation_id): - reply = self.replies[correlation_id] - del self.replies[correlation_id] - return reply - def set_listen_callback(self, listen_callback): - self.listen_callback = listen_callback - def run(self): - ack_to = -1L - timeout = 5.0 - while True: - messages = transfer(self.sock, self.reader, ack_to, timeout) - for id in messages.keys(): - ack_to = max(ack_to, id) - m = messages[id] - reply_to = m.reply_to - if reply_to: - reply = self.server.dispatch(m) - send(self.sock, self.reader, reply_to, reply) - ack(self.sock, self.reader, id) - else: - if m.correlation_id not in self.events: - print >>sys.stderr, "Unknown correlation_id: %d" % m.correlation_id - else: - self.replies[m.correlation_id] = m.payload - event = self.events[m.correlation_id] - del self.events[m.correlation_id] - event.set() - -class Connection: - def __init__(self, client, name): - self.client = client - self.name = name - def rpc(self, request): - return self.client.rpc(self.name, request) - -class Server: - def __init__(self): - pass - def dispatch(self, request): - # echo the request back - request.reply_to = None - return request - -class Switch: - def __init__(self, some_credential, config = default_config, server = Server()): - self.some_credential = some_credential - self.config = config - self.server = server - - # Open a connection for requests and one for events - self.request_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.request_sock.connect((config["ip"], config["port"])) - self.request_stream_reader = StreamReader(self.request_sock) - self.request_mutex = Lock() - login(self.request_sock, self.request_stream_reader, some_credential) - - self.event_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - self.event_sock.connect((config["ip"], config["port"])) - self.event_stream_reader = StreamReader(self.event_sock) - login(self.event_sock, self.event_stream_reader, some_credential) - - self.receiver_thread = Receiver(self.event_sock, self.event_stream_reader, self.server) - self.receiver_thread.start() - self.next_correlation_id = 0 - self.next_correlation_id_mutex = Lock() - - def correlation_id(self): - self.next_correlation_id_mutex.acquire() - try: - correlation_id = self.next_correlation_id - self.next_correlation_id = self.next_correlation_id + 1 - return correlation_id - finally: - self.next_correlation_id_mutex.release() - - def rpc(self, name, request): - correlation_id = self.correlation_id() - event = self.receiver_thread.register_correlation_id(correlation_id) - - self.request_mutex.acquire() - try: - reply_queue = create(self.request_sock, self.request_stream_reader) - subscribe(self.request_sock, self.request_stream_reader, reply_queue) - send(self.request_sock, self.request_stream_reader, name, Message(request, correlation_id, reply_queue)) - finally: - self.request_mutex.release() - - event.wait() - return self.receiver_thread.get_reply(correlation_id) - - def connect(self, service): - self.request_mutex.acquire() - try: - create(self.request_sock, self.request_stream_reader, service) - finally: - self.request_mutex.release() - - return Connection(self, service) - - def listen(self, service): - self.request_mutex.acquire() - try: - create(self.request_sock, self.request_stream_reader, service) - subscribe(self.request_sock, self.request_stream_reader, service) - finally: - self.request_mutex.release() - - -if __name__ == "__main__": - from optparse import OptionParser - import sys, time - - parser = OptionParser() - parser.add_option("-x", "--switch", dest="switch", type="string", - help="address of message switch", metavar="SWITCH") - parser.add_option("-l", "--listen", dest="listen", action="store_true", - help="listen for RPCs, instead of sending them") - parser.add_option("-s", "--service", dest="service", type="string", - help="name of the remote service") - parser.add_option("-c", "--client", dest="client_name", type="string", - help="name which identifies this client") - - (options, args) = parser.parse_args() - config = default_config - if options.switch: - bits = options.switch.split(":") - config["ip"] = bits[0] - if len(bits) == 2: - config["port"] = int(bits[1]) - - client_name = "test_python" - if options.client_name: - client_name = options.client_name - if not options.service: - print >> sys.stderr, "Must provide a --service name" - sys.exit(1) - - if options.listen: - s = Switch(client_name, server = Server()) - s.listen(options.service) - while True: - time.sleep(5) - else: - s = Switch(client_name) - c = s.connect(options.service) - print c.rpc("hello") From 9c066f4680986b71f9eee456a49f84550bdc81a4 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:29:16 +0000 Subject: [PATCH 091/222] CP-47869: Removed message_switch_test.py ocaml/message-switch/core_test Signed-off-by: Ashwinh --- .../core_test/message_switch_test.py | 98 ------------------- 1 file changed, 98 deletions(-) delete mode 100644 ocaml/message-switch/core_test/message_switch_test.py diff --git a/ocaml/message-switch/core_test/message_switch_test.py b/ocaml/message-switch/core_test/message_switch_test.py deleted file mode 100644 index 5566adf8a08..00000000000 --- a/ocaml/message-switch/core_test/message_switch_test.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2012 Citrix Systems Inc -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -import unittest, os -from message_switch import * - -try: - tmpdir = os.environ["TMPDIR"] -except KeyError: - tmpdir = "/tmp" - -basedir = os.path.join(tmpdir, "link_test") - -rpc_req = Message("hello", 1L, "reply_to") -rpc_res = Message("hello", 1L) - -class Internal_invariants(unittest.TestCase): - def test_Message_save_load(self): - for m in [rpc_req, rpc_res]: - n = Message.load(m.save()) - assert m.payload == n.payload - assert m.correlation_id == n.correlation_id - assert m.reply_to == n.reply_to - -def load(x): - path = os.path.join(basedir, x) - f = open(path, "r") - try: - return f.read() - finally: - f.close() - -class Ocaml_interop(unittest.TestCase): - def test_login(self): - py = Login("hello").to_request().to_string() - ocaml = load("login") - assert py == ocaml - def test_create_named(self): - py = Create_request("service").to_request().to_string() - ocaml = load("create") - assert py == ocaml - def test_create_anon(self): - py = Create_request().to_request().to_string() - ocaml = load("create.anon") - assert py == ocaml - def test_subscribe(self): - py = Subscribe("service").to_request().to_string() - ocaml = load("subscribe") - assert py == ocaml - def test_request(self): - py = Send("service", rpc_req).to_request().to_string() - ocaml = load("request") - assert py == ocaml - def test_response(self): - py = Send("service", rpc_res).to_request().to_string() - ocaml = load("reply") - assert py == ocaml - def test_transfer(self): - py = Transfer_request(3, 5.0).to_request().to_string() - ocaml = load("transfer") - assert py == ocaml - def test_ack(self): - py = Ack(3).to_request().to_string() - ocaml = load("ack") - assert py == ocaml - - def test_create_reply(self): - ocaml = Create_response.of_response(Http_response.of_string(load("create.reply"))) - assert ocaml.name == "service" - def test_transfer_reply(self): - ocaml = Transfer_response.of_response(Http_response.of_string(load("transfer.reply"))) - m = { - 1L: rpc_req, - 2L: rpc_res, - } - py = Transfer_response(m) - for k in py.messages: - assert k in ocaml.messages - assert str(py.messages[k]) == str(ocaml.messages[k]) - for k in ocaml.messages: - assert k in py.messages - assert str(py.messages[k]) == str(ocaml.messages[k]) - -if __name__ == "__main__": - unittest.main() From 5ab6877ae9d2f01a13e3b08f4df062e17d03fd9d Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:31:47 +0000 Subject: [PATCH 092/222] CP-47869: Removed rrdd-example.py ocaml/xcp-rrdd/scripts/rrdd/ Signed-off-by: Ashwinh --- ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100755 ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py diff --git a/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py b/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py deleted file mode 100755 index e25e0ddf016..00000000000 --- a/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -import rrdd, os - -if __name__ == "__main__": - # Create a proxy for communicating with xcp-rrdd. - api = rrdd.API(plugin_id="host_mem") - while True: - # Wait until 0.5 seconds before xcp-rrdd is going to read the output file. - api.wait_until_next_reading(neg_shift=.5) - # Collect measurements. - cmd = "free -k | grep Mem | awk '{print $2, $3, $4}'" - vs = os.popen(cmd).read().strip().split() - # Tell the proxy which datasources should be exposed in this iteration. - api.set_datasource("used_mem", vs[1], min_val=0, max_val=vs[0], units="KB") - api.set_datasource("free_mem", vs[2], min_val=0, max_val=vs[0], units="KB") - # Write all required information into a file about to be read by xcp-rrdd. - api.update() From 5cf6693306792f8b861dd7ffef1a1186cbb2335c Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 10:33:31 +0000 Subject: [PATCH 093/222] CP-47869: Removed has_vendor_device_test.py ocaml/tests/ Signed-off-by: Ashwinh --- ocaml/tests/has_vendor_device_test.py | 159 -------------------------- 1 file changed, 159 deletions(-) delete mode 100644 ocaml/tests/has_vendor_device_test.py diff --git a/ocaml/tests/has_vendor_device_test.py b/ocaml/tests/has_vendor_device_test.py deleted file mode 100644 index 5d5ceaf542d..00000000000 --- a/ocaml/tests/has_vendor_device_test.py +++ /dev/null @@ -1,159 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function -import xmlrpclib -import sys - -s=xmlrpclib.Server("http://localhost/") -sess=s.session.login_with_password("root","xenroot")['Value'] - -pool = s.pool.get_all(sess)['Value'][0] -restrictions = s.pool.get_restrictions(sess,pool)['Value'] - -base_request = {'user_version':'1', 'is_a_template':False, 'affinity':'', 'memory_static_max':'4', 'memory_static_min':'1', 'memory_dynamic_max':'3', 'memory_dynamic_min':'2', 'VCPUs_params':{}, 'VCPUs_max':'1', 'VCPUs_at_startup':'1', 'name_label':'hello', 'name_description':'hi', 'memory_target':'2', 'actions_after_shutdown':'destroy', 'actions_after_reboot':'restart', 'actions_after_crash':'destroy', 'PV_bootloader':'', 'PV_kernel':'', 'PV_ramdisk':'', 'PV_args':'', 'PV_bootloader_args':'', 'PV_legacy_args':'', 'HVM_boot_policy':'', 'HVM_boot_params':{}, 'HVM_shadow_multiplier':1.0, 'platform':{}, 'PCI_bus':'', 'other_config':{}, 'recommendations':'', 'xenstore_data':{}, 'ha_always_run':False, 'ha_restart_priority':'1', 'tags':[], 'blocked_operations':{}, 'protection_policy':'', 'is_snapshot_from_vmpp':False, 'appliance':'', 'start_delay':'0', 'shutdown_delay':'0', 'order':'0', 'suspend_SR':'', 'version':'0', 'generation_id':'', 'hardware_platform_version':'0'} - -# - -def create(): - res = s.VM.create(sess, base_request) - return res - -def create_with_vd(b): - request = base_request.copy() - request['has_vendor_device']=b - return s.VM.create(sess,request) - -# VD in request | OK by license | pool.policy_no_vendor_device | resulting VM.has_vendor_device -# - | False | False | False -# False | False | False | False -# True | False | False | Failure -# - | False | True | False -# False | False | True | False -# True | False | True | Failure - - -def test_with_restriction(): # OK by license column above - # Expect this to be successful on an unlicensed host, and for the field to be 'false' - print("running restricted tests (license says you're not allowed the vendor device)") - - s.pool.set_policy_no_vendor_device(sess,pool,False) - -# - | False | False | False - res = create() - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# False | False | False | False - res = create_with_vd(False) - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# True | False | False | Failure - res = create_with_vd(True) - print("Expecting failure: got %s" % res['Status']) - assert(res['Status']=='Failure') - - s.pool.set_policy_no_vendor_device(sess,pool,True) - -# - | False | True | False - res = create() - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# False | False | True | False - res = create_with_vd(False) - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# True | False | True | Failure - res = create_with_vd(True) - print("Expecting failure: got %s" % res['Status']) - assert(res['Status']=='Failure') - - - -def test_no_restriction(): - print("running unrestricted tests") - -# - | True | False | True -# False | True | False | False -# True | True | False | True -# - | True | True | False -# False | True | True | False -# True | True | True | True - - s.pool.set_policy_no_vendor_device(sess,pool,False) - -# - | True | False | True - res = create() - vm = res['Value'] - expected = True - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# False | True | False | False - res = create_with_vd(False) - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# True | True | False | True - res = create_with_vd(True) - vm = res['Value'] - expected = True - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - - s.pool.set_policy_no_vendor_device(sess,pool,True) - -# - | True | True | False - res = create() - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# False | True | True | False - res = create_with_vd(False) - vm = res['Value'] - expected = False - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - -# True | True | True | True - res = create_with_vd(True) - vm = res['Value'] - expected = True - found = s.VM.get_has_vendor_device(sess,vm)['Value'] - print("Expecting has-vendor-device to be %s: got %s" % (expected,found)) - assert(expected == found) - - - -if restrictions['restrict_pci_device_for_auto_update'] == "true": - test_with_restriction() -else: - test_no_restriction() - - - - - From 1bebb13bb6728f1e11587818a1842885efea6524 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 11:04:07 +0000 Subject: [PATCH 094/222] CP-47869: Removed mtcerrno-to-ocaml.py scripts/ Signed-off-by: Ashwinh --- scripts/mtcerrno-to-ocaml.py | 63 ------------------------------------ 1 file changed, 63 deletions(-) delete mode 100755 scripts/mtcerrno-to-ocaml.py diff --git a/scripts/mtcerrno-to-ocaml.py b/scripts/mtcerrno-to-ocaml.py deleted file mode 100755 index 399d265f724..00000000000 --- a/scripts/mtcerrno-to-ocaml.py +++ /dev/null @@ -1,63 +0,0 @@ -#!/usr/bin/env python - -# Convert the MTC exit codes into a disjoint union type. Each line in the file looks like: - -# errdef, MTC_EXIT_SUCCESS, 0, 0, "", - -# Usage: -# cat ../xha.hg/include/mtcerrno.def | ./scripts/mtcerrno-to-ocaml.py > ocaml/xapi/xha_errno.ml - -from __future__ import print_function -import sys - -def parse(file): - all = [] - while True: - line = file.readline() - if line == "": - return all - if line.startswith("errdef, MTC_EXIT"): - bits = line.split(",") - name = bits[1].strip() - code = bits[2].strip() - desc = bits[4].strip() - this = { "name": name, "code": code, "desc": desc } - all.append(this) - -def ctor_name(x): - ctor = x['name'] - return ctor[0].upper() + ctor[1:].lower() - -def make_datatype(all): - print("type code = ") - for x in all: - print("| %s" % ctor_name(x)) - -def to_string(all): - print("let to_string : code -> string = function") - for x in all: - print("| %s -> \"%s\"" % (ctor_name(x), x['name'])) - -def to_description_string(all): - print("let to_description_string : code -> string = function") - for x in all: - print("| %s -> %s" % (ctor_name(x), x['desc'])) - -def of_int(all): - print("let of_int : int -> code = function") - for x in all: - print("| %s -> %s" % (x['code'], ctor_name(x))) - print("| x -> failwith (Printf.sprintf \"Unrecognised MTC exit code: %d\" x)") - -if __name__ == "__main__": - all = parse(sys.stdin) - print("(* Autogenerated by %s -- do not edit *)" % (sys.argv[0])) - make_datatype(all) - to_string(all) - to_description_string(all) - of_int(all) - - - - - From 016d56fa589db52436c374ffa16faac78345b3ad Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 11:04:53 +0000 Subject: [PATCH 095/222] CP-47869: Removed hatests from scripts/ Signed-off-by: Ashwinh --- scripts/hatests | 260 ------------------------------------------------ 1 file changed, 260 deletions(-) delete mode 100755 scripts/hatests diff --git a/scripts/hatests b/scripts/hatests deleted file mode 100755 index 8828820ecb3..00000000000 --- a/scripts/hatests +++ /dev/null @@ -1,260 +0,0 @@ -#!/usr/bin/env python - -from __future__ import print_function -import XenAPI -import getopt -import sys -import os -import commands -import random -import time -import httplib -import urllib - -def check(svm, ip): - """ - checking that the pool is in the same condition as before - """ - global master - global masterref - global hosts - global vmrunning - flag = True - masterref2 = svm.xenapi.pool.get_all_records().values()[0]['master'] - if masterref2 != masterref : - print("From " + ip + " point of view the pool master is " + svm.xenapi.host.get_record(masterref2)["address"]) - flag = False - hosts2 = svm.xenapi.host.get_all_records() - if len(hosts) != len(hosts2) : - print("From " + ip + " point of view the number of hosts is changed.") - flag = False - for k in hosts.keys() : - if k not in hosts2 : - print("From " + ip + " point of view " + hosts[k]["address"] + " is not present any more.") - vmrecords2 = svm.xenapi.VM.get_all_records() - vmrunning2 = {} - for k, v in vmrecords2.iteritems() : - if v['power_state'] == 'Running' and int(v['domid']) == 0: - vmrunning2[k] = v - if len(vmrunning) != len(vmrunning2) : - print("From " + ip + " point of view some VMs have changed state.") - flag = False - for k, v in vmrunning.iteritems() : - if k not in vmrunning2 : - print("From " + ip + " point of view " + v['name_label'] + " is not online any more.") - if flag : - print("On %s everything is consistent." % ip) - -def help() : - print(""" - Usage: hatests - - where options can be: - -w, --wait wait time between stopping an host and restarting it - (default 120) - - where test can be: - master_hard_failure - master_soft_failure - slave_hard_failure - slave_soft_failure - master_vif_unplug - """) - -###### START ###### - -secs = 120 - -optlist, args = getopt.getopt(sys.argv[1:],"w:h", ["wait=", "help"]) -for o, a in optlist: - if o == "-w" or o == "--wait": - secs = int(a) - elif o == "-h" or o == "--help" : - help() - sys.exit(0) - -if len(args) != 1 : - help() - sys.exit(1) - -##read config file -#config = open(sys.args[1], "r") -#slave = [] -#for line in config : -# type, ip = line.lstrip().split() -# if type == "master" : -# master = ip -# else : -# slave.append(ip) - -#connection -s = XenAPI.Session('http://localhost') -s.login_with_password('root', 'xenroot', '1.0', 'xen-api-scripts-hatest') - -#Getting all the installed and running VMs with dom-id > 0 -slaves = [] -master = None -vmrecords = s.xenapi.VM.get_all_records() -for k, v in vmrecords.iteritems() : - if v['power_state'] == 'Running' and int(v['domid']) > 0: - ip = commands.getoutput("xenstore-ls /local/domain/" + v['domid'] + " | grep ip") - try: - ip = ip.split()[2] - ip = ip[1:-1] - slaves.append((k, ip)) - except: - print("VM in dom" + v['domid'] + " doesn't have an IP address") - -#finding out which one is the master -svm = XenAPI.Session("http://" + slaves[0][1]) -try : - svm.login_with_password('root', 'xenroot', '1.0', 'xen-api-scripts-hatest') - masterref = svm.xenapi.pool.get_all_records().values()[0]['master'] - masterrecord = svm.xenapi.host.get_record(masterref) - masterip = masterrecord['address'] -except XenAPI.Failure as inst: - masterip = inst.details[1] - svm = XenAPI.Session("http://" + masterip) - svm.login_with_password('root', 'xenroot', '1.0', 'xen-api-scripts-hatest') - masterref = svm.xenapi.pool.get_all_records().values()[0]['master'] -for i in slaves : - if masterip == i[1] : - master = i - slaves.remove(i) - break -print("Master ip address is " + master[1]) - -#getting ip -> hostref references -hosts = {} -hostsrecs = svm.xenapi.host.get_all_records() -for k, v in hostsrecs.iteritems() : - hosts[v['address']] = k - -#getting the VM running -vmrunning = {} -vmrecords = svm.xenapi.VM.get_all_records() -for k, v in vmrecords.iteritems() : - if v['power_state'] == 'Running' and int(v['domid']) == 0: - vmrunning[k] = v - -bringup = None -vifbringup = None -if sys.argv[-1] == "master_hard_failure" : - print("Shutting down the master") - s.xenapi.VM.hard_shutdown(master[0]) - bringup = master[0] -elif sys.argv[-1] == "master_soft_failure" : - print("Shutting down the master") - s.xenapi.VM.clean_shutdown(master[0]) - bringup = master[0] -elif sys.argv[-1] == "slave_hard_failure" : - r = random.randint(0, len(slaves) - 1) - print("Shutting down slave " + slaves[r][1]) - s.xenapi.VM.hard_shutdown(slaves[r][0]) - bringup = slaves[r][0] -elif sys.argv[-1] == "slave_hard_failure" : - r = random.randint(0, len(slaves) - 1) - print("Shutting down slave " + slaves[r][1]) - s.xenapi.VM.clean_shutdown(slaves[r][0]) - bringup = slaves[r][0] -elif sys.argv[-1] == "master_vif_unplug" : - print("Unplugging the first found attached VIF in the master") - allvifs = s.xenapi.VIF.get_all_records() - for k, v in allvifs.iteritems() : - if v['currently_attached'] and v['VM'] == master[0]: - vifbringup = k - s.xenapi.VIF.unplug(vifbringup) - break - - -print("Waiting " + str(secs) + " seconds") -count = 0 -while count < secs : - time.sleep(1) - sys.stdout.write(".") - sys.stdout.flush() - count = count + 1 -sys.stdout.write("\n") - -if bringup is not None : - print("Bringing the host up again") - s.xenapi.VM.start(bringup, False, True) -if vifbringup is not None : - print("Plugging the VIF back again") - s.xenapi.VIF.plug(vifbringup) - -print("Waiting " + str(secs) + " seconds") -count = 0 -while count < secs : - time.sleep(1) - sys.stdout.write(".") - sys.stdout.flush() - count = count + 1 -sys.stdout.write("\n") - -print("Collecting logs now...") -try : - fileout = open("master-" + master[1] + "-log.tar.bz2", "w") - f = urllib.urlopen("http://root:xenroot@" + master[1] + "/system-status?host_id=" + hosts[master[1]]) - buf = f.read(50) - if len(buf) == 0 : - print(master[1] + " returned an empty log.") - else : - print("Wrote master log to master-" + master[1] + "-log.tar.bz2") - while len(buf) > 0 : - fileout.write(buf) - buf = f.read(50) -except IOError: - print("Unable to connect to %s: network error." % master[1]) -try: - fileout.close() - f.close() -except: - pass - -for k, ip in slaves : - try : - fileout = open("slave-" + ip + "-log.tar.bz2", "w") - f = urllib.urlopen("http://root:xenroot@" + ip + "/system-status?host_id=" + hosts[ip]) - buf = f.read(50) - if len(buf) == 0 : - print(ip + " returned an empty log.") - else : - print("Wrote slave " + ip + " log to slave-" + ip + "-log.tar.bz2") - while len(buf) > 0 : - fileout.write(buf) - buf = f.read(50) - except IOError: - print("Unable to connect to %s: network error." % ip) - try: - fileout.close() - f.close() - except: - pass - -#checking if everything is still OK -print("Connecting to " + master[1] + "...") -svm = XenAPI.Session("http://" + master[1]) -try : - svm.login_with_password('root', 'xenroot', '1.0', 'xen-api-scripts-hatest') - check(svm, master[1]) -except XenAPI.Failure as inst: - if inst.details[0] == "HOST_IS_SLAVE" : - print(master[0] + " is not master any more") -except IOError: - print("Unable to connect to %s: network error." % master[1]) - -for slave in slaves : - print("Connecting to " + slave[1] + "...") - svm = XenAPI.Session("http://" + slave[1]) - try: - svm.login_with_password('root', 'xenroot', '1.0', 'xen-api-scripts-hatest') - print("Connection succeeded! Is %s still a slave?" % slave[1]) - check(svm, slave[1]) - except XenAPI.Failure as inst: - if inst.details[0] == "HOST_IS_SLAVE" : - print("Connection failed because %s is still a slave." % slave[1]) - else : - print("Unable to connect to %s: XenAPI failure." % slave[1]) - except IOError: - print("Unable to connect to %s: network error." % slave[1]) From 6a38aaa2c8f86f29424a771936738944e73f2df9 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 11:05:34 +0000 Subject: [PATCH 096/222] CP-47869: Removed time-vm-boots.py from scripts/ Signed-off-by: Ashwinh --- scripts/time-vm-boots.py | 168 --------------------------------------- 1 file changed, 168 deletions(-) delete mode 100755 scripts/time-vm-boots.py diff --git a/scripts/time-vm-boots.py b/scripts/time-vm-boots.py deleted file mode 100755 index 85ec19f20f8..00000000000 --- a/scripts/time-vm-boots.py +++ /dev/null @@ -1,168 +0,0 @@ -#!/usr/bin/env python - -# Copyright (c) 2006-2007 XenSource, Inc. -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - - -# Simple python example to demonstrate the event system. Logs into the server, -# registers for events on the VM_guest_metrics and computes the time taken for -# the guest agent to report an IP address. - -from __future__ import print_function -import XenAPI -import sys -import time - -vgm_to_vm = {} - - -def register_vm_metrics(session, vm_ref, vgm): - global vgm_to_vm - - try: - # avoid putting invalid references in the cache - tmp = session.xenapi.VM_guest_metrics.get_other(vgm) - vgm_to_vm[vgm] = vm_ref - except: - pass - - -def vm_of_metrics(ref): - global vgm_to_vm - if not(ref in vgm_to_vm.keys()): - return None - return vgm_to_vm[ref] - -interesting_vms = [] -vm_boot_times = {} -boots_seen = 0 - - -def dump_table(session): - global vm_boot_times - for vm_ref in vm_boot_times.keys(): - name = session.xenapi.VM.get_name_label(vm_ref) - print("%s %s" % (name, vm_boot_times[vm_ref])) - - -def seen_possible_boot(session, vm_ref): - global vm_boot_times - global interesting_vms - global boots_seen - if not(vm_ref in vm_boot_times.keys()) and vm_ref in interesting_vms: - t = time.strftime( "%Y%m%dT%H:%M:%SZ", time.gmtime()) - vm_boot_times[vm_ref] = t - boots_seen += 1 - - name = session.xenapi.VM.get_name_label(vm) - print("%d %s %s" % (boots_seen, name, t), file=sys.stdout) - print("%d %s %s" % (boots_seen, name, t), file=sys.stderr) - sys.stderr.flush() - - -def process_guest_metrics(session, ref, snapshot): - if "other" in snapshot.keys(): - other = snapshot["other"] - if "feature-shutdown" in other.keys(): - the_vm = vm_of_metrics(ref) - seen_possible_boot(session, the_vm) - - -def poll_metrics(session): - while True: - time.sleep(10) - all_recs = session.xenapi.VM_guest_metrics.get_all_records() - for ref in all_recs.keys(): - snapshot = all_recs[ref] - process_guest_metrics(session, ref, snapshot) - - -def process_metrics_event(session, ref): - vm_ref = vm_of_metrics(ref) - if vm_ref is None: - return - if session.xenapi.VM.get_power_state(vm_ref) != "Running": - return - other = {} - try: - other=session.xenapi.VM_guest_metrics.get_other(ref) - except Exception as e: - print(repr(e)) - - if "feature-shutdown" in other.keys(): - seen_possible_boot(session, vm_ref) - - -def watch_events_on_vm(session): - try: - token = '' - call_timeout = 30.0 - while True: - output = session.xenapi.event_from(["VM", "VM_guest_metrics"], token, call_timeout) - events = output['events'] - token = output['token'] - - for event in events: - if event['operation'] == 'del': - continue - if event['class'] == 'vm' and event['operation'] == 'mod': - register_vm_metrics(session, event['ref'], event['snapshot']['guest_metrics']) - continue - if event['class'] == 'vm_guest_metrics': - process_metrics_event(session, event['ref']) - continue - - except XenAPI.Failure as e: - print(e.details) - sys.exit(1) - finally: - session.xenapi.session.logout() - - -if __name__ == "__main__": - if len(sys.argv) > 4 or len(sys.argv) < 2: - print(""" -Watches all offline VMs for boots -Usage: - %s -or - %s [http://]localhost [] [] -""" % (sys.argv[0], sys.argv[0])) - sys.exit(1) - - url = sys.argv[1] - username = sys.argv[2] if len(sys.argv) > 2 else "" - password = sys.argv[3] if len(sys.argv) > 3 else "" - - if url == "http://localhost" or url == "localhost": - new_session = XenAPI.xapi_local() - else: - new_session = XenAPI.Session(url) - - # First acquire a valid session by logging in - try: - new_session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-timevmboots.py") - except XenAPI.Failure as f: - print("Failed to acquire a session: %s" % f.details) - sys.exit(1) - - # We start watching all Halted VMs - all_halted_vms = new_session.xenapi.VM.get_all_records() - for vm in all_halted_vms.keys(): - vm_rec = all_halted_vms[vm] - if vm_rec["power_state"] == "Halted" and not vm_rec["is_a_template"]: - interesting_vms.append(vm) - print("Watching %d offline VMs" % (len(interesting_vms)), file=sys.stderr) - - watch_events_on_vm(new_session) From 5a310c00f232352eaf32cd6d6199163ff8705b0e Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 11:07:12 +0000 Subject: [PATCH 097/222] CP-47869: Removed debian from scripts/templates/ Signed-off-by: Ashwinh --- scripts/templates/debian | 144 --------------------------------------- 1 file changed, 144 deletions(-) delete mode 100644 scripts/templates/debian diff --git a/scripts/templates/debian b/scripts/templates/debian deleted file mode 100644 index 9350a40a57d..00000000000 --- a/scripts/templates/debian +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python -# Copyright (c) 2005-2007 XenSource, Inc - -# Code ripped out of 'xgt' script for now -from __future__ import print_function -import commands, xmlrpclib, os, sys, httplib, socket, urllib2, signal - -verbose = True - -##### begin hack. Provide xmlrpc over UNIX domain socket (cut+pasted from eliloader): -class UDSHTTPConnection(httplib.HTTPConnection): - """ Stupid hacked up HTTPConnection subclass to allow HTTP over Unix domain - sockets. """ - def connect(self): - path = self.host.replace("_", "/") - self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - self.sock.connect(path) - -class UDSHTTP(httplib.HTTP): - _connection_class = UDSHTTPConnection - -class UDSTransport(xmlrpclib.Transport): - def make_connection(self, host): - return UDSHTTP(host) - -def xapi_local(): - return xmlrpclib.Server("http://_var_xapi_xapi/", transport=UDSTransport()) -##### end hack. - - -class CommandException(Exception): - pass - - -def run(cmd, *args): - debug("+ " + cmd % args) - (ret, out) = commands.getstatusoutput(cmd % args) - if verbose: - try: - for line in out.split("\n"): - log("| " + line) - except TypeError as e: - pass - if ret != 0: - debug ("run - command %s failed with %d" , cmd, ret) - raise CommandException(out) - return out - -def log(fmt, *args): - print(fmt % args) - -def debug(msg, *args): - if verbose: - print(msg % args) - -def create_partition(lvpath): - # 1. write a partition table: - pipe = os.popen('/sbin/fdisk %s' % lvpath, 'w') - - pipe.write('n\n') # new partition - pipe.write('p\n') # primary - pipe.write("1\n") # 1st partition - pipe.write('\n') # default start cylinder - pipe.write('\n') # size: as big as image - pipe.write('w\n') # write partition table - - # XXX we must ignore certain errors here as fdisk will - # sometimes return non-zero signalling error conditions - # we don't care about. Should fix to detect these cases - # specifically. - rc = pipe.close() - if rc == None: - rc = 0 - log("fdisk exited with rc %d (some non-zero exits can be ignored safely)." % rc) - -def map_partitions(lvpath): - run("/sbin/kpartx -a %s", lvpath) - ps = [] - for line in run("/sbin/kpartx -l %s" % lvpath).split("\n"): - ps.append("/dev/mapper/" + line.split()[0]) - return ps - -def unmap_partitions(lvpath): - run("/sbin/kpartx -d %s", lvpath) - -def umount(mountpoint): - run("umount -l %s",mountpoint) - -if __name__ == "__main__": - #os.setpgrp() - xvda = os.getenv("xvda") - xvdb = os.getenv("xvdb") - debug("Guest's xvda is on %s" % xvda) - debug("Guest's xvdb is on %s" % xvdb) - if xvda == None or xvdb == None: - raise "Need to pass in device names for xvda and xvdb through the environment" - - vm = os.getenv("vm") - - server = xapi_local () - try: - session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] - uuid = server.VM.get_uuid(session_id, vm)['Value'] - mountpoint = "/tmp/installer/%s" % (uuid) - finally: - server.session.logout(session_id) - - def sighandler(signum, frame): - umount(mountpoint) - os.killpg(0,signal.SIGKILL) - exit(1) - - signal.signal(signal.SIGTERM,sighandler) - - create_partition(xvda) - create_partition(xvdb) - - try: - xvda_parts = map_partitions(xvda) - - run("/sbin/mkfs.ext3 %s", xvda_parts[0]) - - xgt = "@SHAREDIR@/packages/xgt/%s.xgt" % os.path.basename(sys.argv[0]) - - run("/bin/mkdir -p %s", mountpoint) - try: - run("/bin/mount %s %s", xvda_parts[0], mountpoint) - run("/usr/bin/unzip -p %s root.tar.bz2 | tar -C %s -jx", xgt, mountpoint) - finally: - run("/bin/umount %s", mountpoint) - run("/bin/rmdir %s", mountpoint) - run("/usr/bin/unzip -p %s swap.img | dd of=%s oflag=direct bs=1M", xgt, xvdb) - - try: - session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] - vbds = server.VM.get_VBDs(session_id, vm)['Value'] - for i in vbds: - dev = server.VBD.get_userdevice(session_id, i)['Value'] - if dev == "0": - server.VBD.set_bootable(session_id, i, True) - finally: - server.session.logout(session_id) - finally: - unmap_partitions(xvda) From 7fa1739a023a353b9809914db12a01ce1731cd88 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 27 May 2024 11:08:24 +0000 Subject: [PATCH 098/222] CP-47869: Removed ping-master.py from scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/ping-master.py | 42 ------------------------ 1 file changed, 42 deletions(-) delete mode 100755 scripts/scalability-tests/ping-master.py diff --git a/scripts/scalability-tests/ping-master.py b/scripts/scalability-tests/ping-master.py deleted file mode 100755 index 048c5d4c938..00000000000 --- a/scripts/scalability-tests/ping-master.py +++ /dev/null @@ -1,42 +0,0 @@ -#!/usr/bin/env python - -# Send back-to-back 'Host.get_servertime' calls to simulate the GUI's heartbeat and record latency. - -from __future__ import print_function -import XenAPI, sys, time - -iso8601 = "%Y%m%dT%H:%M:%SZ" - -def main(session): - global iso8601 - pool = session.xenapi.pool.get_all()[0] - host = session.xenapi.pool.get_master(pool) - while True: - start = time.time() - session.xenapi.host.get_servertime(host) - latency = time.time() - start - date = time.strftime(iso8601, time.gmtime(start)) - print("%s %.2f" % (date, latency)) - sys.stdout.flush() - time.sleep(5) - - -if __name__ == "__main__": - if len(sys.argv) != 4: - print("Usage:") - print(sys.argv[0], " ") - sys.exit(1) - url = sys.argv[1] - if url[:5] != "https": - raise "Must use SSL for a realistic test" - - username = sys.argv[2] - password = sys.argv[3] - - session = XenAPI.Session(url) - session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-pingmaster.py") - try: - main(session) - finally: - session.xenapi.logout() - From 264b414f34de6e66bb25bd2f82a3c7434a6c4bdf Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 28 May 2024 10:56:25 +0000 Subject: [PATCH 099/222] CP-47869: removed scripts/hatests from expected_to_fail in pyproject.toml Signed-off-by: Ashwinh --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index abcdd512aab..764ff6e60e1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -250,7 +250,6 @@ discard_messages_matching = [ ] expected_to_fail = [ # Need 2to3 -w and maybe a few other minor updates: - "scripts/hatests", "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", # SSLSocket.send() only accepts bytes, not unicode string as argument: From 1aedb6a5adce6e6a35897d6f9dc0f5a3839a90bb Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 08:51:08 +0000 Subject: [PATCH 100/222] Revert "CP-47869: Removed debian from scripts/templates/" This reverts commit 515a8b2e3da21e584a123960d14601ea69538a92. Signed-off-by: Ashwinh --- scripts/templates/debian | 144 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) create mode 100644 scripts/templates/debian diff --git a/scripts/templates/debian b/scripts/templates/debian new file mode 100644 index 00000000000..9350a40a57d --- /dev/null +++ b/scripts/templates/debian @@ -0,0 +1,144 @@ +#!/usr/bin/env python +# Copyright (c) 2005-2007 XenSource, Inc + +# Code ripped out of 'xgt' script for now +from __future__ import print_function +import commands, xmlrpclib, os, sys, httplib, socket, urllib2, signal + +verbose = True + +##### begin hack. Provide xmlrpc over UNIX domain socket (cut+pasted from eliloader): +class UDSHTTPConnection(httplib.HTTPConnection): + """ Stupid hacked up HTTPConnection subclass to allow HTTP over Unix domain + sockets. """ + def connect(self): + path = self.host.replace("_", "/") + self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + self.sock.connect(path) + +class UDSHTTP(httplib.HTTP): + _connection_class = UDSHTTPConnection + +class UDSTransport(xmlrpclib.Transport): + def make_connection(self, host): + return UDSHTTP(host) + +def xapi_local(): + return xmlrpclib.Server("http://_var_xapi_xapi/", transport=UDSTransport()) +##### end hack. + + +class CommandException(Exception): + pass + + +def run(cmd, *args): + debug("+ " + cmd % args) + (ret, out) = commands.getstatusoutput(cmd % args) + if verbose: + try: + for line in out.split("\n"): + log("| " + line) + except TypeError as e: + pass + if ret != 0: + debug ("run - command %s failed with %d" , cmd, ret) + raise CommandException(out) + return out + +def log(fmt, *args): + print(fmt % args) + +def debug(msg, *args): + if verbose: + print(msg % args) + +def create_partition(lvpath): + # 1. write a partition table: + pipe = os.popen('/sbin/fdisk %s' % lvpath, 'w') + + pipe.write('n\n') # new partition + pipe.write('p\n') # primary + pipe.write("1\n") # 1st partition + pipe.write('\n') # default start cylinder + pipe.write('\n') # size: as big as image + pipe.write('w\n') # write partition table + + # XXX we must ignore certain errors here as fdisk will + # sometimes return non-zero signalling error conditions + # we don't care about. Should fix to detect these cases + # specifically. + rc = pipe.close() + if rc == None: + rc = 0 + log("fdisk exited with rc %d (some non-zero exits can be ignored safely)." % rc) + +def map_partitions(lvpath): + run("/sbin/kpartx -a %s", lvpath) + ps = [] + for line in run("/sbin/kpartx -l %s" % lvpath).split("\n"): + ps.append("/dev/mapper/" + line.split()[0]) + return ps + +def unmap_partitions(lvpath): + run("/sbin/kpartx -d %s", lvpath) + +def umount(mountpoint): + run("umount -l %s",mountpoint) + +if __name__ == "__main__": + #os.setpgrp() + xvda = os.getenv("xvda") + xvdb = os.getenv("xvdb") + debug("Guest's xvda is on %s" % xvda) + debug("Guest's xvdb is on %s" % xvdb) + if xvda == None or xvdb == None: + raise "Need to pass in device names for xvda and xvdb through the environment" + + vm = os.getenv("vm") + + server = xapi_local () + try: + session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] + uuid = server.VM.get_uuid(session_id, vm)['Value'] + mountpoint = "/tmp/installer/%s" % (uuid) + finally: + server.session.logout(session_id) + + def sighandler(signum, frame): + umount(mountpoint) + os.killpg(0,signal.SIGKILL) + exit(1) + + signal.signal(signal.SIGTERM,sighandler) + + create_partition(xvda) + create_partition(xvdb) + + try: + xvda_parts = map_partitions(xvda) + + run("/sbin/mkfs.ext3 %s", xvda_parts[0]) + + xgt = "@SHAREDIR@/packages/xgt/%s.xgt" % os.path.basename(sys.argv[0]) + + run("/bin/mkdir -p %s", mountpoint) + try: + run("/bin/mount %s %s", xvda_parts[0], mountpoint) + run("/usr/bin/unzip -p %s root.tar.bz2 | tar -C %s -jx", xgt, mountpoint) + finally: + run("/bin/umount %s", mountpoint) + run("/bin/rmdir %s", mountpoint) + run("/usr/bin/unzip -p %s swap.img | dd of=%s oflag=direct bs=1M", xgt, xvdb) + + try: + session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] + vbds = server.VM.get_VBDs(session_id, vm)['Value'] + for i in vbds: + dev = server.VBD.get_userdevice(session_id, i)['Value'] + if dev == "0": + server.VBD.set_bootable(session_id, i, True) + finally: + server.session.logout(session_id) + finally: + unmap_partitions(xvda) From a8338242853a4e3acc5fef991f0894409c56eabd Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:34:44 +0000 Subject: [PATCH 101/222] CP-47869: removed plot-result under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/plot-result | 38 --------------------------- 1 file changed, 38 deletions(-) delete mode 100755 scripts/scalability-tests/plot-result diff --git a/scripts/scalability-tests/plot-result b/scripts/scalability-tests/plot-result deleted file mode 100755 index 830590c306b..00000000000 --- a/scripts/scalability-tests/plot-result +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./plot-result vm_per_host host1 ... hostN -# - -if [ $# -le 1 ]; then - echo "Usage: $0 vm_per_host host1 [host2 ... hostN]" - echo "${0} plot the result of ./stress-tests. Need to have all the resulting .dat files of the test in the current directory. Results are .ps files." - exit 1 -fi - -VM_PER_HOST=$1 - -shift -HOSTS=$@ -MASTER=$1 - -for OP in "start-shutdown" "suspend-resume" "reboot" "live-migrate" "non-live-migrate"; do - STR="" - for HOST in $HOSTS; do - for i in `seq 1 ${VM_PER_HOST}`; do - if [ "${STR}" == "" ] - then - STR="'debian-etch-${HOST}-${i}.${OP}.dat' title '${HOST}-${i}' with lines" - else - STR+=", 'debian-etch-${HOST}-${i}.${OP}.dat' title '${HOST}-${i}' with lines" - fi - done - done - echo "set terminal postscript color eps" > tmp.conf - echo "set output '${OP}.ps'" >> tmp.conf - echo "plot ${STR}" >> tmp.conf - gnuplot tmp.conf -done - - From 554b439e0b06018fc58e726ddd7540f9b4b7c0d4 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:35:13 +0000 Subject: [PATCH 102/222] CP-47869: removed pool-size-tests under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/pool-size-tests | 43 ----------------------- 1 file changed, 43 deletions(-) delete mode 100755 scripts/scalability-tests/pool-size-tests diff --git a/scripts/scalability-tests/pool-size-tests b/scripts/scalability-tests/pool-size-tests deleted file mode 100755 index b3ea46eb9c7..00000000000 --- a/scripts/scalability-tests/pool-size-tests +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./test-pool-size n -# -# Host1 will become the master of the pool, with host2 ... hostN as slaves. -# Then, on each host, vm_per_host VMs are created, with names debian-etch-HOST_NAME-i (for i in 1..vm_per_host) - -if [ $# -ne 1 ]; then - echo "Usage: $0 number_of_vm" - echo "Need :" - echo " * ./repeat, ./repeat-clone, ./repeat-start and ./repeat-destroy scripts to be in the same directory that ${0};" - echo " * a pool already set up with a shared NFS storage and a HVM VM called dsl;" - echo " * ${0} must be started on the master of this pool;" - echo "${0} clones , then starts them all, then shutdown them all, then destroy them all. Then it ejects one host of the pool, and do the same tests again until the master remains the last host in the pool. Each operation is recoreded into a .dat file." - exit 1 -fi - -N=${1} -IFS=:',' -HOSTS=`xe host-list --minimal` -MASTER=`xe pool-list params=master --minimal` - -c=`xe host-list --minimal | sed -e 's/,/\n/g' | wc -l` - - -#main loop -for HOST in $HOSTS; -do - if [ ${HOST} != ${MASTER} ]; then - ./repeat-clone ${N} dsl > clone-${c}.dat - ./repeat-start ${N} dsl > start-${c}.dat - ./repeat ${N} shutdown dsl --force > shutdown-${c}.dat - ./repeat-destroy ${N} dsl > destroy-${c}.dat - - echo "Ejecting ${HOST}." - xe pool-eject host-uuid=${HOST} --force - #xe host-forget uuid=${HOST} - ((c--)) - echo "Ejected." - fi -done From b5a0d554548cb4be824c8f10deb94045dd198daa Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:35:49 +0000 Subject: [PATCH 103/222] CP-47869: removed provision-vm under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/provision-vm | 153 ------------------------- 1 file changed, 153 deletions(-) delete mode 100755 scripts/scalability-tests/provision-vm diff --git a/scripts/scalability-tests/provision-vm b/scripts/scalability-tests/provision-vm deleted file mode 100755 index 03fa99663e3..00000000000 --- a/scripts/scalability-tests/provision-vm +++ /dev/null @@ -1,153 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./provision-vm vm_per_host host1 host2 ... hostN -# -# Host1 will become the master of the pool, with host2 ... hostN as slaves. -# Then, on each host, vm_per_host VMs are created, with names debian-etch-HOST_NAME-i (for i in 1..vm_per_host) - -if [ $# -le 1 ]; then - echo "Usage: ${0} vm_per_host host1 [host2 ... hostN]" - echo "${0} provisions debiant-etch VMs on each host and installs them on a local VHD disk. Moreover, all the hosts join a common pool." - echo "if PROVISION_VM_WITH_CD is set to 1, then attach guest tools ISO CD-ROM to the initial Debian Etch VM before cloning it." - exit 1 -fi - -VM_PER_HOST=$1 - -shift -HOSTS=$@ -MASTER=$1 - -if [ "${PROVISION_VM_WITH_CD}" == "1" ]; then - DEB="debian-etch-withCD" -else - DEB="debian-etch" -fi - -install-vhd () { - HOST=$1 - XE="xe -u root -pw xenroot -s ${HOST}" - SR=`${XE} sr-list name-label='Local storage' --minimal` - if [ $SR ] - then - -# forget the local storage - echo "[${HOST}] Forgeting local storage." - PBD=`${XE} sr-list uuid=$SR params=PBDs --minimal` - ${XE} pbd-unplug uuid=${PBD} - ${XE} sr-forget uuid=${SR} - echo "[${HOST}] Forgotten." - -# build a local VHD storage - echo "[${HOST}] Creating a local VHD storage." - SR=`${XE} sr-create type=ext name-label=localvhd device-config:device=/dev/sda3` - ${XE} pool-param-set uuid=$(${XE} pool-list params=uuid --minimal) default-SR=${SR} crash-dump-SR=${SR} suspend-image-SR=${SR} - echo "[${HOST}] Created." - - fi -} - -install () { - HOST=$1 - XE="xe -u root -pw xenroot -s ${HOST}" - - echo "[${HOST}] Installing the Debian Etch VM." - UUID=`${XE} vm-install new-name-label=${DEB} template="Debian Etch 4.0"` - echo "[${HOST}] Installed." - - echo "[${HOST}] Setting the IP address and the memory size of the VM." - NETWORK=`${XE} network-list bridge=xenbr0 --minimal` - VIF=`${XE} vif-create vm-uuid=${UUID} network-uuid=${NETWORK} device=0` - ${XE} vm-param-set uuid=${UUID} PV-args="noninteractive" - ${XE} vm-param-set uuid=${UUID} memory-static-max="50MiB" - ${XE} vm-param-set uuid=${UUID} memory-static-min="50MiB" - ${XE} vm-param-set uuid=${UUID} memory-dynamic-max="50MiB" - ${XE} vm-param-set uuid=${UUID} memory-dynamic-min="50MiB" - echo "[${HOST}] Set." - - if [ "${PROVISION_VM_WITH_CD}" == "1" ]; then - echo "[${HOST}] Attaching a CD-ROM." - TOOLS_ISO=`${XE} vdi-list is-tools-iso=ture params=name-label --minimal` - ${XE} vm-cd-add vm=${DEB} cd-name=${TOOLS_ISO} device=3 - echo "[${HOST}] Attached." - fi - -} - -#start () { -# HOST=$1 -# XE="xe -u root -pw xenroot -s ${HOST}" -# -# echo "[${HOST}] Starting VM." -# ${XE} vm-start vm="${DEB}" -# UUID=`${XE} vm-list name-label=${DEB} params=uuid --minimal` -# -# echo "[${HOST}] Waiting for the IP address of the VM to appear. This can take a minute or so." -# RC=1 -# while [ ${RC} -ne 0 ] -# do -# sleep 10 -# IP=`${XE} vm-param-get uuid=${UUID} param-name=networks param-key="0/ip"` -# RC=$? -# done -# -# echo "[${HOST}] Debian Etch VM installed (IP=${IP})." -#} - -#shutdown () { -# HOST=$1 -# XE="xe -u root -pw xenroot -s ${HOST}" -# -# echo "[${HOST}] Shutting down the VM." -# ${XE} vm-shutdown vm=${DEB} -# echo "[${HOST}] Shut down." -#} - -clone () { - HOST=$1 - XE="xe -u root -pw xenroot -s ${HOST}" - - echo "# vm_number cumulative_time load_average vhd_size" > clone-${DEB}-${HOST}.dat - SR=`${XE} sr-list --minimal name-label=localvhd` - START=$(date +%s) - - for i in `seq 1 ${VM_PER_HOST}`; do - echo "[${HOST}] Cloning VM ${i}/${VM_PER_HOST}." - TMP=`${XE} vm-clone vm=${DEB} new-name-label=${DEB}-${HOST}-${i}` - CURR=$(date +%s) - DIFF=$(( ${CURR} - ${START} )) - LOADAVG=`${XE} host-data-source-query data-source=loadavg host=${HOST}` - VHDSIZE=`${XE} vdi-list --minimal sr-uuid=${SR} | sed -e 's/,/\n/g' | wc -l` - echo "${i} ${DIFF} ${LOADAVG} ${VHDSIZE}" >> clone-${DEB}-${HOST}.dat - echo "[${HOST}] Done." - done -} - -uninstall () { - HOST=$1 - XE="xe -u root -pw xenroot -s ${HOST}" - - echo "[{$HOST}] Uninstalling the Debian Etch initial VM." - ${XE} vm-uninstall force=true vm=${DEB} - echo "[${HOST}] Uninstalled." -} - -join-master () { - HOST=$1 - if [ ${HOST} != ${MASTER} ] - then - XE="xe -u root -pw xenroot -s ${HOST}" - echo "[${HOST}] Joining ${MASTER} pool." - ${XE} pool-join master-address=${MASTER} master-username=root master-password=xenroot; - echo "[${HOST}] Joined." - fi -} - -#main loop -echo "Provisioning ${VM_PER_HOST} VMs on hosts: ${HOSTS} (master is ${MASTER})." -for HOST in $HOSTS; -do - (install-vhd $HOST; install $HOST; clone $HOST; uninstall $HOST; join-master $HOST) & -done From 2efa58ea88d1ba0b4432f405148e4d0f481c598e Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:36:22 +0000 Subject: [PATCH 104/222] CP-47869: removed repeat under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/repeat | 33 -------------------------------- 1 file changed, 33 deletions(-) delete mode 100755 scripts/scalability-tests/repeat diff --git a/scripts/scalability-tests/repeat b/scripts/scalability-tests/repeat deleted file mode 100755 index c2990a2d171..00000000000 --- a/scripts/scalability-tests/repeat +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./repeat n operation vm_name optional_args -# - -if [ $# -le 2 ]; then - echo "usage: $0 n operation vm_name [optional arguments]" - exit 1 -fi -N=$1 -OP=$2 -VM=$3 -EXTRA=$4 - -MASTER=`xe pool-list params=master --minimal` -START=$(date +%s) - -echo "# vm_number cumulative_time load_average" - -perform () { - i=$1 - TMP=`xe vm-${OP} ${EXTRA} vm=${VM}${i}` - CURR=$(date +%s) - DIFF=$(( ${CURR} - ${START} )) - LOADAVG=`xe host-data-source-query data-source=loadavg host=${MASTER}` - echo "${i} ${DIFF} ${LOADAVG}"; -} - -for i in `seq 1 ${N}`; do - perform $i -done From ed370b80a4485150ce4c748b2c60af2231a7cc21 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:37:09 +0000 Subject: [PATCH 105/222] CP-47869: removed repeat-clone under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/repeat-clone | 33 -------------------------- 1 file changed, 33 deletions(-) delete mode 100755 scripts/scalability-tests/repeat-clone diff --git a/scripts/scalability-tests/repeat-clone b/scripts/scalability-tests/repeat-clone deleted file mode 100755 index f293465b605..00000000000 --- a/scripts/scalability-tests/repeat-clone +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./repeat-clone n vm_name -# - -if [ $# -ne 2 ]; then - echo "usage: $0 n vm_name" - exit 1 -fi -N=$1 -VM=$2 - -SR=`xe sr-list --minimal name-label='NFS virtual disk storage'` -MASTER=`xe pool-list params=master --minimal` -START=$(date +%s) - -echo "# vm_number cumulative_time load_average vhd_size" - -perform () { - i=$1 - TMP=`xe vm-clone vm=${VM} new-name-label=${VM}${i}` - CURR=$(date +%s) - DIFF=$(( ${CURR} - ${START} )) - LOADAVG=`xe host-data-source-query data-source=loadavg host=${MASTER}` - VHDSIZE=` xe vdi-list --minimal sr-uuid=${SR} | sed -e 's/,/\n/g' | wc -l` - echo "${i} ${DIFF} ${LOADAVG} ${VHDSIZE}" -} - -for i in `seq 1 ${N}`; do - perform $i -done From 60215d1f9284ce1061a1dc228f958893f5ead413 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:37:39 +0000 Subject: [PATCH 106/222] CP-47869: removed repeat-destroy under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/repeat-destroy | 33 ------------------------ 1 file changed, 33 deletions(-) delete mode 100755 scripts/scalability-tests/repeat-destroy diff --git a/scripts/scalability-tests/repeat-destroy b/scripts/scalability-tests/repeat-destroy deleted file mode 100755 index b8031e781e4..00000000000 --- a/scripts/scalability-tests/repeat-destroy +++ /dev/null @@ -1,33 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./repeat n operation vm_name optional_args -# - -if [ $# -ne 2 ]; then - echo "usage: $0 n vm_name" - exit 1 -fi -N=$1 -VM=$2 - -MASTER=`xe pool-list params=master --minimal` -START=$(date +%s) - -echo "# vm_number cumulative_time load_average" -perform () { - i=$1 - VM_UUID=`xe vm-list name-label=${VM}${i} params=uuid --minimal` - if [ "${VM_UUID}" != "" ]; then - TMP=`xe vm-destroy uuid=${VM_UUID}` - fi - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - LOADAVG=`xe host-data-source-query data-source=loadavg host=${MASTER}` - echo "${i} ${DIFF} ${LOADAVG}"; -} - -for i in `seq 1 ${N}`; do - perform $i; -done From ec051ed903b178d5d0426730f30251dcf9df39de Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:38:20 +0000 Subject: [PATCH 107/222] CP-47869: removed repeat-start under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/repeat-start | 46 -------------------------- 1 file changed, 46 deletions(-) delete mode 100755 scripts/scalability-tests/repeat-start diff --git a/scripts/scalability-tests/repeat-start b/scripts/scalability-tests/repeat-start deleted file mode 100755 index a439b7ac8b9..00000000000 --- a/scripts/scalability-tests/repeat-start +++ /dev/null @@ -1,46 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./repeat n operation vm_name optional_args -# - -if [ $# -ne 2 ]; then - echo "Usage: $0 n vm_name" - echo "Starts VMs nammed vm_name<1> .. vm_name and output the time taken and the load average." - echo "if WAIT_FOR_IP is set to 1, then wait the IP address to appear before starting the next VM. need xgetip executable to be in the current directory." - exit 1 -fi - -N=$1 -VM_NAME=$2 - -MASTER=`xe pool-list params=master --minimal` -START=$(date +%s) - -wait_IP () { - i=$1 - VM_UUID=`xe vm-list name-label=${VM_NAME}${i} params=uuid --minimal` - MAC=`xe vif-list vm-uuid=${VM_UUID} params=MAC --minimal` - echo "Waiting for the IP address of ${VM_NAME}${i} to appear." - IP=`./xgetip xenbr0 ${MAC} &> /dev/null` - echo "IP address of ${VM_NAME}${i} is ${IP}." -} - -echo "# vm_number cumulative_time load_average" - -perform () { - i=$1 - TMP=`xe vm-start vm=${VM_NAME}${i}` - if [ "${WAIT_FOR_IP}" == "1" ]; then - wait_IP ${i} - fi - CURR=$(date +%s) - DIFF=$(( ${CURR} - ${START} )) - LOADAVG=`xe host-data-source-query data-source=loadavg host=${MASTER}` - echo "${i} ${DIFF} ${LOADAVG}" -} - -for i in `seq 1 ${N}`; do - perform $i -done From de7e31e56e272a90976708a3a0d093ab1691e6fc Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:39:16 +0000 Subject: [PATCH 108/222] CP-47869: removed start-tests under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/start-tests | 27 --------------------------- 1 file changed, 27 deletions(-) delete mode 100755 scripts/scalability-tests/start-tests diff --git a/scripts/scalability-tests/start-tests b/scripts/scalability-tests/start-tests deleted file mode 100755 index 06fc671f135..00000000000 --- a/scripts/scalability-tests/start-tests +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./test-pool-size n vm_name -# -# Host1 will become the master of the pool, with host2 ... hostN as slaves. -# Then, on each host, vm_per_host VMs are created, with names debian-etch-HOST_NAME-i (for i in 1..vm_per_host) - -if [ $# -ne 2 ]; then - echo "Usage: $0 number_of_vm initial_vm_name" - echo "Need :" - echo " * ./repeat, ./repeat-clone, ./repeat-start and ./repeat-destroy scripts to be in the same directory that ${0};" - echo " * a pool already set up with a shared NFS storage and a HVM VM called dsl;" - echo " * ${0} must be started on the master of this pool;" - echo "${0} clones , then starts them all, then shutdown them all, then destroy them all." - echo "If WAIT_FOR_IP is set to 1, the script waits for the IP address of the VM to appear before starting the next VM." - exit 1 -fi - -N=${1} -VM=${2} - -./repeat-clone ${N} ${VM} > clone-${VM}.dat -./repeat-start ${N} ${VM} > start-${VM}.dat -./repeat ${N} shutdown ${VM} --force > shutdown-${VM}.dat -./repeat-destroy ${N} ${VM} > destroy-${VM}.dat \ No newline at end of file From fd0e921f741a221341430790547624475f371fba Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:39:42 +0000 Subject: [PATCH 109/222] CP-47869: removed stress-tests under /scripts/scalability-tests/ Signed-off-by: Ashwinh --- scripts/scalability-tests/stress-tests | 121 ------------------------- 1 file changed, 121 deletions(-) delete mode 100755 scripts/scalability-tests/stress-tests diff --git a/scripts/scalability-tests/stress-tests b/scripts/scalability-tests/stress-tests deleted file mode 100755 index e193728c9e7..00000000000 --- a/scripts/scalability-tests/stress-tests +++ /dev/null @@ -1,121 +0,0 @@ -#!/bin/bash -# -# Copyright (c) Citrix Systems 2008. All rights reserved. -# -# ./stress-tests number_of_tests vm_per_host master slave1 slave2 ... slaveN -# - -if [ $# -le 2 ]; then - echo "Usage: $0 number_of_tests vm_per_host master [slave1 ... slaveN]" - echo "You need debian-etch--<1..vm_per_host> VMs installed in each host of the pool (use ./provision-vm to set them up)." - echo "${0} is a XenRT-like script. It performs: " - echo " for each VM, do sequentialy:" - echo " start/wait IP/shutdown" - echo " suspend/resume" - echo " reboot" - echo " live migrate" - echo " non-live migrate" - exit 1 -fi - -N=$1 -VM_PER_HOST=$2 - -shift -shift -HOSTS=$@ -MASTER=$1 - -XE="xe -u root -pw xenroot -s ${MASTER}" - -wait_IP () { - VM=$1 - UUID=`${XE} vm-list name-label=${VM} params=uuid --minimal` - RC=1 - while [ ${RC} -ne 0 ] - do - sleep 2 - IP=`${XE} vm-param-get uuid=${UUID} param-name=networks param-key="0/ip" &> /dev/null` - RC=$? - done -} - -start () { - VM=$1 - - ${XE} vm-start vm=${VM} - wait_IP ${VM} -} - -perform () { - OP=$1 - VM=$2 - EXTRA=$3 - - ${XE} vm-${OP} vm=${VM} $EXTRA -} - -tests () { - HOST=$1 - VM=$2 - - echo "[${VM}] start/stop tests." - START=$(date +%s) - for i in `seq 1 ${N}`; do - start ${VM}; - perform shutdown ${VM}; - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - echo "${i} ${DIFF}" >> ${VM}.start-shutdown.dat - done - - echo "[${VM}] suspend/resume tests." - start ${VM} - START=$(date +%s) - for i in `seq 1 ${N}`; do - perform suspend ${VM} - perform resume ${VM} - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - echo "${i} ${DIFF}" >> ${VM}.suspend-resume.dat - done - - echo "[${VM}] reboot tests." - START=$(date +%s) - for i in `seq 1 ${N}`; do - perform reboot ${VM} - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - echo "${i} ${DIFF}" >> ${VM}.reboot.dat - done - - wait_IP ${VM} - - echo "[${VM}] live migrate tests." - START=$(date +%s) - for i in `seq 1 ${N}`; do - perform migrate ${VM} "live=true host=${HOST}" - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - echo "${i} ${DIFF}" >> ${VM}.live-migrate.dat - done - - echo "[${VM}] non-live migrate tests." - START=$(date +%s) - for i in `seq 1 ${N}`; do - perform migrate ${VM} "live=false host=${HOST}" - CURR=$(date +%s); - DIFF=$(( ${CURR} - ${START} )); - echo "${i} ${DIFF}" >> ${VM}.non-live-migrate.dat - done - - perform shutdown ${VM} -} - -for HOST in ${HOSTS}; do - for i in `seq 1 ${VM_PER_HOST}`; do - VM="debian-etch-${HOST}-$i" - echo "Starting tests on ${VM}." - tests ${HOST} ${VM} & - done -done From 1afc9082a9efd7ee6a7c032db5fae4272b046fcb Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 29 May 2024 10:41:36 +0000 Subject: [PATCH 110/222] CP-47869: removed scalability-tests/event-count.py under /scripts/ Signed-off-by: Ashwinh --- scripts/scalability-tests/event-count.py | 61 ------------------------ 1 file changed, 61 deletions(-) delete mode 100644 scripts/scalability-tests/event-count.py diff --git a/scripts/scalability-tests/event-count.py b/scripts/scalability-tests/event-count.py deleted file mode 100644 index 24f3c0b5354..00000000000 --- a/scripts/scalability-tests/event-count.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -# Count the number of events received from the master - -from __future__ import print_function -import XenAPI, sys, time - -iso8601 = "%Y-%m-%dT%H:%M:%SZ" - - -def main(session): - global iso8601 - - token = '' - call_timeout = 30.0 - - while True: - sys.stdout.flush() - - now = time.time() - now_string = time.strftime(iso8601, time.gmtime(now)) - - try: - output = session.xenapi.event_from(["*"], token, call_timeout) - events = output['events'] - token = output['token'] - print("%s %10d 0" % (now_string, len(events))) - time.sleep(5) - - except KeyboardInterrupt: - break - - except XenAPI.Failure as e: - print(e.details) - sys.exit(1) - - -if __name__ == "__main__": - if len(sys.argv) != 4: - print("Usage:") - print(sys.argv[0], " ") - sys.exit(1) - - url = sys.argv[1] - if url[:5] != "https": - raise Exception("Must use SSL for a realistic test") - - username = sys.argv[2] - password = sys.argv[3] - - new_session = XenAPI.Session(url) - try: - new_session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-eventcount.py") - except XenAPI.Failure as f: - print("Failed to acquire a session: %s" % f.details) - sys.exit(1) - - try: - main(new_session) - finally: - new_session.xenapi.logout() From 05b6741d052b178654ed6c6d0548b05923b04b9f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 3 Jun 2024 10:41:50 +0000 Subject: [PATCH 111/222] Revert "CP-47869: Removed rrdd-example.py ocaml/xcp-rrdd/scripts/rrdd/" Signed-off-by: Ashwinh This reverts commit a1b06ecc238fcd474eba2fb37a1cf2b83f78d0bb. --- ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100755 ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py diff --git a/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py b/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py new file mode 100755 index 00000000000..e25e0ddf016 --- /dev/null +++ b/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py @@ -0,0 +1,18 @@ +#!/usr/bin/env python + +import rrdd, os + +if __name__ == "__main__": + # Create a proxy for communicating with xcp-rrdd. + api = rrdd.API(plugin_id="host_mem") + while True: + # Wait until 0.5 seconds before xcp-rrdd is going to read the output file. + api.wait_until_next_reading(neg_shift=.5) + # Collect measurements. + cmd = "free -k | grep Mem | awk '{print $2, $3, $4}'" + vs = os.popen(cmd).read().strip().split() + # Tell the proxy which datasources should be exposed in this iteration. + api.set_datasource("used_mem", vs[1], min_val=0, max_val=vs[0], units="KB") + api.set_datasource("free_mem", vs[2], min_val=0, max_val=vs[0], units="KB") + # Write all required information into a file about to be read by xcp-rrdd. + api.update() From 3885e39568406b0d29e05b65cc6f7a3c5dd49df5 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 5 Jun 2024 07:56:18 +0000 Subject: [PATCH 112/222] CP-47869: Removed event_listen.py under /ocaml/events/event_listen.py Signed-off-by: Ashwinh --- ocaml/events/event_listen.py | 20 -------------------- 1 file changed, 20 deletions(-) delete mode 100755 ocaml/events/event_listen.py diff --git a/ocaml/events/event_listen.py b/ocaml/events/event_listen.py deleted file mode 100755 index 79c0f8c4735..00000000000 --- a/ocaml/events/event_listen.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/python - -import xmlrpclib, sys - -# Don't forget to include the port in the url (eg http://melton:8086/) -if len(sys.argv) <> 4: - raise "Expected arguments: " - -server = xmlrpclib.Server(sys.argv[1]); -session = server.session.login_with_password(sys.argv[2], sys.argv[3], "1.0", "xen-api-event-listen.py")['Value'] - -server.event.register(session, ["*"]) -while True: - events = server.event.next(session)['Value'] - for event in events: - print event['id'], " ", event['class'], " ", event['operation'], " ",event['ref'], " ", - if "snapshot" in event.keys(): - print "OK" - else: - print "(no snapshot)" From aac3a262b96a64dd80b1ad5073166761461f02a7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Fri, 14 Jun 2024 13:57:15 +0000 Subject: [PATCH 113/222] CP-49934: Disabled upload coverage report for python2.7 in /.github/workflows/other.yml Signed-off-by: Ashwinh --- .github/workflows/other.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 7c00b893e4a..fc4be895fd1 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -55,6 +55,7 @@ jobs: --cov-report xml:.git/coverage${{matrix.python-version}}.xml - name: Upload Python ${{matrix.python-version}} coverage report to Codecov + if: ${{ matrix.python-version != '2.7' }} uses: codecov/codecov-action@v3 with: directory: .git From 597e50ccf058aa0590e35aec944539a9d4f6b61c Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Mon, 17 Jun 2024 05:52:23 +0100 Subject: [PATCH 114/222] Fix pytype errors Signed-off-by: Stephen Cheng --- pyproject.toml | 1 - scripts/examples/python/XenAPI/XenAPI.py | 9 +++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f9b701e4ed6..2730c0ac018 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -255,7 +255,6 @@ expected_to_fail = [ # SSLSocket.send() only accepts bytes, not unicode string as argument: "scripts/examples/python/exportimport.py", # Other fixes needed: - "scripts/examples/python/XenAPI/XenAPI.py", "scripts/examples/python/monitor-unwanted-domains.py", "scripts/examples/python/shell.py", "scripts/static-vdis", diff --git a/scripts/examples/python/XenAPI/XenAPI.py b/scripts/examples/python/XenAPI/XenAPI.py index 0211fe5e9c8..c4c71e4445e 100644 --- a/scripts/examples/python/XenAPI/XenAPI.py +++ b/scripts/examples/python/XenAPI/XenAPI.py @@ -54,6 +54,7 @@ # OF THIS SOFTWARE. # -------------------------------------------------------------------- +import errno import gettext import os import socket @@ -141,8 +142,8 @@ class Session(xmlrpclib.ServerProxy): session.xenapi.session.logout() """ - def __init__(self, uri, transport=None, encoding=None, verbose=0, - allow_none=1, ignore_ssl=False): + def __init__(self, uri, transport=None, encoding=None, verbose=False, + allow_none=True, ignore_ssl=False): # Fix for CA-172901 (+ Python 2.4 compatibility) # Fix for context=ctx ( < Python 2.7.9 compatibility) @@ -198,7 +199,7 @@ def _login(self, method, params): self.last_login_params = params self.API_version = self._get_api_version() except socket.error as e: - if e.errno == socket.errno.ETIMEDOUT: + if e.errno == errno.ETIMEDOUT: raise xmlrpclib.Fault(504, 'The connection timed out') else: raise e @@ -206,7 +207,7 @@ def _login(self, method, params): def _logout(self): try: if self.last_login_method.startswith("slave_local"): - return _parse_result(self.session.local_logout(self._session)) + return _parse_result(self.session.local_logout(self._session)) # pytype: disable=attribute-error else: return _parse_result(self.session.logout(self._session)) finally: From a79ce2a2a5db0ecfb741d3a3a570b562f525a5ef Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 13 Jun 2024 15:07:47 +0000 Subject: [PATCH 115/222] CP-49896: Moved xe-scsi-dev-map to bin directory under python3 - Modified Makefile to include xe-scsi-dev-map - Removed xe-scsi-dev-map from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3/bin}/xe-scsi-dev-map | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 1 deletion(-) rename {scripts => python3/bin}/xe-scsi-dev-map (100%) diff --git a/python3/Makefile b/python3/Makefile index 1384df9284c..d781ec27bd8 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -19,3 +19,4 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin + $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/xe-scsi-dev-map b/python3/bin/xe-scsi-dev-map similarity index 100% rename from scripts/xe-scsi-dev-map rename to python3/bin/xe-scsi-dev-map diff --git a/scripts/Makefile b/scripts/Makefile index 3c3ce93b22b..dbac54dc8b5 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -90,7 +90,6 @@ install: $(IPROG) update-ca-bundle.sh $(DESTDIR)$(OPTDIR)/bin mkdir -p $(DESTDIR)$(OPTDIR)/debug $(IPROG) debug_ha_query_liveset $(DESTDIR)$(OPTDIR)/debug - $(IPROG) xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-mount-iso-sr $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-reset-networking $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-toolstack-restart $(DESTDIR)$(OPTDIR)/bin From 56932500b2be96cd2015af56e6d7d79a2f935a25 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 17 Jun 2024 12:00:00 +0200 Subject: [PATCH 116/222] CA-390883: Add docs and improve comments on pytest and Coverage.py Signed-off-by: Bernhard Kaindl --- doc/content/python/_index.md | 129 +++++++++++++++++++++++++++++++++++ pyproject.toml | 36 ++++++---- 2 files changed, 150 insertions(+), 15 deletions(-) create mode 100644 doc/content/python/_index.md diff --git a/doc/content/python/_index.md b/doc/content/python/_index.md new file mode 100644 index 00000000000..773f02ce38c --- /dev/null +++ b/doc/content/python/_index.md @@ -0,0 +1,129 @@ +--- +title: "Python" +--- + +Introduction +------------ + +Most Python3 scripts and plugins shall be located below the `python3` directory. +The structure of the directory is as follows: + +- `python3/bin`: This contains files installed in `/opt/xensource/bin` + and are meant to be run by users +- `python3/libexec`: This contains files installed in `/opt/xensource/libexec` + and are meant to only be run by `xapi` and other daemons. +- `python3/packages`: Contains files to be installed in python's `site-packages` + are meant to be modules and packages to be imported by other scripts + or executed via `python3 -m` +- `python3/plugins`: This contains files that + are meant to be `xapi` plugins +- `python3/tests`: Tests for testing and covering the Python scripts and plugins + +Dependencies for development and testing +---------------------------------------- + +In GitHub CI and local testing, we can use [pre-commit] to execute the tests. +It provides a dedicated, clearly defined and always consistent Python environment. +The easiest way to run all tests and checks is to simply run [pre-commit]. +The example commands below assume that you have Python3 in your PATH. +Currently, Python 3.11 is required for it: + +```bash { title="Installing and running pre-commit" } +pip3 install pre-commit +pre-commit run -av +# Or, to just run the pytest hook: +pre-commit run -av pytest +``` + +> Note: By default, CentOS 8 provides Python 3.6, whereas some tests need Python >= 3.7 + +Alternatively, you can of course tests in any suitable environment, +given that you install the supported versions of all dependencies. +You can find the dependencies in the list [additional_dependencies] of the [pytest] hook +in the [pre-commit] configuration file [.pre-commit-config.yaml]. +{{% expand title= +"Example `pytest` hook from `.pre-commit-config.yaml` (expand)" %}} + +```yaml + hooks: + - id: pytest + files: python3/ + name: check that the Python3 test suite in passes + entry: sh -c 'coverage run && coverage xml && + coverage html && coverage report && + diff-cover --ignore-whitespace --compare-branch=origin/master + --show-uncovered --html-report .git/coverage-diff.html + --fail-under 50 .git/coverage3.11.xml' + require_serial: true + pass_filenames: false + language: python + types: [python] + additional_dependencies: + - coverage + - diff-cover + - future + - opentelemetry-api + - opentelemetry-exporter-zipkin-json + - opentelemetry-sdk + - pytest-mock + - mock + - wrapt + - XenAPI +``` + +{{% /expand %}} + +Coverage +-------- + +Code moved to the python3 directory tree shall have good code coverage using +tests that are executed, verified and covered using [pytest] and [Coverage.py]. +The `coverage` tool and [pytest] are configured in `pyproject.toml` and +`coverage run` is configured to run [pytest] by default. + +`coverage run` collects coverage from the run and stores it in its database. +The most simple command line to run and report coverage to stdout is: +`coverage run && coverage report` + +{{% expand title="Other commands also used in the pytest hook example above (expand)" %}} + +- `coverage xml`: Generates an XML report from the coverage database to + `.git/coverage3.11.xml`. It is needed for upload to +- `coverage html`: Generates an HTML report from the coverage database to + `.git/coverage_html/` +{{% /expand %}} + +We configure the file paths used for the generated database and other coverage +configuration in the sections `[tool.coverage.run]` and `[tool.coverage.report]` +of `pyproject.toml`. + +Pytest +------ + +If your Python environment has the [dependencies for the tests] installed, you +can run [pytest] in this environment without any arguments to use the defaults. + +{{% expand title="For development, pytest can also only run one test (expand)" %}} + +To run a specific pytest command, run pytest and pass the test case to it (example): + +```bash { title="Example for running only one specific test" } +pytest python3/tests/test_perfmon.py +``` + +```bash { title="Running only one test and reporting the code coverage of it" } +coverage run -m pytest python3/tests/test_perfmon.py && coverage report +``` + +{{% /expand %}} + +[coverage.py]: https://coverage.readthedocs.io +"coverage.py is the coverage collector for Python" +[dependencies for the tests]: #dependencies-for-development-and-testing +"Installation of the dependencies for development and testing" +[pytest]: https://docs.pytest.org "Pytest documentation" +[pre-commit]: https://pre-commit.com "pre-commit commit hook framework" +[.pre-commit-config.yaml]: https://pre-commit.com/#adding-pre-commit-plugins-to-your-project +"project-specific configuration file of pre-commit, found in the project's top directory" +[additional_dependencies]: https://pre-commit.com/#pre-commit-configyaml---hooks +"dependencies that will be installed in the environment where this hook gets to run" diff --git a/pyproject.toml b/pyproject.toml index f9b701e4ed6..2cee87645a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,6 +30,20 @@ line-length = 88 # ----------------------------------------------------------------------------- # Coverage.py - https://coverage.readthedocs.io/en/coverage-5.5/config.html +# +# [tool.coverage.run] and [tool.coverage.report] configure these commands: +# coverage run && coverage report +# +# These work in conjunction with [tool.pytest.ini_options] to set defaults +# for running pytest (on its own) and for running Coverage.py with pytest: +# +# Examples for Python test development with Coverage.py: +# +# Run the default tests and check coverage: +# coverage run && coverage report +# +# Run a custom set of tests and check coverage: +# coverage run -m pytest python3/tests/test_*.py && coverage report # ----------------------------------------------------------------------------- [tool.coverage.report] @@ -198,22 +212,12 @@ exclude = [ [tool.pytest.ini_options] # ----------------------------------------------------------------------------- -# Options to enable for pytest by default: +# addopts: Options to add to all pytest calls: # -v show what happens # -ra show short summary after running tests -# Other options should not be passed using addopts, as addopts forces those -# options to be used every time pytest is run, which is very restrictive. -# Instead, use `coverage run` to configure coverage options, and support -# running specific tests by passing them as arguments to pytest: -# For example: -# coverage run -m pytest python3/tests/test_xenapi.py -# Adding specific --cov options using addopts is not recommended as it would -# require to use the pytest-cov plugin, which would conflict with the use of -# `coverage run`. Instead, use `coverage` to configure coverage options. -# Specifying directories to test is better done using the testpaths option, -# as testpaths sets the default directories to search for tests, but does not -# force them to be run, so you can still run specific tests files by just -# passing them as arguments to pytest: pytest python3/tests/test_xenapi.py +# +# addopts are added to all pytest calls. We don't add options that would force +# testing specific paths. To be flexible, we use use testpaths instead(see below) # ----------------------------------------------------------------------------- addopts = "-v -ra" @@ -223,7 +227,9 @@ addopts = "-v -ra" # log_cli_level: log level to show # python_files: pattern for test files # python_functions: pattern for test functions -# testpaths: directories to search for tests +# testpaths: directories to search for tests(by default, used for CI) +# For development, developers can test only specific files: +# Example: pytest python3/tests/test_perfmon.py # minversion: this config requires pytest>=7 to configure pythonpath # pythonpath: path to stub files and typing stubs for tests # xfail_strict: require to remove pytext.xfail marker when test is fixed From 1ee2b79257a52ebbfd70f193fc518c3a9089af63 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 17 Jun 2024 12:00:00 +0200 Subject: [PATCH 117/222] shell.py: Fix warning caused by overwriting cmd with str and whitespace Signed-off-by: Bernhard Kaindl --- pyproject.toml | 1 - scripts/examples/python/shell.py | 10 +++++----- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2730c0ac018..6912a211e27 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -256,7 +256,6 @@ expected_to_fail = [ "scripts/examples/python/exportimport.py", # Other fixes needed: "scripts/examples/python/monitor-unwanted-domains.py", - "scripts/examples/python/shell.py", "scripts/static-vdis", "scripts/plugins/extauth-hook-AD.py", ] diff --git a/scripts/examples/python/shell.py b/scripts/examples/python/shell.py index 6e5e4f8ff27..0fa226d798f 100644 --- a/scripts/examples/python/shell.py +++ b/scripts/examples/python/shell.py @@ -71,7 +71,7 @@ def munge_types (str): return True elif str == "False": return False - + try: return int(str) except: @@ -81,12 +81,12 @@ def munge_types (str): if len(sys.argv) < 2: print("Usage:") print(sys.argv[0], " ") - sys.exit(1) + sys.exit(1) if sys.argv[1] != "-" and len(sys.argv) < 4: print("Usage:") print(sys.argv[0], " ") - sys.exit(1) + sys.exit(1) if sys.argv[1] != "-": url = sys.argv[1] @@ -103,10 +103,10 @@ def munge_types (str): # We want to support directly executing the cmd line, # where appropriate if len(sys.argv) > cmdAt: - cmd = sys.argv[cmdAt] + command = sys.argv[cmdAt] params = [munge_types(x) for x in sys.argv[(cmdAt + 1):]] try: - print(session.xenapi_request(cmd, tuple(params)), file=sys.stdout) + print(session.xenapi_request(command, tuple(params)), file=sys.stdout) except XenAPI.Failure as x: print(x, file=sys.stderr) sys.exit(2) From c2b2cf9dfb39e4aa12b9b3311a62065b32b14dd2 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 17 Jun 2024 12:00:00 +0200 Subject: [PATCH 118/222] shell.py: Fix warning caused by overwriting built-in str Signed-off-by: Bernhard Kaindl --- scripts/examples/python/shell.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/scripts/examples/python/shell.py b/scripts/examples/python/shell.py index 0fa226d798f..3cfdde757db 100644 --- a/scripts/examples/python/shell.py +++ b/scripts/examples/python/shell.py @@ -66,16 +66,18 @@ def do_EOF(self, line): print() sys.exit(0) -def munge_types (str): - if str == "True": + +def munge_types(var): + if var == "True": return True - elif str == "False": + if var == "False": return False try: - return int(str) + return int(var) except: - return str + return var + if __name__ == "__main__": if len(sys.argv) < 2: From 1f2dacf3766e3b8bd1deed9e8252a1f5d38901c2 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 17 Jun 2024 11:40:58 +0000 Subject: [PATCH 119/222] CP-49901: Moved disk-space from scripts/plugins to python3/plugins - Modified Makefile to include disk-space in python3/plugins directory - Removed disk-space from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 2 ++ {scripts => python3}/plugins/disk-space | 0 scripts/Makefile | 1 - 3 files changed, 2 insertions(+), 1 deletion(-) rename {scripts => python3}/plugins/disk-space (100%) diff --git a/python3/Makefile b/python3/Makefile index d781ec27bd8..f901767ec64 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -20,3 +20,5 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin + install -d -m 755 $(DESTDIR)$(PLUGINDIR) + $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) diff --git a/scripts/plugins/disk-space b/python3/plugins/disk-space similarity index 100% rename from scripts/plugins/disk-space rename to python3/plugins/disk-space diff --git a/scripts/Makefile b/scripts/Makefile index dbac54dc8b5..cfe52c6a7b9 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -131,7 +131,6 @@ install: $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/openvswitch-config-update $(DESTDIR)$(PLUGINDIR) mkdir -p $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead From cbaab651737b27614c25d960da59c9a05456c34d Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 18 Jun 2024 10:15:55 +0000 Subject: [PATCH 120/222] CP-49930: Moved wlan.py from scripts/poweron to python3/poweron - Modified Makefile in python3 directory to include wlan.py - Fixed pytest error - Removed wlan.py from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 2 ++ {scripts => python3}/poweron/wlan.py | 7 ++++--- scripts/Makefile | 2 -- 3 files changed, 6 insertions(+), 5 deletions(-) rename {scripts => python3}/poweron/wlan.py (96%) diff --git a/python3/Makefile b/python3/Makefile index f901767ec64..1965b241ff5 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -22,3 +22,5 @@ install: $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin install -d -m 755 $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) + $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py + $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan diff --git a/scripts/poweron/wlan.py b/python3/poweron/wlan.py similarity index 96% rename from scripts/poweron/wlan.py rename to python3/poweron/wlan.py index 948ba9a5433..1506968c2bd 100755 --- a/scripts/poweron/wlan.py +++ b/python3/poweron/wlan.py @@ -65,6 +65,10 @@ def get_physical_pif(session, pif_ref): def wake_on_lan(session, host, remote_host_uuid): + """ + Attempt to wake up a machine by sending Wake-On-Lan packets encapsulated within UDP datagrams + sent to the broadcast_addr. + """ # Find this Host's management interface: this_pif = find_host_mgmt_pif(session, inventory.get_localhost_uuid()) # Find the name of the bridge to which it is connected: @@ -79,9 +83,6 @@ def wake_on_lan(session, host, remote_host_uuid): remote_pif = get_physical_pif(session, mgmt_pif) # Find the MAC address of the management interface: mac = session.xenapi.PIF.get_MAC(remote_pif) - - """Attempt to wake up a machine by sending Wake-On-Lan packets encapsulated within UDP datagrams - sent to the broadcast_addr.""" # A Wake-On-LAN packet contains FF:FF:FF:FF:FF:FF followed by 16 repetitions of the target MAC address bin_payload = bytes.fromhex("F" * 12 + mac.replace(":", "") * 16) diff --git a/scripts/Makefile b/scripts/Makefile index cfe52c6a7b9..d7de936178f 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -169,8 +169,6 @@ endif $(IPROG) examples/python/echo.py $(DESTDIR)$(PLUGINDIR)/echo $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py # poweron - $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py - $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan $(IPROG) poweron/DRAC.py $(DESTDIR)$(PLUGINDIR)/DRAC.py $(IPROG) poweron/power-on.py $(DESTDIR)$(PLUGINDIR)/power-on-host # YUM plugins From 438d753dfc7c1d183e9a07ee4e0d5459119ba9fe Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 09:03:38 +0000 Subject: [PATCH 121/222] CP-49902: Moved DRAC.py from scripts/poweron to python3/poweron - Modified Makefile in python3 directory to include DRAC.py - Removed DRAC.PY from scripts/poweron - Removed DRAC.py from scripts/Makefile - Fixed pytlint issue by using sys.exit() Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3}/poweron/DRAC.py | 2 +- scripts/Makefile | 1 - 3 files changed, 2 insertions(+), 2 deletions(-) rename {scripts => python3}/poweron/DRAC.py (98%) diff --git a/python3/Makefile b/python3/Makefile index 1965b241ff5..44300c307e7 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -24,3 +24,4 @@ install: $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan + $(IPROG) poweron/DRAC.py $(DESTDIR)$(PLUGINDIR)/DRAC.py diff --git a/scripts/poweron/DRAC.py b/python3/poweron/DRAC.py similarity index 98% rename from scripts/poweron/DRAC.py rename to python3/poweron/DRAC.py index bace3a177a4..4493d8d6c6a 100644 --- a/scripts/poweron/DRAC.py +++ b/python3/poweron/DRAC.py @@ -49,7 +49,7 @@ def DRAC(power_on_ip, user, password): def main(): if len(sys.argv) < 3: - exit(0) + sys.exit(0) ip = sys.argv[1] user = sys.argv[2] password = sys.argv[3] diff --git a/scripts/Makefile b/scripts/Makefile index d7de936178f..5700d4bd879 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -169,7 +169,6 @@ endif $(IPROG) examples/python/echo.py $(DESTDIR)$(PLUGINDIR)/echo $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py # poweron - $(IPROG) poweron/DRAC.py $(DESTDIR)$(PLUGINDIR)/DRAC.py $(IPROG) poweron/power-on.py $(DESTDIR)$(PLUGINDIR)/power-on-host # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) From ebf54eb1e12ea0dd1de3b8a77e15e3a95668b59c Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 11:35:24 +0000 Subject: [PATCH 122/222] CP-49975: Replaced mkdir -p with \\\$(IPROG) -d for directory creation in install target Signed-off-by: Ashwinh --- python3/Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/python3/Makefile b/python3/Makefile index 44300c307e7..e86d5c683c3 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -6,9 +6,10 @@ IDATA=install -m 644 SITE3_DIR=$(shell python3 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") install: - mkdir -p $(DESTDIR)$(OPTDIR)/bin - mkdir -p $(DESTDIR)$(SITE3_DIR) - mkdir -p $(DESTDIR)$(LIBEXECDIR) + $(IPROG) -d $(DESTDIR)$(OPTDIR)/bin + $(IPROG) -d $(DESTDIR)$(SITE3_DIR) + $(IPROG) -d $(DESTDIR)$(LIBEXECDIR) + $(IPROG) -d $(DESTDIR)$(PLUGINDIR) $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ @@ -20,7 +21,6 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin - install -d -m 755 $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan From 6b70aad0877cfe4606bc9ed4501bd2ad59fa70c0 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 12:44:41 +0000 Subject: [PATCH 123/222] CP-49904: Removed exportimport.py from scripts/examples/python/ - Removed exportimport.py from expected_to_fail in pyproject.toml Signed-off-by: Ashwinh --- pyproject.toml | 2 - scripts/examples/python/exportimport.py | 142 ------------------------ 2 files changed, 144 deletions(-) delete mode 100755 scripts/examples/python/exportimport.py diff --git a/pyproject.toml b/pyproject.toml index cb2be7ffbb5..b41deb50bf4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -258,8 +258,6 @@ expected_to_fail = [ # Need 2to3 -w and maybe a few other minor updates: "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", - # SSLSocket.send() only accepts bytes, not unicode string as argument: - "scripts/examples/python/exportimport.py", # Other fixes needed: "scripts/examples/python/monitor-unwanted-domains.py", "scripts/examples/python/shell.py", diff --git a/scripts/examples/python/exportimport.py b/scripts/examples/python/exportimport.py deleted file mode 100755 index bc72580659b..00000000000 --- a/scripts/examples/python/exportimport.py +++ /dev/null @@ -1,142 +0,0 @@ -#!/usr/bin/env python3 - -# Copyright (c) 2014 Citrix, Inc. -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# Demonstrate how to -# - export raw disk images -# - import raw disk images -# - connect an export to an import to copy a raw disk image - -from __future__ import print_function -import sys, os, socket, urllib.request, urllib.error, urllib.parse, XenAPI, traceback, ssl, time - -def exportimport(url, xapi, session, src_vdi, dst_vdi): - # If an HTTP operation fails then it will record the error on the task - # object. Note you can't use the HTTP response code for this because - # it must be sent *before* the stream is processed. - import_task = xapi.xenapi.task.create("import " + dst_vdi, "") - export_task = xapi.xenapi.task.create("export " + src_vdi, "") - try: - # an HTTP GET of this will export a disk: - get_url = "/export_raw_vdi?session_id=%s&vdi=%s&task_id=%s" % (session, src_vdi, export_task) - # an HTTP PUT to this will import a disk: - put_url = "/import_raw_vdi?session_id=%s&vdi=%s&task_id=%s" % (session, dst_vdi, import_task) - - # 'data' is the stream of raw data: - data = urllib.request.urlopen(url + get_url) - - # python's builtin library doesn't support HTTP PUT very well - # so we do it manually. Note xapi doesn't support Transfer-encoding: - # chunked so we must send the data raw. - url = urllib.parse.urlparse(url) - host = url.netloc.split(":")[0] # assume port 443 - if url.scheme != "https": - print("Sorry, this example only supports HTTPS (not HTTP)", file=sys.stderr) - print("Plaintext HTTP has the following problems:", file=sys.stderr) - print(" - the data can be captured by other programs on the network", file=sys.stderr) - print(" - some network middleboxes will mangle the data", file=sys.stderr) - # time wasted debugging a problem caused by a middlebox: 3hrs - # Just use HTTPS! - return - - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - output = ssl.wrap_socket(s) - output.connect((host, 443)) - - # HTTP/1.0 with no transfer-encoding - headers = [ - "PUT %s HTTP/1.0" % put_url, - "Connection:close", - "" - ] - print("Sending HTTP request:") - for h in headers: - output.send("%s\r\n" % h) - print("%s\r\n" % h) - result = output.recv(1024) - print("Received HTTP response:") - print(result) - if "200 OK" not in result: - print("Expected an HTTP 200, got %s" % result, file=sys.stderr) - return - - # Copy the raw bytes, signal completion by closing the socket - virtual_size = long(xapi.xenapi.VDI.get_virtual_size(src_vdi)) - print("Copying %Ld bytes" % virtual_size) - left = virtual_size - while left > 0: - block = data.read(min(65536, left)) - if block is None: - break - output.send(block) - left = left - len(block) - output.close() - - # Wait for the tasks to complete and check whether they both - # succeeded. It takes a few seconds to detach the disk etc. - finished = False - while not finished: - import_status = xapi.xenapi.task.get_status(import_task) - export_status = xapi.xenapi.task.get_status(export_task) - finished = import_status != "pending" and export_task != "pending" - time.sleep(1) - if import_status == "success" and export_status == "success": - print("OK") - else: - print("FAILED") - if import_status != "success": - print("The import task failed with: ", " ".join(xapi.xenapi.task.get_error_info(import_task))) - if export_status != "success": - print("The export task failed with: ", " ".join(xapi.xenapi.task.get_error_info(export_task))) - - finally: - # The task creator has to destroy them at the end: - xapi.xenapi.task.destroy(import_task) - xapi.xenapi.task.destroy(export_task) - -if __name__ == "__main__": - if len(sys.argv) != 5: - print("Usage:") - print(sys.argv[0], " ") - print(" -- creates a fresh VDI and streams the contents of into it.") - print() - print("Example:") - print("SR=$(xe pool-list params=default-SR --minimal)") - print("VDI=$(xe vdi-create sr-uuid=$SR name-label=test virtual-size=128MiB type=user)") - print(sys.argv[0], "https://localhost password $VDI") - sys.exit(1) - url = sys.argv[1] - username = sys.argv[2] - password = sys.argv[3] - vdi_uuid = sys.argv[4] - # First acquire a valid session by logging in: - xapi = XenAPI.Session(url) - xapi.xenapi.login_with_password(username, password, '1.0', 'xen-api-scripts-exportimport.py') - dst_vdi = None - try: - src_vdi = xapi.xenapi.VDI.get_by_uuid(vdi_uuid) - sr = xapi.xenapi.VDI.get_SR(src_vdi) - # Create an empty VDI with the same initial parameters (e.g. size) - # to upload into - vdi_args = xapi.xenapi.VDI.get_record(src_vdi) - dst_vdi = xapi.xenapi.VDI.create(vdi_args) - exportimport(url, xapi, xapi._session, src_vdi, dst_vdi) - except Exception as e: - print("Caught %s: trying to clean up" % str(e)) - traceback.print_exc() - if dst_vdi: - xapi.xenapi.VDI.destroy(dst_vdi) - finally: - xapi.xenapi.logout() From 9319f5edfd057e57bc0206fe17c56ca12d275628 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 13:52:58 +0000 Subject: [PATCH 124/222] CP-49914: Removed monitor-unwanted-domains.py from scripts/examples/python/ - Removed monitor-unwanted-domains.py from expected_to_fail in pyproject.toml Signed-off-by: Ashwinh --- pyproject.toml | 1 - .../python/monitor-unwanted-domains.py | 89 ------------------- 2 files changed, 90 deletions(-) delete mode 100644 scripts/examples/python/monitor-unwanted-domains.py diff --git a/pyproject.toml b/pyproject.toml index cb2be7ffbb5..7fa401d14a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -261,7 +261,6 @@ expected_to_fail = [ # SSLSocket.send() only accepts bytes, not unicode string as argument: "scripts/examples/python/exportimport.py", # Other fixes needed: - "scripts/examples/python/monitor-unwanted-domains.py", "scripts/examples/python/shell.py", "scripts/static-vdis", "scripts/plugins/extauth-hook-AD.py", diff --git a/scripts/examples/python/monitor-unwanted-domains.py b/scripts/examples/python/monitor-unwanted-domains.py deleted file mode 100644 index 317725288e2..00000000000 --- a/scripts/examples/python/monitor-unwanted-domains.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import print_function -import os, subprocess, XenAPI, inventory, time, sys - -# Script which monitors the domains running on a host, looks for -# paused domains which don't correspond to VMs which are running here -# or are about to run here, logs them and optionally destroys them. - -# Return a list of (domid, uuid) tuples, one per paused domain on this host -def list_paused_domains(): - results = [] - all = subprocess.Popen(["@OPTDIR@/bin/list_domains"], stdout=subprocess.PIPE).communicate()[0] - lines = all.split("\n") - for domain in lines[1:]: - bits = domain.split() - if bits != []: - domid = bits[0] - uuid = bits[2] - state = bits[4] - if 'P' in state: - results.append( (domid, uuid) ) - return results - -# Given localhost's uuid and a (domid, uuid) tuple, return True if the domain -# be somewhere else i.e. we think it may have leaked here -def should_domain_be_somewhere_else(localhost_uuid, domain): - (domid, uuid) = domain - try: - x = XenAPI.xapi_local() - x.xenapi.login_with_password("root", "", "1.0", "xen-api-scripts-monitor-unwanted-domains.py") - try: - try: - vm = x.xenapi.VM.get_by_uuid(uuid) - resident_on = x.xenapi.VM.get_resident_on(vm) - current_operations = x.xenapi.VM.get_current_operations(vm) - result = current_operations == {} and resident_on != localhost_uuid - if result: - log("domid %s uuid %s: is not being operated on and is not resident here" % (domid, uuid)) - return result - except XenAPI.Failure as e: - if e.details[0] == "UUID_INVALID": - # VM is totally bogus - log("domid %s uuid %s: is not in the xapi database" % (domid, uuid)) - return True - # fail safe for now - return False - finally: - x.xenapi.logout() - except: - return False - -def log(str): - print(str) - -# Destroy the given domain -def destroy_domain(domain): - (domid, uuid) = domain - log("destroying domid %s uuid %s" % (domid, uuid)) - all = subprocess.Popen(["@OPTDIR@/debug/destroy_domain", "-domid", domid], stdout=subprocess.PIPE).communicate()[0] - -# Keep track of when a domain first looked like it should be here -domain_first_noticed = {} - -# Number of seconds after which we conclude that a domain really shouldn't be here -threshold = 60 - -if __name__ == "__main__": - localhost_uuid = inventory.get_localhost_uuid () - while True: - time.sleep(1) - paused = list_paused_domains () - # GC the domain_first_noticed map - for d in domain_first_noticed.keys(): - if d not in paused: - log("domid %s uuid %s: looks ok now, forgetting about it" % d) - del domain_first_noticed[d] - - for d in list_paused_domains(): - if should_domain_be_somewhere_else(localhost_uuid, d): - if d not in domain_first_noticed: - domain_first_noticed[d] = time.time() - noticed_for = time.time() - domain_first_noticed[d] - if noticed_for > threshold: - log("domid %s uuid %s: has been in bad state for over threshold" % d) - if "-destroy" in sys.argv: - destroy_domain(d) - - From d9e81653e79c6c9857004a705de7ca1b8ae73ca7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 14:56:12 +0000 Subject: [PATCH 125/222] CP-49913: Removed mini-xenrt.py from scripts/examples/python/ Signed-off-by: Ashwinh --- scripts/examples/python/mini-xenrt.py | 141 -------------------------- 1 file changed, 141 deletions(-) delete mode 100644 scripts/examples/python/mini-xenrt.py diff --git a/scripts/examples/python/mini-xenrt.py b/scripts/examples/python/mini-xenrt.py deleted file mode 100644 index b30e9d9973c..00000000000 --- a/scripts/examples/python/mini-xenrt.py +++ /dev/null @@ -1,141 +0,0 @@ -#!/usr/bin/env python3 - -# Receive multiple VMs -# Issue parallel loops of: reboot, suspend/resume, migrate - -from __future__ import print_function -import xmlrpc.client -from threading import Thread -import time, sys - -iso8601 = "%Y%m%dT%H:%M:%SZ" - -stop_on_first_failure = True -stop = False - -class Operation: - def __init__(self): - raise NotImplementedError - def execute(self, server, session_id): - raise NotImplementedError - -class Reboot(Operation): - def __init__(self, vm): - self.vm = vm - def execute(self, server, session_id): - return server.VM.clean_reboot(session_id, self.vm) - def __str__(self): - return "clean_reboot(%s)" % self.vm - -class SuspendResume(Operation): - def __init__(self, vm): - self.vm = vm - def execute(self, server, session_id): - x = { "ErrorDescription": [ "VM_MISSING_PV_DRIVERS" ] } - while "ErrorDescription" in x and x["ErrorDescription"][0] == "VM_MISSING_PV_DRIVERS": - x = server.VM.suspend(session_id, self.vm) - if "ErrorDescription" in x: - time.sleep(1) - if x["Status"] != "Success": - return x - return server.VM.resume(session_id, self.vm, False, False) - def __str__(self): - return "suspendresume(%s)" % self.vm - -class ShutdownStart(Operation): - def __init__(self, vm): - self.vm = vm - def execute(self, server, session_id): - x = server.VM.clean_shutdown(session_id, self.vm) - if x["Status"] != "Success": - return x - return server.VM.start(session_id, self.vm, False, False) - #return { "Status": "bad", "ErrorDescription": "foo" } - def __str__(self): - return "shutdownstart(%s)" % self.vm - -class LocalhostMigrate(Operation): - def __init__(self, vm): - self.vm = vm - def execute(self, server, session_id): - return server.VM.pool_migrate(session_id, self.vm, server.VM.get_resident_on(session_id, self.vm)["Value"], { "live": "true" } ) - def __str__(self): - return "localhostmigrate(%s)" % self.vm - -# Use this to give each thread a different ID -worker_count = 0 - -class Worker(Thread): - def __init__(self, server, session_id, operations): - Thread.__init__(self) - self.server = server - self.session_id = session_id - self.operations = operations - self.num_successes = 0 - self.num_failures = 0 - global worker_count - self.id = worker_count - worker_count = worker_count + 1 - def run(self): - global iso8601 - global stop_on_first_failure, stop - for op in self.operations: - description = str(op) - - if stop: - return - - start = time.strftime(iso8601, time.gmtime(time.time ())) - result = op.execute(self.server, self.session_id) - end = time.strftime(iso8601, time.gmtime(time.time ())) - - if result["Status"] == "Success": - print("SUCCESS %d %s %s %s" % (self.id, start, end, description)) - self.num_successes = self.num_successes + 1 - else: - error_descr = result["ErrorDescription"] - print("FAILURE %d %s %s %s %s" % (self.id, start, end, error_descr[0], description)) - self.num_failures = self.num_failures + 1 - if stop_on_first_failure: - stop = True - -def make_operation_list(vm): - return [ Reboot(vm), SuspendResume(vm), LocalhostMigrate(vm) ] * 100 - -if __name__ == "__main__": - if len(sys.argv) != 3: - print("Usage:") - print(" %s " % (sys.argv[0])) - print(" -- performs parallel operations on VMs with the specified other-config key") - sys.exit(1) - - x = xmlrpc.client.ServerProxy(sys.argv[1]) - key = sys.argv[2] - session = x.session.login_with_password("root", "xenroot", "1.0", "xen-api-scripts-minixenrt.py")["Value"] - vms = x.VM.get_all_records(session)["Value"] - - workers = [] - for vm in vms.keys(): - if key in vms[vm]["other_config"]: - allowed_ops = vms[vm]["allowed_operations"] - for op in [ "clean_reboot", "suspend", "pool_migrate" ]: - if op not in allowed_ops: - raise RuntimeError("VM %s is not in a state where it can %s" % (vms[vm]["name_label"], op)) - workers.append(Worker(x, session, make_operation_list(vm))) - for w in workers: - w.start() - for w in workers: - w.join() - successes = 0 - failures = 0 - for w in workers: - successes = successes + w.num_successes - failures = failures + w.num_failures - print("Total successes = %d" % successes) - print("Total failures = %d" % failures) - if failures == 0: - print("PASS") - sys.exit(0) - else: - print("FAIL") - sys.exit(1) From 798fa8979a991e277db1030584eb6c1339dacf0f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 15:06:48 +0000 Subject: [PATCH 126/222] CP-49923: Removed provision.py from scripts/examples/python/ Signed-off-by: Ashwinh --- scripts/examples/python/provision.py | 111 --------------------------- 1 file changed, 111 deletions(-) delete mode 100644 scripts/examples/python/provision.py diff --git a/scripts/examples/python/provision.py b/scripts/examples/python/provision.py deleted file mode 100644 index 3b8a224ffae..00000000000 --- a/scripts/examples/python/provision.py +++ /dev/null @@ -1,111 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2007 XenSource, Inc. -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# Parse/regenerate the "disk provisioning" XML contained within templates -# NB this provisioning XML refers to disks which should be created when -# a VM is installed from this template. It does not apply to templates -# which have been created from real VMs -- they have their own disks. - -from __future__ import print_function -import XenAPI -import xml.dom.minidom - -class Disk: - """Represents a disk which should be created for this VM""" - def __init__(self, device, size, sr, bootable): - self.device = device # 0, 1, 2, ... - self.size = size # in bytes - self.sr = sr # uuid of SR - self.bootable = bootable - def toElement(self, doc): - disk = doc.createElement("disk") - disk.setAttribute("device", self.device) - disk.setAttribute("size", self.size) - disk.setAttribute("sr", self.sr) - b = "false" - if self.bootable: b = "true" - disk.setAttribute("bootable", b) - return disk - -def parseDisk(element): - device = element.getAttribute("device") - size = element.getAttribute("size") - sr = element.getAttribute("sr") - b = element.getAttribute("bootable") == "true" - return Disk(device, size, sr, b) - -class ProvisionSpec: - """Represents a provisioning specification: currently a list of required disks""" - def __init__(self): - self.disks = [] - def toElement(self, doc): - element = doc.createElement("provision") - for disk in self.disks: - element.appendChild(disk.toElement(doc)) - return element - def setSR(self, sr): - """Set the requested SR for each disk""" - for disk in self.disks: - disk.sr = sr - -def parseProvisionSpec(txt): - """Return an instance of type ProvisionSpec given XML text""" - doc = xml.dom.minidom.parseString(txt) # pytype: disable=pyi-error - all = doc.getElementsByTagName("provision") - if len(all) != 1: - raise ValueError("Expected to find exactly one element") - ps = ProvisionSpec() - disks = all[0].getElementsByTagName("disk") - for disk in disks: - ps.disks.append(parseDisk(disk)) - return ps - -def printProvisionSpec(ps): - """Return a string containing pretty-printed XML corresponding to the supplied provisioning spec""" - doc = xml.dom.minidom.Document() # pytype: disable=pyi-error - doc.appendChild(ps.toElement(doc)) - return doc.toprettyxml() - -def getProvisionSpec(session, vm): - """Read the provision spec of a template/VM""" - other_config = session.xenapi.VM.get_other_config(vm) - return parseProvisionSpec(other_config['disks']) - -def setProvisionSpec(session, vm, ps): - """Set the provision spec of a template/VM""" - txt = printProvisionSpec(ps) - try: - session.xenapi.VM.remove_from_other_config(vm, "disks") - except: - pass - session.xenapi.VM.add_to_other_config(vm, "disks", txt) - -if __name__ == "__main__": - print("Unit test of provision XML spec module") - print("--------------------------------------") - ps = ProvisionSpec() - ps.disks.append(Disk("0", "1024", "0000-0000", True)) - ps.disks.append(Disk("1", "2048", "1111-1111", False)) - print("* Pretty-printing spec") - txt = printProvisionSpec(ps) - print(txt) - print("* Re-parsing output") - ps2 = parseProvisionSpec(txt) - print("* Pretty-printing spec") - txt2 = printProvisionSpec(ps) - print(txt2) - if txt != txt2: - raise AssertionError("Sanity-check failed: print(parse(print(x))) != print(x)") - print("* OK: print(parse(print(x))) == print(x)") From 920194697d64fc0ffb51354b98083c708f39b82f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 19 Jun 2024 15:13:20 +0000 Subject: [PATCH 127/222] CP-49925: Removed renameif.py from scripts/examples/python/ Signed-off-by: Ashwinh --- scripts/examples/python/renameif.py | 167 ---------------------------- 1 file changed, 167 deletions(-) delete mode 100755 scripts/examples/python/renameif.py diff --git a/scripts/examples/python/renameif.py b/scripts/examples/python/renameif.py deleted file mode 100755 index 4a3d796e1da..00000000000 --- a/scripts/examples/python/renameif.py +++ /dev/null @@ -1,167 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2008 XenSource, Inc. -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# Allow the user to change the MAC address -> interface mapping - -from __future__ import print_function -import XenAPI, inventory, sys - -def warn(txt): - print(txt, file=sys.stderr) - -def show_pifs(pifs): - print("NIC MAC Notes") - print("----------------------------------------------") - for ref in pifs.keys(): - notes = [] - if pifs[ref]['management']: - notes.append("management interface") - nic = pifs[ref]['device'][3:] - try: - metrics = session.xenapi.PIF_metrics.get_record(session.xenapi.PIF.get_metrics(ref)) - if metrics['carrier']: - notes.append("carrier detected") - else: - notes.append("no carrier detected") - except: - pass - - print("%3s %s %s" % (nic, pifs[ref]['MAC'], ", ".join(notes))) - -def select(pifs, key): - """Select a PIF by device name or MAC""" - for ref in pifs.keys(): - if pifs[ref]['device'][3:] == key: - return ref - if pifs[ref]['MAC'].upper() == key.upper(): - return ref - return None - -def save(session, host, pifs): - """Commit changes""" - # Check that device names are unique - devices = [] - for ref in pifs.keys(): - devices.append(pifs[ref]['device'][3:]) - for i in set(devices): - devices.remove(i) - if devices != []: - print("ERROR: cannot assign two interfaces the same NIC number (%s)" % (", ".join(i))) - print("Aborted.") - sys.exit(1) - vifs = [] - for ref in pifs.keys(): - net = pifs[ref]['network'] - for vif in session.xenapi.network.get_VIFs(net): - if session.xenapi.VIF.get_currently_attached(vif): - vifs.append(vif) - if len(vifs) > 0: - plural = "" - if len(vifs) > 1: - plural = "s" - print("WARNING: this operation requires unplugging %d guest network interface%s" % (len(vifs), plural)) - print("Are you sure you want to continue? (yes/no) > ", end=' ') - if sys.stdin.readline().strip().lower() != "yes": - print("Aborted.") - sys.exit(1) - for vif in vifs: - dev = session.xenapi.VIF.get_device(vif) - vm = session.xenapi.VIF.get_VM(vif) - uuid = session.xenapi.VM.get_uuid(vm) - print("Hot-unplugging interface %s on VM %s" % (dev, uuid)) - session.xenapi.VIF.unplug(vif) - - for ref in pifs.keys(): - mac = pifs[ref]['MAC'] - if pifs[ref]['management']: - print("Disabling management NIC (%s)" % mac) - session.xenapi.host.management_disable() - session.xenapi.PIF.forget(ref) - for ref in pifs.keys(): - mac = pifs[ref]['MAC'] - device = pifs[ref]['device'] - mode = pifs[ref]['ip_configuration_mode'] - IP = pifs[ref]['IP'] - netmask = pifs[ref]['IP'] - gateway = pifs[ref]['gateway'] - DNS = pifs[ref]['DNS'] - new_ref = session.xenapi.PIF.introduce(host, mac, device) - session.xenapi.PIF.reconfigure_ip(new_ref, mode, IP, netmask, gateway, DNS) - if pifs[ref]['management']: - print("Re-enabling management NIC (%s)" % mac) - session.xenapi.host.management_reconfigure(new_ref) - - for vif in vifs: - dev = session.xenapi.VIF.get_device(vif) - vm = session.xenapi.VIF.get_VM(vif) - uuid = session.xenapi.VM.get_uuid(vm) - print("Hot-plugging interface %s on VM %s" % (dev, uuid)) - session.xenapi.VIF.plug(vif) - -def renameif(session): - uuid = inventory.get_localhost_uuid () - host = session.xenapi.host.get_by_uuid(uuid) - pool = session.xenapi.pool.get_all()[0] - master = session.xenapi.pool.get_master(pool) - if host != master: - warn("This host is a slave; it is not possible to rename the management interface") - - pifs = session.xenapi.PIF.get_all_records() - for ref in pifs.keys(): - if pifs[ref]['host'] != host or pifs[ref]['physical'] != True: - del pifs[ref] - - while True: - print("Current mappings:") - show_pifs(pifs) - print() - print("Type 'quit' to quit; 'save' to save; or a NIC number or MAC address to edit") - print("> ", end=' ') - x = sys.stdin.readline().strip() - if x.lower() == 'quit': - sys.exit(0) - if x.lower() == 'save': - # If a slave, filter out the management PIF - if host != master: - for ref in pifs.keys(): - if pifs[ref]['management']: - del pifs[ref] - save(session, host, pifs) - sys.exit(0) - pif = select(pifs, x) - if pif != None: - # Make sure this is not a slave's management PIF - if host != master and pifs[pif]['management']: - print("ERROR: cannot modify the management interface of a slave.") - else: - print("Selected NIC with MAC '%s'. Enter new NIC number:" % pifs[pif]['MAC']) - print("> ", end=' ') - nic = sys.stdin.readline().strip() - if not(nic.isdigit()): - print("ERROR: must enter a number (e.g. 0, 1, 2, 3, ...)") - else: - pifs[pif]['device'] = "eth" + nic - else: - print("NIC '%s' not found" % (x)) - print() - - -if __name__ == "__main__": - session = XenAPI.xapi_local() - session.login_with_password("", "", "1.0", "xen-api-scripts-renameifs.py") - try: - renameif(session) - finally: - session.logout() From d305227c7d14d1125c281ec202370b80073b2d40 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 20 Jun 2024 07:19:57 +0000 Subject: [PATCH 128/222] CP-49927: Removed smapiv2.py from scripts/examples Signed-off-by: Ashwinh --- scripts/examples/smapiv2.py | 293 ------------------------------------ 1 file changed, 293 deletions(-) delete mode 100644 scripts/examples/smapiv2.py diff --git a/scripts/examples/smapiv2.py b/scripts/examples/smapiv2.py deleted file mode 100644 index 1047f57825c..00000000000 --- a/scripts/examples/smapiv2.py +++ /dev/null @@ -1,293 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import print_function -import os, sys, time, socket, traceback - -log_f = open(os.dup(sys.stdout.fileno()), "w") -pid = None - -def reopenlog(log_file): - global log_f - if log_f: - log_f.close() - if log_file: - try: - log_f = open(log_file, "a") - except FileNotFoundError: - log_f = open(log_file, "w") - else: - log_f = open(os.dup(sys.stdout.fileno()), "a") - -def log(txt): - global log_f, pid - if not pid: - pid = os.getpid() - t = time.strftime("%Y%m%dT%H:%M:%SZ", time.gmtime()) - print("%s [%d] %s" % (t, pid, txt), file=log_f) - log_f.flush() - -# Functions to construct SMAPI return types ################################# - -unit = [ "Success", "Unit" ] - -# Throw this to return an SR_BACKEND_FAILURE to the caller ################## - -class BackendError(Exception): - def __init__(self, code, params): - self.code = code - self.params = params - def __str__(self): - return "BackendError(%s, %s)" % (self.code, ", ".join(self.params)) - -class Vdi_does_not_exist(Exception): - def __init__(self, vdi): - self.vdi = vdi - def __str__(self): - return "Vdi_does_not_exist(%s)" % self.vdi - -def vdi(vdi_info): -# return ['Success', ['Vdi', {'vdi': location, 'virtual_size': str(virtual_size) }]] - return ['Success', ['Vdi', vdi_info]] - -def vdis(vis): - return ['Success', ['Vdis', vis]] - -def params(params): - return ['Success', ['Params', params ]] - -def value(result): - return { "Status": "Success", "Value": result } - -def backend_error(code, params): - return [ "Failure", [ "Backend_error", code, params ] ] - -def internal_error(txt): - return [ "Failure", "Internal_error", txt ] - -def vdi_does_not_exist(): - return [ "Failure", "Vdi_does_not_exist" ] - -# Type-checking helper functions ############################################ - -vdi_info_types = { - "vdi": type(""), - "name_label": type(""), - "name_description": type(""), - "ty": type(""), - "metadata_of_pool": type(""), - "is_a_snapshot": type(True), - "snapshot_time": type(""), - "snapshot_of": type(""), - "read_only": type(True), - "cbt_enabled": type(True), - "virtual_size": type(""), - "physical_utilisation": type("") -} - -def make_vdi_info(v): - global vdi_info_types - for k in vdi_info_types: - t = vdi_info_types[k] - if t == type(""): - v[k] = str(v[k]) - elif t == type(True): - v[k] = str(v[k]).lower() == "true" - else: - raise BackendError("make_vdi_info unknown type", [ str(t) ]) - return v - -def vdi_info(v): - global vdi_info_types - for k in vdi_info_types: - if k not in v: - raise BackendError("vdi_info missing key", [ k, repr(v) ]) - t = vdi_info_types[k] - if type(v[k]) != t: - raise BackendError("vdi_info key has wrong type", [ k, str(t), str(type(v[k])) ]) - return v - -def expect_none(x): - if x != None: - raise BackendError("type error", [ "None", repr(x) ]) - -def expect_long(x): - if type(x) != type(0): - raise BackendError("type error", [ "long int", repr(x) ]) - -def expect_string(x): - if type(x) != type(""): - raise BackendError("type error", [ "string", repr(x) ]) - -# Well-known feature flags understood by xapi ############################## - -feature_sr_probe = "SR_PROBE" -feature_sr_update = "SR_UPDATE" -feature_sr_supports_local_caching = "SR_SUPPORTS_LOCAL_CACHING" -feature_vdi_create = "VDI_CREATE" -feature_vdi_destroy = "VDI_DESTROY" -feature_vdi_attach = "VDI_ATTACH" -feature_vdi_detach = "VDI_DETACH" -feature_vdi_resize = "VDI_RESIZE" -feature_vdi_resize_online = "VDI_RESIZE_ONLINE" -feature_vdi_clone = "VDI_CLONE" -feature_vdi_snapshot = "VDI_SNAPSHOT" -feature_vdi_activate = "VDI_ACTIVATE" -feature_vdi_deactivate = "VDI_DEACTIVATE" -feature_vdi_update = "VDI_UPDATE" -feature_vdi_introduce = "VDI_INTRODUCE" -feature_vdi_generate_config = "VDI_GENERATE_CONFIG" -feature_vdi_reset_on_boot = "VDI_RESET_ON_BOOT" - -# Unmarshals arguments and marshals results (including exceptions) ########## - -class Marshall: - def __init__(self, x): - self.x = x - - def query(self, args): - result = self.x.query() - return value(result) - - def sr_attach(self, args): - result = self.x.sr_attach(args["task"], args["sr"], args["device_config"]) - expect_none(result) - return value(unit) - def sr_detach(self, args): - result = self.x.sr_detach(args["task"], args["sr"]) - expect_none(result) - return value(unit) - def sr_destroy(self, args): - result = self.x.sr_destroy(args["task"], args["sr"]) - expect_none(result) - return value(unit) - def sr_scan(self, args): - vis = self.x.sr_scan(args["task"], args["sr"]) - result = [vdi_info(vi) for vi in vis] - return value(vdis(result)) - - def vdi_create(self, args): - vi = self.x.vdi_create(args["task"], args["sr"], vdi_info(args["vdi_info"]), args["params"]) - return value(vdi(vdi_info(vi))) - def vdi_destroy(self, args): - result = self.x.vdi_destroy(args["task"], args["sr"], args["vdi"]) - expect_none(result) - return value(unit) - - def vdi_attach(self, args): - result = self.x.vdi_attach(args["task"], args["dp"], args["sr"], args["vdi"], args["read_write"]) - expect_string(result) - return value(params(result)) - def vdi_activate(self, args): - result = self.x.vdi_activate(args["task"], args["dp"], args["sr"], args["vdi"]) - expect_none(result) - return value(unit) - def vdi_deactivate(self, args): - result = self.x.vdi_deactivate(args["task"], args["dp"], args["sr"], args["vdi"]) - expect_none(result) - return value(unit) - def vdi_detach(self, args): - result = self.x.vdi_detach(args["task"], args["dp"], args["sr"], args["vdi"]) - expect_none(result) - return value(unit) - - - def _dispatch(self, method, params): - try: - log("method = %s params = %s" % (method, repr(params))) - args = params[0] - if method == "query": - return self.query(args) - elif method == "SR.attach": - return self.sr_attach(args) - elif method == "SR.detach": - return self.sr_detach(args) - elif method == "SR.scan": - return self.sr_scan(args) - elif method == "VDI.create": - return self.vdi_create(args) - elif method == "VDI.destroy": - return self.vdi_destroy(args) - elif method == "VDI.attach": - return self.vdi_attach(args) - elif method == "VDI.activate": - return self.vdi_activate(args) - elif method == "VDI.deactivate": - return self.vdi_deactivate(args) - elif method == "VDI.detach": - return self.vdi_detach(args) - except BackendError as e: - log("caught %s" % e) - traceback.print_exc() - return value(backend_error(e.code, e.params)) - except Vdi_does_not_exist as e: - log("caught %s" %e) - return value(vdi_does_not_exist()) - except Exception as e: - log("caught %s" % e) - traceback.print_exc() - return value(internal_error(str(e))) - -# Helper function to daemonise ############################################## -def daemonize(): - def fork(): - try: - if os.fork() > 0: - # parent - sys.exit(0) - except Exception as e: - print("fork() failed: %s" % e, file=sys.stderr) - traceback.print_exc() - raise - fork() - os.umask(0) - os.chdir("/") - os.setsid() - fork() - devnull = open("/dev/null", "r") - os.dup2(devnull.fileno(), sys.stdin.fileno()) - devnull = open("/dev/null", "aw") - os.dup2(devnull.fileno(), sys.stdout.fileno()) - os.dup2(devnull.fileno(), sys.stderr.fileno()) - -from xmlrpc.server import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler - -# Server XMLRPC from any HTTP POST path ##################################### - -class RequestHandler(SimpleXMLRPCRequestHandler): - rpc_paths = [] - -# SimpleXMLRPCServer with SO_REUSEADDR ###################################### - -class Server(SimpleXMLRPCServer): - def __init__(self, ip, port): - SimpleXMLRPCServer.__init__(self, (ip, port), requestHandler=RequestHandler) - def server_bind(self): - self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - SimpleXMLRPCServer.server_bind(self) - -# This is a hack to patch slow socket.getfqdn calls that -# BaseHTTPServer (and its subclasses) make. -# See: http://bugs.python.org/issue6085 -# See: http://www.answermysearches.com/xmlrpc-server-slow-in-python-how-to-fix/2140/ -import http.server - -def _bare_address_string(self): - host, port = self.client_address[:2] - return '%s' % host - -http.server.BaseHTTPRequestHandler.address_string = \ - _bare_address_string - -# Given an implementation, serve requests forever ########################### - -def start(impl, ip, port, daemon): - if daemon: - log("daemonising") - daemonize() - log("will listen on %s:%d" % (ip, port)) - server = Server(ip, port) - log("server registered on %s:%d" % (ip, port)) - server.register_introspection_functions() # for debugging - server.register_instance(Marshall(impl)) - log("serving requests forever") - server.serve_forever() From bda644f95cc19d577290936a459c58aa8c0cd3ed Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 20 Jun 2024 07:20:52 +0000 Subject: [PATCH 129/222] CP-49927: Removed storage.py from scripts/examples Signed-off-by: Ashwinh --- scripts/examples/storage.py | 187 ------------------------------------ 1 file changed, 187 deletions(-) delete mode 100755 scripts/examples/storage.py diff --git a/scripts/examples/storage.py b/scripts/examples/storage.py deleted file mode 100755 index 91214a84db4..00000000000 --- a/scripts/examples/storage.py +++ /dev/null @@ -1,187 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (C) Citrix Inc -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -# Example storage backend using SMAPIv2 using raw files and Linux losetup - -# WARNING: this API is considered to be unstable and may be changed at-will - -from __future__ import print_function -import os, sys, subprocess, json - -import smapiv2 -from smapiv2 import log, start, BackendError, Vdi_does_not_exist - -root = "/sr/" - -# [run task cmd] executes [cmd], throwing a BackendError if exits with -# a non-zero exit code. -def run(task, cmd): - code, output = subprocess.getstatusoutput(cmd) - if code != 0: - log("%s: %s exitted with code %d: %s" % (task, cmd, code, output)) - raise BackendError - log("%s: %s" % (task, cmd)) - return output - -# Use Linux "losetup" to create block devices from files -class Loop: - # [_find task path] returns the loop device associated with [path] - def _find(self, task, path): - global root - for line in run(task, "losetup -a").split("\n"): - line = line.strip() - if line != "": - bits = line.split() - loop = bits[0][0:-1] - this_path = bits[2][1:-1] - if this_path == path: - return loop - return None - # [add task path] creates a new loop device for [path] and returns it - def add(self, task, path): - run(task, "losetup -f %s" % path) - return self._find(task, path) - # [remove task path] removes the loop device associated with [path] - def remove(self, task, path): - loop = self._find(task, path) - run(task, "losetup -d %s" % loop) - -# Use FreeBSD "mdconfig" to create block devices from files -class Mdconfig: - # [_find task path] returns the unit (mdX) associated with [path] - def _find(self, task, path): - # md0 vnode 1024M /root/big.img - for line in run(task, "mdconfig -l -v").split("\n"): - if line == "": - continue - bits = line.split() - this_path = bits[3] - if this_path == path: - return bits[0] # md0 - return None - # [add task path] returns a block device associated with [path] - def add(self, task, path): - return "/dev/" + run(task, "mdconfig -a -t vnode -f %s" % path) - # [remove task path] removes the block device associated with [path] - def remove(self, task, path): - md = self._find(task, path) - if md: - run(task, "mdconfig -d -u %s" % md) - -# [path_of_vdi vdi] returns the path in the local filesystem corresponding -# to vdi location [vdi] -def path_of_vdi(vdi): - global root - return root + vdi - -disk_suffix = ".raw" -metadata_suffix = ".json" - -class RawFiles: - def __init__(self, device): - self.device = device - - def query(self): - return { "name": "RawFiles", - "vendor": "XCP", - "version": "0.1", - "features": [ smapiv2.feature_vdi_create, - smapiv2.feature_vdi_destroy, - smapiv2.feature_vdi_attach, - smapiv2.feature_vdi_detach, - smapiv2.feature_vdi_activate, - smapiv2.feature_vdi_deactivate ] } - - def sr_attach(self, task, sr, device_config): - if not(os.path.exists(root)): - raise BackendError("SR directory doesn't exist", [ root ]) - def sr_detach(self, task, sr): - pass - def sr_destroy(self, task, sr): - pass - def sr_scan(self, task, sr): - global root - log("scanning") - results = [] - for name in os.listdir(root): - if name.endswith(metadata_suffix): - path = root + "/" + name - f = open(path, "r") - try: - vdi_info = json.loads(f.read()) - results.append(smapiv2.make_vdi_info(vdi_info)) - finally: - f.close() - return results - - def vdi_create(self, task, sr, vdi_info, params): - filename = run(task, "uuidgen") - run(task, "dd if=/dev/zero of=%s%s bs=1 count=0 seek=%s" % (path_of_vdi(filename), disk_suffix, vdi_info["virtual_size"])) - vdi_info["vdi"] = filename - f = open(path_of_vdi(filename) + metadata_suffix, "w") - try: - f.write(json.dumps(vdi_info)) - finally: - f.close() - return vdi_info - def vdi_destroy(self, task, sr, vdi): - if not (os.path.exists(path_of_vdi(vdi) + disk_suffix)): - raise Vdi_does_not_exist(vdi) - run(task, "rm -f %s%s" % (path_of_vdi(vdi), disk_suffix)) - run(task, "rm -f %s%s" % (path_of_vdi(vdi), metadata_suffix)) - - def vdi_attach(self, task, dp, sr, vdi, read_write): - path = path_of_vdi(vdi) + disk_suffix - loop = self.device.add(task, path) - log("loop = %s" % repr(loop)) - return loop - - def vdi_activate(self, task, dp, sr, vdi): - pass - def vdi_deactivate(self, task, dp, sr, vdi): - pass - def vdi_detach(self, task, dp, sr, vdi): - path = path_of_vdi(vdi) + disk_suffix - self.device.remove(task, path) - -if __name__ == "__main__": - from optparse import OptionParser - - parser = OptionParser() - parser.add_option("-l", "--log", dest="logfile", help="log to LOG", metavar="LOG") - parser.add_option("-p", "--port", dest="port", help="listen on PORT", metavar="PORT") - parser.add_option("-i", "--ip-addr", dest="ip", help="listen on IP", metavar="IP") - parser.add_option("-d", "--daemon", action="store_true", dest="daemon", help="run as a background daemon", metavar="DAEMON") - (options, args) = parser.parse_args() - if options.logfile: - from smapiv2 import reopenlog - reopenlog(options.logfile) - if not options.ip and not options.ip: - print("Need an --ip-addr and --port. Use -h for help", file=sys.stderr) - sys.exit(1) - - ip = options.ip - port = int(options.port) - - arch = run("startup", "uname") - if arch == "Linux": - log("startup: Using loop devices") - start(RawFiles(Loop()), ip, port, options.daemon) - elif arch == "FreeBSD": - log("startup: Using mdconfig devices") - start(RawFiles(Mdconfig()), ip, port, options.daemon) - else: - log("startup: Unknown architecture: %s" % arch) From f387691bee60c8d6789109a95198318f223bff86 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 20 Jun 2024 07:23:54 +0000 Subject: [PATCH 130/222] CP-49927: Removed storage-server from scripts/examples Signed-off-by: Ashwinh --- scripts/examples/storage-server | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100755 scripts/examples/storage-server diff --git a/scripts/examples/storage-server b/scripts/examples/storage-server deleted file mode 100755 index d5d859d9f14..00000000000 --- a/scripts/examples/storage-server +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/sh - -. /etc/rc.subr - -name="storageserver" -start_cmd="${name}_start" -stop_cmd=":" - -storageserver_start() -{ - ip=$(ifconfig xn0 | grep inet | cut -f 2 -d " ") - cd /root - /usr/local/bin/python storage.py --ip-addr ${ip} --port 8080 --log /var/log/SMlog --daemon - echo "storageserver started on ${ip}." -} - -load_rc_config $name -run_rc_command "$1" From e95a92bbdbeba4f5dd6adf3cbc62e255eb2a4939 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 20 Jun 2024 13:54:21 +0000 Subject: [PATCH 131/222] CP-49922: Move scripts/probe-device-for-file to python3/libexec Also fix multiple-import on same line and indentation/spacing. Signed-off-by: Ashwinh --- python3/Makefile | 1 + .../libexec}/probe-device-for-file | 15 +++++++++------ scripts/Makefile | 1 - 3 files changed, 10 insertions(+), 7 deletions(-) rename {scripts => python3/libexec}/probe-device-for-file (87%) diff --git a/python3/Makefile b/python3/Makefile index e86d5c683c3..2d27b6694fb 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -17,6 +17,7 @@ install: $(IPROG) libexec/usb_reset.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) + $(IPROG) libexec/probe-device-for-file $(DESTDIR)$(LIBEXECDIR) $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/probe-device-for-file b/python3/libexec/probe-device-for-file similarity index 87% rename from scripts/probe-device-for-file rename to python3/libexec/probe-device-for-file index be07f40758f..46882c2cbec 100755 --- a/scripts/probe-device-for-file +++ b/python3/libexec/probe-device-for-file @@ -2,13 +2,16 @@ # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 # Checks for the existence of a file on a device -import os, sys +import os +import sys + try: - import xenfsimage + import xenfsimage except ImportError: - import fsimage as xenfsimage + import fsimage as xenfsimage from contextlib import contextmanager + # https://stackoverflow.com/a/17954769 @contextmanager def stderr_redirected(to=os.devnull): @@ -33,7 +36,7 @@ def stderr_redirected(to=os.devnull): with open(to, 'w') as file: _redirect_stderr(to=file) try: - yield # allow code to be run with the redirected stderr + yield # allow code to be run with the redirected stderr finally: _redirect_stderr(to=old_stderr) # restore stderr. # buffering and flags such as @@ -41,8 +44,8 @@ def stderr_redirected(to=os.devnull): if __name__ == "__main__": if len(sys.argv) != 3: - print("Usage: %s " % sys.argv[0]) - sys.exit(2) + print("Usage: %s " % sys.argv[0]) + sys.exit(2) device = sys.argv[1] file = sys.argv[2] try: diff --git a/scripts/Makefile b/scripts/Makefile index 5700d4bd879..5e00c11dfea 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -117,7 +117,6 @@ install: $(IPROG) print-custom-templates $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) - $(IPROG) probe-device-for-file $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)/etc/sysconfig From d4524d07d1d3713ab6df2478405147740ba2160b Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 18 Jun 2024 13:04:30 +0000 Subject: [PATCH 132/222] CP-49910: Move scripts link-vms-by-sr.py from to python3/libexec Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3/libexec}/link-vms-by-sr.py | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 1 deletion(-) rename {scripts => python3/libexec}/link-vms-by-sr.py (100%) diff --git a/python3/Makefile b/python3/Makefile index e86d5c683c3..f05b186d148 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -14,6 +14,7 @@ install: $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ + $(IPROG) libexec/link-vms-by-sr.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_reset.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) diff --git a/scripts/link-vms-by-sr.py b/python3/libexec/link-vms-by-sr.py similarity index 100% rename from scripts/link-vms-by-sr.py rename to python3/libexec/link-vms-by-sr.py diff --git a/scripts/Makefile b/scripts/Makefile index 5700d4bd879..3b468e4e591 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -113,7 +113,6 @@ install: $(IPROG) host-display $(DESTDIR)$(LIBEXECDIR) $(IPROG) xe-backup-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-restore-metadata $(DESTDIR)$(OPTDIR)/bin - $(IPROG) link-vms-by-sr.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) print-custom-templates $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) From 5102a23534aa4cd9da82640a7cbb0aa75e5c3b3f Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 24 Jun 2024 12:00:00 +0200 Subject: [PATCH 133/222] Fix pyright: Set stubPath and fix hfx_filename to work with it Signed-off-by: Bernhard Kaindl --- pyproject.toml | 2 ++ python3/bin/hfx_filename | 2 +- python3/libexec/link-vms-by-sr.py | 4 ++-- python3/stubs/xcp/logger.pyi | 2 ++ 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index c711c24c652..74f4adf981e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -194,9 +194,11 @@ disable = [ # typeCheckingMode: set the standard type checking mode include = ["python3", "ocaml/xcp-rrdd"] strict = ["python3/tests/observer"] +stubPath = "python3/stubs" pythonPlatform = "Linux" typeCheckingMode = "standard" reportMissingImports = false +reportMissingModuleSource = false pythonVersion = "3.6" exclude = [ "ocaml/xcp-rrdd/scripts/rrdd/rrdd.py", diff --git a/python3/bin/hfx_filename b/python3/bin/hfx_filename index 28fb05bbc78..616e5921abb 100755 --- a/python3/bin/hfx_filename +++ b/python3/bin/hfx_filename @@ -87,7 +87,7 @@ def read_field(session_id, table, fld, rf): if __name__ == "__main__": xapi = XenAPI.xapi_local() - xapi.xenapi.login_with_password('root', '') + xapi.xenapi.login_with_password("root", "", "1.0", "hfx_filename") session_id = xapi._session try: rf = db_get_by_uuid(session_id, "pool_patch", sys.argv[1]) diff --git a/python3/libexec/link-vms-by-sr.py b/python3/libexec/link-vms-by-sr.py index 98fcfa587ed..e845dd12bfe 100755 --- a/python3/libexec/link-vms-by-sr.py +++ b/python3/libexec/link-vms-by-sr.py @@ -86,9 +86,9 @@ def main(): print("Failed to create directory: %s" % linkdir, file=sys.stderr) for vmuuid in list(vms_in_sr[sruuid].keys()): + src = "../../all/{}.vmmeta".format(vmuuid) + targ = "{}/{}.vmmeta".format(linkdir, vmuuid) try: - src = "../../all/{}.vmmeta".format(vmuuid) - targ = "{}/{}.vmmeta".format(linkdir, vmuuid) os.symlink(src, targ) except: print("Failed to create symlink: %s -> %s" % (src, targ), file=sys.stderr) diff --git a/python3/stubs/xcp/logger.pyi b/python3/stubs/xcp/logger.pyi index f4aa2dab371..0b42b05eb47 100644 --- a/python3/stubs/xcp/logger.pyi +++ b/python3/stubs/xcp/logger.pyi @@ -1,6 +1,8 @@ # Minimal stub for xcp.logger module def debug(*al, **ad) -> None: ... +def info(*al, **ad) -> None: ... def error(*al, **ad) -> None: ... def warning(*al, **ad) -> None: ... +def critical(*al, **ad) -> None: ... def logToStdout(level) -> bool: ... def logToSyslog(level) -> bool: ... From 9f9511884812a05db647764513dd92f86558e5d0 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 24 Jun 2024 12:00:00 +0200 Subject: [PATCH 134/222] CP-49910/pylint: Add docstrings and comments for link-vms-by-sr.py Signed-off-by: Bernhard Kaindl --- python3/Makefile | 1 + python3/libexec/link-vms-by-sr.py | 34 ++++++++++++++++++++++++++----- 2 files changed, 30 insertions(+), 5 deletions(-) diff --git a/python3/Makefile b/python3/Makefile index f05b186d148..01735511460 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -6,6 +6,7 @@ IDATA=install -m 644 SITE3_DIR=$(shell python3 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") install: + # Create destination directories using install -m 755 -d: $(IPROG) -d $(DESTDIR)$(OPTDIR)/bin $(IPROG) -d $(DESTDIR)$(SITE3_DIR) $(IPROG) -d $(DESTDIR)$(LIBEXECDIR) diff --git a/python3/libexec/link-vms-by-sr.py b/python3/libexec/link-vms-by-sr.py index e845dd12bfe..2b5a37a8ba9 100755 --- a/python3/libexec/link-vms-by-sr.py +++ b/python3/libexec/link-vms-by-sr.py @@ -1,5 +1,21 @@ #!/usr/bin/env python3 -# Populate a directory of symlinks partitioning VMs by SR +""" +link-vms-by-sr.py - Populate the given input_directory with VM metadata files, + and create a directory structure of + symlinks to the metadata files, partitioning VMs by SR UUID. + +Usage: + link-vms-by-sr.py -d + +The script uses the XenAPI to get a list of VMs in each SR +and get the metadata for each VM, writing the metadata to the input_dir, +and creating symlink directories in the input_dir/by-sr directory. + +Below the input_dir, given by -d : +- In the /all/ directory, store all VM metadata files. +- In the /by-sr/ directory, create symlinks to the VM metadata files, + partitioned by a directory structure of SR UUIDs. +""" # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 from __future__ import print_function @@ -15,18 +31,21 @@ def logout(session): + """atexit handler to logout of the xapi session, ignoring any exceptions""" with contextlib.suppress(Exception): session.xenapi.session.logout() -def get_input_dir(): +def get_input_dir_from_argparser(): + """Parse command line arguments (-d input_dir) and return the input directory""" parser = argparse.ArgumentParser() parser.add_argument("-d", dest="input_dir", required=True, help="Specify the input directory") args = parser.parse_args() return args.input_dir -def get_vms_in_sr(session): +def get_vms_in_sr_from_xapi(session): + """Return a dictionary of SR UUIDs to VM UUIDs""" vms = session.xenapi.VM.get_all_records() vbds = session.xenapi.VBD.get_all_records() vdis = session.xenapi.VDI.get_all_records() @@ -67,13 +86,18 @@ def get_vms_in_sr(session): def main(): + """Main function to save VM metadata files and link them by SR UUID""" + + # Get a session for the local host, login and register a logout handler session = XenAPI.xapi_local() session.xenapi.login_with_password("", "", "1.0", "xen-api-scripts-linkvmsbysr.py") atexit.register(logout, session) - input_dir = get_input_dir() - vms_in_sr = get_vms_in_sr(session) + # Parse the input directory and get the VMs in each SR + input_dir = get_input_dir_from_argparser() + vms_in_sr = get_vms_in_sr_from_xapi(session) + # Create the directory structure and populate it with symlinks for sruuid in list(vms_in_sr.keys()): linkdir = "{}/by-sr/{}".format(input_dir, sruuid) if Path(linkdir).is_dir(): From 1139c5b840f2b3a898f151c56d7946d907dd1c0a Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 24 Jun 2024 12:00:00 +0200 Subject: [PATCH 135/222] CP-49910/pylint: Fix bare-except: mkdir/symlink raise OSErrors Signed-off-by: Bernhard Kaindl --- python3/libexec/link-vms-by-sr.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python3/libexec/link-vms-by-sr.py b/python3/libexec/link-vms-by-sr.py index 2b5a37a8ba9..0df20b4bf97 100755 --- a/python3/libexec/link-vms-by-sr.py +++ b/python3/libexec/link-vms-by-sr.py @@ -106,7 +106,7 @@ def main(): try: Path(linkdir).mkdir(parents=True) - except: + except OSError: print("Failed to create directory: %s" % linkdir, file=sys.stderr) for vmuuid in list(vms_in_sr[sruuid].keys()): @@ -114,7 +114,7 @@ def main(): targ = "{}/{}.vmmeta".format(linkdir, vmuuid) try: os.symlink(src, targ) - except: + except OSError: print("Failed to create symlink: %s -> %s" % (src, targ), file=sys.stderr) session.xenapi.logout() From 35724f7be4b5b32115e42836aaa00766a394a9f1 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 25 Jun 2024 10:35:03 +0000 Subject: [PATCH 136/222] CP-49907: Moved host-display to python3/libexec directory - Modified Makefile to include host-display in python3 directory Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3/libexec}/host-display | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 1 deletion(-) rename {scripts => python3/libexec}/host-display (100%) diff --git a/python3/Makefile b/python3/Makefile index 52e8eec787e..115933e8d4d 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -15,6 +15,7 @@ install: $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ + $(IPROG) libexec/host-display $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/link-vms-by-sr.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_reset.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) diff --git a/scripts/host-display b/python3/libexec/host-display similarity index 100% rename from scripts/host-display rename to python3/libexec/host-display diff --git a/scripts/Makefile b/scripts/Makefile index ed91ab5b866..145ab1fe4cf 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -110,7 +110,6 @@ install: $(IPROG) set-hostname $(DESTDIR)$(LIBEXECDIR) $(IPROG) update-mh-info $(DESTDIR)$(LIBEXECDIR) $(IPROG) host-bugreport-upload $(DESTDIR)$(LIBEXECDIR)/host-bugreport-upload - $(IPROG) host-display $(DESTDIR)$(LIBEXECDIR) $(IPROG) xe-backup-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-restore-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) print-custom-templates $(DESTDIR)$(LIBEXECDIR) From c351b3e7aa44372c1aa4a44664250f545d46034f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 25 Jun 2024 13:51:45 +0000 Subject: [PATCH 137/222] CP-49921: Moved print-custom-templates from scripts to python3/libexec directory - Modified python3/Makefile to include this change. Signed-off-by: Ashwinh --- python3/Makefile | 3 ++- {scripts => python3/libexec}/print-custom-templates | 4 ++-- scripts/Makefile | 1 - 3 files changed, 4 insertions(+), 4 deletions(-) rename {scripts => python3/libexec}/print-custom-templates (90%) diff --git a/python3/Makefile b/python3/Makefile index 52e8eec787e..24a8d7dfbee 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -20,7 +20,8 @@ install: $(IPROG) libexec/usb_scan.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/probe-device-for-file $(DESTDIR)$(LIBEXECDIR) - + $(IPROG) libexec/print-custom-templates $(DESTDIR)$(LIBEXECDIR) + $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/print-custom-templates b/python3/libexec/print-custom-templates similarity index 90% rename from scripts/print-custom-templates rename to python3/libexec/print-custom-templates index 4ae15250951..882dc068732 100755 --- a/scripts/print-custom-templates +++ b/python3/libexec/print-custom-templates @@ -20,8 +20,8 @@ def main(argv): atexit.register(logout, session) templates = session.xenapi.VM.get_all_records_where('field "is_a_template" = "true" and field "is_a_snapshot" = "false"' ) - except: - print("Error retrieving template list", file=sys.stderr) + except Exception as e: + print(type(e).__name__, "retrieving template list:", e, file=sys.stderr) sys.exit(1) output=[] diff --git a/scripts/Makefile b/scripts/Makefile index ed91ab5b866..23cde5f3c65 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -113,7 +113,6 @@ install: $(IPROG) host-display $(DESTDIR)$(LIBEXECDIR) $(IPROG) xe-backup-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-restore-metadata $(DESTDIR)$(OPTDIR)/bin - $(IPROG) print-custom-templates $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) From ced62bd4c52f6ee2a6121c97b40b66db1ecedc7f Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 18 Jun 2024 10:48:53 +0000 Subject: [PATCH 138/222] CP-49920: Moved power-on.py from scripts/poweron to python3/poweron - Modified Makefile to include power-on.py under python3 directory - Removed power-on.py from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 2 ++ {scripts => python3}/poweron/power-on.py | 5 +++-- scripts/Makefile | 2 -- 3 files changed, 5 insertions(+), 4 deletions(-) rename {scripts => python3}/poweron/power-on.py (95%) diff --git a/python3/Makefile b/python3/Makefile index 52e8eec787e..c9311858196 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -25,6 +25,8 @@ install: $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) +# poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan $(IPROG) poweron/DRAC.py $(DESTDIR)$(PLUGINDIR)/DRAC.py + $(IPROG) poweron/power-on.py $(DESTDIR)$(PLUGINDIR)/power-on-host diff --git a/scripts/poweron/power-on.py b/python3/poweron/power-on.py similarity index 95% rename from scripts/poweron/power-on.py rename to python3/poweron/power-on.py index 34fec2f1e60..bad899c343d 100644 --- a/scripts/poweron/power-on.py +++ b/python3/poweron/power-on.py @@ -3,6 +3,7 @@ # Example script which shows how to use the XenAPI to find a particular Host's management interface # and send it a wake-on-LAN packet. +import sys import syslog import time @@ -26,8 +27,8 @@ def waitForXapi(session, host): metrics = session.xenapi.host.get_metrics(host) try: finished = session.xenapi.host_metrics.get_live(metrics) - except: - pass + except Exception as e: + print(type(e).__name__, "occurred:", e, file=sys.stderr) return str(finished) diff --git a/scripts/Makefile b/scripts/Makefile index ed91ab5b866..726c4a664ce 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -166,8 +166,6 @@ endif $(IDATA) examples/python/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IPROG) examples/python/echo.py $(DESTDIR)$(PLUGINDIR)/echo $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py -# poweron - $(IPROG) poweron/power-on.py $(DESTDIR)$(PLUGINDIR)/power-on-host # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) $(IDATA) yum-plugins/accesstoken.conf $(DESTDIR)$(YUMPLUGINCONFDIR) From ef9f1f04bc084647c6e15f939ea07b09a52ce510 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 25 Jun 2024 15:53:28 +0000 Subject: [PATCH 139/222] CP-49909: Moved install-sup-pack from scripts/plugins to python3/plugins directory - fixed bare-except and raise-missing errors Signed-off-by: Ashwinh --- python3/Makefile | 2 ++ {scripts => python3}/plugins/install-supp-pack | 12 ++++++------ scripts/Makefile | 1 - 3 files changed, 8 insertions(+), 7 deletions(-) rename {scripts => python3}/plugins/install-supp-pack (90%) diff --git a/python3/Makefile b/python3/Makefile index 4f625d264be..4117ad4fc35 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -27,6 +27,8 @@ install: $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) + $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) + # poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wake-on-lan diff --git a/scripts/plugins/install-supp-pack b/python3/plugins/install-supp-pack similarity index 90% rename from scripts/plugins/install-supp-pack rename to python3/plugins/install-supp-pack index 8143215c4b2..83db0303186 100755 --- a/scripts/plugins/install-supp-pack +++ b/python3/plugins/install-supp-pack @@ -36,8 +36,8 @@ def install(session, args): vdi_ref = None try: vdi_ref = session.xenapi.VDI.get_by_uuid(vdi) - except: - raise ArgumentError("VDI parameter invalid") + except Exception as exc: + raise ArgumentError("VDI parameter invalid") from exc inventory = xcp.environ.readInventory() this_host_uuid = inventory["INSTALLATION_UUID"] @@ -46,8 +46,8 @@ def install(session, args): update_ref = None try: update_ref = session.xenapi.pool_update.introduce(vdi_ref) - except: - raise ArgumentError("VDI contains invalid update package") + except Exception as exc: + raise ArgumentError("VDI contains invalid update package") from exc try: session.xenapi.pool_update.apply(update_ref, this_host_ref) @@ -57,9 +57,9 @@ def install(session, args): # "['ERRORCODE', 'error_message']" # fetch the error_message and display it. error = json.loads(str(e))[1].encode("utf8") - except: + except Exception: error = str(e) - raise InstallFailure("Failed to install the supplemental pack", error) + raise InstallFailure("Failed to install the supplemental pack", error) from e return "OK" diff --git a/scripts/Makefile b/scripts/Makefile index ffcee71e5d5..434d0819de1 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -126,7 +126,6 @@ install: $(IPROG) plugins/perfmon $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/openvswitch-config-update $(DESTDIR)$(PLUGINDIR) mkdir -p $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead From f80b1260de9a2ddd15b22b1bbf627e0950365946 Mon Sep 17 00:00:00 2001 From: Ashwin Date: Thu, 27 Jun 2024 04:29:48 +0530 Subject: [PATCH 140/222] CP-49903: mv echo plugin from scripts/examples/python/echo.py to python3/plugins (#5742) Signed-off-by: Ashwinh Co-authored-by: Bernhard Kaindl --- python3/Makefile | 1 + {scripts/examples/python => python3/plugins}/echo.py | 4 ++-- scripts/Makefile | 1 - 3 files changed, 3 insertions(+), 3 deletions(-) rename {scripts/examples/python => python3/plugins}/echo.py (74%) diff --git a/python3/Makefile b/python3/Makefile index 4117ad4fc35..8f34cb8e107 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -28,6 +28,7 @@ install: $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) + $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo # poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py diff --git a/scripts/examples/python/echo.py b/python3/plugins/echo.py similarity index 74% rename from scripts/examples/python/echo.py rename to python3/plugins/echo.py index 57f70492c6c..27020e17065 100644 --- a/scripts/examples/python/echo.py +++ b/python3/plugins/echo.py @@ -5,8 +5,8 @@ import XenAPIPlugin - -def main(session, args): +# The 1st argument is the session. This plugin does not use it, hence use _: +def main(_, args): if "sleep" in args: secs = int(args["sleep"]) time.sleep(secs) diff --git a/scripts/Makefile b/scripts/Makefile index 434d0819de1..705b161158a 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -161,7 +161,6 @@ endif sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ $(IDATA) examples/python/inventory.py $(DESTDIR)$(SITE3_DIR)/ - $(IPROG) examples/python/echo.py $(DESTDIR)$(PLUGINDIR)/echo $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) From d6ac6a6f85757fc087d2a0863dfcddaa9f29f242 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 26 Jun 2024 12:00:00 +0200 Subject: [PATCH 141/222] CP-49928: test_static_vids.py: mv to py3, test list_vdis & fresh_name Signed-off-by: Bernhard Kaindl --- python3/tests/test_static_vdis.py | 85 +++++++++++++++++++++++++++++++ scripts/test_static_vdis.py | 56 -------------------- 2 files changed, 85 insertions(+), 56 deletions(-) create mode 100644 python3/tests/test_static_vdis.py delete mode 100644 scripts/test_static_vdis.py diff --git a/python3/tests/test_static_vdis.py b/python3/tests/test_static_vdis.py new file mode 100644 index 00000000000..ee424c157a1 --- /dev/null +++ b/python3/tests/test_static_vdis.py @@ -0,0 +1,85 @@ +"""python3/tests/test_static_vdis.py: Test the static-vdis script""" + +import os +from pathlib import Path +from types import ModuleType + +import pytest + +from python3.tests.import_helper import import_file_as_module, mocked_modules + +# ---------------------------- Test fixtures --------------------------------- + + +@pytest.fixture(scope="function") # function scope: Re-run for each test function +def static_vdis() -> ModuleType: + """Test fixture to return the static-vdis module, mocked to avoid dependencies.""" + with mocked_modules("XenAPI", "inventory"): + return import_file_as_module("scripts/static-vdis") + + +# Hide pylint warnings for redefined-outer-name from using the static_vdis fixture: +# pylint: disable=redefined-outer-name +# Allow to access attributes of the static_vdis module from this test module: +# pyright: reportAttributeAccessIssue=false + +# ----------------------------- Test cases ----------------------------------- + + +def test_whole_file(static_vdis: ModuleType): + """Test read_whole_file() and write_whole_file()""" + + with open(__file__, encoding="utf-8") as data: + contents = data.read().strip() + assert static_vdis.read_whole_file(__file__) == contents + assert static_vdis.write_whole_file(__file__, contents) is None + with open(__file__, encoding="utf-8") as written_data: + assert written_data.read().strip() == contents + + +def test_fresh_name(static_vdis: ModuleType, tmp_path: Path): + """Test fresh_name() and list_vdis() - all code paths""" + + # When the freshly created tmp_path is empty, expect [] and "0": + static_vdis.main_dir = tmp_path.as_posix() + assert static_vdis.list_vdis() == [] + assert static_vdis.fresh_name() == "0" + + # When main_dir contains a directory with name "0", the next name should be "1": + os.mkdir(static_vdis.main_dir + "/0") + assert static_vdis.fresh_name() == "1" + + # When main_dir contains a directory with name "1", the next name should be "2": + os.mkdir(static_vdis.main_dir + "/1") + assert static_vdis.fresh_name() == "2" + + # When main_dir does not exist, an empty list and 0 should be returned: + static_vdis.main_dir = tmp_path.as_posix() + "/does-not-exist" + assert static_vdis.list_vdis() == [] + assert static_vdis.fresh_name() == "0" + + + +def test_sr_attach(static_vdis: ModuleType, mocker): + """Test sr_attach()""" + + # We need to mock those as they would attempt to load the volume plugin and + # check the clusterstack, which are not available in the test environment: + static_vdis.call_volume_plugin = mocker.MagicMock() + static_vdis.check_clusterstack = mocker.MagicMock() + + # Set the return value of the mocked functions to success: + static_vdis.call_volume_plugin.return_value = "success" + static_vdis.check_clusterstack.return_value = "success" + + # Call the sr_attach function + device_config = {"key1": "value1", "key2": "value2"} + result = static_vdis.sr_attach("plugin_name", device_config) + + # Assert the expected behavior + assert result == "success" + static_vdis.call_volume_plugin.assert_called_once_with( + "plugin_name", + "SR.attach", + ["--configuration", "key1", "value1", "--configuration", "key2", "value2"], + ) \ No newline at end of file diff --git a/scripts/test_static_vdis.py b/scripts/test_static_vdis.py deleted file mode 100644 index b0ab6ad5939..00000000000 --- a/scripts/test_static_vdis.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -# -# unittest for static-vdis - -import unittest -from mock import MagicMock -import sys -import os -import subprocess -import tempfile - -# mock modules to avoid dependencies -sys.modules["XenAPI"] = MagicMock() -sys.modules["inventory"] = MagicMock() - -def import_from_file(module_name, file_path): - """Import a file as a module""" - if sys.version_info.major == 2: - return None - else: - from importlib import machinery, util - loader = machinery.SourceFileLoader(module_name, file_path) - spec = util.spec_from_loader(module_name, loader) - assert spec - assert spec.loader - module = util.module_from_spec(spec) - # Probably a good idea to add manually imported module stored in sys.modules - sys.modules[module_name] = module - spec.loader.exec_module(module) - return module - -def get_module(): - """Import the static-vdis script as a module for executing unit tests on functions""" - testdir = os.path.dirname(__file__) - return import_from_file("static_vdis", testdir + "/static-vdis") - -static_vdis = get_module() - -@unittest.skipIf(sys.version_info < (3, 0), reason="requires python3") -class TestReadWriteFile(unittest.TestCase): - def test_write_and_read_whole_file(self): - """Test read_whole_file and write_whole_file""" - test_file = tempfile.NamedTemporaryFile(delete=True) - filename = str(test_file.name) - content = r"""def read_whole_file(filename): - with open(filename, 'r', encoding='utf-8') as f: - return ''.join(f.readlines()).strip() - -def write_whole_file(filename, contents): - with open(filename, "w", encoding='utf-8') as f: - f.write(contents)""" - static_vdis.write_whole_file(filename, content) - expected_content = static_vdis.read_whole_file(filename) - self.assertEqual(expected_content, content) - - \ No newline at end of file From a02153b6f81500ec26e82d7f615f3d9601764197 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 25 Jun 2024 12:00:00 +0200 Subject: [PATCH 142/222] CP-49928: Fix pytype warnings in scripts/static-vdis Signed-off-by: Bernhard Kaindl --- pyproject.toml | 2 +- scripts/static-vdis | 20 ++++++++++++++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5bf10170401..83a54c6d978 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,6 +57,7 @@ exclude_lines = [ "pass", # Other specific lines that do not need to be covered, comment in which file: "raise NbdDeviceNotFound", # python3/libexec/usb_scan.py + "params = xmlrpc.client.loads", # static-vdis ] # precision digits to use when reporting coverage (sub-percent-digits are not reported): precision = 0 @@ -261,7 +262,6 @@ expected_to_fail = [ "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", # Other fixes needed: - "scripts/static-vdis", "scripts/plugins/extauth-hook-AD.py", ] diff --git a/scripts/static-vdis b/scripts/static-vdis index 77c9790b71e..9ca8b1d352a 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -3,10 +3,22 @@ # Common functions for managing statically-attached (ie onboot, without xapi) VDIs -import sys, os, subprocess, json, urllib.parse +import json +import os import os.path +import subprocess +import sys import time -import XenAPI, inventory, xmlrpc.client +import urllib.parse +import xmlrpc.client +from typing import TYPE_CHECKING + +import XenAPI + +import inventory + +if TYPE_CHECKING: + from typing import Any, Dict main_dir = "/etc/xensource/static-vdis" @@ -77,6 +89,7 @@ def check_clusterstack(ty): wait_for_corosync_quorum() def sr_attach(ty, device_config): + # type: (str, Dict[str, object]) -> str check_clusterstack(ty) args = [arg for (k,v) in device_config.items() @@ -238,7 +251,7 @@ def call_backend_attach(driver, config): return path def call_backend_detach(driver, config): - params = xmlrpc.client.loads(config)[0][0] + params = xmlrpc.client.loads(config)[0][0] # type: Any params['command'] = 'vdi_detach_from_config' config = xmlrpc.client.dumps(tuple([params]), params['command']) xml = doexec([ driver, config ]) @@ -388,4 +401,3 @@ if __name__ == "__main__": detach(sys.argv[2]) else: usage() - From 49ed5e62028befa407e7d88387b50eae5bf544ab Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 27 Jun 2024 09:28:56 +0000 Subject: [PATCH 143/222] CP-49911: Removed lvhd-api-test.py from scripts/examples/python/ Signed-off-by: Ashwinh --- scripts/examples/python/lvhd-api-test.py | 29 ------------------------ 1 file changed, 29 deletions(-) delete mode 100644 scripts/examples/python/lvhd-api-test.py diff --git a/scripts/examples/python/lvhd-api-test.py b/scripts/examples/python/lvhd-api-test.py deleted file mode 100644 index 4b7786d3f27..00000000000 --- a/scripts/examples/python/lvhd-api-test.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python3 - -from __future__ import print_function -import XenAPI, sys - -def go(x, name): - vm = x.xenapi.VM.get_by_name_label(name)[0] - vbds = x.xenapi.VM.get_VBDs(vm) - non_empty = filter(lambda y:not(x.xenapi.VBD.get_empty(y)), vbds) - vdis = map(lambda y:x.xenapi.VBD.get_VDI(y), non_empty) - - print("Calling API call on %s" % (repr(vdis))) - result = x.xenapi.SR.lvhd_stop_using_these_vdis_and_call_script(vdis, "echo", "main", { "hello": "there", "sleep": "10" }) - print(repr(result)) - - -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage:", file=sys.stderr) - print(" %s " % (sys.argv[0]), file=sys.stderr) - print(" -- Call SR.lvhd_stop_using_these_vdis_and_call_script with all VDIs with VBDs (attached or not) linking to specified VM", file=sys.stderr) - sys.exit(1) - name = sys.argv[1] - x = XenAPI.xapi_local() - x.xenapi.login_with_password("root", "", "1.0", "xen-api-scripts-lvhd-api-test.py") - try: - go(x, name) - finally: - x.xenapi.logout() From 39a5384c7f7cc5a62c8c9beef9348aebc8c8c49c Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 27 Jun 2024 10:58:42 +0000 Subject: [PATCH 144/222] CP-50091: Moved inventory.py from scripts/examples/python/ to python3/packages/inventory.py - Fixed bare-except, unspecified-encoding and indentation issue Signed-off-by: Ashwinh --- python3/Makefile | 1 + python3/packages/inventory.py | 37 ++++++++++++++++++++++++++++ scripts/Makefile | 2 -- scripts/examples/python/inventory.py | 32 ------------------------ 4 files changed, 38 insertions(+), 34 deletions(-) create mode 100644 python3/packages/inventory.py delete mode 100644 scripts/examples/python/inventory.py diff --git a/python3/Makefile b/python3/Makefile index 8f34cb8e107..91479a31d8d 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -13,6 +13,7 @@ install: $(IPROG) -d $(DESTDIR)$(PLUGINDIR) + $(IDATA) packages/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ $(IPROG) libexec/host-display $(DESTDIR)$(LIBEXECDIR) diff --git a/python3/packages/inventory.py b/python3/packages/inventory.py new file mode 100644 index 00000000000..87847cf5cde --- /dev/null +++ b/python3/packages/inventory.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 + +""" +inventory.py + +This module defines functions to read and parse constants from the xensource-inventory file. +""" +import sys + +INVENTORY = "@INVENTORY@" +INSTALLATION_UUID = "INSTALLATION_UUID" + + +def read_kvpairs(filename): + """Read in a file of key-value pairs in the format used by the inventory file""" + all_entries = {} + with open(filename, 'r', encoding='utf-8') as f: + for line in f: + equals = line.index("=") + key = line[:equals] + value = line[equals+1:].strip().strip("'") + all_entries[key] = value + return all_entries + + +def parse(): + """Return the contents of the xensource inventory file as a dictionary""" + try: + return read_kvpairs(INVENTORY) + except FileNotFoundError as e: + print("Error: File '{}' not found. {}".format(INVENTORY, e), file=sys.stderr) + return {} + + +def get_localhost_uuid(): + """Return the UUID of the local host""" + return parse()[INSTALLATION_UUID] diff --git a/scripts/Makefile b/scripts/Makefile index 705b161158a..8204a2a0e66 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -155,12 +155,10 @@ install: ifneq ($(BUILD_PY2), NO) $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE_DIR)/ $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE_DIR)/ - $(IDATA) examples/python/inventory.py $(DESTDIR)$(SITE_DIR)/ endif $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ - $(IDATA) examples/python/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) diff --git a/scripts/examples/python/inventory.py b/scripts/examples/python/inventory.py deleted file mode 100644 index 9fd645b5d32..00000000000 --- a/scripts/examples/python/inventory.py +++ /dev/null @@ -1,32 +0,0 @@ -# Simple functions to read the constants from the xensource-inventory file - -INVENTORY="@INVENTORY@" -INSTALLATION_UUID="INSTALLATION_UUID" - - -def read_kvpairs(filename): - """Read in a file of key-value pairs in the format used by the inventory file""" - f = open(filename) - all_entries = {} - try: - for line in f.readlines(): - equals = line.index("=") - key = line[0:equals] - value = line[equals+1:].strip().strip("'") - all_entries[key] = value - finally: - f.close() - return all_entries - - -def parse(): - """Return the contents of the xensource inventory file as a dictionary""" - try: - return read_kvpairs(INVENTORY) - except: - return {} - - -def get_localhost_uuid(): - """Return the UUID of the local host""" - return parse()[INSTALLATION_UUID] From 1cd2fa5857acbf2bbd1e929385fefd2b779785c8 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 1 Jul 2024 10:01:54 +0000 Subject: [PATCH 145/222] CP-49900: Moved scripts/templates to python3/templates directory - Modified code to using 2to3 - Fixed except issue - Removed templates from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 3 + {scripts => python3}/templates/debian | 83 ++++++++++++++++++--------- {scripts => python3}/templates/debug | 0 scripts/Makefile | 3 - 4 files changed, 58 insertions(+), 31 deletions(-) rename {scripts => python3}/templates/debian (69%) rename {scripts => python3}/templates/debug (100%) diff --git a/python3/Makefile b/python3/Makefile index 8f34cb8e107..3f724d972c7 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -29,6 +29,9 @@ install: $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo +# templates + $(IPROG) templates/debian $(DESTDIR)$(OPTDIR)/packages/post-install-scripts/debian-etch + $(IPROG) templates/debug $(DESTDIR)$(OPTDIR)/packages/post-install-scripts # poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py diff --git a/scripts/templates/debian b/python3/templates/debian similarity index 69% rename from scripts/templates/debian rename to python3/templates/debian index 9350a40a57d..4e9b12a8714 100644 --- a/scripts/templates/debian +++ b/python3/templates/debian @@ -1,30 +1,46 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # Copyright (c) 2005-2007 XenSource, Inc # Code ripped out of 'xgt' script for now from __future__ import print_function -import commands, xmlrpclib, os, sys, httplib, socket, urllib2, signal + +import os +import signal +import socket +import sys + +import commands +import httplib +import urllib2 +import xmlrpclib verbose = True + ##### begin hack. Provide xmlrpc over UNIX domain socket (cut+pasted from eliloader): class UDSHTTPConnection(httplib.HTTPConnection): - """ Stupid hacked up HTTPConnection subclass to allow HTTP over Unix domain - sockets. """ + """Stupid hacked up HTTPConnection subclass to allow HTTP over Unix domain + sockets.""" + def connect(self): path = self.host.replace("_", "/") self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) self.sock.connect(path) + class UDSHTTP(httplib.HTTP): _connection_class = UDSHTTPConnection + class UDSTransport(xmlrpclib.Transport): def make_connection(self, host): return UDSHTTP(host) + def xapi_local(): return xmlrpclib.Server("http://_var_xapi_xapi/", transport=UDSTransport()) + + ##### end hack. @@ -36,43 +52,47 @@ def run(cmd, *args): debug("+ " + cmd % args) (ret, out) = commands.getstatusoutput(cmd % args) if verbose: - try: + try: for line in out.split("\n"): log("| " + line) except TypeError as e: pass if ret != 0: - debug ("run - command %s failed with %d" , cmd, ret) + debug("run - command %s failed with %d", cmd, ret) raise CommandException(out) return out + def log(fmt, *args): print(fmt % args) + def debug(msg, *args): if verbose: print(msg % args) + def create_partition(lvpath): # 1. write a partition table: - pipe = os.popen('/sbin/fdisk %s' % lvpath, 'w') + pipe = os.popen("/sbin/fdisk %s" % lvpath, "w") - pipe.write('n\n') # new partition - pipe.write('p\n') # primary - pipe.write("1\n") # 1st partition - pipe.write('\n') # default start cylinder - pipe.write('\n') # size: as big as image - pipe.write('w\n') # write partition table + pipe.write("n\n") # new partition + pipe.write("p\n") # primary + pipe.write("1\n") # 1st partition + pipe.write("\n") # default start cylinder + pipe.write("\n") # size: as big as image + pipe.write("w\n") # write partition table # XXX we must ignore certain errors here as fdisk will # sometimes return non-zero signalling error conditions # we don't care about. Should fix to detect these cases # specifically. rc = pipe.close() - if rc == None: + if rc == None: rc = 0 log("fdisk exited with rc %d (some non-zero exits can be ignored safely)." % rc) + def map_partitions(lvpath): run("/sbin/kpartx -a %s", lvpath) ps = [] @@ -80,37 +100,42 @@ def map_partitions(lvpath): ps.append("/dev/mapper/" + line.split()[0]) return ps + def unmap_partitions(lvpath): run("/sbin/kpartx -d %s", lvpath) + def umount(mountpoint): - run("umount -l %s",mountpoint) + run("umount -l %s", mountpoint) + if __name__ == "__main__": - #os.setpgrp() + # os.setpgrp() xvda = os.getenv("xvda") xvdb = os.getenv("xvdb") debug("Guest's xvda is on %s" % xvda) debug("Guest's xvdb is on %s" % xvdb) if xvda == None or xvdb == None: - raise "Need to pass in device names for xvda and xvdb through the environment" - + raise ValueError ("Need to pass in device names for xvda and xvdb through the environment") + vm = os.getenv("vm") - server = xapi_local () + server = xapi_local() try: - session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] - uuid = server.VM.get_uuid(session_id, vm)['Value'] + session_id = server.session.login_with_password( + "", "", "1.0", "xen-api-scripts-debian" + )["Value"] + uuid = server.VM.get_uuid(session_id, vm)["Value"] mountpoint = "/tmp/installer/%s" % (uuid) finally: server.session.logout(session_id) def sighandler(signum, frame): - umount(mountpoint) - os.killpg(0,signal.SIGKILL) - exit(1) + umount(mountpoint) + os.killpg(0, signal.SIGKILL) + exit(1) - signal.signal(signal.SIGTERM,sighandler) + signal.signal(signal.SIGTERM, sighandler) create_partition(xvda) create_partition(xvdb) @@ -132,10 +157,12 @@ if __name__ == "__main__": run("/usr/bin/unzip -p %s swap.img | dd of=%s oflag=direct bs=1M", xgt, xvdb) try: - session_id = server.session.login_with_password('','','1.0','xen-api-scripts-debian')['Value'] - vbds = server.VM.get_VBDs(session_id, vm)['Value'] + session_id = server.session.login_with_password( + "", "", "1.0", "xen-api-scripts-debian" + )["Value"] + vbds = server.VM.get_VBDs(session_id, vm)["Value"] for i in vbds: - dev = server.VBD.get_userdevice(session_id, i)['Value'] + dev = server.VBD.get_userdevice(session_id, i)["Value"] if dev == "0": server.VBD.set_bootable(session_id, i, True) finally: diff --git a/scripts/templates/debug b/python3/templates/debug similarity index 100% rename from scripts/templates/debug rename to python3/templates/debug diff --git a/scripts/Makefile b/scripts/Makefile index 705b161158a..b068e7a8959 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -145,9 +145,6 @@ install: mkdir -p $(DESTDIR)/etc/cron.d $(IDATA) xapi-tracing-log-trim.cron $(DESTDIR)/etc/cron.d/xapi-tracing-log-trim.cron mkdir -p $(DESTDIR)/opt/xensource/gpg -# templates - $(IPROG) templates/debian $(DESTDIR)$(OPTDIR)/packages/post-install-scripts/debian-etch - $(IPROG) templates/debug $(DESTDIR)$(OPTDIR)/packages/post-install-scripts # host-backup-restore $(IPROG) host-backup-restore/host-backup $(DESTDIR)$(LIBEXECDIR) $(IPROG) host-backup-restore/host-restore $(DESTDIR)$(LIBEXECDIR) From 8199f2d3bf1569c651f76c3c6e273869716ca9ba Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 1 Jul 2024 10:54:29 +0000 Subject: [PATCH 146/222] CP-49912: Moved scripts/mail-alarm to python3/libexec directory - Modified python3/Makefile to include these changes Signed-off-by: Ashwinh --- python3/Makefile | 2 ++ {scripts => python3/libexec}/mail-alarm | 0 scripts/Makefile | 1 - 3 files changed, 2 insertions(+), 1 deletion(-) rename {scripts => python3/libexec}/mail-alarm (100%) diff --git a/python3/Makefile b/python3/Makefile index 8f34cb8e107..1d98e2223ba 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -22,6 +22,8 @@ install: $(IPROG) libexec/nbd_client_manager.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/probe-device-for-file $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/print-custom-templates $(DESTDIR)$(LIBEXECDIR) + $(IPROG) libexec/mail-alarm $(DESTDIR)$(LIBEXECDIR) + $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/mail-alarm b/python3/libexec/mail-alarm similarity index 100% rename from scripts/mail-alarm rename to python3/libexec/mail-alarm diff --git a/scripts/Makefile b/scripts/Makefile index 705b161158a..88d71c98227 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -42,7 +42,6 @@ install: $(IPROG) fence $(DESTDIR)$(LIBEXECDIR) $(IPROG) xha-lc $(DESTDIR)$(LIBEXECDIR) $(IPROG) xapi-health-check $(DESTDIR)$(LIBEXECDIR) - $(IPROG) mail-alarm $(DESTDIR)$(LIBEXECDIR) $(IDATA) audit-logrotate $(DESTDIR)/etc/logrotate.d/audit $(IDATA) xapi-logrotate.conf $(DESTDIR)/etc/logrotate.d/xapi $(IPROG) xapi-tracing-log-trim.sh $(DESTDIR)$(LIBEXECDIR) From 501cc2630dc78b3e00b3f9ee8e38646495974620 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 1 Jul 2024 12:00:00 +0200 Subject: [PATCH 147/222] CP-49928: static-vdis: Fix warnings in def fresh_name() Warning fix (for pyright) in scripts/static-vdis: - fresh_name is covered by github.com/xapi-project/xen-api/pull/5740 Details: Always return a value in fresh_name(): - Was already so, but pyright doesn't "understand" this. - Fix the warning by de-indenting `return 0` of fresh_name() Signed-off-by: Bernhard Kaindl --- scripts/static-vdis | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/static-vdis b/scripts/static-vdis index 9ca8b1d352a..6ea02211e0d 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -115,7 +115,7 @@ def fresh_name(): except: # Directory doesn't exist os.mkdir(main_dir) - return "0" + return "0" # Always return a string, fixes pyright error by not returning None def to_string_list(d): From 017ad4939d11d940140706fb8ae20287bfff7d9b Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 1 Jul 2024 12:00:00 +0200 Subject: [PATCH 148/222] CP-49928: static-vdis: Fix pylint warnings in def fresh_name() - all does not need to be initialized to an empty list: It isn't used before it is set by `all = listdir(...)` and not after. - Use `if i not in files:` not some clumsy method that pylint warns on. Signed-off-by: Bernhard Kaindl --- scripts/static-vdis | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/static-vdis b/scripts/static-vdis index 6ea02211e0d..fada0e14f1f 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -105,12 +105,12 @@ def list_vdis(): return list(map(load, all)) def fresh_name(): - all = [] + """Return a unique name for a new static VDI configuration directory""" try: all = os.listdir(main_dir) for i in range(0, len(all) + 1): # guarantees to find a unique number i = str(i) - if not(i in all): + if i not in all: return i except: # Directory doesn't exist From ef1bb5f9155d15650a16dccf6b4c46440e4677c5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 1 Jul 2024 12:00:00 +0200 Subject: [PATCH 149/222] CP-49928: Rename all to not have warnings on overriding built-in Signed-off-by: Bernhard Kaindl --- scripts/static-vdis | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/scripts/static-vdis b/scripts/static-vdis index fada0e14f1f..5a9b60b77be 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -97,20 +97,20 @@ def sr_attach(ty, device_config): return call_volume_plugin(ty, "SR.attach", args) def list_vdis(): - all = [] + files = [] try: - all = os.listdir(main_dir) + files = os.listdir(main_dir) except: pass - return list(map(load, all)) + return list(map(load, files)) def fresh_name(): """Return a unique name for a new static VDI configuration directory""" try: - all = os.listdir(main_dir) - for i in range(0, len(all) + 1): # guarantees to find a unique number + files = os.listdir(main_dir) + for i in range(0, len(files) + 1): # guarantees to find a unique number i = str(i) - if i not in all: + if i not in files: return i except: # Directory doesn't exist From b606836f9064616984261b3e5c06037ec2399c65 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 1 Jul 2024 12:00:00 +0200 Subject: [PATCH 150/222] CP-49928: listdir: except OSError: Possible errors are subclasses of it Signed-off-by: Bernhard Kaindl --- scripts/static-vdis | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/static-vdis b/scripts/static-vdis index 5a9b60b77be..ec24848e934 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -100,7 +100,7 @@ def list_vdis(): files = [] try: files = os.listdir(main_dir) - except: + except OSError: # All possible errors are subclasses of OSError pass return list(map(load, files)) @@ -112,7 +112,7 @@ def fresh_name(): i = str(i) if i not in files: return i - except: + except OSError: # All possible errors are subclasses of OSError # Directory doesn't exist os.mkdir(main_dir) return "0" # Always return a string, fixes pyright error by not returning None From f225cc47da391a5b70a8086f07e8de9b9464b6da Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 2 Jul 2024 12:01:05 +0200 Subject: [PATCH 151/222] CP-49928: test_static_vids.py: mv to py3, test list_vdis & fresh_name (#5740) Signed-off-by: Bernhard Kaindl --- python3/tests/test_static_vdis.py | 85 +++++++++++++++++++++++++++++++ scripts/test_static_vdis.py | 56 -------------------- 2 files changed, 85 insertions(+), 56 deletions(-) create mode 100644 python3/tests/test_static_vdis.py delete mode 100644 scripts/test_static_vdis.py diff --git a/python3/tests/test_static_vdis.py b/python3/tests/test_static_vdis.py new file mode 100644 index 00000000000..ee424c157a1 --- /dev/null +++ b/python3/tests/test_static_vdis.py @@ -0,0 +1,85 @@ +"""python3/tests/test_static_vdis.py: Test the static-vdis script""" + +import os +from pathlib import Path +from types import ModuleType + +import pytest + +from python3.tests.import_helper import import_file_as_module, mocked_modules + +# ---------------------------- Test fixtures --------------------------------- + + +@pytest.fixture(scope="function") # function scope: Re-run for each test function +def static_vdis() -> ModuleType: + """Test fixture to return the static-vdis module, mocked to avoid dependencies.""" + with mocked_modules("XenAPI", "inventory"): + return import_file_as_module("scripts/static-vdis") + + +# Hide pylint warnings for redefined-outer-name from using the static_vdis fixture: +# pylint: disable=redefined-outer-name +# Allow to access attributes of the static_vdis module from this test module: +# pyright: reportAttributeAccessIssue=false + +# ----------------------------- Test cases ----------------------------------- + + +def test_whole_file(static_vdis: ModuleType): + """Test read_whole_file() and write_whole_file()""" + + with open(__file__, encoding="utf-8") as data: + contents = data.read().strip() + assert static_vdis.read_whole_file(__file__) == contents + assert static_vdis.write_whole_file(__file__, contents) is None + with open(__file__, encoding="utf-8") as written_data: + assert written_data.read().strip() == contents + + +def test_fresh_name(static_vdis: ModuleType, tmp_path: Path): + """Test fresh_name() and list_vdis() - all code paths""" + + # When the freshly created tmp_path is empty, expect [] and "0": + static_vdis.main_dir = tmp_path.as_posix() + assert static_vdis.list_vdis() == [] + assert static_vdis.fresh_name() == "0" + + # When main_dir contains a directory with name "0", the next name should be "1": + os.mkdir(static_vdis.main_dir + "/0") + assert static_vdis.fresh_name() == "1" + + # When main_dir contains a directory with name "1", the next name should be "2": + os.mkdir(static_vdis.main_dir + "/1") + assert static_vdis.fresh_name() == "2" + + # When main_dir does not exist, an empty list and 0 should be returned: + static_vdis.main_dir = tmp_path.as_posix() + "/does-not-exist" + assert static_vdis.list_vdis() == [] + assert static_vdis.fresh_name() == "0" + + + +def test_sr_attach(static_vdis: ModuleType, mocker): + """Test sr_attach()""" + + # We need to mock those as they would attempt to load the volume plugin and + # check the clusterstack, which are not available in the test environment: + static_vdis.call_volume_plugin = mocker.MagicMock() + static_vdis.check_clusterstack = mocker.MagicMock() + + # Set the return value of the mocked functions to success: + static_vdis.call_volume_plugin.return_value = "success" + static_vdis.check_clusterstack.return_value = "success" + + # Call the sr_attach function + device_config = {"key1": "value1", "key2": "value2"} + result = static_vdis.sr_attach("plugin_name", device_config) + + # Assert the expected behavior + assert result == "success" + static_vdis.call_volume_plugin.assert_called_once_with( + "plugin_name", + "SR.attach", + ["--configuration", "key1", "value1", "--configuration", "key2", "value2"], + ) \ No newline at end of file diff --git a/scripts/test_static_vdis.py b/scripts/test_static_vdis.py deleted file mode 100644 index b0ab6ad5939..00000000000 --- a/scripts/test_static_vdis.py +++ /dev/null @@ -1,56 +0,0 @@ -#!/usr/bin/env python3 -# -# unittest for static-vdis - -import unittest -from mock import MagicMock -import sys -import os -import subprocess -import tempfile - -# mock modules to avoid dependencies -sys.modules["XenAPI"] = MagicMock() -sys.modules["inventory"] = MagicMock() - -def import_from_file(module_name, file_path): - """Import a file as a module""" - if sys.version_info.major == 2: - return None - else: - from importlib import machinery, util - loader = machinery.SourceFileLoader(module_name, file_path) - spec = util.spec_from_loader(module_name, loader) - assert spec - assert spec.loader - module = util.module_from_spec(spec) - # Probably a good idea to add manually imported module stored in sys.modules - sys.modules[module_name] = module - spec.loader.exec_module(module) - return module - -def get_module(): - """Import the static-vdis script as a module for executing unit tests on functions""" - testdir = os.path.dirname(__file__) - return import_from_file("static_vdis", testdir + "/static-vdis") - -static_vdis = get_module() - -@unittest.skipIf(sys.version_info < (3, 0), reason="requires python3") -class TestReadWriteFile(unittest.TestCase): - def test_write_and_read_whole_file(self): - """Test read_whole_file and write_whole_file""" - test_file = tempfile.NamedTemporaryFile(delete=True) - filename = str(test_file.name) - content = r"""def read_whole_file(filename): - with open(filename, 'r', encoding='utf-8') as f: - return ''.join(f.readlines()).strip() - -def write_whole_file(filename, contents): - with open(filename, "w", encoding='utf-8') as f: - f.write(contents)""" - static_vdis.write_whole_file(filename, content) - expected_content = static_vdis.read_whole_file(filename) - self.assertEqual(expected_content, content) - - \ No newline at end of file From d73441d9ca89d83937eacf2084a3e850f0ef8f28 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 2 Jul 2024 12:33:42 +0000 Subject: [PATCH 152/222] CP-49916: Moved perfmon, perfmon.service and sysconfig-perfmon from scripts/ to python3/perfmon directory - Modified Makefile to include these changes. - Removed perfmon, perfmon.service and sysconfig-perfmon from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 7 +++++++ {scripts/plugins => python3/perfmon}/perfmon | 2 +- {scripts => python3/perfmon}/perfmon.service | 0 {scripts => python3/perfmon}/sysconfig-perfmon | 0 scripts/Makefile | 4 ---- 5 files changed, 8 insertions(+), 5 deletions(-) rename {scripts/plugins => python3/perfmon}/perfmon (100%) rename {scripts => python3/perfmon}/perfmon.service (100%) rename {scripts => python3/perfmon}/sysconfig-perfmon (100%) diff --git a/python3/Makefile b/python3/Makefile index 4d97bacc1fa..d31aa4f497d 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -11,6 +11,8 @@ install: $(IPROG) -d $(DESTDIR)$(SITE3_DIR) $(IPROG) -d $(DESTDIR)$(LIBEXECDIR) $(IPROG) -d $(DESTDIR)$(PLUGINDIR) + $(IPROG) -d $(DESTDIR)/etc/sysconfig + $(IPROG) -d $(DESTDIR)/usr/lib/systemd/system $(IDATA) packages/inventory.py $(DESTDIR)$(SITE3_DIR)/ @@ -32,6 +34,11 @@ install: $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo + + $(IPROG) perfmon/perfmon $(DESTDIR)$(PLUGINDIR) + $(IDATA) perfmon/perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service + $(IPROG) perfmon/sysconfig-perfmon $(DESTDIR)/etc/sysconfig/perfmon + # templates $(IPROG) templates/debian $(DESTDIR)$(OPTDIR)/packages/post-install-scripts/debian-etch $(IPROG) templates/debug $(DESTDIR)$(OPTDIR)/packages/post-install-scripts diff --git a/scripts/plugins/perfmon b/python3/perfmon/perfmon similarity index 100% rename from scripts/plugins/perfmon rename to python3/perfmon/perfmon index e3dc2452691..c40eb659cf6 100644 --- a/scripts/plugins/perfmon +++ b/python3/perfmon/perfmon @@ -2,9 +2,9 @@ # # A plugin for requesting perfmon actions via the xe host-call-plugin mechanism -import XenAPIPlugin import os import socket +import XenAPIPlugin # TODO: put this info plus all the supported cmds in a shared file cmdsockname = "\0perfmon" # an af_unix socket name (the "\0" stops socket.bind() creating a fs node) diff --git a/scripts/perfmon.service b/python3/perfmon/perfmon.service similarity index 100% rename from scripts/perfmon.service rename to python3/perfmon/perfmon.service diff --git a/scripts/sysconfig-perfmon b/python3/perfmon/sysconfig-perfmon similarity index 100% rename from scripts/sysconfig-perfmon rename to python3/perfmon/sysconfig-perfmon diff --git a/scripts/Makefile b/scripts/Makefile index 0234d6ffd1a..91b232a834e 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -61,7 +61,6 @@ install: $(IDATA) cdrommon@.service $(DESTDIR)/usr/lib/systemd/system/cdrommon@.service $(IDATA) gencert.service $(DESTDIR)/usr/lib/systemd/system/gencert.service $(IDATA) xapi-domains.service $(DESTDIR)/usr/lib/systemd/system/xapi-domains.service - $(IDATA) perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service $(IDATA) generate-iscsi-iqn.service $(DESTDIR)/usr/lib/systemd/system/generate-iscsi-iqn.service $(IDATA) xapi.service $(DESTDIR)/usr/lib/systemd/system/xapi.service $(IDATA) attach-static-vdis.service $(DESTDIR)/usr/lib/systemd/system/attach-static-vdis.service @@ -115,14 +114,11 @@ install: $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) - mkdir -p $(DESTDIR)/etc/sysconfig - $(IPROG) sysconfig-perfmon $(DESTDIR)/etc/sysconfig/perfmon mkdir -p $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) mkdir -p $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/perfmon $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) From f68e89737d82f09e4d27ae2ecf84606c3d019069 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 2 Jul 2024 12:00:00 +0200 Subject: [PATCH 153/222] CP-49928: Add code coverage for static-vdis:attach() to fix warnings Signed-off-by: Bernhard Kaindl --- python3/tests/test_static_vdis.py | 59 ++++++++++++++++++++++++++++--- scripts/static-vdis | 9 +++-- 2 files changed, 61 insertions(+), 7 deletions(-) diff --git a/python3/tests/test_static_vdis.py b/python3/tests/test_static_vdis.py index ee424c157a1..1b7efc0bcf0 100644 --- a/python3/tests/test_static_vdis.py +++ b/python3/tests/test_static_vdis.py @@ -1,6 +1,7 @@ """python3/tests/test_static_vdis.py: Test the static-vdis script""" import os +import sys from pathlib import Path from types import ModuleType @@ -26,17 +27,66 @@ def static_vdis() -> ModuleType: # ----------------------------- Test cases ----------------------------------- -def test_whole_file(static_vdis: ModuleType): +def test_whole_file(static_vdis: ModuleType, tmp_path): """Test read_whole_file() and write_whole_file()""" with open(__file__, encoding="utf-8") as data: contents = data.read().strip() assert static_vdis.read_whole_file(__file__) == contents - assert static_vdis.write_whole_file(__file__, contents) is None - with open(__file__, encoding="utf-8") as written_data: + assert static_vdis.write_whole_file(tmp_path / "temp_file", contents) is None + with open(tmp_path / "temp_file", encoding="utf-8") as written_data: assert written_data.read().strip() == contents +def test_attach(static_vdis: ModuleType, tmpdir, mocker, capsys): + """Test five common and SMAPIv1 code paths in the attach() function""" + + # Path 1: When the VDI is not found, expect attach() to raise an exception: + static_vdis.list_vdis = lambda: [{"vdi-uuid": "existing-uuid"}] + with pytest.raises(Exception) as exc_info: + static_vdis.attach("nonexisting-uuid") + assert exc_info.value.args[0] == "Disk configuration not found" + + # Path 2: When the VDI is already attached, expect main():attach to return None\n: + static_vdis.list_vdis = lambda: [{"vdi-uuid": "attached", "path": "/attached"}] + sys.argv = ["static-vdis", "attach", "attached"] + static_vdis.main() + with capsys.disabled(): + assert capsys.readouterr().out == "None\n" + + # Path 3: When the VDI is not attached, attach() to return "the-id/disk": + vdis: list[dict[str, str]] = [{"vdi-uuid": "attach-uuid", "id": "the-id"}] + static_vdis.list_vdis = lambda: vdis + static_vdis.call_backend_attach = lambda driver, config: "/mock-attached-path" + static_vdis.read_whole_file = lambda path: '{"json":true}' + disk = tmpdir.mkdir(vdis[0]["id"]).join("disk") + static_vdis.main_dir = str(tmpdir) + assert static_vdis.attach("attach-uuid") == disk + assert os.readlink(disk) == "/mock-attached-path" + os.unlink(disk) + + # Path 4: Create the disk file expect it to be deleted and replaced by a symlink: + disk.write("mock-disk-contents-to-delete") + assert static_vdis.attach("attach-uuid") == disk + assert os.readlink(disk) == "/mock-attached-path" + + # Path 5: When the backend call returns None, expect attach() to raise TypeError + static_vdis.call_backend_attach = lambda driver, config: None + with pytest.raises(TypeError) as exc_info: + static_vdis.attach("attach-uuid") + + # Path 6: When the backend returns an empty str, attach() raises FileNotFoundError: + static_vdis.call_backend_attach = lambda driver, config: "" + with pytest.raises(FileNotFoundError) as exc_info: + static_vdis.attach("attach-uuid") + + # Path 7: If the smapiv3_config exists, but not the volume plugin, attach() fails: + with pytest.raises(FileNotFoundError) as exc_info: + mocker.patch("os.path.exists", return_value=True) + static_vdis.MULTIPATH_FLAG = __file__ + static_vdis.attach("attach-uuid") + + def test_fresh_name(static_vdis: ModuleType, tmp_path: Path): """Test fresh_name() and list_vdis() - all code paths""" @@ -59,7 +109,6 @@ def test_fresh_name(static_vdis: ModuleType, tmp_path: Path): assert static_vdis.fresh_name() == "0" - def test_sr_attach(static_vdis: ModuleType, mocker): """Test sr_attach()""" @@ -82,4 +131,4 @@ def test_sr_attach(static_vdis: ModuleType, mocker): "plugin_name", "SR.attach", ["--configuration", "key1", "value1", "--configuration", "key2", "value2"], - ) \ No newline at end of file + ) diff --git a/scripts/static-vdis b/scripts/static-vdis index ec24848e934..d94a5282ac1 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -375,8 +375,9 @@ def usage(): print(" %s attach -- attach the VDI immediately" % sys.argv[0]) print(" %s detach -- detach the VDI immediately" % sys.argv[0]) sys.exit(1) - -if __name__ == "__main__": + + +def main(): if len(sys.argv) < 2: usage() @@ -401,3 +402,7 @@ if __name__ == "__main__": detach(sys.argv[2]) else: usage() + + +if __name__ == "__main__": # pragma: no cover + main() From bff2d07b4f3b59739a14dd30bdc55ce7de414393 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 2 Jul 2024 12:00:00 +0200 Subject: [PATCH 154/222] static-vdis: pyright: Add type hints and fix errors from pyright Signed-off-by: Bernhard Kaindl --- pyproject.toml | 2 ++ scripts/static-vdis | 19 ++++++++++++------- 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 83a54c6d978..5dd6d1ee8e5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,6 +58,8 @@ exclude_lines = [ # Other specific lines that do not need to be covered, comment in which file: "raise NbdDeviceNotFound", # python3/libexec/usb_scan.py "params = xmlrpc.client.loads", # static-vdis + "assert.*# must not be None", # static-vdis + "except Exception:", # static-vdis ] # precision digits to use when reporting coverage (sub-percent-digits are not reported): precision = 0 diff --git a/scripts/static-vdis b/scripts/static-vdis index d94a5282ac1..ff3a01da596 100755 --- a/scripts/static-vdis +++ b/scripts/static-vdis @@ -11,7 +11,7 @@ import sys import time import urllib.parse import xmlrpc.client -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast import XenAPI @@ -166,6 +166,7 @@ def add(session, vdi_uuid, reason): sm = None all_sm = session.xenapi.SM.get_all_records() + sm_ref = "" # pragma: no cover for sm_ref in all_sm: if all_sm[sm_ref]['type'] == ty: sm = all_sm[sm_ref] @@ -183,6 +184,7 @@ def add(session, vdi_uuid, reason): if "VDI_ATTACH_OFFLINE" in sm["features"]: data["volume-plugin"] = ty data[smapiv3_config] = json.dumps(device_config) + assert device_config # must not be None sr = sr_attach(ty, device_config) location = session.xenapi.VDI.get_location(vdi) stat = call_volume_plugin(ty, "Volume.stat", [ sr, location ]) @@ -238,7 +240,7 @@ def call_backend_attach(driver, config): xml = doexec(args) if xml[0] != 0: raise Exception("SM_BACKEND_FAILURE(%d, %s, %s)" % xml) - xml_rpc = xmlrpc.client.loads(xml[1]) + xml_rpc = xmlrpc.client.loads(xml[1]) # type: Any # pragma: no cover if 'params_nbd' in xml_rpc[0][0]: # Prefer NBD if available @@ -259,8 +261,8 @@ def call_backend_detach(driver, config): raise Exception("SM_BACKEND_FAILURE(%d, %s, %s)" % xml) xml_rpc = xmlrpc.client.loads(xml[1]) try: - res = xml_rpc[0][0]['params'] - except: + res = cast(dict, xml_rpc[0][0])['params'] # pragma: no cover + except Exception: res = xml_rpc[0][0] return res @@ -301,7 +303,7 @@ def attach(vdi_uuid): os.unlink(d + "/disk") except: pass - path = None + path = None # Raise TypeError if path is not set at the end if not (os.path.exists(d + "/" + smapiv3_config)): # SMAPIv1 config = read_whole_file(d + "/config") @@ -333,10 +335,13 @@ def attach(vdi_uuid): (path, exportname) = parse_nbd_uri(uri) path = connect_nbd(path=path, exportname=exportname) + if path is None: + raise TypeError("static-vdis: attach(): path was not set") os.symlink(path, d + "/disk") return d + "/disk" if not found: raise Exception("Disk configuration not found") + return None def detach(vdi_uuid): found = False @@ -396,8 +401,8 @@ def main(): elif sys.argv[1] == "del" and len(sys.argv) == 3: delete(sys.argv[2]) elif sys.argv[1] == "attach" and len(sys.argv) == 3: - path = attach(sys.argv[2]) - print(path) + disk_path = attach(sys.argv[2]) + print(disk_path) elif sys.argv[1] == "detach" and len(sys.argv) == 3: detach(sys.argv[2]) else: From 849a517f3077294c2343c6503d526368bb481d60 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 3 Jul 2024 08:57:09 +0000 Subject: [PATCH 155/222] CP-49915: Moved openvswitch-config-update from scripts/plugins to python3/plugins/ - Modified Makefile to include this change - Removed openvswitch-config-update from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3}/plugins/openvswitch-config-update | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 1 deletion(-) rename {scripts => python3}/plugins/openvswitch-config-update (100%) diff --git a/python3/Makefile b/python3/Makefile index d31aa4f497d..4e65f81ad38 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -34,6 +34,7 @@ install: $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo + $(IPROG) plugins/openvswitch-config-update $(DESTDIR)$(PLUGINDIR) $(IPROG) perfmon/perfmon $(DESTDIR)$(PLUGINDIR) $(IDATA) perfmon/perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service diff --git a/scripts/plugins/openvswitch-config-update b/python3/plugins/openvswitch-config-update similarity index 100% rename from scripts/plugins/openvswitch-config-update rename to python3/plugins/openvswitch-config-update diff --git a/scripts/Makefile b/scripts/Makefile index 91b232a834e..08e7d7a517b 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -122,7 +122,6 @@ install: $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/openvswitch-config-update $(DESTDIR)$(PLUGINDIR) mkdir -p $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead $(IPROG) 10resetvdis $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead mkdir -p $(DESTDIR)/etc/bash_completion.d From 29e55748cbb96138c862bf4c5e5df141ec9c3046 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 3 Jul 2024 13:22:24 +0000 Subject: [PATCH 156/222] CP-50172: Moved Test.test from scripts/extensions to python3/extensions - Modified python3/Makefile to include Test.test - Removed Test.test from scripts/Makefile - Fixed import order Signed-off-by: Ashwinh --- python3/Makefile | 4 ++++ python3/extensions/Test.test | 22 ++++++++++++++++++++++ scripts/Makefile | 1 - scripts/extensions/Test.test | 20 -------------------- 4 files changed, 26 insertions(+), 21 deletions(-) create mode 100755 python3/extensions/Test.test delete mode 100755 scripts/extensions/Test.test diff --git a/python3/Makefile b/python3/Makefile index d31aa4f497d..fb24ea25fd2 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -14,6 +14,8 @@ install: $(IPROG) -d $(DESTDIR)/etc/sysconfig $(IPROG) -d $(DESTDIR)/usr/lib/systemd/system + $(IPROG) -d $(DESTDIR)$(EXTENSIONDIR) + $(IDATA) packages/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ @@ -31,6 +33,8 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin + + $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo diff --git a/python3/extensions/Test.test b/python3/extensions/Test.test new file mode 100755 index 00000000000..372de668b8c --- /dev/null +++ b/python3/extensions/Test.test @@ -0,0 +1,22 @@ +#!/usr/bin/env python3 + + +import sys +import xmlrpc.client + + +def success_message(result): + rpcparams = {"Status": "Success", "Value": result} + return xmlrpc.client.dumps((rpcparams,), "", True) + + +def failure_message(code, params): + rpcparams = {"Status": "Failure", "ErrorDescription": [code] + params} + return xmlrpc.client.dumps((rpcparams,), "", True) + + +if __name__ == "__main__": + txt = sys.stdin.read() + req = xmlrpc.client.loads(txt) + print(failure_message("CODE", ["a", "b"])) + # print (success_message("")) diff --git a/scripts/Makefile b/scripts/Makefile index 91b232a834e..8f64e908f70 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -115,7 +115,6 @@ install: $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(EXTENSIONDIR) - $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) mkdir -p $(DESTDIR)$(PLUGINDIR) diff --git a/scripts/extensions/Test.test b/scripts/extensions/Test.test deleted file mode 100755 index f49f8c22e07..00000000000 --- a/scripts/extensions/Test.test +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python3 - - -import xmlrpc.client, sys - -def success_message(result): - rpcparams = { 'Status': 'Success', 'Value': result } - return xmlrpc.client.dumps((rpcparams, ), '', True) - -def failure_message(code, params): - rpcparams = { 'Status': 'Failure', 'ErrorDescription': [ code ] + params } - return xmlrpc.client.dumps((rpcparams, ), '', True) - -if __name__ == "__main__": - txt = sys.stdin.read() - req = xmlrpc.client.loads(txt) - print (failure_message("CODE", [ "a", "b" ])) - #print (success_message("")) - - From aeae774a41e06efcefc9905f6f9d9904f90cae01 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 4 Jul 2024 13:50:45 +0000 Subject: [PATCH 157/222] CP-49926: Removed shell.py from scripts/examples/python Signed-off-by: Ashwinh --- scripts/Makefile | 1 - scripts/examples/python/shell.py | 120 ------------------------------- 2 files changed, 121 deletions(-) delete mode 100644 scripts/examples/python/shell.py diff --git a/scripts/Makefile b/scripts/Makefile index 7d5e13ce954..6e4fe678471 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -149,7 +149,6 @@ endif $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ - $(IPROG) examples/python/shell.py $(DESTDIR)$(LIBEXECDIR)/shell.py # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) $(IDATA) yum-plugins/accesstoken.conf $(DESTDIR)$(YUMPLUGINCONFDIR) diff --git a/scripts/examples/python/shell.py b/scripts/examples/python/shell.py deleted file mode 100644 index 3cfdde757db..00000000000 --- a/scripts/examples/python/shell.py +++ /dev/null @@ -1,120 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2006-2008 Citrix Systems. -# -# Permission to use, copy, modify, and distribute this software for any -# purpose with or without fee is hereby granted, provided that the above -# copyright notice and this permission notice appear in all copies. -# -# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. - -from __future__ import print_function -import atexit -import cmd -import pprint -import readline -import shlex -import string -import sys - -import XenAPI - -def logout(): - try: - session.xenapi.session.logout() - except: - pass -atexit.register(logout) - -class Shell(cmd.Cmd): - def __init__(self): - cmd.Cmd.__init__(self) - self.identchars = string.ascii_letters + string.digits + '_.' - self.prompt = "xe> " - - def preloop(self): - cmd.Cmd.preloop(self) - readline.set_completer_delims(' ') - - def default(self, line): - words = shlex.split(line) - if len(words) > 0: - res = session.xenapi_request(words[0], tuple(words[1:])) - if res is not None and res != '': - pprint.pprint(res) - return False - - def completedefault(self, text, line, begidx, endidx): - words = shlex.split(line[:begidx]) - clas, func = words[0].split('.') - if len(words) > 1 or \ - func.startswith('get_by_') or \ - func == 'get_all': - return [] - uuids = session.xenapi_request('%s.get_all' % clas, ()) - return [u + " " for u in uuids if u.startswith(text)] - - def emptyline(self): - pass - - def do_EOF(self, line): - print() - sys.exit(0) - - -def munge_types(var): - if var == "True": - return True - if var == "False": - return False - - try: - return int(var) - except: - return var - - -if __name__ == "__main__": - if len(sys.argv) < 2: - print("Usage:") - print(sys.argv[0], " ") - sys.exit(1) - - if sys.argv[1] != "-" and len(sys.argv) < 4: - print("Usage:") - print(sys.argv[0], " ") - sys.exit(1) - - if sys.argv[1] != "-": - url = sys.argv[1] - username = sys.argv[2] - password = sys.argv[3] - session = XenAPI.Session(url) - session.xenapi.login_with_password(username, password, "1.0", "xen-api-scripts-shell.py") - cmdAt = 4 - else: - session = XenAPI.xapi_local() - session.xenapi.login_with_password("", "", "1.0", "xen-api-scripts-shell.py") - cmdAt = 2 - - # We want to support directly executing the cmd line, - # where appropriate - if len(sys.argv) > cmdAt: - command = sys.argv[cmdAt] - params = [munge_types(x) for x in sys.argv[(cmdAt + 1):]] - try: - print(session.xenapi_request(command, tuple(params)), file=sys.stdout) - except XenAPI.Failure as x: - print(x, file=sys.stderr) - sys.exit(2) - except Exception as e: - print(e, file=sys.stderr) - sys.exit(3) - sys.exit(0) - else: - Shell().cmdloop('Welcome to the XenServer shell. (Try "VM.get_all")') From 3b26dce21028cae3daac1792a22332bc4583693e Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 4 Jul 2024 12:00:00 +0200 Subject: [PATCH 158/222] Stop testing scripts/plugins/extauth-hook-AD.py with Python2.7 Preparations for cleaning up the checker warnings in extauth-hook-AD.py: 1. The shebang of extauth-hook-AD.py has already been changed to Python3: Thus, stop testing it with Python3. 2. This drops the Python2 code coverage to 28% (below 50%). We need to allow further drops in coverage: Set the limit to 0. Signed-off-by: Bernhard Kaindl --- .github/workflows/other.yml | 1 + scripts/plugins/test_extauth_hook_AD.py | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 959679a92b3..17f91991da5 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -55,6 +55,7 @@ jobs: --junitxml=.git/pytest${{matrix.python-version}}.xml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml + --cov-fail-under 0 env: PYTHONDEVMODE: yes diff --git a/scripts/plugins/test_extauth_hook_AD.py b/scripts/plugins/test_extauth_hook_AD.py index 71b5b7c95eb..1960072f3f1 100644 --- a/scripts/plugins/test_extauth_hook_AD.py +++ b/scripts/plugins/test_extauth_hook_AD.py @@ -6,6 +6,9 @@ import os from unittest import TestCase from mock import MagicMock, patch + +import pytest + # mock modules to avoid dependencies sys.modules["XenAPIPlugin"] = MagicMock() sys.modules["XenAPI"] = MagicMock() @@ -14,6 +17,10 @@ from extauth_hook_ad import StaticSSHPam, NssConfig, SshdConfig, UsersList, GroupsList +if sys.version_info < (3, ): # pragma: no cover + pytest.skip(allow_module_level=True) + + def line_exists_in_config(lines, line): """ Helper function to detect whether configration match expectation From 44fdc3d67e5d407dd72ee929ee35b15c10688e61 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 4 Jul 2024 12:00:00 +0200 Subject: [PATCH 159/222] extauth-hook-AD.py: Fix `pytype` warnings, cleanup `pylint` suppressions 1. `pytype` warnings need to be fixed before it can be moved to Python3. - `pytype` reports the uses `@abc.abstractmethod` as stray, disable. - Initialise `upn`: checkers (`pytype`, `pyright`) can't see that it is already handled OK. 2. Clean-up obsolete inheriting from `object`: In Python3, all classes already always inherit from `object`. Signed-off-by: Bernhard Kaindl --- pyproject.toml | 2 -- scripts/plugins/extauth-hook-AD.py | 8 ++++---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 5dd6d1ee8e5..6f8e1095dc9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -263,8 +263,6 @@ expected_to_fail = [ # Need 2to3 -w and maybe a few other minor updates: "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", - # Other fixes needed: - "scripts/plugins/extauth-hook-AD.py", ] # ----------------------------------------------------------------------------- diff --git a/scripts/plugins/extauth-hook-AD.py b/scripts/plugins/extauth-hook-AD.py index 98b228c04e5..0474ecacd39 100755 --- a/scripts/plugins/extauth-hook-AD.py +++ b/scripts/plugins/extauth-hook-AD.py @@ -29,7 +29,8 @@ # - /etc/pam.d/hcp_users # - /etc/ssh/ssh_config -# pylint: disable=super-with-arguments +# pylint: disable=too-few-public-methods +# pytype: disable=ignored-abstractmethod HCP_USERS = "/etc/security/hcp_ad_users.conf" @@ -81,10 +82,8 @@ class ADBackend(Enum): BD_WINBIND = 1 -# pylint: disable=useless-object-inheritance, too-few-public-methods -class ADConfig(object): +class ADConfig(): """Base class for AD configuration""" - #pylint: disable=too-many-arguments def __init__(self, path, session, args, ad_enabled=True, load_existing=True, file_mode=0o644): self._file_path = path @@ -257,6 +256,7 @@ def _match_subject(self, subject_rec): def _add_upn(self, subject_rec): sep = "@" + upn = "" try: upn = subject_rec["other_config"]["subject-upn"] user, domain = upn.split(sep) From 231a34e046f8c251ac5b070d199db1a68bde5ef7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 8 Jul 2024 09:52:16 +0000 Subject: [PATCH 160/222] CP-49918: Moved pool_update.apply from scripts/extensions to python3/extensions - Modified python3/Makefile to include pool_update.apply - Removed pool_update.apply from scripts/Makefile - Used isort to sort import order and used black code formatter Signed-off-by: Ashwinh --- python3/Makefile | 2 +- .../extensions/pool_update.apply | 112 +++++++++++------- scripts/Makefile | 1 - 3 files changed, 68 insertions(+), 47 deletions(-) rename {scripts => python3}/extensions/pool_update.apply (61%) diff --git a/python3/Makefile b/python3/Makefile index ffdbe9c2481..15e0a27b57a 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -13,7 +13,6 @@ install: $(IPROG) -d $(DESTDIR)$(PLUGINDIR) $(IPROG) -d $(DESTDIR)/etc/sysconfig $(IPROG) -d $(DESTDIR)/usr/lib/systemd/system - $(IPROG) -d $(DESTDIR)$(EXTENSIONDIR) @@ -33,6 +32,7 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin + $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) diff --git a/scripts/extensions/pool_update.apply b/python3/extensions/pool_update.apply similarity index 61% rename from scripts/extensions/pool_update.apply rename to python3/extensions/pool_update.apply index ab8f49478dc..2bf9e0a5dcc 100644 --- a/scripts/extensions/pool_update.apply +++ b/python3/extensions/pool_update.apply @@ -1,83 +1,103 @@ #!/usr/bin/env python3 -import xmlrpc.client -import sys -import XenAPI +import errno import json -import traceback -import subprocess +import logging import os import re -import fasteners -import errno import shutil -import logging +import subprocess +import sys +import traceback +import xmlrpc.client + +import fasteners import xcp.logger +import XenAPI + +TMP_DIR = "/tmp/" +UPDATE_ALREADY_APPLIED = "UPDATE_ALREADY_APPLIED" +UPDATE_APPLY_FAILED = "UPDATE_APPLY_FAILED" +OTHER_OPERATION_IN_PROGRESS = "OTHER_OPERATION_IN_PROGRESS" +UPDATE_PRECHECK_FAILED_UNKNOWN_ERROR = "UPDATE_PRECHECK_FAILED_UNKNOWN_ERROR" +CANNOT_FIND_UPDATE = "CANNOT_FIND_UPDATE" +INVALID_UPDATE = "INVALID_UPDATE" +ERROR_MESSAGE_DOWNLOAD_PACKAGE = "Error downloading packages:\n" +ERROR_MESSAGE_START = "Error: " +ERROR_MESSAGE_END = "You could try " -TMP_DIR = '/tmp/' -UPDATE_ALREADY_APPLIED = 'UPDATE_ALREADY_APPLIED' -UPDATE_APPLY_FAILED = 'UPDATE_APPLY_FAILED' -OTHER_OPERATION_IN_PROGRESS = 'OTHER_OPERATION_IN_PROGRESS' -UPDATE_PRECHECK_FAILED_UNKNOWN_ERROR = 'UPDATE_PRECHECK_FAILED_UNKNOWN_ERROR' -CANNOT_FIND_UPDATE = 'CANNOT_FIND_UPDATE' -INVALID_UPDATE = 'INVALID_UPDATE' -ERROR_MESSAGE_DOWNLOAD_PACKAGE = 'Error downloading packages:\n' -ERROR_MESSAGE_START = 'Error: ' -ERROR_MESSAGE_END = 'You could try ' class EnvironmentFailure(Exception): pass + class ApplyFailure(Exception): pass + class InvalidUpdate(Exception): pass + def success_message(): - rpcparams = {'Status': 'Success', 'Value': ''} - return xmlrpc.client.dumps((rpcparams, ), '', True) + rpcparams = {"Status": "Success", "Value": ""} + return xmlrpc.client.dumps((rpcparams,), "", True) def failure_message(code, params): - rpcparams = { - 'Status': 'Failure', 'ErrorDescription': [code] + params} - return xmlrpc.client.dumps((rpcparams, ), '', True) + rpcparams = {"Status": "Failure", "ErrorDescription": [code] + params} + return xmlrpc.client.dumps((rpcparams,), "", True) def execute_apply(session, update_package, yum_conf_file): yum_env = os.environ.copy() - yum_env['LANG'] = 'C' - - cmd = ['yum', 'clean', 'all', '--noplugins', '-c', yum_conf_file] - p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True, env=yum_env, universal_newlines=True) + yum_env["LANG"] = "C" + + cmd = ["yum", "clean", "all", "--noplugins", "-c", yum_conf_file] + p = subprocess.Popen( + cmd, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=True, + env=yum_env, + universal_newlines=True, + ) output, _ = p.communicate() - for line in output.split('\n'): + for line in output.split("\n"): xcp.logger.info(line) if p.returncode != 0: raise EnvironmentFailure("Error cleaning yum cache") - cmd = ['yum', 'upgrade', '-y', '--noplugins', '-c', yum_conf_file, update_package] - p = subprocess.Popen(cmd, shell=False, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, close_fds=True, env=yum_env, universal_newlines=True) + cmd = ["yum", "upgrade", "-y", "--noplugins", "-c", yum_conf_file, update_package] + p = subprocess.Popen( + cmd, + shell=False, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=True, + env=yum_env, + universal_newlines=True, + ) output, _ = p.communicate() - xcp.logger.info('pool_update.apply %r returncode=%r output:', cmd, p.returncode) - for line in output.split('\n'): + xcp.logger.info("pool_update.apply %r returncode=%r output:", cmd, p.returncode) + for line in output.split("\n"): xcp.logger.info(line) if p.returncode != 0: if ERROR_MESSAGE_DOWNLOAD_PACKAGE in output: - raise InvalidUpdate('Missing package(s) in the update.') + raise InvalidUpdate("Missing package(s) in the update.") - m = re.search('(?<=' + ERROR_MESSAGE_START + ').+$', output, flags=re.DOTALL) + m = re.search("(?<=" + ERROR_MESSAGE_START + ").+$", output, flags=re.DOTALL) if m: errmsg = m.group() - errmsg = re.sub(ERROR_MESSAGE_END + '.+', '', errmsg, flags=re.DOTALL) + errmsg = re.sub(ERROR_MESSAGE_END + ".+", "", errmsg, flags=re.DOTALL) raise ApplyFailure(errmsg) else: raise ApplyFailure(output) -if __name__ == '__main__': +if __name__ == "__main__": xcp.logger.logToSyslog(level=logging.INFO) txt = sys.stdin.read() params, method = xmlrpc.client.loads(txt) @@ -86,27 +106,29 @@ if __name__ == '__main__': lock_acquired = False try: session = XenAPI.xapi_local() - session.xenapi.login_with_password('root', '', '', 'Pool_update') + session.xenapi.login_with_password("root", "", "", "Pool_update") update = params[1] host = params[2] # Check if the update has been applied. if update in session.xenapi.host.get_updates(host): - print(failure_message( - UPDATE_ALREADY_APPLIED, [update])) + print(failure_message(UPDATE_ALREADY_APPLIED, [update])) sys.exit(0) update_uuid = session.xenapi.pool_update.get_uuid(update) - yum_conf_file = os.path.join(TMP_DIR, update_uuid, 'yum.conf') + yum_conf_file = os.path.join(TMP_DIR, update_uuid, "yum.conf") # To prevent the race condition of invoking apply, set a lock. - lock_file = os.path.join(TMP_DIR, update_uuid + '.lck') + lock_file = os.path.join(TMP_DIR, update_uuid + ".lck") lock = fasteners.InterProcessLock(lock_file) lock_acquired = lock.acquire(blocking=False) if not lock_acquired: - print(failure_message( - OTHER_OPERATION_IN_PROGRESS, ['Applying the update', update])) + print( + failure_message( + OTHER_OPERATION_IN_PROGRESS, ["Applying the update", update] + ) + ) sys.exit(0) # Run precheck @@ -136,10 +158,10 @@ if __name__ == '__main__': pass else: raise - with open (yum_conf_file, "w+") as file: + with open(yum_conf_file, "w+") as file: file.write("{0}".format(yum_conf)) - execute_apply(session, '@update', yum_conf_file) + execute_apply(session, "@update", yum_conf_file) session.xenapi.pool_update.resync_host(host) print(success_message()) diff --git a/scripts/Makefile b/scripts/Makefile index 6e4fe678471..8d3196ceece 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -116,7 +116,6 @@ install: $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) - $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) mkdir -p $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) From 2a2d233254f24235cd7b38e454f973e2870ca0c5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 8 Jul 2024 12:00:00 +0200 Subject: [PATCH 161/222] CP-50100: backup-sr-metadata.py: apply 2to3 and change shebang to python3 Signed-off-by: Bernhard Kaindl --- pyproject.toml | 1 - scripts/backup-sr-metadata.py | 22 +++++++++++----------- 2 files changed, 11 insertions(+), 12 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6f8e1095dc9..07515dc95a8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -261,7 +261,6 @@ discard_messages_matching = [ ] expected_to_fail = [ # Need 2to3 -w and maybe a few other minor updates: - "scripts/backup-sr-metadata.py", "scripts/restore-sr-metadata.py", ] diff --git a/scripts/backup-sr-metadata.py b/scripts/backup-sr-metadata.py index 2464d5c8761..346c636b8f0 100644 --- a/scripts/backup-sr-metadata.py +++ b/scripts/backup-sr-metadata.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 # Back up the SR metadata and VDI list into an XML file # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 @@ -7,7 +7,7 @@ import sys import getopt import codecs -from xml.dom.minidom import Document +from xml.dom.minidom import Document # pytype: disable=pyi-error def logout(): try: @@ -17,11 +17,11 @@ def logout(): atexit.register(logout) def usage(): - print >> sys.stderr, "%s [-f ]" % sys.argv[0] + print("%s [-f ]" % sys.argv[0], file=sys.stderr) sys.exit(1) def set_if_exists(xml, record, key): - if record.has_key(key): + if key in record: xml.setAttribute(key, record[key]) else: xml.setAttribute(key, "") @@ -32,8 +32,8 @@ def main(argv): try: opts, args = getopt.getopt(argv, "hf:", []) - except getopt.GetoptError, err: - print str(err) + except getopt.GetoptError as err: + print(str(err)) usage() outfile = None @@ -60,18 +60,18 @@ def main(argv): set_if_exists(srxml, srrec, 'uuid') set_if_exists(srxml, srrec, 'name_label') set_if_exists(srxml, srrec, 'name_description') - + for vdiref in srrec['VDIs']: - try: + try: vdirec = session.xenapi.VDI.get_record(vdiref) vdixml = doc.createElement("vdi") set_if_exists(vdixml, vdirec, 'uuid') set_if_exists(vdixml, vdirec, 'name_label') set_if_exists(vdixml, vdirec, 'name_description') srxml.appendChild(vdixml) - except: - print >> sys.stderr, "Failed to get VDI record for: %s" % vdiref - + except Exception: + print("Failed to get VDI record for: %s" % vdiref, file=sys.stderr) + metaxml.appendChild(srxml) doc.writexml(f, encoding="utf-8") From 8353dae1bdc8c1647ceb7ca482f8afceed04b983 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 8 Jul 2024 12:00:00 +0200 Subject: [PATCH 162/222] CP-50100: backup-sr-metadata.py: Fix pyright and pylint to prepare move Signed-off-by: Bernhard Kaindl --- python3/stubs/XenAPI.pyi | 3 +++ scripts/backup-sr-metadata.py | 38 ++++++++++++++++++----------------- 2 files changed, 23 insertions(+), 18 deletions(-) diff --git a/python3/stubs/XenAPI.pyi b/python3/stubs/XenAPI.pyi index 4590e614814..bde962b0556 100644 --- a/python3/stubs/XenAPI.pyi +++ b/python3/stubs/XenAPI.pyi @@ -42,9 +42,12 @@ class _Dispatcher: """Authenticate the session with the XenAPI server.""" def logout(self) -> None: """End the session with the XenAPI server.""" + + # Dynamic attributes that type checkers like pytype and pyright cannot check: session: Incomplete secret: Incomplete SR: Incomplete + VDI: Incomplete PBD: Incomplete pool: Incomplete VM: Incomplete diff --git a/scripts/backup-sr-metadata.py b/scripts/backup-sr-metadata.py index 346c636b8f0..8f83a9b06cb 100644 --- a/scripts/backup-sr-metadata.py +++ b/scripts/backup-sr-metadata.py @@ -3,52 +3,56 @@ # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 import atexit -import XenAPI -import sys -import getopt import codecs +import contextlib +import getopt +import sys from xml.dom.minidom import Document # pytype: disable=pyi-error -def logout(): - try: - session.xenapi.session.logout() - except: - pass -atexit.register(logout) +import XenAPI + def usage(): print("%s [-f ]" % sys.argv[0], file=sys.stderr) - sys.exit(1) + def set_if_exists(xml, record, key): if key in record: xml.setAttribute(key, record[key]) else: xml.setAttribute(key, "") - + def main(argv): session = XenAPI.xapi_local() + + def logout(): + with contextlib.suppress(Exception): + session.xenapi.session.logout() + + atexit.register(logout) + session.xenapi.login_with_password("", "", "1.0", "xen-api-scripts-backup-sr-metadata") try: - opts, args = getopt.getopt(argv, "hf:", []) + opts, _ = getopt.getopt(argv, "hf:", []) except getopt.GetoptError as err: - print(str(err)) + print(err) usage() + sys.exit(1) outfile = None for o,a in opts: if o == "-f": outfile = a - if outfile == None: + if outfile is None: usage() + sys.exit(1) f = codecs.open(outfile, 'w', encoding="utf-8") srs = session.xenapi.SR.get_all_records() - vdis = session.xenapi.SR.get_all_records() - + doc = Document() metaxml = doc.createElement("meta") @@ -80,5 +84,3 @@ def main(argv): if __name__ == "__main__": main(sys.argv[1:]) - - From ef27a66102f10ca83afee82866cd72c349c16708 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 8 Jul 2024 12:00:00 +0200 Subject: [PATCH 163/222] Improve spelling and fix typos in Python3 scripts Signed-off-by: Bernhard Kaindl --- python3/bin/perfmon | 2 +- python3/libexec/nbd_client_manager.py | 2 +- python3/packages/observer.py | 8 ++++---- python3/perfmon/perfmon | 2 +- scripts/examples/python/XenAPIPlugin.py | 2 +- scripts/plugins/extauth-hook-AD.py | 8 ++++---- 6 files changed, 12 insertions(+), 12 deletions(-) diff --git a/python3/bin/perfmon b/python3/bin/perfmon index e5c6741b2d3..58be93284d7 100644 --- a/python3/bin/perfmon +++ b/python3/bin/perfmon @@ -27,7 +27,7 @@ # based on the "start" CGI param. It will return the highest level of granularity # available for the period requested. # -# The "cf" CGI param specfies the row. (All rows are returned if it's missing.) +# The "cf" CGI param specifies the row. If it is not set, all rows are returned. # pylint: disable=too-many-lines, missing-class-docstring # pytype: disable=attribute-error diff --git a/python3/libexec/nbd_client_manager.py b/python3/libexec/nbd_client_manager.py index 0f77e69b12e..d0655df9756 100644 --- a/python3/libexec/nbd_client_manager.py +++ b/python3/libexec/nbd_client_manager.py @@ -96,7 +96,7 @@ def _call(cmd_args, error=True): if error and proc.returncode != 0: LOGGER.error( - "%s exitted with code %d: %s", " ".join(cmd_args), proc.returncode, stderr + "%s exited with code %d: %s", " ".join(cmd_args), proc.returncode, stderr ) raise subprocess.CalledProcessError( diff --git a/python3/packages/observer.py b/python3/packages/observer.py index 4b3451dbec3..1651eb5b4d8 100644 --- a/python3/packages/observer.py +++ b/python3/packages/observer.py @@ -33,13 +33,13 @@ from logging.handlers import SysLogHandler from typing import List, Sequence -# The opentelemetry library may generate exceptions we aren't expecting, this code +# The OpenTelemetry library may generate exceptions we aren't expecting: This code # must not fail or it will cause the pass-through script to fail when at worst # this script should be a noop. As such, we sometimes need to catch broad exceptions: # pylint: disable=broad-exception-caught, too-many-locals, too-many-statements # wrapt.decorator adds the extra parameters so we shouldn't provide them: # pylint: disable=no-value-for-parameter -# We only want to import opentelemetry libraries if instrumentation is enabled +# We only want to import OpenTelemetry libraries when instrumentation is enabled # pylint: disable=import-outside-toplevel DEBUG_ENABLED = os.getenv("XAPI_TEST") @@ -103,7 +103,7 @@ def _init_tracing(configs: List[str], config_dir: str): If configs is empty, return the noop span and patch_module functions. If configs are passed: - - Import the opentelemetry packages + - Import the OpenTelemetry packages - Read the configuration file - Create a tracer - Trace the script @@ -372,7 +372,7 @@ def _patch_module(module_name): # are not overridden and will be the defined no-op functions. span, patch_module = _init_tracing(observer_configs, observer_config_dir) - # If tracing is now operational, explicity set "OTEL_SDK_DISABLED" to "false". + # If tracing is now operational, explicitly set "OTEL_SDK_DISABLED" to "false". # In our case, different from the standard, we want the tracing disabled by # default, so if the env variable is not set the noop implementation is used. os.environ["OTEL_SDK_DISABLED"] = "false" diff --git a/python3/perfmon/perfmon b/python3/perfmon/perfmon index c40eb659cf6..9f26f998fd4 100644 --- a/python3/perfmon/perfmon +++ b/python3/perfmon/perfmon @@ -6,7 +6,7 @@ import os import socket import XenAPIPlugin -# TODO: put this info plus all the supported cmds in a shared file +# TODO: Document this information and all supported commands cmdsockname = "\0perfmon" # an af_unix socket name (the "\0" stops socket.bind() creating a fs node) cmdmaxlen = 256 diff --git a/scripts/examples/python/XenAPIPlugin.py b/scripts/examples/python/XenAPIPlugin.py index 87d8c23c12b..1d657f065d1 100644 --- a/scripts/examples/python/XenAPIPlugin.py +++ b/scripts/examples/python/XenAPIPlugin.py @@ -16,7 +16,7 @@ import xmlrpc.client as xmlrpclib class Failure(Exception): - """Provide compatibilty with plugins written against XenServer 5.5 API""" + """Provide compatibility with plugins written against the XenServer 5.5 API""" def __init__(self, code, params): Exception.__init__(self) diff --git a/scripts/plugins/extauth-hook-AD.py b/scripts/plugins/extauth-hook-AD.py index 0474ecacd39..38fb0b67329 100755 --- a/scripts/plugins/extauth-hook-AD.py +++ b/scripts/plugins/extauth-hook-AD.py @@ -277,7 +277,7 @@ def _add_subject(self, subject_rec): logger.debug("Permit user %s, Current sid is %s", formatted_name, sid) self._lines.append(formatted_name) - # If ssh key is permittd in authorized_keys, + # If the ssh key is permitted in the authorized_keys file, # The original name is compared, add UPN and original name if self._backend == ADBackend.BD_PBIS and name != formatted_name: self._lines.append(name) @@ -311,7 +311,7 @@ def _add_subject(self, subject_rec): class KeyValueConfig(ADConfig): """ - Only support configure files with key value in each line, seperated by sep + Only support configure files with key value in each line, separated by sep Otherwise, it will be just copied and un-configurable If multiple lines with the same key exists, only the first line will be configured """ @@ -475,7 +475,7 @@ def after_extauth_enable(session, args): def after_xapi_initialize(session, args): - """Callback afer xapi initialize""" + """Callback after xapi initialization""" return refresh_all_configurations(session, args, "after_xapi_initialize") @@ -485,7 +485,7 @@ def after_subject_add(session, args): def after_subject_remove(session, args): - """Callbackk after remove subject""" + """Callback after remove subject""" return refresh_dynamic_pam(session, args, "after_subject_remove") From f588d6a84de553516a448d323476716aeee1a4c5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 8 Jul 2024 12:00:00 +0200 Subject: [PATCH 164/222] Python3: Cleanup unused imports Signed-off-by: Bernhard Kaindl --- python3/extensions/pool_update.apply | 2 -- scripts/plugins/extauth-hook-AD.py | 2 -- 2 files changed, 4 deletions(-) diff --git a/python3/extensions/pool_update.apply b/python3/extensions/pool_update.apply index 2bf9e0a5dcc..092da42a90b 100644 --- a/python3/extensions/pool_update.apply +++ b/python3/extensions/pool_update.apply @@ -2,14 +2,12 @@ import errno -import json import logging import os import re import shutil import subprocess import sys -import traceback import xmlrpc.client import fasteners diff --git a/scripts/plugins/extauth-hook-AD.py b/scripts/plugins/extauth-hook-AD.py index 0474ecacd39..fc359f060c7 100755 --- a/scripts/plugins/extauth-hook-AD.py +++ b/scripts/plugins/extauth-hook-AD.py @@ -10,7 +10,6 @@ # Alternatively, the extauth-hook module can be called, which will # dispatch to the correct extauth-hook-.py module automatically. import abc -import sys import subprocess import os import shutil @@ -20,7 +19,6 @@ from collections import OrderedDict from enum import Enum import XenAPIPlugin -import XenAPI # this plugin manage following configuration files for external auth From 16e37e606bc64692d622e8e3cfe2c49201b06e4a Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 8 Jul 2024 12:00:00 +0200 Subject: [PATCH 165/222] scripts/restore-sr-metadata.py: Apply 2to3, fix pytype(prepare moving it) Signed-off-by: Bernhard Kaindl --- pyproject.toml | 2 -- scripts/restore-sr-metadata.py | 46 ++++++++++++++++++---------------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 07515dc95a8..2749d69956f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -260,8 +260,6 @@ discard_messages_matching = [ "No Node.TEXT_NODE in module xml.dom.minidom, referenced from 'xml.dom.expatbuilder'" ] expected_to_fail = [ - # Need 2to3 -w and maybe a few other minor updates: - "scripts/restore-sr-metadata.py", ] # ----------------------------------------------------------------------------- diff --git a/scripts/restore-sr-metadata.py b/scripts/restore-sr-metadata.py index 105591a15c5..21214fef3c5 100644 --- a/scripts/restore-sr-metadata.py +++ b/scripts/restore-sr-metadata.py @@ -3,34 +3,36 @@ # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 import atexit +import contextlib import XenAPI import os, sys, time import getopt -from xml.dom.minidom import parse +from xml.dom.minidom import parse # pytype: disable=pyi-error import codecs sys.stdout = codecs.getwriter("utf-8")(sys.stdout) sys.stderr = codecs.getwriter("utf-8")(sys.stderr) -def logout(): - try: - session.xenapi.session.logout() - except: - pass -atexit.register(logout) def usage(): - print >> sys.stderr, "%s -f -u " % sys.argv[0] + print("%s -f -u " % sys.argv[0], file=sys.stderr) sys.exit(1) def main(argv): session = XenAPI.xapi_local() + + def logout(): + with contextlib.suppress(Exception): + session.xenapi.session.logout() + + atexit.register(logout) + session.xenapi.login_with_password("", "", "1.0", "xen-api-scripts-restore-sr-metadata") try: opts, args = getopt.getopt(argv, "hf:u:", []) - except getopt.GetoptError, err: - print str(err) + except getopt.GetoptError as err: + print(str(err)) usage() infile = None @@ -47,11 +49,11 @@ def main(argv): try: doc = parse(infile) except: - print >> sys.stderr, "Error parsing %s" % infile + print("Error parsing %s" % infile, file=sys.stderr) sys.exit(1) if doc.documentElement.tagName != "meta": - print >> sys.stderr, "Unexpected root element while parsing %s" % infile + print("Unexpected root element while parsing %s" % infile, file=sys.stderr) sys.exit(1) for srxml in doc.documentElement.childNodes: @@ -60,19 +62,19 @@ def main(argv): name_label = srxml.getAttribute("name_label") name_descr = srxml.getAttribute("name_description") except: - print >> sys.stderr, "Error parsing SR tag" + print("Error parsing SR tag", file=sys.stderr) continue # only set attributes on the selected SR passed in on cmd line if sruuid is None or sruuid == "all" or sruuid == uuid: try: srref = session.xenapi.SR.get_by_uuid(uuid) - print "Setting SR (%s):" % uuid + print("Setting SR (%s):" % uuid) session.xenapi.SR.set_name_label(srref, name_label) - print " Name: %s " % name_label + print(" Name: %s " % name_label) session.xenapi.SR.set_name_description(srref, name_descr) - print " Description: %s" % name_descr + print(" Description: %s" % name_descr) except: - print >> sys.stderr, "Error setting SR data for: %s (%s)" % (uuid, name_label) + print("Error setting SR data for: %s (%s)" % (uuid, name_label), file=sys.stderr) sys.exit(1) # go through all the SR VDIs and set the name_label and description for vdixml in srxml.childNodes: @@ -81,17 +83,17 @@ def main(argv): vdi_label = vdixml.getAttribute("name_label") vdi_descr = vdixml.getAttribute("name_description") except: - print >> sys.stderr, "Error parsing VDI tag" + print("Error parsing VDI tag", file=sys.stderr) continue try: vdiref = session.xenapi.VDI.get_by_uuid(vdi_uuid) - print "Setting VDI (%s):" % vdi_uuid + print("Setting VDI (%s):" % vdi_uuid) session.xenapi.VDI.set_name_label(vdiref, vdi_label) - print " Name: %s" % vdi_label + print(" Name: %s" % vdi_label) session.xenapi.VDI.set_name_description(vdiref, vdi_descr) - print " Description: %s" % vdi_descr + print(" Description: %s" % vdi_descr) except: - print >> sys.stderr, "Error setting VDI data for: %s (%s)" % (vdi_uuid, name_label) + print("Error setting VDI data for: %s (%s)" % (vdi_uuid, name_label), file=sys.stderr) continue if __name__ == "__main__": From 81fecb602f65a71bc0c4fff1432eae76f7deb407 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 9 Jul 2024 12:00:00 +0200 Subject: [PATCH 166/222] scripts/xe-reset-networking: Convert tabs to spaces and fix whitespace Signed-off-by: Bernhard Kaindl --- scripts/xe-reset-networking | 473 ++++++++++++++++++------------------ 1 file changed, 236 insertions(+), 237 deletions(-) diff --git a/scripts/xe-reset-networking b/scripts/xe-reset-networking index 38f676a5aaa..f26ce9fd7d4 100755 --- a/scripts/xe-reset-networking +++ b/scripts/xe-reset-networking @@ -27,147 +27,147 @@ management_conf = '/etc/firstboot.d/data/management.conf' network_reset = '/tmp/network-reset' def read_dict_file(fname): - f = open(fname, 'r') - d = {} - for l in f.readlines(): - kv = l.split('=') - d[kv[0].strip()] = kv[1].strip().strip("'") - return d + f = open(fname, 'r') + d = {} + for l in f.readlines(): + kv = l.split('=') + d[kv[0].strip()] = kv[1].strip().strip("'") + return d def read_inventory(): - return read_dict_file(inventory_file) + return read_dict_file(inventory_file) def read_management_conf(): - return read_dict_file(management_conf) + return read_dict_file(management_conf) def write_inventory(inventory): - f = open(inventory_file, 'w') - for k in inventory: - f.write(k + "='" + inventory[k] + "'\n") - f.flush() - os.fsync(f.fileno()) - f.close() + f = open(inventory_file, 'w') + for k in inventory: + f.write(k + "='" + inventory[k] + "'\n") + f.flush() + os.fsync(f.fileno()) + f.close() def valid_vlan(vlan): - if not re.match('^\d+$', vlan): - return False - if int(vlan)<0 or int(vlan)>4094: - return False - return True + if not re.match('^\d+$', vlan): + return False + if int(vlan)<0 or int(vlan)>4094: + return False + return True if __name__ == "__main__": - parser = OptionParser() - parser.add_option("-m", "--master", help="Master's address", dest="address", default=None) - parser.add_option("--device", help="Device name of new management interface", dest="device", default=None) - parser.add_option("--mode", help='IP configuration mode for new management interface: "none", "dhcp" or "static" (default is dhcp)', dest="mode", default="dhcp") - parser.add_option("--mode-v6", help='IPv6 configuration mode for new management interface: "none", "dhcp", "autoconf" or "static" (default is none)', dest="mode_v6", default="none") - parser.add_option("--novlan", help="no vlan is used for new management interface", dest="novlan", action="store_const", const=True, default=False) - parser.add_option("--vlan", help="vlanID for new management interface to be on vlan network", dest="vlan", default=None) - parser.add_option("--ip", help="IP address for new management interface", dest="ip", default='') - parser.add_option("--ipv6", help="IPv6 address (CIDR format) for new management interface", dest="ipv6", default='') - parser.add_option("--netmask", help="Netmask for new management interface", dest="netmask", default='') - parser.add_option("--gateway", help="Gateway for new management interface", dest="gateway", default='') - parser.add_option("--gateway-v6", help="IPv6 Gateway for new management interface", dest="gateway_v6", default='') - parser.add_option("--dns", help="DNS server for new management interface", dest="dns", default='') - (options, args) = parser.parse_args() - - # Determine pool role - try: - f = open(pool_conf, 'r') - try: - l = f.readline() - ls = l.split(':') - if ls[0].strip() == 'master': - master = True - address = 'localhost' - else: - master = False - if options.address == None: - address = ls[1].strip() - else: - address = options.address - finally: - f.close() - except: - pass - - # Get the management device from the firstboot data if not specified by the user - if options.device == None: - try: - conf = read_management_conf() - device = conf['LABEL'] - except: - print("Could not figure out which interface should become the management interface. \ - Please specify one using the --device option.") - sys.exit(1) - else: - device = options.device - - # Get the VLAN if provided in the firstboot data and not specified by the user - vlan = None - if options.vlan: - if options.novlan: - parser.error('"--vlan " and "--novlan" should not be used together') - sys.exit(1) - if not valid_vlan(options.vlan): - print("VLAN tag you gave was invalid, It must be between 0 and 4094") - sys.exit(1) - vlan = options.vlan - elif not options.novlan: - try: - conf = read_management_conf() - vlan = conf['VLAN'] - except KeyError: - pass - - # Determine IP configuration for management interface - options.mode = options.mode.lower() - if options.mode not in ["none", "dhcp", "static"]: - parser.error('mode should be either "none", "dhcp" or "static"') - sys.exit(1) - - options.mode_v6 = options.mode_v6.lower() - if options.mode not in ["none", "autoconf", "dhcp", "static"]: - parser.error('mode-v6 should be either "none", "autoconf", "dhcp" or "static"') - sys.exit(1) - - if options.mode == "none" and options.mode_v6 == "none": - parser.error("Either mode or mode-v6 must be not 'none'") - sys.exit(1) - - if options.mode == 'static' and (options.ip == '' or options.netmask == ''): - parser.error("if static IP mode is selected, an IP address and netmask need to be specified") - sys.exit(1) - - if options.mode_v6 == 'static': - if options.ipv6 == '': - parser.error("if static IPv6 mode is selected, an IPv6 address needs to be specified") - elif options.ipv6.find('/') == -1: - parser.error("Invalid format: IPv6 must be specified with CIDR format: /") - sys.exit(1) - - # Warn user - if not os.access('/tmp/fist_network_reset_no_warning', os.F_OK): - configuration = [] - configuration.append("Management interface: " + device) - configuration.append("IP configuration mode: " + options.mode) - configuration.append("IPv6 configuration mode:" + options.mode_v6) - if vlan != None: - configuration.append("Vlan: " + vlan) - if options.mode == "static": - configuration.append("IP address: " + options.ip) - configuration.append("Netmask: " + options.netmask) - if options.mode_v6 == "static": - configuration.append("IPv6/CIDR: " + options.ipv6) - if options.gateway != '': - configuration.append("Gateway: " + options.gateway) - if options.gateway_v6 != '': - configuration.append("IPv6 gateway: " + options.gateway_v6) - if options.dns != '': - configuration.append("DNS server(s): " + options.dns) - if master == False: - configuration.append("Pool master's address: " + address) - warning = """---------------------------------------------------------------------- + parser = OptionParser() + parser.add_option("-m", "--master", help="Master's address", dest="address", default=None) + parser.add_option("--device", help="Device name of new management interface", dest="device", default=None) + parser.add_option("--mode", help='IP configuration mode for new management interface: "none", "dhcp" or "static" (default is dhcp)', dest="mode", default="dhcp") + parser.add_option("--mode-v6", help='IPv6 configuration mode for new management interface: "none", "dhcp", "autoconf" or "static" (default is none)', dest="mode_v6", default="none") + parser.add_option("--novlan", help="no vlan is used for new management interface", dest="novlan", action="store_const", const=True, default=False) + parser.add_option("--vlan", help="vlanID for new management interface to be on vlan network", dest="vlan", default=None) + parser.add_option("--ip", help="IP address for new management interface", dest="ip", default='') + parser.add_option("--ipv6", help="IPv6 address (CIDR format) for new management interface", dest="ipv6", default='') + parser.add_option("--netmask", help="Netmask for new management interface", dest="netmask", default='') + parser.add_option("--gateway", help="Gateway for new management interface", dest="gateway", default='') + parser.add_option("--gateway-v6", help="IPv6 Gateway for new management interface", dest="gateway_v6", default='') + parser.add_option("--dns", help="DNS server for new management interface", dest="dns", default='') + (options, args) = parser.parse_args() + + # Determine pool role + try: + f = open(pool_conf, 'r') + try: + l = f.readline() + ls = l.split(':') + if ls[0].strip() == 'master': + master = True + address = 'localhost' + else: + master = False + if options.address == None: + address = ls[1].strip() + else: + address = options.address + finally: + f.close() + except: + pass + + # Get the management device from the firstboot data if not specified by the user + if options.device == None: + try: + conf = read_management_conf() + device = conf['LABEL'] + except: + print("Could not figure out which interface should become the management interface. \ + Please specify one using the --device option.") + sys.exit(1) + else: + device = options.device + + # Get the VLAN if provided in the firstboot data and not specified by the user + vlan = None + if options.vlan: + if options.novlan: + parser.error('"--vlan " and "--novlan" should not be used together') + sys.exit(1) + if not valid_vlan(options.vlan): + print("VLAN tag you gave was invalid, It must be between 0 and 4094") + sys.exit(1) + vlan = options.vlan + elif not options.novlan: + try: + conf = read_management_conf() + vlan = conf['VLAN'] + except KeyError: + pass + + # Determine IP configuration for management interface + options.mode = options.mode.lower() + if options.mode not in ["none", "dhcp", "static"]: + parser.error('mode should be either "none", "dhcp" or "static"') + sys.exit(1) + + options.mode_v6 = options.mode_v6.lower() + if options.mode not in ["none", "autoconf", "dhcp", "static"]: + parser.error('mode-v6 should be either "none", "autoconf", "dhcp" or "static"') + sys.exit(1) + + if options.mode == "none" and options.mode_v6 == "none": + parser.error("Either mode or mode-v6 must be not 'none'") + sys.exit(1) + + if options.mode == 'static' and (options.ip == '' or options.netmask == ''): + parser.error("if static IP mode is selected, an IP address and netmask need to be specified") + sys.exit(1) + + if options.mode_v6 == 'static': + if options.ipv6 == '': + parser.error("if static IPv6 mode is selected, an IPv6 address needs to be specified") + elif options.ipv6.find('/') == -1: + parser.error("Invalid format: IPv6 must be specified with CIDR format: /") + sys.exit(1) + + # Warn user + if not os.access('/tmp/fist_network_reset_no_warning', os.F_OK): + configuration = [] + configuration.append("Management interface: " + device) + configuration.append("IP configuration mode: " + options.mode) + configuration.append("IPv6 configuration mode:" + options.mode_v6) + if vlan != None: + configuration.append("Vlan: " + vlan) + if options.mode == "static": + configuration.append("IP address: " + options.ip) + configuration.append("Netmask: " + options.netmask) + if options.mode_v6 == "static": + configuration.append("IPv6/CIDR: " + options.ipv6) + if options.gateway != '': + configuration.append("Gateway: " + options.gateway) + if options.gateway_v6 != '': + configuration.append("IPv6 gateway: " + options.gateway_v6) + if options.dns != '': + configuration.append("DNS server(s): " + options.dns) + if master == False: + configuration.append("Pool master's address: " + address) + warning = """---------------------------------------------------------------------- !! WARNING !! This command will reboot the host and reset its network configuration. @@ -179,115 +179,114 @@ Before completing this command: ---------------------------------------------------------------------- Your network will be re-configured as follows:\n\n""" - confirmation = """\n\nIf you want to change any of the above settings, type 'no' and re-run + confirmation = """\n\nIf you want to change any of the above settings, type 'no' and re-run the command with appropriate arguments (use --help for a list of options). Type 'yes' to continue. Type 'no' to cancel. """ - res = input(warning + '\n'.join(configuration) + confirmation) - if res != 'yes': - sys.exit(1) - - # Update master's IP, if needed and given - if master == False and options.address != None: - print("Setting master's ip (" + address + ")...") - try: - f = open(pool_conf, 'w') - f.write('slave:' + address) - finally: - f.flush() - os.fsync(f.fileno()) - f.close() - - # Construct bridge name for management interface based on convention - if device[:3] == 'eth': - bridge = 'xenbr' + device[3:] - else: - bridge = 'br' + device - - # Ensure xapi is not running - print("Stopping xapi...") - os.system('service xapi stop >/dev/null 2>/dev/null') - - # Reconfigure new management interface - print("Reconfiguring " + device + "...") - os.system('systemctl stop xcp-networkd >/dev/null 2>/dev/null') - try: - os.remove('/var/lib/xcp/networkd.db') - except Exception as e: - print('Warning: Failed to delete networkd.db.\n%s' % e) - - # Update interfaces in inventory file - print('Updating inventory file...') - inventory = read_inventory() - if vlan != None: - inventory['MANAGEMENT_INTERFACE'] = 'xentemp' - else: - inventory['MANAGEMENT_INTERFACE'] = bridge - inventory['CURRENT_INTERFACES'] = '' - write_inventory(inventory) - - # Rewrite firstboot management.conf file, which will be picked it by xcp-networkd on restart (if used) - is_static = False - try: - f = open(management_conf, 'w') - f.write("LABEL='" + device + "'\n") - if options.mode != "none": - f.write("MODE='" + options.mode + "'\n") - if options.mode_v6 != "none": - f.write("MODEV6='" + options.mode_v6 + "'\n") - if vlan != None: - f.write("VLAN='" + vlan + "'\n") - if options.mode == 'static': - is_static = True - f.write("IP='" + options.ip + "'\n") - f.write("NETMASK='" + options.netmask + "'\n") - if options.gateway != '': - f.write("GATEWAY='" + options.gateway + "'\n") - if options.mode_v6 == "static": - is_static = True - f.write("IPv6='" + options.ipv6 + "'\n") - if options.gateway_v6 != '': - f.write("IPv6_GATEWAY='" + options.gateway_v6 + "'\n") - if is_static and options.dns != '': - f.write("DNS='" + options.dns + "'\n") - finally: - f.flush() - os.fsync(f.fileno()) - f.close() - - # Write trigger file for XAPI to continue the network reset on startup - try: - f = open(network_reset, 'w') - f.write('DEVICE=' + device + '\n') - if options.mode != "none": - f.write('MODE=' + options.mode + '\n') - if options.mode_v6 != "none": - f.write('MODE_V6=' + options.mode_v6 + '\n') - if vlan != None: - f.write('VLAN=' + vlan + '\n') - if options.mode == 'static': - f.write('IP=' + options.ip + '\n') - f.write('NETMASK=' + options.netmask + '\n') - if options.gateway != '': - f.write('GATEWAY=' + options.gateway + '\n') - if options.mode_v6 == "static": - f.write('IPV6=' + options.ipv6 + '\n') - if options.gateway_v6 != '': - f.write('GATEWAY_V6=' + options.gateway_v6 + '\n') - if is_static and options.dns != '': - f.write('DNS=' + options.dns + '\n') - finally: - f.flush() - os.fsync(f.fileno()) - f.close() - - # Reset the domain 0 network interface naming configuration - # back to a fresh-install state for the currently-installed - # hardware. - os.system("/etc/sysconfig/network-scripts/interface-rename.py --reset-to-install") - - # Reboot - os.system("mount -o remount,rw / && reboot -f") + res = input(warning + '\n'.join(configuration) + confirmation) + if res != 'yes': + sys.exit(1) + + # Update master's IP, if needed and given + if master == False and options.address != None: + print("Setting master's ip (" + address + ")...") + try: + f = open(pool_conf, 'w') + f.write('slave:' + address) + finally: + f.flush() + os.fsync(f.fileno()) + f.close() + + # Construct bridge name for management interface based on convention + if device[:3] == 'eth': + bridge = 'xenbr' + device[3:] + else: + bridge = 'br' + device + + # Ensure xapi is not running + print("Stopping xapi...") + os.system('service xapi stop >/dev/null 2>/dev/null') + + # Reconfigure new management interface + print("Reconfiguring " + device + "...") + os.system('systemctl stop xcp-networkd >/dev/null 2>/dev/null') + try: + os.remove('/var/lib/xcp/networkd.db') + except Exception as e: + print('Warning: Failed to delete networkd.db.\n%s' % e) + + # Update interfaces in inventory file + print('Updating inventory file...') + inventory = read_inventory() + if vlan != None: + inventory['MANAGEMENT_INTERFACE'] = 'xentemp' + else: + inventory['MANAGEMENT_INTERFACE'] = bridge + inventory['CURRENT_INTERFACES'] = '' + write_inventory(inventory) + + # Rewrite firstboot management.conf file, which will be picked it by xcp-networkd on restart (if used) + is_static = False + try: + f = open(management_conf, 'w') + f.write("LABEL='" + device + "'\n") + if options.mode != "none": + f.write("MODE='" + options.mode + "'\n") + if options.mode_v6 != "none": + f.write("MODEV6='" + options.mode_v6 + "'\n") + if vlan != None: + f.write("VLAN='" + vlan + "'\n") + if options.mode == 'static': + is_static = True + f.write("IP='" + options.ip + "'\n") + f.write("NETMASK='" + options.netmask + "'\n") + if options.gateway != '': + f.write("GATEWAY='" + options.gateway + "'\n") + if options.mode_v6 == "static": + is_static = True + f.write("IPv6='" + options.ipv6 + "'\n") + if options.gateway_v6 != '': + f.write("IPv6_GATEWAY='" + options.gateway_v6 + "'\n") + if is_static and options.dns != '': + f.write("DNS='" + options.dns + "'\n") + finally: + f.flush() + os.fsync(f.fileno()) + f.close() + + # Write trigger file for XAPI to continue the network reset on startup + try: + f = open(network_reset, 'w') + f.write('DEVICE=' + device + '\n') + if options.mode != "none": + f.write('MODE=' + options.mode + '\n') + if options.mode_v6 != "none": + f.write('MODE_V6=' + options.mode_v6 + '\n') + if vlan != None: + f.write('VLAN=' + vlan + '\n') + if options.mode == 'static': + f.write('IP=' + options.ip + '\n') + f.write('NETMASK=' + options.netmask + '\n') + if options.gateway != '': + f.write('GATEWAY=' + options.gateway + '\n') + if options.mode_v6 == "static": + f.write('IPV6=' + options.ipv6 + '\n') + if options.gateway_v6 != '': + f.write('GATEWAY_V6=' + options.gateway_v6 + '\n') + if is_static and options.dns != '': + f.write('DNS=' + options.dns + '\n') + finally: + f.flush() + os.fsync(f.fileno()) + f.close() + + # Reset the domain 0 network interface naming configuration + # back to a fresh-install state for the currently-installed + # hardware. + os.system("/etc/sysconfig/network-scripts/interface-rename.py --reset-to-install") + # Reboot + os.system("mount -o remount,rw / && reboot -f") From a39d65efaf0658b6c5af56cde81c064650bbfbaf Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 9 Jul 2024 12:00:00 +0200 Subject: [PATCH 167/222] CP-49906: Move scripts/plugins/extauth-hook-AD.py to python3/plugins Signed-off-by: Ashwinh Co-authored-by: Bernhard Kaindl --- python3/Makefile | 3 ++- {scripts => python3}/plugins/extauth-hook | 0 {scripts => python3}/plugins/extauth-hook-AD.py | 12 ++++++------ {scripts => python3}/plugins/extauth_hook_ad.py | 0 {scripts => python3}/plugins/test_extauth_hook_AD.py | 0 scripts/Makefile | 2 -- 6 files changed, 8 insertions(+), 9 deletions(-) rename {scripts => python3}/plugins/extauth-hook (100%) rename {scripts => python3}/plugins/extauth-hook-AD.py (99%) rename {scripts => python3}/plugins/extauth_hook_ad.py (100%) rename {scripts => python3}/plugins/test_extauth_hook_AD.py (100%) diff --git a/python3/Makefile b/python3/Makefile index 15e0a27b57a..75416a8d7f9 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -15,7 +15,6 @@ install: $(IPROG) -d $(DESTDIR)/usr/lib/systemd/system $(IPROG) -d $(DESTDIR)$(EXTENSIONDIR) - $(IDATA) packages/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ @@ -39,6 +38,8 @@ install: $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/echo.py $(DESTDIR)$(PLUGINDIR)/echo $(IPROG) plugins/openvswitch-config-update $(DESTDIR)$(PLUGINDIR) + $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) + $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) $(IPROG) perfmon/perfmon $(DESTDIR)$(PLUGINDIR) $(IDATA) perfmon/perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service diff --git a/scripts/plugins/extauth-hook b/python3/plugins/extauth-hook similarity index 100% rename from scripts/plugins/extauth-hook rename to python3/plugins/extauth-hook diff --git a/scripts/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py similarity index 99% rename from scripts/plugins/extauth-hook-AD.py rename to python3/plugins/extauth-hook-AD.py index bd2c349d4ba..0123461749c 100755 --- a/scripts/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -2,6 +2,12 @@ # # extauth-hook-AD.py # +# This plugin manages the following configuration files for external authentication +# - /etc/nsswitch.conf +# - /etc/pam.d/sshd +# - /etc/pam.d/hcp_users +# - /etc/ssh/ssh_config +# # This module can be called directly as a plugin. It handles # Active Directory being enabled or disabled as the hosts external_auth_type, # or subjects being added or removed while AD is the external_auth_type, @@ -21,12 +27,6 @@ import XenAPIPlugin -# this plugin manage following configuration files for external auth -# - /etc/nsswitch.conf -# - /etc/pam.d/sshd -# - /etc/pam.d/hcp_users -# - /etc/ssh/ssh_config - # pylint: disable=too-few-public-methods # pytype: disable=ignored-abstractmethod diff --git a/scripts/plugins/extauth_hook_ad.py b/python3/plugins/extauth_hook_ad.py similarity index 100% rename from scripts/plugins/extauth_hook_ad.py rename to python3/plugins/extauth_hook_ad.py diff --git a/scripts/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py similarity index 100% rename from scripts/plugins/test_extauth_hook_AD.py rename to python3/plugins/test_extauth_hook_AD.py diff --git a/scripts/Makefile b/scripts/Makefile index 8d3196ceece..5d769d8778d 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -117,8 +117,6 @@ install: mkdir -p $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) mkdir -p $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/extauth-hook $(DESTDIR)$(PLUGINDIR) - $(IPROG) plugins/extauth-hook-AD.py $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) mkdir -p $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead $(IPROG) 10resetvdis $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead From c8a1b5e4435a099edcbbffe169f57c2f520c40a5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 9 Jul 2024 12:00:00 +0200 Subject: [PATCH 168/222] scripts/restore-sr-metadata.py: isort, fix pyright and pylint Signed-off-by: Bernhard Kaindl --- scripts/restore-sr-metadata.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/scripts/restore-sr-metadata.py b/scripts/restore-sr-metadata.py index 21214fef3c5..4bbb9fe55af 100644 --- a/scripts/restore-sr-metadata.py +++ b/scripts/restore-sr-metadata.py @@ -4,19 +4,19 @@ import atexit import contextlib -import XenAPI -import os, sys, time import getopt +import io +import sys from xml.dom.minidom import parse # pytype: disable=pyi-error -import codecs -sys.stdout = codecs.getwriter("utf-8")(sys.stdout) -sys.stderr = codecs.getwriter("utf-8")(sys.stderr) +import XenAPI + +sys.stdout = io.open(sys.stdout.fileno(), "w", encoding="utf-8") +sys.stderr = io.open(sys.stderr.fileno(), "w", encoding="utf-8") def usage(): print("%s -f -u " % sys.argv[0], file=sys.stderr) - sys.exit(1) def main(argv): session = XenAPI.xapi_local() @@ -34,6 +34,7 @@ def logout(): except getopt.GetoptError as err: print(str(err)) usage() + sys.exit(1) infile = None sruuid = None @@ -45,6 +46,7 @@ def logout(): if infile == None: usage() + sys.exit(1) try: doc = parse(infile) @@ -93,10 +95,12 @@ def logout(): session.xenapi.VDI.set_name_description(vdiref, vdi_descr) print(" Description: %s" % vdi_descr) except: - print("Error setting VDI data for: %s (%s)" % (vdi_uuid, name_label), file=sys.stderr) + print( + "Error setting VDI data for: %s (%s)" % (vdi_uuid, name_label), + file=sys.stderr, + ) continue + if __name__ == "__main__": main(sys.argv[1:]) - - From deb173135a181a3a99ac61ffd55dd8fdfa1595e8 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 9 Jul 2024 12:00:00 +0200 Subject: [PATCH 169/222] CP-49906: extauth-hook-AD.py: Fix remaining pylint warnings Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth-hook-AD.py | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/python3/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py index 0123461749c..3a7b14f7959 100755 --- a/python3/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -44,7 +44,7 @@ def setup_logger(): log = logging.getLogger() if not os.path.exists(addr): - log.warning("{} not available, logs are not redirected".format(addr)) + log.warning("%s not available, logs are not redirected", addr) return # Send to syslog local5, which will be redirected to xapi log /var/log/xensource.log @@ -92,7 +92,7 @@ def __init__(self, path, session, args, ad_enabled=True, load_existing=True, fil self._ad_enabled = ad_enabled self._file_mode = file_mode if load_existing and os.path.exists(self._file_path): - with open(self._file_path, 'r') as file: + with open(self._file_path, "r", encoding="utf-8") as file: lines = file.readlines() self._lines = [l.strip() for l in lines] @@ -237,9 +237,8 @@ def _add_subject(self, subject_rec): def _install(self): if self._ad_enabled: super(DynamicPam, self)._install() - else: - if os.path.exists(self._file_path): - os.remove(self._file_path) + elif os.path.exists(self._file_path): + os.remove(self._file_path) class UsersList(DynamicPam): @@ -261,7 +260,7 @@ def _add_upn(self, subject_rec): if self._backend == ADBackend.BD_PBIS: # PBIS convert domain to UPPER case, we revert it back domain = domain.lower() - self._lines.append(u"{}{}{}".format(user, sep, domain)) + self._lines.append("{}{}{}".format(user, sep, domain)) except KeyError: logger.info("subject does not have upn %s", subject_rec) except ValueError: @@ -362,7 +361,7 @@ def _apply_value(self, key, value): if self._is_special_line(key): line = value else: # normal line, construct the key value pair - sep = self._sep if self._sep else " " + sep = self._sep or " " line = "{}{}{}".format(key, sep, value) self._lines.append(line) From fb98229e86250d6add9cf75e65ad74bdc4726877 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Tue, 9 Jul 2024 15:13:00 +0000 Subject: [PATCH 170/222] CP-49900: Removed templates folder from python3/ - Removed templates debian and debug from Makefile Signed-off-by: Ashwinh --- python3/Makefile | 3 - python3/templates/debian | 171 --------------------------------------- python3/templates/debug | 7 -- 3 files changed, 181 deletions(-) delete mode 100644 python3/templates/debian delete mode 100755 python3/templates/debug diff --git a/python3/Makefile b/python3/Makefile index 15e0a27b57a..034a472ec1c 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -44,9 +44,6 @@ install: $(IDATA) perfmon/perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service $(IPROG) perfmon/sysconfig-perfmon $(DESTDIR)/etc/sysconfig/perfmon -# templates - $(IPROG) templates/debian $(DESTDIR)$(OPTDIR)/packages/post-install-scripts/debian-etch - $(IPROG) templates/debug $(DESTDIR)$(OPTDIR)/packages/post-install-scripts # poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py diff --git a/python3/templates/debian b/python3/templates/debian deleted file mode 100644 index 4e9b12a8714..00000000000 --- a/python3/templates/debian +++ /dev/null @@ -1,171 +0,0 @@ -#!/usr/bin/env python3 -# Copyright (c) 2005-2007 XenSource, Inc - -# Code ripped out of 'xgt' script for now -from __future__ import print_function - -import os -import signal -import socket -import sys - -import commands -import httplib -import urllib2 -import xmlrpclib - -verbose = True - - -##### begin hack. Provide xmlrpc over UNIX domain socket (cut+pasted from eliloader): -class UDSHTTPConnection(httplib.HTTPConnection): - """Stupid hacked up HTTPConnection subclass to allow HTTP over Unix domain - sockets.""" - - def connect(self): - path = self.host.replace("_", "/") - self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - self.sock.connect(path) - - -class UDSHTTP(httplib.HTTP): - _connection_class = UDSHTTPConnection - - -class UDSTransport(xmlrpclib.Transport): - def make_connection(self, host): - return UDSHTTP(host) - - -def xapi_local(): - return xmlrpclib.Server("http://_var_xapi_xapi/", transport=UDSTransport()) - - -##### end hack. - - -class CommandException(Exception): - pass - - -def run(cmd, *args): - debug("+ " + cmd % args) - (ret, out) = commands.getstatusoutput(cmd % args) - if verbose: - try: - for line in out.split("\n"): - log("| " + line) - except TypeError as e: - pass - if ret != 0: - debug("run - command %s failed with %d", cmd, ret) - raise CommandException(out) - return out - - -def log(fmt, *args): - print(fmt % args) - - -def debug(msg, *args): - if verbose: - print(msg % args) - - -def create_partition(lvpath): - # 1. write a partition table: - pipe = os.popen("/sbin/fdisk %s" % lvpath, "w") - - pipe.write("n\n") # new partition - pipe.write("p\n") # primary - pipe.write("1\n") # 1st partition - pipe.write("\n") # default start cylinder - pipe.write("\n") # size: as big as image - pipe.write("w\n") # write partition table - - # XXX we must ignore certain errors here as fdisk will - # sometimes return non-zero signalling error conditions - # we don't care about. Should fix to detect these cases - # specifically. - rc = pipe.close() - if rc == None: - rc = 0 - log("fdisk exited with rc %d (some non-zero exits can be ignored safely)." % rc) - - -def map_partitions(lvpath): - run("/sbin/kpartx -a %s", lvpath) - ps = [] - for line in run("/sbin/kpartx -l %s" % lvpath).split("\n"): - ps.append("/dev/mapper/" + line.split()[0]) - return ps - - -def unmap_partitions(lvpath): - run("/sbin/kpartx -d %s", lvpath) - - -def umount(mountpoint): - run("umount -l %s", mountpoint) - - -if __name__ == "__main__": - # os.setpgrp() - xvda = os.getenv("xvda") - xvdb = os.getenv("xvdb") - debug("Guest's xvda is on %s" % xvda) - debug("Guest's xvdb is on %s" % xvdb) - if xvda == None or xvdb == None: - raise ValueError ("Need to pass in device names for xvda and xvdb through the environment") - - vm = os.getenv("vm") - - server = xapi_local() - try: - session_id = server.session.login_with_password( - "", "", "1.0", "xen-api-scripts-debian" - )["Value"] - uuid = server.VM.get_uuid(session_id, vm)["Value"] - mountpoint = "/tmp/installer/%s" % (uuid) - finally: - server.session.logout(session_id) - - def sighandler(signum, frame): - umount(mountpoint) - os.killpg(0, signal.SIGKILL) - exit(1) - - signal.signal(signal.SIGTERM, sighandler) - - create_partition(xvda) - create_partition(xvdb) - - try: - xvda_parts = map_partitions(xvda) - - run("/sbin/mkfs.ext3 %s", xvda_parts[0]) - - xgt = "@SHAREDIR@/packages/xgt/%s.xgt" % os.path.basename(sys.argv[0]) - - run("/bin/mkdir -p %s", mountpoint) - try: - run("/bin/mount %s %s", xvda_parts[0], mountpoint) - run("/usr/bin/unzip -p %s root.tar.bz2 | tar -C %s -jx", xgt, mountpoint) - finally: - run("/bin/umount %s", mountpoint) - run("/bin/rmdir %s", mountpoint) - run("/usr/bin/unzip -p %s swap.img | dd of=%s oflag=direct bs=1M", xgt, xvdb) - - try: - session_id = server.session.login_with_password( - "", "", "1.0", "xen-api-scripts-debian" - )["Value"] - vbds = server.VM.get_VBDs(session_id, vm)["Value"] - for i in vbds: - dev = server.VBD.get_userdevice(session_id, i)["Value"] - if dev == "0": - server.VBD.set_bootable(session_id, i, True) - finally: - server.session.logout(session_id) - finally: - unmap_partitions(xvda) diff --git a/python3/templates/debug b/python3/templates/debug deleted file mode 100755 index 85656ebf2d9..00000000000 --- a/python3/templates/debug +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/sh - - -# Script should be passed a session_id, VM reference and set of block -# devices via the environment - -set > /tmp/debug-install-script \ No newline at end of file From 4c023d354a34ca0cfc09f3a8b60e85d7f2236e88 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 10 Jul 2024 12:00:00 +0200 Subject: [PATCH 171/222] XenAPIPlugin.py: Remove the superflous catch & raise of SystemExit In `XenAPIPlugin.py`'s `dispatch()` function, SystemExit does not need to be caught and raised because both other exceptions are subclasses of Exception: By design, SystemExit is a subclass of BaseException and because we are not catching BaseException and also not use a bare `except:` here, we can cleanup catching and re-raising `SystemExit()` here. Reference: https://docs.python.org/3/library/exceptions.html#SystemExit Signed-off-by: Bernhard Kaindl --- scripts/examples/python/XenAPIPlugin.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/scripts/examples/python/XenAPIPlugin.py b/scripts/examples/python/XenAPIPlugin.py index 1d657f065d1..82f1f2f8531 100644 --- a/scripts/examples/python/XenAPIPlugin.py +++ b/scripts/examples/python/XenAPIPlugin.py @@ -44,9 +44,6 @@ def dispatch(fn_table): try: result = fn_table[methodname](x, args) print(success_message(result)) - except SystemExit: - # SystemExit should not be caught, as it is handled elsewhere in the plugin system. - raise except Failure as e: print(failure_message(e.params)) except Exception as e: From 37e615e30f465a150827e9498814c3dfda80b6b8 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 11 Jul 2024 10:59:51 +0000 Subject: [PATCH 172/222] CP-50100: Moved backup-sr-metadata.py from scripts/ to python3/libexec directory - Modified python3/Makefile to include this change. - Removed backup-sr-metadata.py from scripts/Makefile Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3/libexec}/backup-sr-metadata.py | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 1 deletion(-) rename {scripts => python3/libexec}/backup-sr-metadata.py (100%) diff --git a/python3/Makefile b/python3/Makefile index 75416a8d7f9..715257cdc8d 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -26,6 +26,7 @@ install: $(IPROG) libexec/probe-device-for-file $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/print-custom-templates $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/mail-alarm $(DESTDIR)$(LIBEXECDIR) + $(IPROG) libexec/backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/backup-sr-metadata.py b/python3/libexec/backup-sr-metadata.py similarity index 100% rename from scripts/backup-sr-metadata.py rename to python3/libexec/backup-sr-metadata.py diff --git a/scripts/Makefile b/scripts/Makefile index 5d769d8778d..cd32cd2f0df 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -110,7 +110,6 @@ install: $(IPROG) host-bugreport-upload $(DESTDIR)$(LIBEXECDIR)/host-bugreport-upload $(IPROG) xe-backup-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-restore-metadata $(DESTDIR)$(OPTDIR)/bin - $(IPROG) backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) From c1ea5b3ea3494ddd32d74f00537afec4a55186a7 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 11 Jul 2024 12:27:55 +0000 Subject: [PATCH 173/222] CP-50099: Moved restore-sr-metadata.py from scripts/ to python3/libexec directory - Modified python3 Makefile to include this change - Removed restore-sr-metadata.py from scripts/Makefile - Fixed bare-except exception pylint issue Signed-off-by: Ashwinh --- python3/Makefile | 2 +- {scripts => python3/libexec}/restore-sr-metadata.py | 0 scripts/Makefile | 1 - 3 files changed, 1 insertion(+), 2 deletions(-) rename {scripts => python3/libexec}/restore-sr-metadata.py (100%) diff --git a/python3/Makefile b/python3/Makefile index 715257cdc8d..5a427961371 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -27,7 +27,7 @@ install: $(IPROG) libexec/print-custom-templates $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/mail-alarm $(DESTDIR)$(LIBEXECDIR) $(IPROG) libexec/backup-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) - + $(IPROG) libexec/restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/restore-sr-metadata.py b/python3/libexec/restore-sr-metadata.py similarity index 100% rename from scripts/restore-sr-metadata.py rename to python3/libexec/restore-sr-metadata.py diff --git a/scripts/Makefile b/scripts/Makefile index cd32cd2f0df..94dfc412718 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -110,7 +110,6 @@ install: $(IPROG) host-bugreport-upload $(DESTDIR)$(LIBEXECDIR)/host-bugreport-upload $(IPROG) xe-backup-metadata $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-restore-metadata $(DESTDIR)$(OPTDIR)/bin - $(IPROG) restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(EXTENSIONDIR) From 8f972d80ac528f78acdd2635cf61a674a47d7261 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Wed, 10 Jul 2024 12:52:29 +0000 Subject: [PATCH 174/222] CP-49919: mv scripts/extensions/pool_update.precheck to python3/extensions Original (code supplied) by Bernhard Kaindl: - Declare missing methods to python3/stubs/XenAPI.py for pyright - Initialize variables to fix pyright:reportPossiblyUnboundVariable - Applied isort Signed-off-by: Ashwinh Signed-off-by: Bernhard Kaindl --- python3/Makefile | 2 +- .../extensions/pool_update.precheck | 29 ++++++++++--------- python3/stubs/XenAPI.pyi | 2 ++ scripts/Makefile | 1 - 4 files changed, 19 insertions(+), 15 deletions(-) rename {scripts => python3}/extensions/pool_update.precheck (98%) diff --git a/python3/Makefile b/python3/Makefile index 75416a8d7f9..c9b6fe73f4c 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -32,7 +32,7 @@ install: $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) - + $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) $(IPROG) plugins/disk-space $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/install-supp-pack $(DESTDIR)$(PLUGINDIR) diff --git a/scripts/extensions/pool_update.precheck b/python3/extensions/pool_update.precheck similarity index 98% rename from scripts/extensions/pool_update.precheck rename to python3/extensions/pool_update.precheck index 161fad13740..1004ae5c736 100755 --- a/scripts/extensions/pool_update.precheck +++ b/python3/extensions/pool_update.precheck @@ -1,23 +1,23 @@ #!/usr/bin/env python3 -import xmlrpc.client -import sys -import XenAPI -import json -import urllib.request, urllib.error, urllib.parse -import xml.dom.minidom -import traceback -import subprocess -import os +import configparser import errno -import re -import shutil import io -import configparser import logging -import xcp.logger +import os +import re +import shutil +import subprocess +import sys +import urllib.error +import urllib.parse +import urllib.request +import xml.dom.minidom +import xmlrpc.client +import xcp.logger +import XenAPI TMP_DIR = '/tmp/' UPDATE_DIR = '/var/update/' @@ -234,6 +234,9 @@ if __name__ == '__main__': update_vdi_valid = False session = None + update_package = None + update = None + yum_conf_file = "" try: session = XenAPI.xapi_local() session.xenapi.login_with_password('root', '', '', 'Pool_update') diff --git a/python3/stubs/XenAPI.pyi b/python3/stubs/XenAPI.pyi index bde962b0556..ede1e13d5f5 100644 --- a/python3/stubs/XenAPI.pyi +++ b/python3/stubs/XenAPI.pyi @@ -50,6 +50,8 @@ class _Dispatcher: VDI: Incomplete PBD: Incomplete pool: Incomplete + host: Incomplete + pool_update: Incomplete VM: Incomplete diff --git a/scripts/Makefile b/scripts/Makefile index 5d769d8778d..4aef6854d7d 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -115,7 +115,6 @@ install: $(IPROG) backup-metadata-cron $(DESTDIR)$(LIBEXECDIR) $(IPROG) pbis-force-domain-leave $(DESTDIR)$(LIBEXECDIR) mkdir -p $(DESTDIR)$(EXTENSIONDIR) - $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) mkdir -p $(DESTDIR)$(PLUGINDIR) $(IPROG) plugins/firewall-port $(DESTDIR)$(PLUGINDIR) mkdir -p $(DESTDIR)$(HOOKSDIR)/host-post-declare-dead From dd7d37a5656d9f5dddde0ab6f6b2fe3c902658db Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 11 Jul 2024 12:00:00 +0200 Subject: [PATCH 175/222] python3/plugins/test_extauth_hook_AD.py: Assert the current bug Signed-off-by: Bernhard Kaindl --- python3/plugins/test_extauth_hook_AD.py | 31 +++++++++++++++++-------- 1 file changed, 21 insertions(+), 10 deletions(-) diff --git a/python3/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py index 1960072f3f1..d3bee7670ad 100644 --- a/python3/plugins/test_extauth_hook_AD.py +++ b/python3/plugins/test_extauth_hook_AD.py @@ -1,13 +1,12 @@ """ Test module for extauth_hook_ad """ -#pylint: disable=invalid-name -import sys + +import logging import os +import sys from unittest import TestCase -from mock import MagicMock, patch - -import pytest +from unittest.mock import MagicMock, patch # mock modules to avoid dependencies sys.modules["XenAPIPlugin"] = MagicMock() @@ -15,11 +14,23 @@ # pylint: disable=wrong-import-position # Import must after mock modules from extauth_hook_ad import StaticSSHPam, NssConfig, SshdConfig, UsersList, GroupsList - - -if sys.version_info < (3, ): # pragma: no cover - pytest.skip(allow_module_level=True) - +from extauth_hook_ad import run_cmd + +def test_run_cmd(caplog): + """Assert the current buggy behavior of the run_cmd function after py3 migration""" + cmd = ["echo", " Hello World! "] + + # Call the function under test, check the return value and capture the log message + with caplog.at_level(logging.DEBUG): + # Bug in the current code, the result is a byte string: + assert run_cmd(cmd) == cmd[1].strip().encode() + + # Bug in the current code after not fully tested py3 migration: + # The logged message contains a byte string that is not stripped: + assert caplog.records[0].message == "%s -> b' Hello World! \\n'" % (cmd) + # Test the case where the command fails: + assert run_cmd(["bad command"]) is None + assert caplog.records[1].message == "Failed to run command ['bad command']" def line_exists_in_config(lines, line): """ From 3317717508a11c8c1d6b4963561a5afe949212c5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 11 Jul 2024 12:00:00 +0200 Subject: [PATCH 176/222] python3/plugins/extauth-hook-AD.py: Fix logging of run_cmd() Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth-hook-AD.py | 17 +++++++---------- python3/plugins/test_extauth_hook_AD.py | 9 ++++----- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/python3/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py index 3a7b14f7959..a67d35b10b8 100755 --- a/python3/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -61,17 +61,14 @@ def setup_logger(): logger = logging.getLogger(__name__) -def run_cmd(cmd, log_cmd=True): - """Helper function to run command""" +def run_cmd(command: "list[str]"): + """Helper function to run a command and log the output""" try: - result = subprocess.check_output(cmd) - if log_cmd: - msg = "{} -> {}".format(cmd, result) - logger.debug(msg) - return result.strip() - except Exception: # pylint: disable=broad-except - logger.exception("Failed to run command %s", cmd) - return None + output = subprocess.check_output(command, universal_newlines=True) + logger.debug("%s -> %s", command, output.strip()) + + except OSError: + logger.exception("Failed to run command %s", command) class ADBackend(Enum): diff --git a/python3/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py index d3bee7670ad..616884101c6 100644 --- a/python3/plugins/test_extauth_hook_AD.py +++ b/python3/plugins/test_extauth_hook_AD.py @@ -22,12 +22,11 @@ def test_run_cmd(caplog): # Call the function under test, check the return value and capture the log message with caplog.at_level(logging.DEBUG): - # Bug in the current code, the result is a byte string: - assert run_cmd(cmd) == cmd[1].strip().encode() + assert run_cmd(cmd) is None # The return value is None (not used in the code) + + # Assert the log message + assert caplog.records[0].message == "%s -> Hello World!" % (cmd) - # Bug in the current code after not fully tested py3 migration: - # The logged message contains a byte string that is not stripped: - assert caplog.records[0].message == "%s -> b' Hello World! \\n'" % (cmd) # Test the case where the command fails: assert run_cmd(["bad command"]) is None assert caplog.records[1].message == "Failed to run command ['bad command']" From 959721e36b4e4a3ac7e52e807cc6e739315efb44 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Fri, 12 Jul 2024 09:00:00 +0200 Subject: [PATCH 177/222] mypy: Fix and improve the config to make it more usable Signed-off-by: Bernhard Kaindl --- pyproject.toml | 24 +++++++++++++++++++++--- 1 file changed, 21 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 2749d69956f..588e8249222 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ # https://packaging.python.org/en/latest/specifications/pyproject-toml/ [project] name = "xen-api" -requires-python = ">=3.6.*" +requires-python = ">=3.6.0" license = {file = "LICENSE"} keywords = ["xen-project", "Xen", "hypervisor", "libraries"] maintainers = [ @@ -119,10 +119,19 @@ ensure_newline_before_comments = false # PYTHONPATH="scripts/examples/python:.:scripts:scripts/plugins:scripts/examples" files = [ "python3", - "scripts/usb_reset.py", + "scripts/examples/python", +] +exclude = [ + "python3/packages", + "python3/stubs", + "python3/tests", ] pretty = true +mypy_path = "python3/packages:python3/stubs:scripts/examples/python" error_summary = true +# default_return = false sets the default return type of functions to 'Any'. +# It makes mypy less noisy on untyped code makes it more usable now: +default_return = false strict_equality = true show_error_codes = true show_error_context = true @@ -138,7 +147,16 @@ disallow_any_explicit = false disallow_any_generics = true disallow_any_unimported = true disallow_subclassing_any = true -disable_error_code = ["import-untyped"] # XenAPI is not typed yet +disable_error_code = [ + "explicit-override", + "misc", + "no-any-decorated", + "no-any-expr", + "no-untyped-call", + "no-untyped-def", + "no-untyped-usage", + "import-untyped", # XenAPI is not typed yet +] [[tool.mypy.overrides]] From df8986839c57d4d920469f6f19f4109b2fb8649a Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Thu, 11 Jul 2024 13:22:39 +0000 Subject: [PATCH 178/222] CP-49928: Moved static-vdis from scripts/ to python3/bin directory - Modified python3/Makefile to include this change - Removed static-vdis from scripts/Makefile - Modified test_static_vdis.py to include new location of the static-vdis Signed-off-by: Ashwinh --- python3/Makefile | 1 + {scripts => python3/bin}/static-vdis | 0 python3/tests/test_static_vdis.py | 2 +- scripts/Makefile | 1 - 4 files changed, 2 insertions(+), 2 deletions(-) rename {scripts => python3/bin}/static-vdis (100%) diff --git a/python3/Makefile b/python3/Makefile index 8df2788f583..81783bcd6c0 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -32,6 +32,7 @@ install: $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin + $(IPROG) bin/static-vdis $(DESTDIR)$(OPTDIR)/bin $(IPROG) extensions/pool_update.apply $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/pool_update.precheck $(DESTDIR)$(EXTENSIONDIR) $(IPROG) extensions/Test.test $(DESTDIR)$(EXTENSIONDIR) diff --git a/scripts/static-vdis b/python3/bin/static-vdis similarity index 100% rename from scripts/static-vdis rename to python3/bin/static-vdis diff --git a/python3/tests/test_static_vdis.py b/python3/tests/test_static_vdis.py index 1b7efc0bcf0..ef4e24d7f31 100644 --- a/python3/tests/test_static_vdis.py +++ b/python3/tests/test_static_vdis.py @@ -16,7 +16,7 @@ def static_vdis() -> ModuleType: """Test fixture to return the static-vdis module, mocked to avoid dependencies.""" with mocked_modules("XenAPI", "inventory"): - return import_file_as_module("scripts/static-vdis") + return import_file_as_module("python3/bin/static-vdis") # Hide pylint warnings for redefined-outer-name from using the static_vdis fixture: diff --git a/scripts/Makefile b/scripts/Makefile index 198141f594a..bbaa23db002 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -95,7 +95,6 @@ install: $(IPROG) xe-edit-bootloader $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-get-network-backend $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-enable-all-plugin-metrics $(DESTDIR)$(OPTDIR)/bin - $(IPROG) static-vdis $(DESTDIR)$(OPTDIR)/bin $(IPROG) with-vdi $(DESTDIR)$(OPTDIR)/debug $(IPROG) import-update-key $(DESTDIR)$(OPTDIR)/debug $(IPROG) pool.conf $(DESTDIR)$(ETCXENDIR) From 91b646d0afb7936057dc09009bb8b9f39f9bdcf0 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Fri, 12 Jul 2024 12:00:00 +0200 Subject: [PATCH 179/222] scripts/generate-iscsi-iqn: Fix inline Python to work in Py3 Signed-off-by: Bernhard Kaindl --- scripts/generate-iscsi-iqn | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/scripts/generate-iscsi-iqn b/scripts/generate-iscsi-iqn index 0d662b0441c..882a4c7f6fd 100755 --- a/scripts/generate-iscsi-iqn +++ b/scripts/generate-iscsi-iqn @@ -21,7 +21,8 @@ def f(x): tmp = x.rstrip().split(".") tmp.reverse() return ".".join(tmp) -if __name__ == "__main__": print f(sys.argv[1]) + +if __name__ == "__main__": print(f(sys.argv[1])) ' geniqn() { From 889b1bf9a5a1bfd76f0f8d12b4de8315e3953338 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Fri, 12 Jul 2024 12:00:00 +0200 Subject: [PATCH 180/222] rm ocaml/idl/ocaml_backend/python: remove obsolete example scripts These example scripts were imported in 2009 and are obsoleted by samples like: https://github.com/xapi-project/xen-api-sdk/blob/master/python/samples/powercycle.py - The sample xen-api-sdk/python/samples/powercycle.py is much better. - They are not installed and not otherwise mentioned in the repository. Signed-off-by: Bernhard Kaindl --- ocaml/idl/ocaml_backend/python/list_vms.py | 10 ----- ocaml/idl/ocaml_backend/python/pause_vm.py | 6 --- ocaml/idl/ocaml_backend/python/test_client.py | 44 ------------------- ocaml/idl/ocaml_backend/python/unpause_vm.py | 6 --- pyproject.toml | 2 - 5 files changed, 68 deletions(-) delete mode 100755 ocaml/idl/ocaml_backend/python/list_vms.py delete mode 100755 ocaml/idl/ocaml_backend/python/pause_vm.py delete mode 100755 ocaml/idl/ocaml_backend/python/test_client.py delete mode 100755 ocaml/idl/ocaml_backend/python/unpause_vm.py diff --git a/ocaml/idl/ocaml_backend/python/list_vms.py b/ocaml/idl/ocaml_backend/python/list_vms.py deleted file mode 100755 index 0d7a75313cb..00000000000 --- a/ocaml/idl/ocaml_backend/python/list_vms.py +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/python - -import xmlrpclib -server = xmlrpclib.Server("http://melton:8086"); -session = server.session.login_with_password("root", "xenroot", "1.0", "xen-api-list-vms.py")['Value'] -print session -vms = server.VM.get_all(session)['Value'] -print vms -#for vm in vms: -# print vm,server.VM.get_kernel__kernel(session, vm) diff --git a/ocaml/idl/ocaml_backend/python/pause_vm.py b/ocaml/idl/ocaml_backend/python/pause_vm.py deleted file mode 100755 index 2795496e1cd..00000000000 --- a/ocaml/idl/ocaml_backend/python/pause_vm.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/python - -import xmlrpclib -server = xmlrpclib.Server("http://localhost:8086"); -session = server.Session.do_login_with_password("user", "passwd", "1.0", "xen-api-pause-vm.py")['Value'] -server.VM.do_pause(session, '7366a41a-e50e-b891-fa0c-ca5b4d2e3f1c') diff --git a/ocaml/idl/ocaml_backend/python/test_client.py b/ocaml/idl/ocaml_backend/python/test_client.py deleted file mode 100755 index 05888c97db7..00000000000 --- a/ocaml/idl/ocaml_backend/python/test_client.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/python - -import getopt, sys, xmlrpclib - -url = "http://dhcp108:70000" #default -parsed = getopt.getopt(sys.argv[1:], "u:url") -if len(parsed[0]) == 1: - url = parsed[0][0][1] - -# Create an object to represent our server. -server = xmlrpclib.Server(url); - -# Call the server and get our result. -print "Logging in... ", -session = server.Session.do_login_with_password("user", "passwd", "1.0", "xen-api-test-client.py") -print "OK" -print "Session ID: \""+session+"\"" -vm_list = server.VM.do_list(session) - -print "VM list = " + repr(vm_list) - -for vm in vm_list: - print "VM ", vm, " in state: ", server.VM.get_power_state(session, vm) - -first_vm = vm_list[0] -other = server.VM.get_otherConfig(session, first_vm) -print repr(other) - - -#state = server.VM.get_power_state(session, first_vm) -#if state == "Halted": -# print "Starting first VM... ", -# server.VM.do_start(session, first_vm, 1==0) -#elif state == "Suspended": -# print "Restoring first VM..." -# server.VM.do_unhibernate(session, first_vm, 1==0) -#elif state == "Running": -# print "Suspending first VM... ", -# server.VM.do_hibernate(session, first_vm, 1==1) -#print "OK" - -print "Logging out... ", -server.Session.do_logout(session) -print "OK" diff --git a/ocaml/idl/ocaml_backend/python/unpause_vm.py b/ocaml/idl/ocaml_backend/python/unpause_vm.py deleted file mode 100755 index 97d748e1dca..00000000000 --- a/ocaml/idl/ocaml_backend/python/unpause_vm.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/python - -import xmlrpclib -server = xmlrpclib.Server("http://localhost:8086"); -session = server.Session.do_login_with_password("user", "passwd", "1.0", "xen-api-unpause-vm.py")['Value'] -server.VM.do_unpause(session, '7366a41a-e50e-b891-fa0c-ca5b4d2e3f1c') diff --git a/pyproject.toml b/pyproject.toml index 2749d69956f..555f1940203 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -287,8 +287,6 @@ inputs = [ # To be added later, # when converted to Python3-compatible syntax: - # "ocaml/message-switch/python", - # "ocaml/idl/ocaml_backend/python", # "ocaml/xapi-storage/python", ] disable = [ From 418450d7167590cd89ac9225c9320c636be2d3a8 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Fri, 12 Jul 2024 12:00:00 +0200 Subject: [PATCH 181/222] Update scripts/test_mail-alarm.py to test with Python3 and move it Signed-off-by: Bernhard Kaindl --- {scripts => python3/tests}/test_mail-alarm.py | 64 ++++--------------- 1 file changed, 14 insertions(+), 50 deletions(-) rename {scripts => python3/tests}/test_mail-alarm.py (92%) diff --git a/scripts/test_mail-alarm.py b/python3/tests/test_mail-alarm.py similarity index 92% rename from scripts/test_mail-alarm.py rename to python3/tests/test_mail-alarm.py index acd5f5f20a5..c1d225eeac2 100644 --- a/scripts/test_mail-alarm.py +++ b/python3/tests/test_mail-alarm.py @@ -2,26 +2,13 @@ # test_mail-alarm.py: uses unittest to test script "mail-alarm" # -import tempfile -import os -import shutil import sys import unittest -import mock -import pytest +from unittest import mock -if sys.version_info > (2, ): - pytest.skip(allow_module_level=True) - -def nottest(obj): - obj.__test__ = False - return obj - -sys.path.append("./scripts/examples/python") -sys.modules["xcp"] = mock.Mock() - -log_file_global = None +from python3.tests.import_helper import import_file_as_module, mocked_modules +log_strs = "" XML_MESSAGE_TEMPLATE = """ 63102OpaqueRef:46be74f4-3a26-31a8-a629-d52584fe6ed3{alarm}3{cls}2e00443d-ac29-4940-8433-a15dda1e8f8e20170516T16:30:00Z0d985f5e-6d91-3410-f853-040d0906a4b9{body}""" @@ -56,28 +43,16 @@ def get_alarm_xml(xmlalarm_str, xmlcls_str, xmlname_str, xmlbody_str): def log_err(err): - global log_file_global - with open(log_file_global, "a+") as fileh: - fileh.write("%s: %s\n" % (sys.argv[0], err)) + global log_strs # pylint: disable=global-statement + log_strs = log_strs + "%s: %s\n" % (sys.argv[0], err) + + +with mocked_modules("xcp"): + mailalarm = import_file_as_module("python3/libexec/mail-alarm") + mock_setup(mailalarm) class TestXapiMessage(unittest.TestCase): - def setUp(self): - global log_file_global - try: - self.work_dir = tempfile.mkdtemp(prefix="test-mail-alarm-") - log_file_global = os.path.join(self.work_dir, "user.log") - src_file = "./scripts/mail-alarm" - dst_file = os.path.join(self.work_dir, "mailalarm.py") - shutil.copyfile(src_file, dst_file) - sys.path.append(self.work_dir) - except: - raise - - def tearDown(self): - shutil.rmtree(self.work_dir, ignore_errors=True) - - @nottest def common_test_good_input( self, xmlalarm_str, @@ -87,11 +62,6 @@ def common_test_good_input( body_str, xmlbody_str=XML_BODY_COMMON, ): - import mailalarm - - # Emulate functions with Mock - mock_setup(mailalarm) - session = mock.Mock() tst_xml = get_alarm_xml(xmlalarm_str, xmlcls_str, xmlname_str, xmlbody_str) @@ -104,7 +74,6 @@ def common_test_good_input( self.assertIn(subject_str, mail_subject) self.assertIn(body_str, mail_body) - @nottest def common_test_bad_input( self, xmlalarm_str, @@ -114,12 +83,6 @@ def common_test_bad_input( subtitle_str, xmlbody_str=XML_BODY_COMMON, ): - global log_file_global - import mailalarm - - # Emulate functions with Mock - mock_setup(mailalarm) - session = mock.Mock() tst_xml = get_alarm_xml(xmlalarm_str, xmlcls_str, xmlname_str, xmlbody_str) @@ -128,9 +91,11 @@ def common_test_bad_input( mail_subject = obj_XapiMessage.generate_email_subject() mail_body = obj_XapiMessage.generate_email_body() + assert mail_subject and mail_body # They're tested by test_good_mail_language() - with open(log_file_global, "r") as fileh: - log_strs = fileh.read() + # Assert the logged error messages for the bad language pack that are + # recorded in `log_str` by `log_err()` when the language pack is not found + # by `generate_email_subject()` and `generate_email_body()`: self.assertIn("Read mail language pack error", log_strs) self.assertIn( @@ -146,7 +111,6 @@ def common_test_bad_input( log_strs, ) - os.remove(log_file_global) def test_good_mail_language(self): ## Test cpu_usage alarm From e6f9ff580c351d9cab8fe824540a58fd6ab794d0 Mon Sep 17 00:00:00 2001 From: Ashwinh Date: Mon, 15 Jul 2024 12:00:50 +0000 Subject: [PATCH 182/222] CP-49931: Move scripts/xe-reset-networking to python3/bin, fix pyright Fix pyright: - Rework unclean exception handling using a contextmanager. - Declare address and master in case of an Exception as well. - Fix warning on unsupported escape sequence using a raw string. - Removed python2 script check from pyproject.toml Signed-off-by: Bernhard Kaindl Signed-off-by: Ashwinh --- pyproject.toml | 13 ----- python3/Makefile | 1 + {scripts => python3/bin}/xe-reset-networking | 59 ++++++++++---------- scripts/Makefile | 1 - 4 files changed, 29 insertions(+), 45 deletions(-) rename {scripts => python3/bin}/xe-reset-networking (92%) diff --git a/pyproject.toml b/pyproject.toml index 2749d69956f..352014a78e3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -271,19 +271,6 @@ inputs = [ # Python 3 "python3/", "ocaml/xcp-rrdd", - # Python2: These will generate warnings that need to be fixed: - "scripts/static-vdis", - "scripts/generate-iscsi-iqn", - "scripts/hatests", - "scripts/host-display", - "scripts/mail-alarm", - "scripts/print-custom-templates", - "scripts/probe-device-for-file", - "scripts/xe-reset-networking", - "scripts/xe-scsi-dev-map", - "scripts/examples/python", - "scripts/yum-plugins", - "scripts/*.py", # To be added later, # when converted to Python3-compatible syntax: diff --git a/python3/Makefile b/python3/Makefile index 81783bcd6c0..d33d89d81f3 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -30,6 +30,7 @@ install: $(IPROG) libexec/restore-sr-metadata.py $(DESTDIR)$(LIBEXECDIR) $(IPROG) bin/hfx_filename $(DESTDIR)$(OPTDIR)/bin + $(IPROG) bin/xe-reset-networking $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/perfmon $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/xe-scsi-dev-map $(DESTDIR)$(OPTDIR)/bin $(IPROG) bin/static-vdis $(DESTDIR)$(OPTDIR)/bin diff --git a/scripts/xe-reset-networking b/python3/bin/xe-reset-networking similarity index 92% rename from scripts/xe-reset-networking rename to python3/bin/xe-reset-networking index f26ce9fd7d4..c1e1d908261 100755 --- a/scripts/xe-reset-networking +++ b/python3/bin/xe-reset-networking @@ -14,18 +14,30 @@ GNU Lesser General Public License for more details. """ from __future__ import print_function -import sys import os -import time import re +import sys +from contextlib import contextmanager from optparse import OptionParser -#import XenAPI pool_conf = '@ETCXENDIR@/pool.conf' inventory_file = '@INVENTORY@' management_conf = '/etc/firstboot.d/data/management.conf' network_reset = '/tmp/network-reset' + +@contextmanager +def fsync_write(filename): + """Context manager that writes to a file and fsyncs it after writing.""" + + with open(filename, "w", encoding="utf-8") as file: + try: # Run the context, ignoring exceptions: + yield file + finally: + file.flush() # Flush the file buffer to the OS + os.fsync(file.fileno()) # Ask the OS to write the file to disk + + def read_dict_file(fname): f = open(fname, 'r') d = {} @@ -40,16 +52,15 @@ def read_inventory(): def read_management_conf(): return read_dict_file(management_conf) -def write_inventory(inventory): - f = open(inventory_file, 'w') - for k in inventory: - f.write(k + "='" + inventory[k] + "'\n") - f.flush() - os.fsync(f.fileno()) - f.close() + +def write_inventory(inventory_dict): + with fsync_write(inventory_file) as file: + for k in inventory_dict: + file.write(k + "='" + inventory_dict[k] + "'\n") + def valid_vlan(vlan): - if not re.match('^\d+$', vlan): + if not re.match(r"^\d+$", vlan): return False if int(vlan)<0 or int(vlan)>4094: return False @@ -88,8 +99,9 @@ if __name__ == "__main__": address = options.address finally: f.close() - except: - pass + except Exception: + master = None + address = "" # Get the management device from the firstboot data if not specified by the user if options.device == None: @@ -192,13 +204,8 @@ Type 'no' to cancel. # Update master's IP, if needed and given if master == False and options.address != None: print("Setting master's ip (" + address + ")...") - try: - f = open(pool_conf, 'w') + with fsync_write(pool_conf) as f: f.write('slave:' + address) - finally: - f.flush() - os.fsync(f.fileno()) - f.close() # Construct bridge name for management interface based on convention if device[:3] == 'eth': @@ -230,8 +237,7 @@ Type 'no' to cancel. # Rewrite firstboot management.conf file, which will be picked it by xcp-networkd on restart (if used) is_static = False - try: - f = open(management_conf, 'w') + with fsync_write(management_conf) as f: f.write("LABEL='" + device + "'\n") if options.mode != "none": f.write("MODE='" + options.mode + "'\n") @@ -252,14 +258,9 @@ Type 'no' to cancel. f.write("IPv6_GATEWAY='" + options.gateway_v6 + "'\n") if is_static and options.dns != '': f.write("DNS='" + options.dns + "'\n") - finally: - f.flush() - os.fsync(f.fileno()) - f.close() # Write trigger file for XAPI to continue the network reset on startup - try: - f = open(network_reset, 'w') + with fsync_write(network_reset) as f: f.write('DEVICE=' + device + '\n') if options.mode != "none": f.write('MODE=' + options.mode + '\n') @@ -278,10 +279,6 @@ Type 'no' to cancel. f.write('GATEWAY_V6=' + options.gateway_v6 + '\n') if is_static and options.dns != '': f.write('DNS=' + options.dns + '\n') - finally: - f.flush() - os.fsync(f.fileno()) - f.close() # Reset the domain 0 network interface naming configuration # back to a fresh-install state for the currently-installed diff --git a/scripts/Makefile b/scripts/Makefile index bbaa23db002..87302dca48f 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -89,7 +89,6 @@ install: mkdir -p $(DESTDIR)$(OPTDIR)/debug $(IPROG) debug_ha_query_liveset $(DESTDIR)$(OPTDIR)/debug $(IPROG) xe-mount-iso-sr $(DESTDIR)$(OPTDIR)/bin - $(IPROG) xe-reset-networking $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-toolstack-restart $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-xentrace $(DESTDIR)$(OPTDIR)/bin $(IPROG) xe-edit-bootloader $(DESTDIR)$(OPTDIR)/bin From 8056f9adf8ba494c4b9216fc1cafc2a687f70558 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 183/222] plugins/extauth-hook-AD.py: Fix 'Stray abstractmethod' pytype warning Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth-hook-AD.py | 12 ++++-------- 1 file changed, 4 insertions(+), 8 deletions(-) diff --git a/python3/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py index a67d35b10b8..41f982fbc77 100755 --- a/python3/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -28,7 +28,6 @@ # pylint: disable=too-few-public-methods -# pytype: disable=ignored-abstractmethod HCP_USERS = "/etc/security/hcp_ad_users.conf" @@ -77,7 +76,7 @@ class ADBackend(Enum): BD_WINBIND = 1 -class ADConfig(): +class ADConfig(abc.ABC): """Base class for AD configuration""" def __init__(self, path, session, args, ad_enabled=True, load_existing=True, file_mode=0o644): @@ -103,8 +102,7 @@ def _get_ad_backend(self): return ADBackend.BD_WINBIND @abc.abstractmethod - def _apply_to_cache(self): - pass + def _apply_to_cache(self): ... def apply(self): """Apply configuration""" @@ -224,12 +222,10 @@ def _is_responsible_for(self, subject_rec): return False @abc.abstractmethod - def _match_subject(self, subject_rec): - pass + def _match_subject(self, subject_rec): ... @abc.abstractmethod - def _add_subject(self, subject_rec): - pass + def _add_subject(self, subject_rec): ... def _install(self): if self._ad_enabled: From ed29ab93542ff268644d4f85f2c2e50cf1d7f6f1 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 184/222] plugins/extauth-hook-AD.py: Cleanup obsolete per-method pylint comments Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth-hook-AD.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/python3/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py index a67d35b10b8..7db600476fd 100755 --- a/python3/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -170,7 +170,6 @@ def _apply_to_cache(self): class DynamicPam(ADConfig): - #pylint: disable=too-few-public-methods """Base class to manage AD users and groups configure which permit pool admin ssh""" def __init__(self, path, session, args, ad_enabled=True): @@ -239,7 +238,6 @@ def _install(self): class UsersList(DynamicPam): - #pylint: disable=too-few-public-methods """Class manage users which permit pool admin ssh""" def __init__(self, session, arg, ad_enabled=True): @@ -282,7 +280,6 @@ def _add_subject(self, subject_rec): class GroupsList(DynamicPam): - #pylint: disable=too-few-public-methods """Class manage groups which permit pool admin ssh""" def __init__(self, session, arg, ad_enabled=True): @@ -313,7 +310,6 @@ class KeyValueConfig(ADConfig): _special_line_prefix = "__key_value_config_sp_line_prefix_" _empty_value = "" - #pylint: disable=too-many-arguments def __init__(self, path, session, args, ad_enabled=True, load_existing=True, file_mode=0o644, sep=": ", comment="#"): super(KeyValueConfig, self).__init__(path, session, From 8cab68c2be6d42325b50243666720f1591e1df79 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 185/222] plugins/extauth-hook-AD.py: Modernise testee import to not need a symlink Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth_hook_ad.py | 1 - python3/plugins/test_extauth_hook_AD.py | 22 +++++++++++++++------- 2 files changed, 15 insertions(+), 8 deletions(-) delete mode 120000 python3/plugins/extauth_hook_ad.py diff --git a/python3/plugins/extauth_hook_ad.py b/python3/plugins/extauth_hook_ad.py deleted file mode 120000 index 19afff4d393..00000000000 --- a/python3/plugins/extauth_hook_ad.py +++ /dev/null @@ -1 +0,0 @@ -extauth-hook-AD.py \ No newline at end of file diff --git a/python3/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py index 616884101c6..8ae81bcbd19 100644 --- a/python3/plugins/test_extauth_hook_AD.py +++ b/python3/plugins/test_extauth_hook_AD.py @@ -8,13 +8,21 @@ from unittest import TestCase from unittest.mock import MagicMock, patch -# mock modules to avoid dependencies -sys.modules["XenAPIPlugin"] = MagicMock() -sys.modules["XenAPI"] = MagicMock() -# pylint: disable=wrong-import-position -# Import must after mock modules -from extauth_hook_ad import StaticSSHPam, NssConfig, SshdConfig, UsersList, GroupsList -from extauth_hook_ad import run_cmd +from python3.tests.import_helper import import_file_as_module, mocked_modules + + +with mocked_modules("XenAPIPlugin", "XenAPI"): + testee = import_file_as_module("python3/plugins/extauth-hook-AD.py") + # Will be replaced by updating the patch decorators + sys.modules["extauth_hook_ad"] = testee + # Will be replaced by updating the tests to call testee.function_name() + run_cmd = testee.run_cmd + NssConfig = testee.NssConfig + UsersList = testee.UsersList + GroupsList = testee.GroupsList + SshdConfig = testee.SshdConfig + StaticSSHPam = testee.StaticSSHPam + def test_run_cmd(caplog): """Assert the current buggy behavior of the run_cmd function after py3 migration""" From a0d3be8683f8e9c12561e936245dc8690b3d544d Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 186/222] plugins/test_extauth_hook_ad.py: Update the patch decorators to 'extauth-hook-AD' Signed-off-by: Bernhard Kaindl --- python3/plugins/test_extauth_hook_AD.py | 17 +++++++---------- python3/tests/import_helper.py | 2 +- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/python3/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py index 8ae81bcbd19..3f0f22e40dd 100644 --- a/python3/plugins/test_extauth_hook_AD.py +++ b/python3/plugins/test_extauth_hook_AD.py @@ -4,7 +4,6 @@ import logging import os -import sys from unittest import TestCase from unittest.mock import MagicMock, patch @@ -13,8 +12,6 @@ with mocked_modules("XenAPIPlugin", "XenAPI"): testee = import_file_as_module("python3/plugins/extauth-hook-AD.py") - # Will be replaced by updating the patch decorators - sys.modules["extauth_hook_ad"] = testee # Will be replaced by updating the tests to call testee.function_name() run_cmd = testee.run_cmd NssConfig = testee.NssConfig @@ -132,9 +129,9 @@ def test_ad_enabled_with_pbis(self, mock_rename, mock_chmod): self.assertTrue(line_exists_in_config(static._lines, enabled_keyward)) -@patch("extauth_hook_ad.ADConfig._install") +@patch("extauth_hook_AD.ADConfig._install") class TestUsersList(TestCase): - @patch("extauth_hook_ad.open") + @patch("extauth_hook_AD.open") @patch("os.path.exists") @patch("os.remove") def test_ad_not_enabled(self, mock_remove, mock_exists, mock_open, mock_install): @@ -219,7 +216,7 @@ def test_failed_to_add_one_admin_should_not_affact_others(self, mock_install): self.assertNotIn(bad_user, dynamic._lines) -@patch("extauth_hook_ad.ADConfig._install") +@patch("extauth_hook_AD.ADConfig._install") class TestGroups(TestCase): def test_permit_admin_group(self, mock_install): # Domain group with admin role should be included in config file @@ -249,7 +246,7 @@ def test_permit_admin_group_with_space(self, mock_install): self.assertIn(permit_group, dynamic._lines) -@patch("extauth_hook_ad.ADConfig._install") +@patch("extauth_hook_AD.ADConfig._install") class TestNssConfig(TestCase): def test_ad_not_enabled(self, mock_install): expected_config = "passwd: files sss" @@ -264,9 +261,9 @@ def test_ad_enabled(self, mock_install): self.assertTrue(line_exists_in_config(nss._lines, expected_config)) -@patch("extauth_hook_ad.run_cmd") -@patch("extauth_hook_ad.ADConfig._install") -@patch("extauth_hook_ad.open") +@patch("extauth_hook_AD.run_cmd") +@patch("extauth_hook_AD.ADConfig._install") +@patch("extauth_hook_AD.open") class TestSshdConfig(TestCase): def test_ad_not_enabled(self, mock_open, mock_install, mock_run_cmd): expected_config = "ChallengeResponseAuthentication no" diff --git a/python3/tests/import_helper.py b/python3/tests/import_helper.py index 076a24913c7..2fdbd922b95 100644 --- a/python3/tests/import_helper.py +++ b/python3/tests/import_helper.py @@ -50,7 +50,7 @@ def import_file_as_module(relative_script_path): # type:(str) -> ModuleType - import_script_as_module('scripts/mail-alarm') # Returns the imported module. """ script_path = os.path.dirname(__file__) + "/../../" + relative_script_path - module_name = os.path.basename(script_path.replace(".py", "")) + module_name = os.path.basename(script_path).replace(".py", "").replace("-", "_") # For Python 3.11+: Import Python script without the .py extension: # https://gist.github.com/bernhardkaindl/1aaa04ea925fdc36c40d031491957fd3: From 3fe07e39d1e5a234cd30a030477627eaa3f5e6d5 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 187/222] storage-api: __init__.py: Use is_str() to check for string type Signed-off-by: Bernhard Kaindl --- ocaml/xapi-storage/python/xapi/__init__.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index 57a7c0c9f2d..fbef43ecde2 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -33,9 +33,23 @@ # pylint: disable=invalid-name,redefined-builtin,undefined-variable # pyright: reportUndefinedVariable=false + +# is_str(): Shortcut to check if a value is an instance of a string type. +# +# Replace: +# if not isinstance(code, str) and not isinstance(code, unicode): +# with: +# if not is_str(code): +# +# This makes for much cleaner code and suits Python3 well too. if sys.version_info[0] > 2: long = int - unicode = str + def is_str(x): + return isinstance(x, str) # With Python3, all strings are unicode +else: + def is_str(x): # pragma: no cover + return isinstance(x, (str, unicode)) # pylint: disable=undefined-variable + def success(result): return {"Status": "Success", "Value": result} @@ -72,7 +86,7 @@ class XenAPIException(Exception): def __init__(self, code, params): Exception.__init__(self) - if not isinstance(code, str) and not isinstance(code, unicode): + if not is_str(code): raise TypeError("string", repr(code)) if not isinstance(params, list): raise TypeError("list", repr(params)) From d034638da9fc9f6087afec69df5d67dee7d283c0 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 188/222] storage/api/{volume,plugin,datapath}.py: Apply isort, darker, import is_str Unused imports of is_long() are removed as well. Signed-off-by: Bernhard Kaindl --- .../python/xapi/storage/api/datapath.py | 20 +++++++++++++----- .../python/xapi/storage/api/plugin.py | 20 +++++++++++++----- .../python/xapi/storage/api/volume.py | 21 ++++++++++++++----- 3 files changed, 46 insertions(+), 15 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py index 69b37e5a9e7..1bf426d5b2f 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py @@ -1,11 +1,21 @@ from __future__ import print_function -from xapi import success, Rpc_light_failure, InternalError, UnmarshalException, TypeError, is_long, UnknownMethod -import xapi -import sys -import json + import argparse -import traceback +import json import logging +import sys +import traceback + +import xapi +from xapi import ( + InternalError, + Rpc_light_failure, + TypeError, + UnknownMethod, + UnmarshalException, + is_str, + success, +) # pylint: disable=invalid-name,redefined-builtin,undefined-variable # pyright: reportUndefinedVariable=false diff --git a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py index 1b6d37214ca..8e50736c72e 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py @@ -1,11 +1,21 @@ from __future__ import print_function -from xapi import success, Rpc_light_failure, InternalError, UnmarshalException, TypeError, is_long, UnknownMethod -import xapi -import sys -import json + import argparse -import traceback +import json import logging +import sys +import traceback + +import xapi +from xapi import ( + InternalError, + Rpc_light_failure, + TypeError, + UnknownMethod, + UnmarshalException, + is_str, + success, +) # pylint: disable=invalid-name,redefined-builtin,undefined-variable # pyright: reportUndefinedVariable=false diff --git a/ocaml/xapi-storage/python/xapi/storage/api/volume.py b/ocaml/xapi-storage/python/xapi/storage/api/volume.py index b89574f9570..5beb31b57cb 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/volume.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/volume.py @@ -1,11 +1,22 @@ from __future__ import print_function -from xapi import success, Rpc_light_failure, InternalError, UnmarshalException, TypeError, is_long, UnknownMethod -import xapi -import sys -import json + import argparse -import traceback +import json import logging +import sys +import traceback + +import xapi +from xapi import ( + InternalError, + Rpc_light_failure, + TypeError, + UnknownMethod, + UnmarshalException, + is_long, + is_str, + success, +) # pylint: disable=invalid-name,redefined-builtin,undefined-variable # pyright: reportUndefinedVariable=false From 3e636027b270faf0b464542c1a569931097e769a Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 189/222] storage/api/{volume,plugin,datapath}.py: Disable excessive pylint warnings Signed-off-by: Bernhard Kaindl --- ocaml/xapi-storage/python/xapi/__init__.py | 4 +--- ocaml/xapi-storage/python/xapi/storage/api/datapath.py | 2 ++ ocaml/xapi-storage/python/xapi/storage/api/plugin.py | 2 ++ ocaml/xapi-storage/python/xapi/storage/api/volume.py | 2 ++ pyproject.toml | 2 ++ 5 files changed, 9 insertions(+), 3 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index fbef43ecde2..50eae33fe1a 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -31,8 +31,6 @@ import json import argparse -# pylint: disable=invalid-name,redefined-builtin,undefined-variable -# pyright: reportUndefinedVariable=false # is_str(): Shortcut to check if a value is an instance of a string type. # @@ -138,7 +136,7 @@ def __init__(self, thing, ty, desc): "UnmarshalException thing=%s ty=%s desc=%s" % (thing, ty, desc)) -class TypeError(InternalError): +class TypeError(InternalError): # pylint: disable=redefined-builtin def __init__(self, expected, actual): InternalError.__init__( diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py index 1bf426d5b2f..1305f31cc9d 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py @@ -7,6 +7,8 @@ import traceback import xapi +# pylint: disable=line-too-long,superfluous-parens,unused-argument +# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi from xapi import ( InternalError, Rpc_light_failure, diff --git a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py index 8e50736c72e..69dce60fc77 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py @@ -7,6 +7,8 @@ import traceback import xapi +# pylint: disable=line-too-long,superfluous-parens,unused-argument +# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi from xapi import ( InternalError, Rpc_light_failure, diff --git a/ocaml/xapi-storage/python/xapi/storage/api/volume.py b/ocaml/xapi-storage/python/xapi/storage/api/volume.py index 5beb31b57cb..d530a3a3c8c 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/volume.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/volume.py @@ -7,6 +7,8 @@ import traceback import xapi +# pylint: disable=line-too-long,superfluous-parens,unused-argument +# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi from xapi import ( InternalError, Rpc_light_failure, diff --git a/pyproject.toml b/pyproject.toml index efdcd13494e..b8e4c984853 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -195,8 +195,10 @@ disable = [ "no-else-break", # else clause following a break statement "protected-access", # Best done during the code cleanup phase "super-with-arguments", # Consider using Python 3 style super(no args) calls + "too-few-public-methods", # Some classes only overload private methods, is fine "too-many-branches", # Existing code breaches this, not part of porting "too-many-arguments", # Likewise, not part of porting + "too-many-lines", # Likewise, not part of porting "too-many-locals", # Likewise, not part of porting "too-many-statements", # Likewise, not part of porting "unnecessary-pass", # Cosmetic, best done during the code cleanup phase From f74d7c1a3968ce5207dde3387e522001293aa1c7 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 190/222] storage/api/{volume,plugin,datapath}.py: Apply automatic conversion to is_str() Signed-off-by: Bernhard Kaindl --- .../python/xapi/storage/api/datapath.py | 42 ++-- .../python/xapi/storage/api/plugin.py | 34 +-- .../python/xapi/storage/api/volume.py | 228 +++++++++--------- 3 files changed, 152 insertions(+), 152 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py index 1305f31cc9d..0a4e82438fb 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py @@ -27,7 +27,7 @@ class Unimplemented(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Datapath_server_dispatcher: @@ -42,12 +42,12 @@ def open(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('persistent' in args): raise UnmarshalException('argument missing', 'persistent', '') @@ -63,29 +63,29 @@ def attach(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('domain' in args): raise UnmarshalException('argument missing', 'domain', '') domain = args["domain"] - if not isinstance(domain, str) and not isinstance(domain, unicode): + if not is_str(domain): raise TypeError("string", repr(domain)) results = self._impl.attach(dbg, uri, domain) - if not isinstance(results['domain_uuid'], str) and not isinstance(results['domain_uuid'], unicode): + if not is_str(results['domain_uuid']): raise TypeError("string", repr(results['domain_uuid'])) if results['implementation'][0] == 'Blkback': - if not isinstance(results['implementation'][1], str) and not isinstance(results['implementation'][1], unicode): + if not is_str(results['implementation'][1]): raise TypeError("string", repr(results['implementation'][1])) elif results['implementation'][0] == 'Tapdisk3': - if not isinstance(results['implementation'][1], str) and not isinstance(results['implementation'][1], unicode): + if not is_str(results['implementation'][1]): raise TypeError("string", repr(results['implementation'][1])) elif results['implementation'][0] == 'Qdisk': - if not isinstance(results['implementation'][1], str) and not isinstance(results['implementation'][1], unicode): + if not is_str(results['implementation'][1]): raise TypeError("string", repr(results['implementation'][1])) return results def activate(self, args): @@ -95,17 +95,17 @@ def activate(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('domain' in args): raise UnmarshalException('argument missing', 'domain', '') domain = args["domain"] - if not isinstance(domain, str) and not isinstance(domain, unicode): + if not is_str(domain): raise TypeError("string", repr(domain)) results = self._impl.activate(dbg, uri, domain) return results @@ -116,17 +116,17 @@ def deactivate(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('domain' in args): raise UnmarshalException('argument missing', 'domain', '') domain = args["domain"] - if not isinstance(domain, str) and not isinstance(domain, unicode): + if not is_str(domain): raise TypeError("string", repr(domain)) results = self._impl.deactivate(dbg, uri, domain) return results @@ -137,17 +137,17 @@ def detach(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('domain' in args): raise UnmarshalException('argument missing', 'domain', '') domain = args["domain"] - if not isinstance(domain, str) and not isinstance(domain, unicode): + if not is_str(domain): raise TypeError("string", repr(domain)) results = self._impl.detach(dbg, uri, domain) return results @@ -158,12 +158,12 @@ def close(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) results = self._impl.close(dbg, uri) return results diff --git a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py index 69dce60fc77..d9199a98771 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py @@ -27,7 +27,7 @@ class Unimplemented(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Plugin_server_dispatcher: @@ -42,40 +42,40 @@ def query(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) results = self._impl.query(dbg) - if not isinstance(results['plugin'], str) and not isinstance(results['plugin'], unicode): + if not is_str(results['plugin']): raise TypeError("string", repr(results['plugin'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) - if not isinstance(results['vendor'], str) and not isinstance(results['vendor'], unicode): + if not is_str(results['vendor']): raise TypeError("string", repr(results['vendor'])) - if not isinstance(results['copyright'], str) and not isinstance(results['copyright'], unicode): + if not is_str(results['copyright']): raise TypeError("string", repr(results['copyright'])) - if not isinstance(results['version'], str) and not isinstance(results['version'], unicode): + if not is_str(results['version']): raise TypeError("string", repr(results['version'])) - if not isinstance(results['required_api_version'], str) and not isinstance(results['required_api_version'], unicode): + if not is_str(results['required_api_version']): raise TypeError("string", repr(results['required_api_version'])) if not isinstance(results['features'], list): raise TypeError("string list", repr(results['features'])) for tmp_1 in results['features']: - if not isinstance(tmp_1, str) and not isinstance(tmp_1, unicode): + if not is_str(tmp_1): raise TypeError("string", repr(tmp_1)) if not isinstance(results['configuration'], dict): raise TypeError("(string * string) list", repr(results['configuration'])) for tmp_2 in results['configuration'].keys(): - if not isinstance(tmp_2, str) and not isinstance(tmp_2, unicode): + if not is_str(tmp_2): raise TypeError("string", repr(tmp_2)) for tmp_2 in results['configuration'].values(): - if not isinstance(tmp_2, str) and not isinstance(tmp_2, unicode): + if not is_str(tmp_2): raise TypeError("string", repr(tmp_2)) if not isinstance(results['required_cluster_stack'], list): raise TypeError("string list", repr(results['required_cluster_stack'])) for tmp_3 in results['required_cluster_stack']: - if not isinstance(tmp_3, str) and not isinstance(tmp_3, unicode): + if not is_str(tmp_3): raise TypeError("string", repr(tmp_3)) return results def ls(self, args): @@ -85,13 +85,13 @@ def ls(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) results = self._impl.ls(dbg) if not isinstance(results, list): raise TypeError("string list", repr(results)) for tmp_4 in results: - if not isinstance(tmp_4, str) and not isinstance(tmp_4, unicode): + if not is_str(tmp_4): raise TypeError("string", repr(tmp_4)) return results def diagnostics(self, args): @@ -101,10 +101,10 @@ def diagnostics(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) results = self._impl.diagnostics(dbg) - if not isinstance(results, str) and not isinstance(results, unicode): + if not is_str(results): raise TypeError("string", repr(results)) return results def _dispatch(self, method, params): diff --git a/ocaml/xapi-storage/python/xapi/storage/api/volume.py b/ocaml/xapi-storage/python/xapi/storage/api/volume.py index d530a3a3c8c..0f01ed6fd97 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/volume.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/volume.py @@ -30,31 +30,31 @@ class Sr_not_attached(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Sr_not_attached", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class SR_does_not_exist(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "SR_does_not_exist", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Volume_does_not_exist(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Volume_does_not_exist", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Unimplemented(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Cancelled(Rpc_light_failure): def __init__(self, arg_0): Rpc_light_failure.__init__(self, "Cancelled", [ arg_0 ]) - if not isinstance(arg_0, str) and not isinstance(arg_0, unicode): + if not is_str(arg_0): raise TypeError("string", repr(arg_0)) self.arg_0 = arg_0 class Volume_server_dispatcher: @@ -69,22 +69,22 @@ def create(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('name' in args): raise UnmarshalException('argument missing', 'name', '') name = args["name"] - if not isinstance(name, str) and not isinstance(name, unicode): + if not is_str(name): raise TypeError("string", repr(name)) if not('description' in args): raise UnmarshalException('argument missing', 'description', '') description = args["description"] - if not isinstance(description, str) and not isinstance(description, unicode): + if not is_str(description): raise TypeError("string", repr(description)) if not('size' in args): raise UnmarshalException('argument missing', 'size', '') @@ -92,14 +92,14 @@ def create(self, args): if not(is_long(size)): raise TypeError("int64", repr(size)) results = self._impl.create(dbg, sr, name, description, size) - if not isinstance(results['key'], str) and not isinstance(results['key'], unicode): + if not is_str(results['key']): raise TypeError("string", repr(results['key'])) if results['uuid'] is not None: - if not isinstance(results['uuid'], str) and not isinstance(results['uuid'], unicode): + if not is_str(results['uuid']): raise TypeError("string", repr(results['uuid'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) if not isinstance(results['read_write'], bool): raise TypeError("bool", repr(results['read_write'])) @@ -110,15 +110,15 @@ def create(self, args): if not isinstance(results['uri'], list): raise TypeError("string list", repr(results['uri'])) for tmp_5 in results['uri']: - if not isinstance(tmp_5, str) and not isinstance(tmp_5, unicode): + if not is_str(tmp_5): raise TypeError("string", repr(tmp_5)) if not isinstance(results['keys'], dict): raise TypeError("(string * string) list", repr(results['keys'])) for tmp_6 in results['keys'].keys(): - if not isinstance(tmp_6, str) and not isinstance(tmp_6, unicode): + if not is_str(tmp_6): raise TypeError("string", repr(tmp_6)) for tmp_6 in results['keys'].values(): - if not isinstance(tmp_6, str) and not isinstance(tmp_6, unicode): + if not is_str(tmp_6): raise TypeError("string", repr(tmp_6)) return results def snapshot(self, args): @@ -128,27 +128,27 @@ def snapshot(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) results = self._impl.snapshot(dbg, sr, key) - if not isinstance(results['key'], str) and not isinstance(results['key'], unicode): + if not is_str(results['key']): raise TypeError("string", repr(results['key'])) if results['uuid'] is not None: - if not isinstance(results['uuid'], str) and not isinstance(results['uuid'], unicode): + if not is_str(results['uuid']): raise TypeError("string", repr(results['uuid'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) if not isinstance(results['read_write'], bool): raise TypeError("bool", repr(results['read_write'])) @@ -159,15 +159,15 @@ def snapshot(self, args): if not isinstance(results['uri'], list): raise TypeError("string list", repr(results['uri'])) for tmp_7 in results['uri']: - if not isinstance(tmp_7, str) and not isinstance(tmp_7, unicode): + if not is_str(tmp_7): raise TypeError("string", repr(tmp_7)) if not isinstance(results['keys'], dict): raise TypeError("(string * string) list", repr(results['keys'])) for tmp_8 in results['keys'].keys(): - if not isinstance(tmp_8, str) and not isinstance(tmp_8, unicode): + if not is_str(tmp_8): raise TypeError("string", repr(tmp_8)) for tmp_8 in results['keys'].values(): - if not isinstance(tmp_8, str) and not isinstance(tmp_8, unicode): + if not is_str(tmp_8): raise TypeError("string", repr(tmp_8)) return results def clone(self, args): @@ -177,27 +177,27 @@ def clone(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) results = self._impl.clone(dbg, sr, key) - if not isinstance(results['key'], str) and not isinstance(results['key'], unicode): + if not is_str(results['key']): raise TypeError("string", repr(results['key'])) if results['uuid'] is not None: - if not isinstance(results['uuid'], str) and not isinstance(results['uuid'], unicode): + if not is_str(results['uuid']): raise TypeError("string", repr(results['uuid'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) if not isinstance(results['read_write'], bool): raise TypeError("bool", repr(results['read_write'])) @@ -208,15 +208,15 @@ def clone(self, args): if not isinstance(results['uri'], list): raise TypeError("string list", repr(results['uri'])) for tmp_9 in results['uri']: - if not isinstance(tmp_9, str) and not isinstance(tmp_9, unicode): + if not is_str(tmp_9): raise TypeError("string", repr(tmp_9)) if not isinstance(results['keys'], dict): raise TypeError("(string * string) list", repr(results['keys'])) for tmp_10 in results['keys'].keys(): - if not isinstance(tmp_10, str) and not isinstance(tmp_10, unicode): + if not is_str(tmp_10): raise TypeError("string", repr(tmp_10)) for tmp_10 in results['keys'].values(): - if not isinstance(tmp_10, str) and not isinstance(tmp_10, unicode): + if not is_str(tmp_10): raise TypeError("string", repr(tmp_10)) return results def destroy(self, args): @@ -226,17 +226,17 @@ def destroy(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) results = self._impl.destroy(dbg, sr, key) return results @@ -247,22 +247,22 @@ def set_name(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) if not('new_name' in args): raise UnmarshalException('argument missing', 'new_name', '') new_name = args["new_name"] - if not isinstance(new_name, str) and not isinstance(new_name, unicode): + if not is_str(new_name): raise TypeError("string", repr(new_name)) results = self._impl.set_name(dbg, sr, key, new_name) return results @@ -273,22 +273,22 @@ def set_description(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) if not('new_description' in args): raise UnmarshalException('argument missing', 'new_description', '') new_description = args["new_description"] - if not isinstance(new_description, str) and not isinstance(new_description, unicode): + if not is_str(new_description): raise TypeError("string", repr(new_description)) results = self._impl.set_description(dbg, sr, key, new_description) return results @@ -299,27 +299,27 @@ def set(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) if not('k' in args): raise UnmarshalException('argument missing', 'k', '') k = args["k"] - if not isinstance(k, str) and not isinstance(k, unicode): + if not is_str(k): raise TypeError("string", repr(k)) if not('v' in args): raise UnmarshalException('argument missing', 'v', '') v = args["v"] - if not isinstance(v, str) and not isinstance(v, unicode): + if not is_str(v): raise TypeError("string", repr(v)) results = self._impl.set(dbg, sr, key, k, v) return results @@ -330,22 +330,22 @@ def unset(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) if not('k' in args): raise UnmarshalException('argument missing', 'k', '') k = args["k"] - if not isinstance(k, str) and not isinstance(k, unicode): + if not is_str(k): raise TypeError("string", repr(k)) results = self._impl.unset(dbg, sr, key, k) return results @@ -356,17 +356,17 @@ def resize(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) if not('new_size' in args): raise UnmarshalException('argument missing', 'new_size', '') @@ -382,27 +382,27 @@ def stat(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('key' in args): raise UnmarshalException('argument missing', 'key', '') key = args["key"] - if not isinstance(key, str) and not isinstance(key, unicode): + if not is_str(key): raise TypeError("string", repr(key)) results = self._impl.stat(dbg, sr, key) - if not isinstance(results['key'], str) and not isinstance(results['key'], unicode): + if not is_str(results['key']): raise TypeError("string", repr(results['key'])) if results['uuid'] is not None: - if not isinstance(results['uuid'], str) and not isinstance(results['uuid'], unicode): + if not is_str(results['uuid']): raise TypeError("string", repr(results['uuid'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) if not isinstance(results['read_write'], bool): raise TypeError("bool", repr(results['read_write'])) @@ -413,15 +413,15 @@ def stat(self, args): if not isinstance(results['uri'], list): raise TypeError("string list", repr(results['uri'])) for tmp_11 in results['uri']: - if not isinstance(tmp_11, str) and not isinstance(tmp_11, unicode): + if not is_str(tmp_11): raise TypeError("string", repr(tmp_11)) if not isinstance(results['keys'], dict): raise TypeError("(string * string) list", repr(results['keys'])) for tmp_12 in results['keys'].keys(): - if not isinstance(tmp_12, str) and not isinstance(tmp_12, unicode): + if not is_str(tmp_12): raise TypeError("string", repr(tmp_12)) for tmp_12 in results['keys'].values(): - if not isinstance(tmp_12, str) and not isinstance(tmp_12, unicode): + if not is_str(tmp_12): raise TypeError("string", repr(tmp_12)) return results def _dispatch(self, method, params): @@ -814,22 +814,22 @@ def probe(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) results = self._impl.probe(dbg, uri) if not isinstance(results['srs'], list): raise TypeError("7 list", repr(results['srs'])) for tmp_13 in results['srs']: - if not isinstance(tmp_13['sr'], str) and not isinstance(tmp_13['sr'], unicode): + if not is_str(tmp_13['sr']): raise TypeError("string", repr(tmp_13['sr'])) - if not isinstance(tmp_13['name'], str) and not isinstance(tmp_13['name'], unicode): + if not is_str(tmp_13['name']): raise TypeError("string", repr(tmp_13['name'])) - if not isinstance(tmp_13['description'], str) and not isinstance(tmp_13['description'], unicode): + if not is_str(tmp_13['description']): raise TypeError("string", repr(tmp_13['description'])) if not(is_long(tmp_13['free_space'])): raise TypeError("int64", repr(tmp_13['free_space'])) @@ -838,20 +838,20 @@ def probe(self, args): if not isinstance(tmp_13['datasources'], list): raise TypeError("string list", repr(tmp_13['datasources'])) for tmp_14 in tmp_13['datasources']: - if not isinstance(tmp_14, str) and not isinstance(tmp_14, unicode): + if not is_str(tmp_14): raise TypeError("string", repr(tmp_14)) if not isinstance(tmp_13['clustered'], bool): raise TypeError("bool", repr(tmp_13['clustered'])) if tmp_13['health'][0] == 'Healthy': - if not isinstance(tmp_13['health'][1], str) and not isinstance(tmp_13['health'][1], unicode): + if not is_str(tmp_13['health'][1]): raise TypeError("string", repr(tmp_13['health'][1])) elif tmp_13['health'][0] == 'Recovering': - if not isinstance(tmp_13['health'][1], str) and not isinstance(tmp_13['health'][1], unicode): + if not is_str(tmp_13['health'][1]): raise TypeError("string", repr(tmp_13['health'][1])) if not isinstance(results['uris'], list): raise TypeError("string list", repr(results['uris'])) for tmp_15 in results['uris']: - if not isinstance(tmp_15, str) and not isinstance(tmp_15, unicode): + if not is_str(tmp_15): raise TypeError("string", repr(tmp_15)) return results def create(self, args): @@ -861,22 +861,22 @@ def create(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) if not('name' in args): raise UnmarshalException('argument missing', 'name', '') name = args["name"] - if not isinstance(name, str) and not isinstance(name, unicode): + if not is_str(name): raise TypeError("string", repr(name)) if not('description' in args): raise UnmarshalException('argument missing', 'description', '') description = args["description"] - if not isinstance(description, str) and not isinstance(description, unicode): + if not is_str(description): raise TypeError("string", repr(description)) if not('configuration' in args): raise UnmarshalException('argument missing', 'configuration', '') @@ -884,10 +884,10 @@ def create(self, args): if not isinstance(configuration, dict): raise TypeError("(string * string) list", repr(configuration)) for tmp_16 in configuration.keys(): - if not isinstance(tmp_16, str) and not isinstance(tmp_16, unicode): + if not is_str(tmp_16): raise TypeError("string", repr(tmp_16)) for tmp_16 in configuration.values(): - if not isinstance(tmp_16, str) and not isinstance(tmp_16, unicode): + if not is_str(tmp_16): raise TypeError("string", repr(tmp_16)) results = self._impl.create(dbg, uri, name, description, configuration) return results @@ -898,15 +898,15 @@ def attach(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('uri' in args): raise UnmarshalException('argument missing', 'uri', '') uri = args["uri"] - if not isinstance(uri, str) and not isinstance(uri, unicode): + if not is_str(uri): raise TypeError("string", repr(uri)) results = self._impl.attach(dbg, uri) - if not isinstance(results, str) and not isinstance(results, unicode): + if not is_str(results): raise TypeError("string", repr(results)) return results def detach(self, args): @@ -916,12 +916,12 @@ def detach(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) results = self._impl.detach(dbg, sr) return results @@ -932,12 +932,12 @@ def destroy(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) results = self._impl.destroy(dbg, sr) return results @@ -948,19 +948,19 @@ def stat(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) results = self._impl.stat(dbg, sr) - if not isinstance(results['sr'], str) and not isinstance(results['sr'], unicode): + if not is_str(results['sr']): raise TypeError("string", repr(results['sr'])) - if not isinstance(results['name'], str) and not isinstance(results['name'], unicode): + if not is_str(results['name']): raise TypeError("string", repr(results['name'])) - if not isinstance(results['description'], str) and not isinstance(results['description'], unicode): + if not is_str(results['description']): raise TypeError("string", repr(results['description'])) if not(is_long(results['free_space'])): raise TypeError("int64", repr(results['free_space'])) @@ -969,15 +969,15 @@ def stat(self, args): if not isinstance(results['datasources'], list): raise TypeError("string list", repr(results['datasources'])) for tmp_17 in results['datasources']: - if not isinstance(tmp_17, str) and not isinstance(tmp_17, unicode): + if not is_str(tmp_17): raise TypeError("string", repr(tmp_17)) if not isinstance(results['clustered'], bool): raise TypeError("bool", repr(results['clustered'])) if results['health'][0] == 'Healthy': - if not isinstance(results['health'][1], str) and not isinstance(results['health'][1], unicode): + if not is_str(results['health'][1]): raise TypeError("string", repr(results['health'][1])) elif results['health'][0] == 'Recovering': - if not isinstance(results['health'][1], str) and not isinstance(results['health'][1], unicode): + if not is_str(results['health'][1]): raise TypeError("string", repr(results['health'][1])) return results def set_name(self, args): @@ -987,17 +987,17 @@ def set_name(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('new_name' in args): raise UnmarshalException('argument missing', 'new_name', '') new_name = args["new_name"] - if not isinstance(new_name, str) and not isinstance(new_name, unicode): + if not is_str(new_name): raise TypeError("string", repr(new_name)) results = self._impl.set_name(dbg, sr, new_name) return results @@ -1008,17 +1008,17 @@ def set_description(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) if not('new_description' in args): raise UnmarshalException('argument missing', 'new_description', '') new_description = args["new_description"] - if not isinstance(new_description, str) and not isinstance(new_description, unicode): + if not is_str(new_description): raise TypeError("string", repr(new_description)) results = self._impl.set_description(dbg, sr, new_description) return results @@ -1029,25 +1029,25 @@ def ls(self, args): if not('dbg' in args): raise UnmarshalException('argument missing', 'dbg', '') dbg = args["dbg"] - if not isinstance(dbg, str) and not isinstance(dbg, unicode): + if not is_str(dbg): raise TypeError("string", repr(dbg)) if not('sr' in args): raise UnmarshalException('argument missing', 'sr', '') sr = args["sr"] - if not isinstance(sr, str) and not isinstance(sr, unicode): + if not is_str(sr): raise TypeError("string", repr(sr)) results = self._impl.ls(dbg, sr) if not isinstance(results, list): raise TypeError("8 list", repr(results)) for tmp_18 in results: - if not isinstance(tmp_18['key'], str) and not isinstance(tmp_18['key'], unicode): + if not is_str(tmp_18['key']): raise TypeError("string", repr(tmp_18['key'])) if tmp_18['uuid'] is not None: - if not isinstance(tmp_18['uuid'], str) and not isinstance(tmp_18['uuid'], unicode): + if not is_str(tmp_18['uuid']): raise TypeError("string", repr(tmp_18['uuid'])) - if not isinstance(tmp_18['name'], str) and not isinstance(tmp_18['name'], unicode): + if not is_str(tmp_18['name']): raise TypeError("string", repr(tmp_18['name'])) - if not isinstance(tmp_18['description'], str) and not isinstance(tmp_18['description'], unicode): + if not is_str(tmp_18['description']): raise TypeError("string", repr(tmp_18['description'])) if not isinstance(tmp_18['read_write'], bool): raise TypeError("bool", repr(tmp_18['read_write'])) @@ -1058,15 +1058,15 @@ def ls(self, args): if not isinstance(tmp_18['uri'], list): raise TypeError("string list", repr(tmp_18['uri'])) for tmp_19 in tmp_18['uri']: - if not isinstance(tmp_19, str) and not isinstance(tmp_19, unicode): + if not is_str(tmp_19): raise TypeError("string", repr(tmp_19)) if not isinstance(tmp_18['keys'], dict): raise TypeError("(string * string) list", repr(tmp_18['keys'])) for tmp_20 in tmp_18['keys'].keys(): - if not isinstance(tmp_20, str) and not isinstance(tmp_20, unicode): + if not is_str(tmp_20): raise TypeError("string", repr(tmp_20)) for tmp_20 in tmp_18['keys'].values(): - if not isinstance(tmp_20, str) and not isinstance(tmp_20, unicode): + if not is_str(tmp_20): raise TypeError("string", repr(tmp_20)) return results def _dispatch(self, method, params): From d83218f61810f5d9adc9ddb416de23dd51d59a6a Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 191/222] storage-api: Add pytest for datapath.Datapath_server_dispatcher() Signed-off-by: Bernhard Kaindl --- .github/workflows/other.yml | 7 +- .../python/xapi/storage/api/test_datapath.py | 127 ++++++++++++++++++ pyproject.toml | 2 +- 3 files changed, 130 insertions(+), 6 deletions(-) create mode 100644 ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 17f91991da5..7cf672afd14 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -49,13 +49,10 @@ jobs: if: ${{ matrix.python-version == '2.7' }} run: > pip install enum future mock pytest-coverage pytest-mock && - pytest - --cov=scripts --cov=ocaml/xcp-rrdd - scripts/ ocaml/xcp-rrdd -vv -rA - --junitxml=.git/pytest${{matrix.python-version}}.xml + pytest -vv -rA --cov=ocaml ocaml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml - --cov-fail-under 0 + --cov-fail-under 60 env: PYTHONDEVMODE: yes diff --git a/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py new file mode 100644 index 00000000000..9bea7377391 --- /dev/null +++ b/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py @@ -0,0 +1,127 @@ +import logging + +import pytest + +import xapi +import xapi.storage.api.datapath + + +def internal_error(error): + """Return a dictionary with an internal error""" + return {"ErrorDescription": ["Internal_error", error], "Status": "Failure"} + + +def assert_error(testee, caplog, method_args, method, error): + """Assert that the result of the testee matches the expected error result""" + args = method_args.copy() + if method != "open": # the persistent arg is only checked for the open method + args["persistent"] = None # pass it, but with a wrong type(not used/checked) + assert testee._dispatch("Datapath." + method, [args]) == internal_error(error) + assert caplog.messages[0] == "caught " + error + caplog.clear() + + +def assert_type_checks(testee, methods, template_args, bad_args, caplog): + """Assert that the result of the testee matches the expected result""" + for arg in bad_args: + # Sigh, if Python would be strongly typed, we wouldn't need this: + # Assert the type checks of the arguments + expected = "bool" if arg == "persistent" else "string" + other_type = False if expected == "string" else "str" + for actual in [None, [], (), {"dict": "val"}, 1, 1.0, str, caplog, other_type]: + bad_args = template_args.copy() + bad_args[arg] = actual + error_msg = "TypeError expected={} actual={}".format(expected, repr(actual)) + for method in methods: + assert_error(testee, caplog, bad_args, method, error_msg) + + # Remove the argument and assert the missing argument checks + bad_args.pop(arg) + error_msg = "UnmarshalException thing=argument missing ty={} desc=".format(arg) + for method in methods: + assert_error(testee, caplog, bad_args, method, error_msg) + + +def test_dispatcher(caplog, capsys): + """ + Test the dispatcher of the Xapi storage API datapath interface + + The dispatcher is a class that routes the calls to the corresponding methods + of a given Datapath implementation class. + """ + # Setup + caplog.set_level(logging.INFO) + + # The testee passes them to the Datapath_test class and its attach method + # is expected to return the values which we use to test the dispatcher: + args = {"dbg": "", "uri": "uri", "domain": "uuid", "persistent": True} + + # Call + + # datapath_server_test() returns an instance of the dispatcher class that + # routes the calls to the corresponding methods of the Datapath_test class: + testee = xapi.storage.api.datapath.datapath_server_test() + + # Test the argument checks of the dispatcher to identify missing arguments: + + # Assert type checks on the dbg and uri arguments + missing = ["dbg", "uri"] + methods = ["attach", "activate", "deactivate", "detach", "open", "close"] + assert_type_checks(testee, methods, args, missing, caplog) + + # Assert type checks on the missing domain argument + missing = ["domain"] + methods = ["attach", "activate", "deactivate", "detach"] + assert_type_checks(testee, methods, args, missing, caplog) + + # Assert type checks on the persistent flag for the open method + missing = ["persistent"] + methods = ["open"] + assert_type_checks(testee, methods, args, missing, caplog) + + # BUG: Datapath_test.attach() currently returns an mismatching dictionary: + # The dispatcher expects a dict with a "domain_uuid" key, but the implementation + # Datapath_test.attach() returns a dict with a "backend" key instead. + # Changing the implementation of Datapath_test.attach() will fix this issue. + + # This WOULD be an example expected result, BUT the implementation of + # Datapath_test.attach() returns an invalid dictionary to the dispatcher: + assert testee._dispatch("Datapath.attach", [args]) != { + "Status": "Success", + "Value": {"domain_uuid": "uuid", "implementation": ("uri", "dbg")}, + } + + # BUG: This is the internal error that Datapath_test.attach() currently triggers: + assert testee._dispatch("Datapath.attach", [args]) == { + "ErrorDescription": ["Internal_error", "'domain_uuid'"], + "Status": "Failure", + } + assert caplog.messages[0] == "caught 'domain_uuid'" + caplog.clear() + + # The other methods work as expected. Setup, Call, Assert: + success = {"Status": "Success", "Value": {}} + assert testee._dispatch("Datapath.open", [args]) == success + assert testee._dispatch("Datapath.activate", [args]) == success + assert testee._dispatch("Datapath.deactivate", [args]) == success + assert testee._dispatch("Datapath.detach", [args]) == success + assert testee._dispatch("Datapath.close", [args]) == success + + # Assert that no errors were logged and no output was printed: + assert caplog.messages == [] # No messages were logged + assert capsys.readouterr().out == "" # No output was printed + assert capsys.readouterr().err == "" # No errors were printed + + +def test_exceptions(): + """Cover the code changed by using the is_str() function""" + + with pytest.raises(xapi.TypeError) as exc_info: + _ = xapi.XenAPIException(1, "params") # pylint: disable=pointless-statement + assert str(exc_info.value) == "TypeError expected=string actual=1" + + with pytest.raises(xapi.TypeError) as exc_info: + _ = xapi.storage.api.datapath.Unimplemented( + False + ) # pylint: disable=pointless-statement + assert str(exc_info.value) == "TypeError expected=string actual=False" diff --git a/pyproject.toml b/pyproject.toml index b8e4c984853..8f1b5931255 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -260,7 +260,7 @@ addopts = "-v -ra" # xfail_strict: require to remove pytext.xfail marker when test is fixed # required_plugins: require that these plugins are installed before testing # ----------------------------------------------------------------------------- -testpaths = ["python3", "scripts", "ocaml/xcp-rrdd"] +testpaths = ["python3", "ocaml/xcp-rrdd", "ocaml/xapi-storage"] required_plugins = ["pytest-cov", "pytest-mock"] log_cli_level = "INFO" log_cli = true From c23f314c039404092774b727664d5937472355ae Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 192/222] storage-api: Use Datapath_test.attach to test Datapath_server_dispatcher.attach Signed-off-by: Bernhard Kaindl --- .../python/xapi/storage/api/datapath.py | 29 ++++++++++-- .../python/xapi/storage/api/test_datapath.py | 47 ++++++++++++------- 2 files changed, 54 insertions(+), 22 deletions(-) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py index 0a4e82438fb..957b8c0362f 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py @@ -205,7 +205,11 @@ def close(self, dbg, uri): """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" raise Unimplemented("Datapath.close") class Datapath_test: - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" + """ + Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. + Every function is idempotent. Every function takes a domain parameter which allows + the implementation to track how many domains are currently using the volume. + """ def __init__(self): pass def open(self, dbg, uri, persistent): @@ -213,10 +217,27 @@ def open(self, dbg, uri, persistent): result = {} return result def attach(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - result["backend"] = { "domain_uuid": "string", "implementation": None } + # type:(str, str, str) -> dict[str, tuple[str, Any] | str] + """ + Return a valid results dictionary to Datapath_server_dispatcher.attach() + + The returned dict must contain the "domain_uuid" key with a string value. + The returned dict must contain the "implementation" key with two elements: + If the first element is one of "Blkback", "Tapdisk3" or "Qdisk", + the second element must be a string. Else, the dispatcher returns an error. + + See Datapath_server_dispatcher.attach() for the implementation details. + """ + # Fixed to not raise an internal error in Datapath_server_dispatcher.attach(): + result = { "domain_uuid": domain, "implementation": (uri, dbg) } + if not domain: # Provoke an internal error in the dispatcher to cover its code + result.pop("domain_uuid") # by removing the required "domain_uuid" key. + if domain == "5": + result["domain_uuid"] = 5 # Return an integer to provoke a type error. + if dbg == "inject_error" and uri in ["Blkback", "Tapdisk3", "Qdisk"]: + result["implementation"] = (uri, False) return result + def activate(self, dbg, uri, domain): """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" result = {} diff --git a/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py index 9bea7377391..8b6436657da 100644 --- a/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py +++ b/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py @@ -42,6 +42,26 @@ def assert_type_checks(testee, methods, template_args, bad_args, caplog): assert_error(testee, caplog, bad_args, method, error_msg) +def assert_attach_type_check(testee, caplog, args, uri): + """Assert that the result of the testee matches the expected result""" + a = args.copy() + a["uri"] = uri + assert testee._dispatch("Datapath.attach", [a]) == { + "Status": "Success", + "Value": {"domain_uuid": a["domain"], "implementation": (uri, a["dbg"])}, + } + if uri == "other": + return + a["dbg"] = "inject_error" + assert_error(testee, caplog, a, "attach", "TypeError expected=string actual=False") + + +def assert_attach_type_checks(testee, caplog, args): + """Assert type checks when attach() returns Blkback, Tapdisk3, Qdisk and others""" + for uri in ["Blkback", "Tapdisk3", "Qdisk", "other"]: + assert_attach_type_check(testee, caplog, args, uri) + + def test_dispatcher(caplog, capsys): """ Test the dispatcher of the Xapi storage API datapath interface @@ -79,25 +99,16 @@ def test_dispatcher(caplog, capsys): methods = ["open"] assert_type_checks(testee, methods, args, missing, caplog) - # BUG: Datapath_test.attach() currently returns an mismatching dictionary: - # The dispatcher expects a dict with a "domain_uuid" key, but the implementation - # Datapath_test.attach() returns a dict with a "backend" key instead. - # Changing the implementation of Datapath_test.attach() will fix this issue. - - # This WOULD be an example expected result, BUT the implementation of - # Datapath_test.attach() returns an invalid dictionary to the dispatcher: - assert testee._dispatch("Datapath.attach", [args]) != { - "Status": "Success", - "Value": {"domain_uuid": "uuid", "implementation": ("uri", "dbg")}, - } + # Assert the dispatcher returns the example results of Datapath_test.attach(): + assert_attach_type_checks(testee, caplog, args) - # BUG: This is the internal error that Datapath_test.attach() currently triggers: - assert testee._dispatch("Datapath.attach", [args]) == { - "ErrorDescription": ["Internal_error", "'domain_uuid'"], - "Status": "Failure", - } - assert caplog.messages[0] == "caught 'domain_uuid'" - caplog.clear() + # Assert the internal error to cover the check by removing the domain argument: + bad = args.copy() + bad["domain"] = "" + assert_error(testee, caplog, bad, "attach", "'domain_uuid'") + # Assert the type check on the domain_uuid return value: + bad["domain"] = "5" + assert_error(testee, caplog, bad, "attach", "TypeError expected=string actual=5") # The other methods work as expected. Setup, Call, Assert: success = {"Status": "Success", "Value": {}} From 8544efe0f624b9d860ee7f60e94e5c97a8ddd2d0 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Tue, 16 Jul 2024 12:00:00 +0200 Subject: [PATCH 193/222] extauth-hook-AD.py: Delete PBIS code (PBIS was removed from XS8) Signed-off-by: Bernhard Kaindl --- python3/plugins/extauth-hook-AD.py | 49 ++----------------- python3/plugins/test_extauth_hook_AD.py | 62 ++----------------------- 2 files changed, 9 insertions(+), 102 deletions(-) diff --git a/python3/plugins/extauth-hook-AD.py b/python3/plugins/extauth-hook-AD.py index 5ca0f705846..d3e89aae8c8 100755 --- a/python3/plugins/extauth-hook-AD.py +++ b/python3/plugins/extauth-hook-AD.py @@ -23,7 +23,7 @@ import logging import logging.handlers from collections import OrderedDict -from enum import Enum + import XenAPIPlugin @@ -70,12 +70,6 @@ def run_cmd(command: "list[str]"): logger.exception("Failed to run command %s", command) -class ADBackend(Enum): - """Enum for AD backend""" - BD_PBIS = 0 - BD_WINBIND = 1 - - class ADConfig(abc.ABC): """Base class for AD configuration""" @@ -84,7 +78,6 @@ def __init__(self, path, session, args, ad_enabled=True, load_existing=True, fil self._session = session self._args = args self._lines = [] - self._backend = self._get_ad_backend() self._ad_enabled = ad_enabled self._file_mode = file_mode if load_existing and os.path.exists(self._file_path): @@ -92,14 +85,6 @@ def __init__(self, path, session, args, ad_enabled=True, load_existing=True, fil lines = file.readlines() self._lines = [l.strip() for l in lines] - def _get_ad_backend(self): - """Get active AD backend""" - if self._args.get("ad_backend", "winbind") == "pbis": - logger.debug("pbis is used as AD backend") - return ADBackend.BD_PBIS - - logger.debug("winbind is used as AD backend") - return ADBackend.BD_WINBIND @abc.abstractmethod def _apply_to_cache(self): ... @@ -156,11 +141,7 @@ def __init__(self, session, args, ad_enabled=True): def _apply_to_cache(self): if self._ad_enabled: - if self._backend == ADBackend.BD_PBIS: - ad_pam_module = "/lib/security/pam_lsass.so" - else: - ad_pam_module = "pam_winbind.so" - content = self.ad_pam_format.format(ad_module=ad_pam_module, + content = self.ad_pam_format.format(ad_module="pam_winbind.so", user_list=HCP_USERS, group_list=HCP_GROUPS) else: content = self.no_ad_pam @@ -202,16 +183,6 @@ def _is_pool_admin(self, subject_rec): logger.warning("subject %s does not have role", subject_rec) return False - def _format_item(self, item): - space_replacement = "+" - if self._backend == ADBackend.BD_PBIS: - if space_replacement in item: - raise ValueError( - "{} is not permitted in subject name".format(space_replacement)) - # PBIS relace space with "+", eg "ab cd" -> "ab++cd" - # PBIS pam module will reverse it back - return item.replace(" ", space_replacement) - return item def _is_responsible_for(self, subject_rec): try: @@ -248,9 +219,6 @@ def _add_upn(self, subject_rec): try: upn = subject_rec["other_config"]["subject-upn"] user, domain = upn.split(sep) - if self._backend == ADBackend.BD_PBIS: - # PBIS convert domain to UPPER case, we revert it back - domain = domain.lower() self._lines.append("{}{}{}".format(user, sep, domain)) except KeyError: logger.info("subject does not have upn %s", subject_rec) @@ -260,15 +228,12 @@ def _add_upn(self, subject_rec): def _add_subject(self, subject_rec): try: sid = subject_rec['subject_identifier'] - name = subject_rec["other_config"]["subject-name"] - formatted_name = self._format_item(name) + formatted_name = subject_rec["other_config"]["subject-name"] logger.debug("Permit user %s, Current sid is %s", formatted_name, sid) self._lines.append(formatted_name) # If the ssh key is permitted in the authorized_keys file, # The original name is compared, add UPN and original name - if self._backend == ADBackend.BD_PBIS and name != formatted_name: - self._lines.append(name) self._add_upn(subject_rec) # pylint: disable=broad-except except Exception as exp: @@ -287,8 +252,7 @@ def _match_subject(self, subject_rec): def _add_subject(self, subject_rec): try: sid = subject_rec['subject_identifier'] - name = self._format_item( - subject_rec["other_config"]["subject-name"]) + name = subject_rec["other_config"]["subject-name"] logger.debug("Permit group %s, Current sid is %s", name, sid) self._lines.append(name) # pylint: disable=broad-except @@ -368,10 +332,7 @@ def __init__(self, session, args, ad_enabled=True): "/etc/nsswitch.conf", session, args, ad_enabled) modules = "files sss" if ad_enabled: - if self._backend == ADBackend.BD_PBIS: - modules = "files sss lsass" - else: - modules = "files hcp winbind" + modules = "files hcp winbind" self._update_key_value("passwd", modules) self._update_key_value("group", modules) self._update_key_value("shadow", modules) diff --git a/python3/plugins/test_extauth_hook_AD.py b/python3/plugins/test_extauth_hook_AD.py index 3f0f22e40dd..eb9d1107e87 100644 --- a/python3/plugins/test_extauth_hook_AD.py +++ b/python3/plugins/test_extauth_hook_AD.py @@ -38,7 +38,7 @@ def test_run_cmd(caplog): def line_exists_in_config(lines, line): """ - Helper function to detect whether configration match expectation + Helper function to check if the configuration matches the expectation """ return any(line.split() == l.split() for l in lines) @@ -46,8 +46,6 @@ def line_exists_in_config(lines, line): domain = "conappada.local" args_bd_winbind = {'auth_type': 'AD', 'service_name': domain, 'ad_backend': 'winbind'} -args_bd_pbis = {'auth_type': 'AD', - 'service_name': domain, 'ad_backend': 'pbis'} mock_session = MagicMock() subjects = ['OpaqueRef:96ae4be5-8815-4de8-a40f-d5e5c531dda9'] @@ -56,8 +54,7 @@ def line_exists_in_config(lines, line): admin_roles = [admin_role] mock_session.xenapi.role.get_by_name_label.return_value = admin_roles -# pylint: disable=unused-argument, protected-access, redefined-outer-name, missing-function-docstring -# pylint: disable=too-many-arguments, missing-class-docstring, no-self-use +# pylint: disable=unused-argument, redefined-outer-name def build_user(domain_netbios, domain, name, is_admin=True): @@ -120,14 +117,6 @@ def test_ad_enabled_with_winbind(self, mock_rename, mock_chmod): enabled_keyward = "auth sufficient pam_winbind.so try_first_pass try_authtok" self.assertTrue(line_exists_in_config(static._lines, enabled_keyward)) - def test_ad_enabled_with_pbis(self, mock_rename, mock_chmod): - # pam_lsass should be used - mock_rename.side_effect = mock_rename_to_clean - static = StaticSSHPam(mock_session, args_bd_pbis) - static.apply() - enabled_keyward = "auth sufficient /lib/security/pam_lsass.so try_first_pass try_authtok" - self.assertTrue(line_exists_in_config(static._lines, enabled_keyward)) - @patch("extauth_hook_AD.ADConfig._install") class TestUsersList(TestCase): @@ -146,21 +135,12 @@ def test_permit_admin_user(self, mock_install): # Domain user with admin role should be included in config file user = build_user("CONNAPP", "CONAPPADA.LOCAL", "radmin", True) mock_session.xenapi.subject.get_record.return_value = user - dynamic = UsersList(mock_session, args_bd_pbis) + dynamic = UsersList(mock_session, args_bd_winbind) dynamic.apply() self.assertIn(r"CONNAPP\radmin", dynamic._lines) - self.assertIn(r"radmin@conappada.local", dynamic._lines) + self.assertIn(r"radmin@CONAPPADA.LOCAL", dynamic._lines) mock_install.assert_called() - def test_pbis_permit_admin_user_with_space(self, mock_install): - # Domain user name with space should be repalced by "+" with PBIS - user = build_user("CONNAPP", "conappada.local", "radmin l1", True) - mock_session.xenapi.subject.get_record.return_value = user - permit_user = r"CONNAPP\radmin++l1" - dynamic = UsersList(mock_session, args_bd_pbis) - dynamic.apply() - self.assertIn(permit_user, dynamic._lines) - mock_install.assert_called() def test_winbind_permit_admin_user_with_space(self, mock_install): # Domain user name with space should be surrounded by [] with winbind @@ -181,40 +161,6 @@ def test_not_permit_non_admin_user(self, mock_install): dynamic.apply() self.assertNotIn(permit_user, dynamic._lines) - def test_pbis_not_permit_pool_admin_with_plus_in_name(self, mock_install): - """ - Domain user name should not contain "+" - """ - user = build_user("CONNAPP", "conappada.local", "radm+in", True) - mock_session.xenapi.subject.get_record.return_value = user - permit_user = r"CONNAPP\radm+in" - dynamic = UsersList(mock_session, args_bd_pbis) - dynamic.apply() - self.assertNotIn(permit_user, dynamic._lines) - - def test_failed_to_add_one_admin_should_not_affact_others(self, mock_install): - """ - Failed to add one bad domain users should not affact others - """ - bad_user = build_user("CONNAPP", "conappada.local", "bad+in", True) - good_user = build_user("CONNAPP", "conappada.local", "good", True) - - mock_session_with_multi_users = MagicMock() - - subjects = ['OpaqueRef:96ae4be5-8815-4de8-a40f-d5e5c531dda9', - 'OpaqueRef:96ae4be5-8815-4de8-a40f-d5e5c531dda1'] - mock_session_with_multi_users.xenapi.subject.get_all.return_value = subjects - mock_session_with_multi_users.xenapi.subject.get_record.side_effect = [ - bad_user, good_user] - mock_session_with_multi_users.xenapi.role.get_by_name_label.return_value = admin_roles - - bad_user = r"CONNAPP\bad+in" - good_user = r"CONNAPP\good" - dynamic = UsersList(mock_session_with_multi_users, args_bd_pbis) - dynamic.apply() - self.assertIn(good_user, dynamic._lines) - self.assertNotIn(bad_user, dynamic._lines) - @patch("extauth_hook_AD.ADConfig._install") class TestGroups(TestCase): From ae5f7b029d4baf018173e04194041511bf76b2d8 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 18 Jul 2024 12:00:00 +0200 Subject: [PATCH 194/222] pytest: Run xenopsd tests, rm pytest-cov (obsolete), update pyright Also prepare removal of the old baseline SMAPv3 code: ocaml/xapi-storage/python/xapi/storage/api/{volume,plugin,*datapath}.py - Reduce pytest --cov-fail-under to 50% (may be completely removed) - Remove it from the directories to test. Update the minimum supported Python version for XenAPI.py to 3.6 (for XS8) for https://pypi.org/project/XenAPI/ to `python_requires = >=3.6.*, <4` Signed-off-by: Bernhard Kaindl --- .github/workflows/other.yml | 2 +- .pre-commit-config.yaml | 3 +-- pyproject.toml | 4 ++-- scripts/examples/python/setup.cfg | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 107ba9ba573..58cc7c8cdfe 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -52,7 +52,7 @@ jobs: pytest -vv -rA --cov=ocaml ocaml --cov-report term-missing --cov-report xml:.git/coverage${{matrix.python-version}}.xml - --cov-fail-under 60 + --cov-fail-under 50 env: PYTHONDEVMODE: yes PYTHONPATH: "python3:python3/stubs" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d714b01cd6e..124645bd875 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -81,7 +81,6 @@ repos: - opentelemetry-api - opentelemetry-exporter-zipkin-json - opentelemetry-sdk - - pytest-coverage - pytest-mock - mock - wrapt @@ -89,7 +88,7 @@ repos: - repo: https://github.com/RobertCraigie/pyright-python - rev: v1.1.361 + rev: v1.1.372 hooks: - id: pyright name: check that python3 tree passes pyright/VSCode check diff --git a/pyproject.toml b/pyproject.toml index b821b222804..630f6c51e25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -260,8 +260,8 @@ addopts = "-v -ra" # xfail_strict: require to remove pytext.xfail marker when test is fixed # required_plugins: require that these plugins are installed before testing # ----------------------------------------------------------------------------- -testpaths = ["python3", "ocaml/xcp-rrdd", "ocaml/xapi-storage"] -required_plugins = ["pytest-cov", "pytest-mock"] +testpaths = ["python3", "ocaml/xcp-rrdd", "ocaml/xenopsd"] +required_plugins = ["pytest-mock"] log_cli_level = "INFO" log_cli = true minversion = "7.0" diff --git a/scripts/examples/python/setup.cfg b/scripts/examples/python/setup.cfg index 059e6631bd1..47601de9c05 100644 --- a/scripts/examples/python/setup.cfg +++ b/scripts/examples/python/setup.cfg @@ -19,7 +19,7 @@ classifiers = [options] packages = find: -python_requires = >=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4 +python_requires = >=3.6.*, <4 [bdist_wheel] # This flag says that the code is written to work on both Python 2 and Python From 97e117db2af6f158cff4f0c2a2feb47d71c8aae5 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 18 Jul 2024 07:36:38 +0100 Subject: [PATCH 195/222] Python3 CI: fix import error Signed-off-by: Stephen Cheng --- .github/workflows/other.yml | 2 +- pyproject.toml | 2 +- python3/dnf_plugins/ptoken.py | 2 +- python3/stubs/__init__.py | 0 python3/{tests => }/stubs/dnf.py | 0 python3/tests/test_dnf_plugins.py | 10 ++++++---- 6 files changed, 9 insertions(+), 7 deletions(-) create mode 100644 python3/stubs/__init__.py rename python3/{tests => }/stubs/dnf.py (100%) diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index bc73c18338c..97548160b43 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -58,7 +58,7 @@ jobs: --cov-fail-under 0 env: PYTHONDEVMODE: yes - PYTHONPATH: "python3:python3/tests/stubs" + PYTHONPATH: "python3:python3/stubs" - name: Upload coverage report to Coveralls uses: coverallsapp/github-action@v2 diff --git a/pyproject.toml b/pyproject.toml index efdcd13494e..58743495b93 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -263,7 +263,7 @@ required_plugins = ["pytest-cov", "pytest-mock"] log_cli_level = "INFO" log_cli = true minversion = "7.0" -pythonpath = "python3/stubs:scripts/examples/python" # Allow to import the XenAPI module +pythonpath = "python3/stubs" # Allow to import the XenAPI module python_files = ["test_*.py", "it_*.py"] python_functions = ["test_", "it_", "when_"] xfail_strict = true # is used to fail tests that are marked as xfail but pass(for TDD) diff --git a/python3/dnf_plugins/ptoken.py b/python3/dnf_plugins/ptoken.py index 75c926e13b4..c2ea73fccc8 100644 --- a/python3/dnf_plugins/ptoken.py +++ b/python3/dnf_plugins/ptoken.py @@ -15,7 +15,7 @@ def config(self): """ DNF plugin config hook, refer to https://dnf.readthedocs.io/en/latest/api_plugins.html""" try: - with open('/etc/xensource/ptoken', encoding="utf-8") as file: + with open(PTOKEN_PATH, encoding="utf-8") as file: ptoken = file.read().strip() except Exception: #pylint: disable=broad-exception-caught logging.error("Failed to open %s", PTOKEN_PATH) diff --git a/python3/stubs/__init__.py b/python3/stubs/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/python3/tests/stubs/dnf.py b/python3/stubs/dnf.py similarity index 100% rename from python3/tests/stubs/dnf.py rename to python3/stubs/dnf.py diff --git a/python3/tests/test_dnf_plugins.py b/python3/tests/test_dnf_plugins.py index c7d5f587532..2f82b1eb5cb 100644 --- a/python3/tests/test_dnf_plugins.py +++ b/python3/tests/test_dnf_plugins.py @@ -3,6 +3,7 @@ import sys import json from unittest.mock import MagicMock, patch +from python3.tests.import_helper import import_file_as_module sys.modules["urlgrabber"] = MagicMock() @@ -14,8 +15,8 @@ # Some test case does not use self -from dnf_plugins import accesstoken -from dnf_plugins import ptoken +accesstoken = import_file_as_module("python3/dnf_plugins/accesstoken.py") +ptoken = import_file_as_module("python3/dnf_plugins/ptoken.py") REPO_NAME = "testrepo" @@ -31,7 +32,7 @@ def _mock_repo(a_token=None, p_token=None, baseurl=None): return mock_repo -@patch("dnf_plugins.accesstoken.urlgrabber") +@patch("accesstoken.urlgrabber") class TestAccesstoken(unittest.TestCase): """Test class for dnf access plugin""" @@ -74,7 +75,8 @@ class TestPtoken(unittest.TestCase): """Test class for ptoken dnf plugin""" def test_failed_to_open_ptoken_file(self): """Exception should raised if the system does not have PTOKEN_PATH""" - ptoken.PTOKEN_PATH = "/some/not/exist/path" + # Disable pyright warning as we need to set the PTOKEN_PATH to test the exception + ptoken.PTOKEN_PATH = "/some/not/exist/path" # pyright: ignore[reportAttributeAccessIssue] with self.assertRaises(Exception): ptoken.Ptoken(MagicMock(), MagicMock()).config() From 7656999d6cb3a062835c2f203592b75eec5948ff Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 22 Jul 2024 12:00:00 +0200 Subject: [PATCH 196/222] Update test storage.dummyv5/plugin.py: Base the test on the new v5 API Signed-off-by: Bernhard Kaindl --- .../volume/org.xen.xapi.storage.dummyv5/plugin.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py index e9ef122ca07..bf54820cdc4 100755 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py +++ b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummyv5/plugin.py @@ -6,15 +6,15 @@ import os import sys -import xapi.storage.api.plugin +import xapi.storage.api.v5.plugin # pylint: disable=no-name-in-module -class Implementation(xapi.storage.api.plugin.Plugin_skeleton): +class Implementation(xapi.storage.api.v5.plugin.Plugin_skeleton): - def diagnostics(self, dbg): + def diagnostics(self, dbg): # pylint: disable=unused-argument return "Dummy diagnostics" - def query(self, dbg): + def query(self, dbg): # pylint: disable=unused-argument return { "plugin": "dummy", "name": "dummy SR plugin", @@ -35,11 +35,11 @@ def query(self, dbg): if __name__ == "__main__": - cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation()) + cmd = xapi.storage.api.v5.plugin.Plugin_commandline(Implementation()) base = os.path.basename(sys.argv[0]) if base == 'Plugin.diagnostics': cmd.diagnostics() elif base == 'Plugin.Query': cmd.query() else: - raise xapi.storage.api.plugin.Unimplemented(base) + raise xapi.storage.api.v5.plugin.Unimplemented(base) From a26ce264925c6310f948b9a675deda0c7b789b86 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 22 Jul 2024 12:00:00 +0200 Subject: [PATCH 197/222] Remove ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy Signed-off-by: Bernhard Kaindl --- ocaml/xapi-storage-script/main.ml | 5 +- .../org.xen.xapi.storage.dummy/Plugin.Query | 1 - .../Plugin.diagnostics | 1 - .../org.xen.xapi.storage.dummy/SR.attach | 1 - .../org.xen.xapi.storage.dummy/SR.create | 1 - .../org.xen.xapi.storage.dummy/SR.detach | 1 - .../volume/org.xen.xapi.storage.dummy/SR.ls | 1 - .../volume/org.xen.xapi.storage.dummy/SR.stat | 1 - .../org.xen.xapi.storage.dummy/Volume.create | 1 - .../org.xen.xapi.storage.dummy/Volume.destroy | 1 - .../org.xen.xapi.storage.dummy/Volume.stat | 1 - .../org.xen.xapi.storage.dummy/plugin.py | 44 ------------ .../volume/org.xen.xapi.storage.dummy/sr.py | 71 ------------------- .../org.xen.xapi.storage.dummy/volume.py | 64 ----------------- 14 files changed, 1 insertion(+), 193 deletions(-) delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.Query delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.diagnostics delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.attach delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.create delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.detach delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.ls delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.stat delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.create delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.destroy delete mode 120000 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.stat delete mode 100755 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py delete mode 100755 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py delete mode 100755 ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py diff --git a/ocaml/xapi-storage-script/main.ml b/ocaml/xapi-storage-script/main.ml index 2c904af7a43..87956ee47fb 100644 --- a/ocaml/xapi-storage-script/main.ml +++ b/ocaml/xapi-storage-script/main.ml @@ -1857,10 +1857,7 @@ let self_test_plugin ~root_dir plugin = failwith "self test failed" let self_test ~root_dir = - ( self_test_plugin ~root_dir "org.xen.xapi.storage.dummy" >>>= fun () -> - self_test_plugin ~root_dir "org.xen.xapi.storage.dummyv5" - ) - >>= function + self_test_plugin ~root_dir "org.xen.xapi.storage.dummyv5" >>= function | Ok () -> info "test thread shutdown cleanly" ; Async_unix.exit 0 diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.Query b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.Query deleted file mode 120000 index 96bd1391c0e..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.Query +++ /dev/null @@ -1 +0,0 @@ -plugin.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.diagnostics b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.diagnostics deleted file mode 120000 index 96bd1391c0e..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Plugin.diagnostics +++ /dev/null @@ -1 +0,0 @@ -plugin.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.attach b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.attach deleted file mode 120000 index 482eaaf76a5..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.attach +++ /dev/null @@ -1 +0,0 @@ -sr.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.create b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.create deleted file mode 120000 index 482eaaf76a5..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.create +++ /dev/null @@ -1 +0,0 @@ -sr.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.detach b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.detach deleted file mode 120000 index 482eaaf76a5..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.detach +++ /dev/null @@ -1 +0,0 @@ -sr.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.ls b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.ls deleted file mode 120000 index 482eaaf76a5..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.ls +++ /dev/null @@ -1 +0,0 @@ -sr.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.stat b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.stat deleted file mode 120000 index 482eaaf76a5..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/SR.stat +++ /dev/null @@ -1 +0,0 @@ -sr.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.create b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.create deleted file mode 120000 index 1d6acb7b332..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.create +++ /dev/null @@ -1 +0,0 @@ -volume.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.destroy b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.destroy deleted file mode 120000 index 1d6acb7b332..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.destroy +++ /dev/null @@ -1 +0,0 @@ -volume.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.stat b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.stat deleted file mode 120000 index 1d6acb7b332..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/Volume.stat +++ /dev/null @@ -1 +0,0 @@ -volume.py \ No newline at end of file diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py deleted file mode 100755 index 40e3a00911c..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/plugin.py +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env python3 - -""" - Copyright (C) Citrix Systems, Inc. -""" - -import os -import sys -import xapi.storage.api.plugin - - -class Implementation(xapi.storage.api.plugin.Plugin_skeleton): - - def diagnostics(self, dbg): - return "Dummy diagnostics" - - def query(self, dbg): - return { - "plugin": "dummy", - "name": "dummy SR plugin", - "description": ("Dummy SR for unit tests."), - "vendor": "Citrix Systems Inc", - "copyright": "(C) 2018 Citrix Inc", - "version": "1.0", - "required_api_version": "3.0", - "features": [ - "SR_ATTACH", - "SR_DETACH", - "SR_CREATE", - "VDI_CREATE", - "VDI_DESTROY"], - "configuration": {}, - "required_cluster_stack": []} - - -if __name__ == "__main__": - cmd = xapi.storage.api.plugin.Plugin_commandline(Implementation()) - base = os.path.basename(sys.argv[0]) - if base == 'Plugin.diagnostics': - cmd.diagnostics() - elif base == 'Plugin.Query': - cmd.query() - else: - raise xapi.storage.api.plugin.Unimplemented(base) diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py deleted file mode 100755 index 82c77d891db..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/sr.py +++ /dev/null @@ -1,71 +0,0 @@ -#!/usr/bin/env python3 - -""" - Copyright (C) Citrix Systems, Inc. -""" - -import os -import sys -import urllib.parse -import xapi.storage.api.volume - -import plugin - - -class Implementation(xapi.storage.api.volume.SR_skeleton): - - def attach(self, dbg, uri): - return "file:///tmp/dummy" - - def create(self, dbg, uri, name, description, configuration): - return - - def detach(self, dbg, sr): - urllib.parse.urlparse(sr) - return - - def ls(self, dbg, sr): - urllib.parse.urlparse(sr) - qr = plugin.Implementation().query(dbg) - return [{ - "name": qr['name'], - "description": qr['description'], - "key": "file1", - "uuid": "file1", - "read_write": True, - "virtual_size": 0, - "physical_utilisation": 0, - "uri": ["raw+file:///tmp/disk.raw"], - "keys": {}, - }] - - def stat(self, dbg, sr): - urllib.parse.urlparse(sr) - qr = plugin.Implementation().query(dbg) - return { - "sr": sr, - "name": qr['name'], - "description": qr['description'], - "total_space": 0, - "free_space": 0, - "datasources": [], - "clustered": False, - "health": ["Healthy", ""] - } - - -if __name__ == "__main__": - cmd = xapi.storage.api.volume.SR_commandline(Implementation()) - base = os.path.basename(sys.argv[0]) - if base == 'SR.attach': - cmd.attach() - elif base == 'SR.create': - cmd.create() - elif base == 'SR.detach': - cmd.detach() - elif base == 'SR.ls': - cmd.ls() - elif base == 'SR.stat': - cmd.stat() - else: - raise xapi.storage.api.volume.Unimplemented(base) diff --git a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py b/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py deleted file mode 100755 index 848c13bfd39..00000000000 --- a/ocaml/xapi-storage-script/test/volume/org.xen.xapi.storage.dummy/volume.py +++ /dev/null @@ -1,64 +0,0 @@ -#!/usr/bin/env python3 - -""" - Copyright (C) Citrix Systems, Inc. -""" - -import uuid -import urllib.parse -import os -import sys -import xapi.storage.api.volume -import xapi - -import plugin - - -class Implementation(xapi.storage.api.volume.Volume_skeleton): - - def create(self, dbg, sr, name, description, size): - urllib.parse.urlparse(sr) - voluuid = str(uuid.uuid4()) - return { - "name": name, - "description": description, - "key": voluuid, - "uuid": voluuid, - "read_write": True, - "virtual_size": 0, - "physical_utilisation": 0, - "uri": ["raw+file:///tmp/disk.raw"], - "keys": {}, - } - - def destroy(self, dbg, sr, key): - urllib.parse.urlparse(sr) - return - - def stat(self, dbg, sr, key): - urllib.parse.urlparse(sr) - qr = plugin.Implementation().query(dbg) - return { - "name": qr['name'], - "description": qr['description'], - "key": key, - "uuid": key, - "read_write": True, - "virtual_size": 0, - "physical_utilisation": 0, - "uri": ["raw+file:///tmp/disk.raw"], - "keys": {}, - } - - -if __name__ == "__main__": - cmd = xapi.storage.api.volume.Volume_commandline(Implementation()) - base = os.path.basename(sys.argv[0]) - if base == "Volume.create": - cmd.create() - elif base == "Volume.destroy": - cmd.destroy() - elif base == "Volume.stat": - cmd.stat() - else: - raise xapi.storage.api.volume.Unimplemented(base) From 910289f209c125299bc4014b90c116189cb1f719 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Mon, 22 Jul 2024 12:00:00 +0200 Subject: [PATCH 198/222] Remove ocaml/xapi-storage/python/xapi/storage/api/{volume,plugin,*datapath}.py Signed-off-by: Bernhard Kaindl --- .../python/xapi/storage/api/datapath.py | 440 ----- .../python/xapi/storage/api/plugin.py | 251 --- .../python/xapi/storage/api/test_datapath.py | 138 -- .../python/xapi/storage/api/volume.py | 1429 ----------------- 4 files changed, 2258 deletions(-) delete mode 100644 ocaml/xapi-storage/python/xapi/storage/api/datapath.py delete mode 100644 ocaml/xapi-storage/python/xapi/storage/api/plugin.py delete mode 100644 ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py delete mode 100644 ocaml/xapi-storage/python/xapi/storage/api/volume.py diff --git a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/datapath.py deleted file mode 100644 index 957b8c0362f..00000000000 --- a/ocaml/xapi-storage/python/xapi/storage/api/datapath.py +++ /dev/null @@ -1,440 +0,0 @@ -from __future__ import print_function - -import argparse -import json -import logging -import sys -import traceback - -import xapi -# pylint: disable=line-too-long,superfluous-parens,unused-argument -# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi -from xapi import ( - InternalError, - Rpc_light_failure, - TypeError, - UnknownMethod, - UnmarshalException, - is_str, - success, -) - -# pylint: disable=invalid-name,redefined-builtin,undefined-variable -# pyright: reportUndefinedVariable=false -if sys.version_info[0] > 2: - unicode = str - -class Unimplemented(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Datapath_server_dispatcher: - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - def __init__(self, impl): - """impl is a proxy object whose methods contain the implementation""" - self._impl = impl - def open(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('persistent' in args): - raise UnmarshalException('argument missing', 'persistent', '') - persistent = args["persistent"] - if not isinstance(persistent, bool): - raise TypeError("bool", repr(persistent)) - results = self._impl.open(dbg, uri, persistent) - return results - def attach(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('domain' in args): - raise UnmarshalException('argument missing', 'domain', '') - domain = args["domain"] - if not is_str(domain): - raise TypeError("string", repr(domain)) - results = self._impl.attach(dbg, uri, domain) - if not is_str(results['domain_uuid']): - raise TypeError("string", repr(results['domain_uuid'])) - if results['implementation'][0] == 'Blkback': - if not is_str(results['implementation'][1]): - raise TypeError("string", repr(results['implementation'][1])) - elif results['implementation'][0] == 'Tapdisk3': - if not is_str(results['implementation'][1]): - raise TypeError("string", repr(results['implementation'][1])) - elif results['implementation'][0] == 'Qdisk': - if not is_str(results['implementation'][1]): - raise TypeError("string", repr(results['implementation'][1])) - return results - def activate(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('domain' in args): - raise UnmarshalException('argument missing', 'domain', '') - domain = args["domain"] - if not is_str(domain): - raise TypeError("string", repr(domain)) - results = self._impl.activate(dbg, uri, domain) - return results - def deactivate(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('domain' in args): - raise UnmarshalException('argument missing', 'domain', '') - domain = args["domain"] - if not is_str(domain): - raise TypeError("string", repr(domain)) - results = self._impl.deactivate(dbg, uri, domain) - return results - def detach(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('domain' in args): - raise UnmarshalException('argument missing', 'domain', '') - domain = args["domain"] - if not is_str(domain): - raise TypeError("string", repr(domain)) - results = self._impl.detach(dbg, uri, domain) - return results - def close(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - results = self._impl.close(dbg, uri) - return results - def _dispatch(self, method, params): - """type check inputs, call implementation, type check outputs and return""" - args = params[0] - if method == "Datapath.open": - return success(self.open(args)) - elif method == "Datapath.attach": - return success(self.attach(args)) - elif method == "Datapath.activate": - return success(self.activate(args)) - elif method == "Datapath.deactivate": - return success(self.deactivate(args)) - elif method == "Datapath.detach": - return success(self.detach(args)) - elif method == "Datapath.close": - return success(self.close(args)) -class Datapath_skeleton: - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - def __init__(self): - pass - def open(self, dbg, uri, persistent): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.open") - def attach(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.attach") - def activate(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.activate") - def deactivate(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.deactivate") - def detach(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.detach") - def close(self, dbg, uri): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - raise Unimplemented("Datapath.close") -class Datapath_test: - """ - Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. - Every function is idempotent. Every function takes a domain parameter which allows - the implementation to track how many domains are currently using the volume. - """ - def __init__(self): - pass - def open(self, dbg, uri, persistent): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - return result - def attach(self, dbg, uri, domain): - # type:(str, str, str) -> dict[str, tuple[str, Any] | str] - """ - Return a valid results dictionary to Datapath_server_dispatcher.attach() - - The returned dict must contain the "domain_uuid" key with a string value. - The returned dict must contain the "implementation" key with two elements: - If the first element is one of "Blkback", "Tapdisk3" or "Qdisk", - the second element must be a string. Else, the dispatcher returns an error. - - See Datapath_server_dispatcher.attach() for the implementation details. - """ - # Fixed to not raise an internal error in Datapath_server_dispatcher.attach(): - result = { "domain_uuid": domain, "implementation": (uri, dbg) } - if not domain: # Provoke an internal error in the dispatcher to cover its code - result.pop("domain_uuid") # by removing the required "domain_uuid" key. - if domain == "5": - result["domain_uuid"] = 5 # Return an integer to provoke a type error. - if dbg == "inject_error" and uri in ["Blkback", "Tapdisk3", "Qdisk"]: - result["implementation"] = (uri, False) - return result - - def activate(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - return result - def deactivate(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - return result - def detach(self, dbg, uri, domain): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - return result - def close(self, dbg, uri): - """Xapi will call the functions here on VM start/shutdown/suspend/resume/migrate. Every function is idempotent. Every function takes a domain parameter which allows the implementation to track how many domains are currently using the volume.""" - result = {} - return result -class Datapath_commandline(): - """Parse command-line arguments and call an implementation.""" - def __init__(self, impl): - self.impl = impl - self.dispatcher = Datapath_server_dispatcher(self.impl) - def _parse_open(self): - """[open uri persistent] is called before a disk is attached to a VM. If persistent is true then care should be taken to persist all writes to the disk. If persistent is false then the implementation should configure a temporary location for writes so they can be thrown away on [close].""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[open uri persistent] is called before a disk is attached to a VM. If persistent is true then care should be taken to persist all writes to the disk. If persistent is false then the implementation should configure a temporary location for writes so they can be thrown away on [close].') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - parser.add_argument('--persistent', action='store_true', help='True means the disk data is persistent and should be preserved when the datapath is closed i.e. when a VM is shutdown or rebooted. False means the data should be thrown away when the VM is shutdown or rebooted.') - return vars(parser.parse_args()) - def _parse_attach(self): - """[attach uri domain] prepares a connection between the storage named by [uri] and the Xen domain with id [domain]. The return value is the information needed by the Xen toolstack to setup the shared-memory blkfront protocol. Note that the same volume may be simultaneously attached to multiple hosts for example over a migrate. If an implementation needs to perform an explicit handover, then it should implement [activate] and [deactivate]. This function is idempotent.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[attach uri domain] prepares a connection between the storage named by [uri] and the Xen domain with id [domain]. The return value is the information needed by the Xen toolstack to setup the shared-memory blkfront protocol. Note that the same volume may be simultaneously attached to multiple hosts for example over a migrate. If an implementation needs to perform an explicit handover, then it should implement [activate] and [deactivate]. This function is idempotent.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - parser.add_argument('domain', action='store', help='An opaque string which represents the Xen domain.') - return vars(parser.parse_args()) - def _parse_activate(self): - """[activate uri domain] is called just before a VM needs to read or write its disk. This is an opportunity for an implementation which needs to perform an explicit volume handover to do it. This function is called in the migration downtime window so delays here will be noticeable to users and should be minimised. This function is idempotent.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[activate uri domain] is called just before a VM needs to read or write its disk. This is an opportunity for an implementation which needs to perform an explicit volume handover to do it. This function is called in the migration downtime window so delays here will be noticeable to users and should be minimised. This function is idempotent.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - parser.add_argument('domain', action='store', help='An opaque string which represents the Xen domain.') - return vars(parser.parse_args()) - def _parse_deactivate(self): - """[deactivate uri domain] is called as soon as a VM has finished reading or writing its disk. This is an opportunity for an implementation which needs to perform an explicit volume handover to do it. This function is called in the migration downtime window so delays here will be noticeable to users and should be minimised. This function is idempotent.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[deactivate uri domain] is called as soon as a VM has finished reading or writing its disk. This is an opportunity for an implementation which needs to perform an explicit volume handover to do it. This function is called in the migration downtime window so delays here will be noticeable to users and should be minimised. This function is idempotent.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - parser.add_argument('domain', action='store', help='An opaque string which represents the Xen domain.') - return vars(parser.parse_args()) - def _parse_detach(self): - """[detach uri domain] is called sometime after a VM has finished reading or writing its disk. This is an opportunity to clean up any resources associated with the disk. This function is called outside the migration downtime window so can be slow without affecting users. This function is idempotent. This function should never fail. If an implementation is unable to perform some cleanup right away then it should queue the action internally. Any error result represents a bug in the implementation.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[detach uri domain] is called sometime after a VM has finished reading or writing its disk. This is an opportunity to clean up any resources associated with the disk. This function is called outside the migration downtime window so can be slow without affecting users. This function is idempotent. This function should never fail. If an implementation is unable to perform some cleanup right away then it should queue the action internally. Any error result represents a bug in the implementation.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - parser.add_argument('domain', action='store', help='An opaque string which represents the Xen domain.') - return vars(parser.parse_args()) - def _parse_close(self): - """[close uri] is called after a disk is detached and a VM shutdown. This is an opportunity to throw away writes if the disk is not persistent.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[close uri] is called after a disk is detached and a VM shutdown. This is an opportunity to throw away writes if the disk is not persistent.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='A URI which represents how to access the volume disk data.') - return vars(parser.parse_args()) - def open(self): - use_json = False - try: - request = self._parse_open() - use_json = 'json' in request and request['json'] - results = self.dispatcher.open(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def attach(self): - use_json = False - try: - request = self._parse_attach() - use_json = 'json' in request and request['json'] - results = self.dispatcher.attach(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def activate(self): - use_json = False - try: - request = self._parse_activate() - use_json = 'json' in request and request['json'] - results = self.dispatcher.activate(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def deactivate(self): - use_json = False - try: - request = self._parse_deactivate() - use_json = 'json' in request and request['json'] - results = self.dispatcher.deactivate(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def detach(self): - use_json = False - try: - request = self._parse_detach() - use_json = 'json' in request and request['json'] - results = self.dispatcher.detach(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def close(self): - use_json = False - try: - request = self._parse_close() - use_json = 'json' in request and request['json'] - results = self.dispatcher.close(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e -class datapath_server_dispatcher: - """Demux calls to individual interface server_dispatchers""" - def __init__(self, Datapath=None): - self.Datapath = Datapath - def _dispatch(self, method, params): - try: - logging.debug("method = %s params = %s" % (method, repr(params))) - if method.startswith("Datapath") and self.Datapath: - return self.Datapath._dispatch(method, params) - raise UnknownMethod(method) - except Exception as e: - logging.info("caught %s" % e) - traceback.print_exc() - try: - # A declared (expected) failure will have a .failure() method - logging.debug("returning %s" % (repr(e.failure()))) - return e.failure() - except AttributeError: - # An undeclared (unexpected) failure is wrapped as InternalError - return (InternalError(str(e)).failure()) -class datapath_server_test(datapath_server_dispatcher): - """Create a server which will respond to all calls, returning arbitrary values. This is intended as a marshal/unmarshal test.""" - def __init__(self): - datapath_server_dispatcher.__init__(self, Datapath_server_dispatcher(Datapath_test())) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py b/ocaml/xapi-storage/python/xapi/storage/api/plugin.py deleted file mode 100644 index d9199a98771..00000000000 --- a/ocaml/xapi-storage/python/xapi/storage/api/plugin.py +++ /dev/null @@ -1,251 +0,0 @@ -from __future__ import print_function - -import argparse -import json -import logging -import sys -import traceback - -import xapi -# pylint: disable=line-too-long,superfluous-parens,unused-argument -# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi -from xapi import ( - InternalError, - Rpc_light_failure, - TypeError, - UnknownMethod, - UnmarshalException, - is_str, - success, -) - -# pylint: disable=invalid-name,redefined-builtin,undefined-variable -# pyright: reportUndefinedVariable=false -if sys.version_info[0] > 2: - unicode = str - -class Unimplemented(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Plugin_server_dispatcher: - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - def __init__(self, impl): - """impl is a proxy object whose methods contain the implementation""" - self._impl = impl - def query(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - results = self._impl.query(dbg) - if not is_str(results['plugin']): - raise TypeError("string", repr(results['plugin'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not is_str(results['vendor']): - raise TypeError("string", repr(results['vendor'])) - if not is_str(results['copyright']): - raise TypeError("string", repr(results['copyright'])) - if not is_str(results['version']): - raise TypeError("string", repr(results['version'])) - if not is_str(results['required_api_version']): - raise TypeError("string", repr(results['required_api_version'])) - if not isinstance(results['features'], list): - raise TypeError("string list", repr(results['features'])) - for tmp_1 in results['features']: - if not is_str(tmp_1): - raise TypeError("string", repr(tmp_1)) - if not isinstance(results['configuration'], dict): - raise TypeError("(string * string) list", repr(results['configuration'])) - for tmp_2 in results['configuration'].keys(): - if not is_str(tmp_2): - raise TypeError("string", repr(tmp_2)) - for tmp_2 in results['configuration'].values(): - if not is_str(tmp_2): - raise TypeError("string", repr(tmp_2)) - if not isinstance(results['required_cluster_stack'], list): - raise TypeError("string list", repr(results['required_cluster_stack'])) - for tmp_3 in results['required_cluster_stack']: - if not is_str(tmp_3): - raise TypeError("string", repr(tmp_3)) - return results - def ls(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - results = self._impl.ls(dbg) - if not isinstance(results, list): - raise TypeError("string list", repr(results)) - for tmp_4 in results: - if not is_str(tmp_4): - raise TypeError("string", repr(tmp_4)) - return results - def diagnostics(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - results = self._impl.diagnostics(dbg) - if not is_str(results): - raise TypeError("string", repr(results)) - return results - def _dispatch(self, method, params): - """type check inputs, call implementation, type check outputs and return""" - args = params[0] - if method == "Plugin.query": - return success(self.query(args)) - elif method == "Plugin.ls": - return success(self.ls(args)) - elif method == "Plugin.diagnostics": - return success(self.diagnostics(args)) -class Plugin_skeleton: - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - def __init__(self): - pass - def query(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - raise Unimplemented("Plugin.query") - def ls(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - raise Unimplemented("Plugin.ls") - def diagnostics(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - raise Unimplemented("Plugin.diagnostics") -class Plugin_test: - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - def __init__(self): - pass - def query(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - result = {} - result["query_result"] = { "plugin": "string", "name": "string", "description": "string", "vendor": "string", "copyright": "string", "version": "string", "required_api_version": "string", "features": [ "string", "string" ], "configuration": { "string": "string" }, "required_cluster_stack": [ "string", "string" ] } - return result - def ls(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - result = {} - result["srs"] = [ "string", "string" ] - return result - def diagnostics(self, dbg): - """Discover properties of this implementation. Every implementation must support the query interface or it will not be recognised as a storage plugin by xapi.""" - result = {} - result["diagnostics"] = "string" - return result -class Plugin_commandline(): - """Parse command-line arguments and call an implementation.""" - def __init__(self, impl): - self.impl = impl - self.dispatcher = Plugin_server_dispatcher(self.impl) - def _parse_query(self): - """Query this implementation and return its properties. This is called by xapi to determine whether it is compatible with xapi and to discover the supported features.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='Query this implementation and return its properties. This is called by xapi to determine whether it is compatible with xapi and to discover the supported features.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - return vars(parser.parse_args()) - def _parse_ls(self): - """[ls dbg]: returns a list of attached SRs""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[ls dbg]: returns a list of attached SRs') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - return vars(parser.parse_args()) - def _parse_diagnostics(self): - """Returns a printable set of backend diagnostic information. Implementations are encouraged to include any data which will be useful to diagnose problems. Note this data should not include personally-identifiable data as it is intended to be automatically included in bug reports.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='Returns a printable set of backend diagnostic information. Implementations are encouraged to include any data which will be useful to diagnose problems. Note this data should not include personally-identifiable data as it is intended to be automatically included in bug reports.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - return vars(parser.parse_args()) - def query(self): - use_json = False - try: - request = self._parse_query() - use_json = 'json' in request and request['json'] - results = self.dispatcher.query(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def ls(self): - use_json = False - try: - request = self._parse_ls() - use_json = 'json' in request and request['json'] - results = self.dispatcher.ls(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def diagnostics(self): - use_json = False - try: - request = self._parse_diagnostics() - use_json = 'json' in request and request['json'] - results = self.dispatcher.diagnostics(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e -class plugin_server_dispatcher: - """Demux calls to individual interface server_dispatchers""" - def __init__(self, Plugin=None): - self.Plugin = Plugin - def _dispatch(self, method, params): - try: - logging.debug("method = %s params = %s" % (method, repr(params))) - if method.startswith("Plugin") and self.Plugin: - return self.Plugin._dispatch(method, params) - raise UnknownMethod(method) - except Exception as e: - logging.info("caught %s" % e) - traceback.print_exc() - try: - # A declared (expected) failure will have a .failure() method - logging.debug("returning %s" % (repr(e.failure()))) - return e.failure() - except AttributeError: - # An undeclared (unexpected) failure is wrapped as InternalError - return (InternalError(str(e)).failure()) -class plugin_server_test(plugin_server_dispatcher): - """Create a server which will respond to all calls, returning arbitrary values. This is intended as a marshal/unmarshal test.""" - def __init__(self): - plugin_server_dispatcher.__init__(self, Plugin_server_dispatcher(Plugin_test())) diff --git a/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py b/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py deleted file mode 100644 index 8b6436657da..00000000000 --- a/ocaml/xapi-storage/python/xapi/storage/api/test_datapath.py +++ /dev/null @@ -1,138 +0,0 @@ -import logging - -import pytest - -import xapi -import xapi.storage.api.datapath - - -def internal_error(error): - """Return a dictionary with an internal error""" - return {"ErrorDescription": ["Internal_error", error], "Status": "Failure"} - - -def assert_error(testee, caplog, method_args, method, error): - """Assert that the result of the testee matches the expected error result""" - args = method_args.copy() - if method != "open": # the persistent arg is only checked for the open method - args["persistent"] = None # pass it, but with a wrong type(not used/checked) - assert testee._dispatch("Datapath." + method, [args]) == internal_error(error) - assert caplog.messages[0] == "caught " + error - caplog.clear() - - -def assert_type_checks(testee, methods, template_args, bad_args, caplog): - """Assert that the result of the testee matches the expected result""" - for arg in bad_args: - # Sigh, if Python would be strongly typed, we wouldn't need this: - # Assert the type checks of the arguments - expected = "bool" if arg == "persistent" else "string" - other_type = False if expected == "string" else "str" - for actual in [None, [], (), {"dict": "val"}, 1, 1.0, str, caplog, other_type]: - bad_args = template_args.copy() - bad_args[arg] = actual - error_msg = "TypeError expected={} actual={}".format(expected, repr(actual)) - for method in methods: - assert_error(testee, caplog, bad_args, method, error_msg) - - # Remove the argument and assert the missing argument checks - bad_args.pop(arg) - error_msg = "UnmarshalException thing=argument missing ty={} desc=".format(arg) - for method in methods: - assert_error(testee, caplog, bad_args, method, error_msg) - - -def assert_attach_type_check(testee, caplog, args, uri): - """Assert that the result of the testee matches the expected result""" - a = args.copy() - a["uri"] = uri - assert testee._dispatch("Datapath.attach", [a]) == { - "Status": "Success", - "Value": {"domain_uuid": a["domain"], "implementation": (uri, a["dbg"])}, - } - if uri == "other": - return - a["dbg"] = "inject_error" - assert_error(testee, caplog, a, "attach", "TypeError expected=string actual=False") - - -def assert_attach_type_checks(testee, caplog, args): - """Assert type checks when attach() returns Blkback, Tapdisk3, Qdisk and others""" - for uri in ["Blkback", "Tapdisk3", "Qdisk", "other"]: - assert_attach_type_check(testee, caplog, args, uri) - - -def test_dispatcher(caplog, capsys): - """ - Test the dispatcher of the Xapi storage API datapath interface - - The dispatcher is a class that routes the calls to the corresponding methods - of a given Datapath implementation class. - """ - # Setup - caplog.set_level(logging.INFO) - - # The testee passes them to the Datapath_test class and its attach method - # is expected to return the values which we use to test the dispatcher: - args = {"dbg": "", "uri": "uri", "domain": "uuid", "persistent": True} - - # Call - - # datapath_server_test() returns an instance of the dispatcher class that - # routes the calls to the corresponding methods of the Datapath_test class: - testee = xapi.storage.api.datapath.datapath_server_test() - - # Test the argument checks of the dispatcher to identify missing arguments: - - # Assert type checks on the dbg and uri arguments - missing = ["dbg", "uri"] - methods = ["attach", "activate", "deactivate", "detach", "open", "close"] - assert_type_checks(testee, methods, args, missing, caplog) - - # Assert type checks on the missing domain argument - missing = ["domain"] - methods = ["attach", "activate", "deactivate", "detach"] - assert_type_checks(testee, methods, args, missing, caplog) - - # Assert type checks on the persistent flag for the open method - missing = ["persistent"] - methods = ["open"] - assert_type_checks(testee, methods, args, missing, caplog) - - # Assert the dispatcher returns the example results of Datapath_test.attach(): - assert_attach_type_checks(testee, caplog, args) - - # Assert the internal error to cover the check by removing the domain argument: - bad = args.copy() - bad["domain"] = "" - assert_error(testee, caplog, bad, "attach", "'domain_uuid'") - # Assert the type check on the domain_uuid return value: - bad["domain"] = "5" - assert_error(testee, caplog, bad, "attach", "TypeError expected=string actual=5") - - # The other methods work as expected. Setup, Call, Assert: - success = {"Status": "Success", "Value": {}} - assert testee._dispatch("Datapath.open", [args]) == success - assert testee._dispatch("Datapath.activate", [args]) == success - assert testee._dispatch("Datapath.deactivate", [args]) == success - assert testee._dispatch("Datapath.detach", [args]) == success - assert testee._dispatch("Datapath.close", [args]) == success - - # Assert that no errors were logged and no output was printed: - assert caplog.messages == [] # No messages were logged - assert capsys.readouterr().out == "" # No output was printed - assert capsys.readouterr().err == "" # No errors were printed - - -def test_exceptions(): - """Cover the code changed by using the is_str() function""" - - with pytest.raises(xapi.TypeError) as exc_info: - _ = xapi.XenAPIException(1, "params") # pylint: disable=pointless-statement - assert str(exc_info.value) == "TypeError expected=string actual=1" - - with pytest.raises(xapi.TypeError) as exc_info: - _ = xapi.storage.api.datapath.Unimplemented( - False - ) # pylint: disable=pointless-statement - assert str(exc_info.value) == "TypeError expected=string actual=False" diff --git a/ocaml/xapi-storage/python/xapi/storage/api/volume.py b/ocaml/xapi-storage/python/xapi/storage/api/volume.py deleted file mode 100644 index 0f01ed6fd97..00000000000 --- a/ocaml/xapi-storage/python/xapi/storage/api/volume.py +++ /dev/null @@ -1,1429 +0,0 @@ -from __future__ import print_function - -import argparse -import json -import logging -import sys -import traceback - -import xapi -# pylint: disable=line-too-long,superfluous-parens,unused-argument -# pylint: disable-next=redefined-builtin # FIXME: TypeError is a custom class in xapi -from xapi import ( - InternalError, - Rpc_light_failure, - TypeError, - UnknownMethod, - UnmarshalException, - is_long, - is_str, - success, -) - -# pylint: disable=invalid-name,redefined-builtin,undefined-variable -# pyright: reportUndefinedVariable=false -if sys.version_info[0] > 2: - long = int - unicode = str - str = bytes - -class Sr_not_attached(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Sr_not_attached", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class SR_does_not_exist(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "SR_does_not_exist", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Volume_does_not_exist(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Volume_does_not_exist", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Unimplemented(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Unimplemented", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Cancelled(Rpc_light_failure): - def __init__(self, arg_0): - Rpc_light_failure.__init__(self, "Cancelled", [ arg_0 ]) - if not is_str(arg_0): - raise TypeError("string", repr(arg_0)) - self.arg_0 = arg_0 -class Volume_server_dispatcher: - """Operations which operate on volumes (also known as Virtual Disk Images)""" - def __init__(self, impl): - """impl is a proxy object whose methods contain the implementation""" - self._impl = impl - def create(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('name' in args): - raise UnmarshalException('argument missing', 'name', '') - name = args["name"] - if not is_str(name): - raise TypeError("string", repr(name)) - if not('description' in args): - raise UnmarshalException('argument missing', 'description', '') - description = args["description"] - if not is_str(description): - raise TypeError("string", repr(description)) - if not('size' in args): - raise UnmarshalException('argument missing', 'size', '') - size = args["size"] - if not(is_long(size)): - raise TypeError("int64", repr(size)) - results = self._impl.create(dbg, sr, name, description, size) - if not is_str(results['key']): - raise TypeError("string", repr(results['key'])) - if results['uuid'] is not None: - if not is_str(results['uuid']): - raise TypeError("string", repr(results['uuid'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not isinstance(results['read_write'], bool): - raise TypeError("bool", repr(results['read_write'])) - if not(is_long(results['virtual_size'])): - raise TypeError("int64", repr(results['virtual_size'])) - if not(is_long(results['physical_utilisation'])): - raise TypeError("int64", repr(results['physical_utilisation'])) - if not isinstance(results['uri'], list): - raise TypeError("string list", repr(results['uri'])) - for tmp_5 in results['uri']: - if not is_str(tmp_5): - raise TypeError("string", repr(tmp_5)) - if not isinstance(results['keys'], dict): - raise TypeError("(string * string) list", repr(results['keys'])) - for tmp_6 in results['keys'].keys(): - if not is_str(tmp_6): - raise TypeError("string", repr(tmp_6)) - for tmp_6 in results['keys'].values(): - if not is_str(tmp_6): - raise TypeError("string", repr(tmp_6)) - return results - def snapshot(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - results = self._impl.snapshot(dbg, sr, key) - if not is_str(results['key']): - raise TypeError("string", repr(results['key'])) - if results['uuid'] is not None: - if not is_str(results['uuid']): - raise TypeError("string", repr(results['uuid'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not isinstance(results['read_write'], bool): - raise TypeError("bool", repr(results['read_write'])) - if not(is_long(results['virtual_size'])): - raise TypeError("int64", repr(results['virtual_size'])) - if not(is_long(results['physical_utilisation'])): - raise TypeError("int64", repr(results['physical_utilisation'])) - if not isinstance(results['uri'], list): - raise TypeError("string list", repr(results['uri'])) - for tmp_7 in results['uri']: - if not is_str(tmp_7): - raise TypeError("string", repr(tmp_7)) - if not isinstance(results['keys'], dict): - raise TypeError("(string * string) list", repr(results['keys'])) - for tmp_8 in results['keys'].keys(): - if not is_str(tmp_8): - raise TypeError("string", repr(tmp_8)) - for tmp_8 in results['keys'].values(): - if not is_str(tmp_8): - raise TypeError("string", repr(tmp_8)) - return results - def clone(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - results = self._impl.clone(dbg, sr, key) - if not is_str(results['key']): - raise TypeError("string", repr(results['key'])) - if results['uuid'] is not None: - if not is_str(results['uuid']): - raise TypeError("string", repr(results['uuid'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not isinstance(results['read_write'], bool): - raise TypeError("bool", repr(results['read_write'])) - if not(is_long(results['virtual_size'])): - raise TypeError("int64", repr(results['virtual_size'])) - if not(is_long(results['physical_utilisation'])): - raise TypeError("int64", repr(results['physical_utilisation'])) - if not isinstance(results['uri'], list): - raise TypeError("string list", repr(results['uri'])) - for tmp_9 in results['uri']: - if not is_str(tmp_9): - raise TypeError("string", repr(tmp_9)) - if not isinstance(results['keys'], dict): - raise TypeError("(string * string) list", repr(results['keys'])) - for tmp_10 in results['keys'].keys(): - if not is_str(tmp_10): - raise TypeError("string", repr(tmp_10)) - for tmp_10 in results['keys'].values(): - if not is_str(tmp_10): - raise TypeError("string", repr(tmp_10)) - return results - def destroy(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - results = self._impl.destroy(dbg, sr, key) - return results - def set_name(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - if not('new_name' in args): - raise UnmarshalException('argument missing', 'new_name', '') - new_name = args["new_name"] - if not is_str(new_name): - raise TypeError("string", repr(new_name)) - results = self._impl.set_name(dbg, sr, key, new_name) - return results - def set_description(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - if not('new_description' in args): - raise UnmarshalException('argument missing', 'new_description', '') - new_description = args["new_description"] - if not is_str(new_description): - raise TypeError("string", repr(new_description)) - results = self._impl.set_description(dbg, sr, key, new_description) - return results - def set(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - if not('k' in args): - raise UnmarshalException('argument missing', 'k', '') - k = args["k"] - if not is_str(k): - raise TypeError("string", repr(k)) - if not('v' in args): - raise UnmarshalException('argument missing', 'v', '') - v = args["v"] - if not is_str(v): - raise TypeError("string", repr(v)) - results = self._impl.set(dbg, sr, key, k, v) - return results - def unset(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - if not('k' in args): - raise UnmarshalException('argument missing', 'k', '') - k = args["k"] - if not is_str(k): - raise TypeError("string", repr(k)) - results = self._impl.unset(dbg, sr, key, k) - return results - def resize(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - if not('new_size' in args): - raise UnmarshalException('argument missing', 'new_size', '') - new_size = args["new_size"] - if not(is_long(new_size)): - raise TypeError("int64", repr(new_size)) - results = self._impl.resize(dbg, sr, key, new_size) - return results - def stat(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('key' in args): - raise UnmarshalException('argument missing', 'key', '') - key = args["key"] - if not is_str(key): - raise TypeError("string", repr(key)) - results = self._impl.stat(dbg, sr, key) - if not is_str(results['key']): - raise TypeError("string", repr(results['key'])) - if results['uuid'] is not None: - if not is_str(results['uuid']): - raise TypeError("string", repr(results['uuid'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not isinstance(results['read_write'], bool): - raise TypeError("bool", repr(results['read_write'])) - if not(is_long(results['virtual_size'])): - raise TypeError("int64", repr(results['virtual_size'])) - if not(is_long(results['physical_utilisation'])): - raise TypeError("int64", repr(results['physical_utilisation'])) - if not isinstance(results['uri'], list): - raise TypeError("string list", repr(results['uri'])) - for tmp_11 in results['uri']: - if not is_str(tmp_11): - raise TypeError("string", repr(tmp_11)) - if not isinstance(results['keys'], dict): - raise TypeError("(string * string) list", repr(results['keys'])) - for tmp_12 in results['keys'].keys(): - if not is_str(tmp_12): - raise TypeError("string", repr(tmp_12)) - for tmp_12 in results['keys'].values(): - if not is_str(tmp_12): - raise TypeError("string", repr(tmp_12)) - return results - def _dispatch(self, method, params): - """type check inputs, call implementation, type check outputs and return""" - args = params[0] - if method == "Volume.create": - return success(self.create(args)) - elif method == "Volume.snapshot": - return success(self.snapshot(args)) - elif method == "Volume.clone": - return success(self.clone(args)) - elif method == "Volume.destroy": - return success(self.destroy(args)) - elif method == "Volume.set_name": - return success(self.set_name(args)) - elif method == "Volume.set_description": - return success(self.set_description(args)) - elif method == "Volume.set": - return success(self.set(args)) - elif method == "Volume.unset": - return success(self.unset(args)) - elif method == "Volume.resize": - return success(self.resize(args)) - elif method == "Volume.stat": - return success(self.stat(args)) -class Volume_skeleton: - """Operations which operate on volumes (also known as Virtual Disk Images)""" - def __init__(self): - pass - def create(self, dbg, sr, name, description, size): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.create") - def snapshot(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.snapshot") - def clone(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.clone") - def destroy(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.destroy") - def set_name(self, dbg, sr, key, new_name): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.set_name") - def set_description(self, dbg, sr, key, new_description): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.set_description") - def set(self, dbg, sr, key, k, v): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.set") - def unset(self, dbg, sr, key, k): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.unset") - def resize(self, dbg, sr, key, new_size): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.resize") - def stat(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - raise Unimplemented("Volume.stat") -class Volume_test: - """Operations which operate on volumes (also known as Virtual Disk Images)""" - def __init__(self): - pass - def create(self, dbg, sr, name, description, size): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - result["volume"] = { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } } - return result - def snapshot(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - result["volume"] = { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } } - return result - def clone(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - result["volume"] = { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } } - return result - def destroy(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def set_name(self, dbg, sr, key, new_name): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def set_description(self, dbg, sr, key, new_description): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def set(self, dbg, sr, key, k, v): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def unset(self, dbg, sr, key, k): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def resize(self, dbg, sr, key, new_size): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - return result - def stat(self, dbg, sr, key): - """Operations which operate on volumes (also known as Virtual Disk Images)""" - result = {} - result["volume"] = { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } } - return result -class Volume_commandline(): - """Parse command-line arguments and call an implementation.""" - def __init__(self, impl): - self.impl = impl - self.dispatcher = Volume_server_dispatcher(self.impl) - def _parse_create(self): - """[create sr name description size] creates a new volume in [sr] with [name] and [description]. The volume will have size >= [size] i.e. it is always permissable for an implementation to round-up the volume to the nearest convenient block size""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[create sr name description size] creates a new volume in [sr] with [name] and [description]. The volume will have size >= [size] i.e. it is always permissable for an implementation to round-up the volume to the nearest convenient block size') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('name', action='store', help='A human-readable name to associate with the new disk. This name is intended to be short, to be a good summary of the disk.') - parser.add_argument('description', action='store', help='A human-readable description to associate with the new disk. This can be arbitrarily long, up to the general string size limit.') - parser.add_argument('size', action='store', help='A minimum size (in bytes) for the disk. Depending on the characteristics of the implementation this may be rounded up to (for example) the nearest convenient block size. The created disk will not be smaller than this size.') - return vars(parser.parse_args()) - def _parse_snapshot(self): - """[snapshot sr volume] creates a new volue which is a snapshot of [volume] in [sr]. Snapshots should never be written to; they are intended for backup/restore only. Note the name and description are copied but any extra metadata associated by [set] is not copied.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[snapshot sr volume] creates a new volue which is a snapshot of [volume] in [sr]. Snapshots should never be written to; they are intended for backup/restore only. Note the name and description are copied but any extra metadata associated by [set] is not copied.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - return vars(parser.parse_args()) - def _parse_clone(self): - """[clone sr volume] creates a new volume which is a writable clone of [volume] in [sr]. Note the name and description are copied but any extra metadata associated by [set] is not copied.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[clone sr volume] creates a new volume which is a writable clone of [volume] in [sr]. Note the name and description are copied but any extra metadata associated by [set] is not copied.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - return vars(parser.parse_args()) - def _parse_destroy(self): - """[destroy sr volume] removes [volume] from [sr]""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[destroy sr volume] removes [volume] from [sr]') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - return vars(parser.parse_args()) - def _parse_set_name(self): - """[set_name sr volume new_name] changes the name of [volume]""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[set_name sr volume new_name] changes the name of [volume]') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - parser.add_argument('new_name', action='store', help='New name') - return vars(parser.parse_args()) - def _parse_set_description(self): - """[set_description sr volume new_description] changes the description of [volume]""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[set_description sr volume new_description] changes the description of [volume]') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - parser.add_argument('new_description', action='store', help='New description') - return vars(parser.parse_args()) - def _parse_set(self): - """[set sr volume key value] associates [key] with [value] in the metadata of [volume] Note these keys and values are not interpreted by the plugin; they are intended for the higher-level software only.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[set sr volume key value] associates [key] with [value] in the metadata of [volume] Note these keys and values are not interpreted by the plugin; they are intended for the higher-level software only.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - parser.add_argument('k', action='store', help='Key') - parser.add_argument('v', action='store', help='Value') - return vars(parser.parse_args()) - def _parse_unset(self): - """[unset sr volume key] removes [key] and any value associated with it from the metadata of [volume] Note these keys and values are not interpreted by the plugin; they are intended for the higher-level software only.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[unset sr volume key] removes [key] and any value associated with it from the metadata of [volume] Note these keys and values are not interpreted by the plugin; they are intended for the higher-level software only.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - parser.add_argument('k', action='store', help='Key') - return vars(parser.parse_args()) - def _parse_resize(self): - """[resize sr volume new_size] enlarges [volume] to be at least [new_size].""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[resize sr volume new_size] enlarges [volume] to be at least [new_size].') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - parser.add_argument('new_size', action='store', help='New disk size') - return vars(parser.parse_args()) - def _parse_stat(self): - """[stat sr volume] returns metadata associated with [volume].""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[stat sr volume] returns metadata associated with [volume].') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('key', action='store', help='The volume key') - return vars(parser.parse_args()) - def create(self): - use_json = False - try: - request = self._parse_create() - use_json = 'json' in request and request['json'] - results = self.dispatcher.create(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def snapshot(self): - use_json = False - try: - request = self._parse_snapshot() - use_json = 'json' in request and request['json'] - results = self.dispatcher.snapshot(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def clone(self): - use_json = False - try: - request = self._parse_clone() - use_json = 'json' in request and request['json'] - results = self.dispatcher.clone(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def destroy(self): - use_json = False - try: - request = self._parse_destroy() - use_json = 'json' in request and request['json'] - results = self.dispatcher.destroy(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def set_name(self): - use_json = False - try: - request = self._parse_set_name() - use_json = 'json' in request and request['json'] - results = self.dispatcher.set_name(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def set_description(self): - use_json = False - try: - request = self._parse_set_description() - use_json = 'json' in request and request['json'] - results = self.dispatcher.set_description(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def set(self): - use_json = False - try: - request = self._parse_set() - use_json = 'json' in request and request['json'] - results = self.dispatcher.set(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def unset(self): - use_json = False - try: - request = self._parse_unset() - use_json = 'json' in request and request['json'] - results = self.dispatcher.unset(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def resize(self): - use_json = False - try: - request = self._parse_resize() - use_json = 'json' in request and request['json'] - results = self.dispatcher.resize(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def stat(self): - use_json = False - try: - request = self._parse_stat() - use_json = 'json' in request and request['json'] - results = self.dispatcher.stat(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e -class SR_server_dispatcher: - """Operations which act on Storage Repositories""" - def __init__(self, impl): - """impl is a proxy object whose methods contain the implementation""" - self._impl = impl - def probe(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - results = self._impl.probe(dbg, uri) - if not isinstance(results['srs'], list): - raise TypeError("7 list", repr(results['srs'])) - for tmp_13 in results['srs']: - if not is_str(tmp_13['sr']): - raise TypeError("string", repr(tmp_13['sr'])) - if not is_str(tmp_13['name']): - raise TypeError("string", repr(tmp_13['name'])) - if not is_str(tmp_13['description']): - raise TypeError("string", repr(tmp_13['description'])) - if not(is_long(tmp_13['free_space'])): - raise TypeError("int64", repr(tmp_13['free_space'])) - if not(is_long(tmp_13['total_space'])): - raise TypeError("int64", repr(tmp_13['total_space'])) - if not isinstance(tmp_13['datasources'], list): - raise TypeError("string list", repr(tmp_13['datasources'])) - for tmp_14 in tmp_13['datasources']: - if not is_str(tmp_14): - raise TypeError("string", repr(tmp_14)) - if not isinstance(tmp_13['clustered'], bool): - raise TypeError("bool", repr(tmp_13['clustered'])) - if tmp_13['health'][0] == 'Healthy': - if not is_str(tmp_13['health'][1]): - raise TypeError("string", repr(tmp_13['health'][1])) - elif tmp_13['health'][0] == 'Recovering': - if not is_str(tmp_13['health'][1]): - raise TypeError("string", repr(tmp_13['health'][1])) - if not isinstance(results['uris'], list): - raise TypeError("string list", repr(results['uris'])) - for tmp_15 in results['uris']: - if not is_str(tmp_15): - raise TypeError("string", repr(tmp_15)) - return results - def create(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - if not('name' in args): - raise UnmarshalException('argument missing', 'name', '') - name = args["name"] - if not is_str(name): - raise TypeError("string", repr(name)) - if not('description' in args): - raise UnmarshalException('argument missing', 'description', '') - description = args["description"] - if not is_str(description): - raise TypeError("string", repr(description)) - if not('configuration' in args): - raise UnmarshalException('argument missing', 'configuration', '') - configuration = args["configuration"] - if not isinstance(configuration, dict): - raise TypeError("(string * string) list", repr(configuration)) - for tmp_16 in configuration.keys(): - if not is_str(tmp_16): - raise TypeError("string", repr(tmp_16)) - for tmp_16 in configuration.values(): - if not is_str(tmp_16): - raise TypeError("string", repr(tmp_16)) - results = self._impl.create(dbg, uri, name, description, configuration) - return results - def attach(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('uri' in args): - raise UnmarshalException('argument missing', 'uri', '') - uri = args["uri"] - if not is_str(uri): - raise TypeError("string", repr(uri)) - results = self._impl.attach(dbg, uri) - if not is_str(results): - raise TypeError("string", repr(results)) - return results - def detach(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - results = self._impl.detach(dbg, sr) - return results - def destroy(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - results = self._impl.destroy(dbg, sr) - return results - def stat(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - results = self._impl.stat(dbg, sr) - if not is_str(results['sr']): - raise TypeError("string", repr(results['sr'])) - if not is_str(results['name']): - raise TypeError("string", repr(results['name'])) - if not is_str(results['description']): - raise TypeError("string", repr(results['description'])) - if not(is_long(results['free_space'])): - raise TypeError("int64", repr(results['free_space'])) - if not(is_long(results['total_space'])): - raise TypeError("int64", repr(results['total_space'])) - if not isinstance(results['datasources'], list): - raise TypeError("string list", repr(results['datasources'])) - for tmp_17 in results['datasources']: - if not is_str(tmp_17): - raise TypeError("string", repr(tmp_17)) - if not isinstance(results['clustered'], bool): - raise TypeError("bool", repr(results['clustered'])) - if results['health'][0] == 'Healthy': - if not is_str(results['health'][1]): - raise TypeError("string", repr(results['health'][1])) - elif results['health'][0] == 'Recovering': - if not is_str(results['health'][1]): - raise TypeError("string", repr(results['health'][1])) - return results - def set_name(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('new_name' in args): - raise UnmarshalException('argument missing', 'new_name', '') - new_name = args["new_name"] - if not is_str(new_name): - raise TypeError("string", repr(new_name)) - results = self._impl.set_name(dbg, sr, new_name) - return results - def set_description(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - if not('new_description' in args): - raise UnmarshalException('argument missing', 'new_description', '') - new_description = args["new_description"] - if not is_str(new_description): - raise TypeError("string", repr(new_description)) - results = self._impl.set_description(dbg, sr, new_description) - return results - def ls(self, args): - """type-check inputs, call implementation, type-check outputs and return""" - if not isinstance(args, dict): - raise UnmarshalException('arguments', 'dict', repr(args)) - if not('dbg' in args): - raise UnmarshalException('argument missing', 'dbg', '') - dbg = args["dbg"] - if not is_str(dbg): - raise TypeError("string", repr(dbg)) - if not('sr' in args): - raise UnmarshalException('argument missing', 'sr', '') - sr = args["sr"] - if not is_str(sr): - raise TypeError("string", repr(sr)) - results = self._impl.ls(dbg, sr) - if not isinstance(results, list): - raise TypeError("8 list", repr(results)) - for tmp_18 in results: - if not is_str(tmp_18['key']): - raise TypeError("string", repr(tmp_18['key'])) - if tmp_18['uuid'] is not None: - if not is_str(tmp_18['uuid']): - raise TypeError("string", repr(tmp_18['uuid'])) - if not is_str(tmp_18['name']): - raise TypeError("string", repr(tmp_18['name'])) - if not is_str(tmp_18['description']): - raise TypeError("string", repr(tmp_18['description'])) - if not isinstance(tmp_18['read_write'], bool): - raise TypeError("bool", repr(tmp_18['read_write'])) - if not(is_long(tmp_18['virtual_size'])): - raise TypeError("int64", repr(tmp_18['virtual_size'])) - if not(is_long(tmp_18['physical_utilisation'])): - raise TypeError("int64", repr(tmp_18['physical_utilisation'])) - if not isinstance(tmp_18['uri'], list): - raise TypeError("string list", repr(tmp_18['uri'])) - for tmp_19 in tmp_18['uri']: - if not is_str(tmp_19): - raise TypeError("string", repr(tmp_19)) - if not isinstance(tmp_18['keys'], dict): - raise TypeError("(string * string) list", repr(tmp_18['keys'])) - for tmp_20 in tmp_18['keys'].keys(): - if not is_str(tmp_20): - raise TypeError("string", repr(tmp_20)) - for tmp_20 in tmp_18['keys'].values(): - if not is_str(tmp_20): - raise TypeError("string", repr(tmp_20)) - return results - def _dispatch(self, method, params): - """type check inputs, call implementation, type check outputs and return""" - args = params[0] - if method == "SR.probe": - return success(self.probe(args)) - elif method == "SR.create": - return success(self.create(args)) - elif method == "SR.attach": - return success(self.attach(args)) - elif method == "SR.detach": - return success(self.detach(args)) - elif method == "SR.destroy": - return success(self.destroy(args)) - elif method == "SR.stat": - return success(self.stat(args)) - elif method == "SR.set_name": - return success(self.set_name(args)) - elif method == "SR.set_description": - return success(self.set_description(args)) - elif method == "SR.ls": - return success(self.ls(args)) -class SR_skeleton: - """Operations which act on Storage Repositories""" - def __init__(self): - pass - def probe(self, dbg, uri): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.probe") - def create(self, dbg, uri, name, description, configuration): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.create") - def attach(self, dbg, uri): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.attach") - def detach(self, dbg, sr): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.detach") - def destroy(self, dbg, sr): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.destroy") - def stat(self, dbg, sr): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.stat") - def set_name(self, dbg, sr, new_name): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.set_name") - def set_description(self, dbg, sr, new_description): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.set_description") - def ls(self, dbg, sr): - """Operations which act on Storage Repositories""" - raise Unimplemented("SR.ls") -class SR_test: - """Operations which act on Storage Repositories""" - def __init__(self): - pass - def probe(self, dbg, uri): - """Operations which act on Storage Repositories""" - result = {} - result["result"] = { "srs": [ { "sr": "string", "name": "string", "description": "string", "free_space": long(0), "total_space": long(0), "datasources": [ "string", "string" ], "clustered": True, "health": None }, { "sr": "string", "name": "string", "description": "string", "free_space": long(0), "total_space": long(0), "datasources": [ "string", "string" ], "clustered": True, "health": None } ], "uris": [ "string", "string" ] } - return result - def create(self, dbg, uri, name, description, configuration): - """Operations which act on Storage Repositories""" - result = {} - return result - def attach(self, dbg, uri): - """Operations which act on Storage Repositories""" - result = {} - result["sr"] = "string" - return result - def detach(self, dbg, sr): - """Operations which act on Storage Repositories""" - result = {} - return result - def destroy(self, dbg, sr): - """Operations which act on Storage Repositories""" - result = {} - return result - def stat(self, dbg, sr): - """Operations which act on Storage Repositories""" - result = {} - result["sr"] = { "sr": "string", "name": "string", "description": "string", "free_space": long(0), "total_space": long(0), "datasources": [ "string", "string" ], "clustered": True, "health": None } - return result - def set_name(self, dbg, sr, new_name): - """Operations which act on Storage Repositories""" - result = {} - return result - def set_description(self, dbg, sr, new_description): - """Operations which act on Storage Repositories""" - result = {} - return result - def ls(self, dbg, sr): - """Operations which act on Storage Repositories""" - result = {} - result["volumes"] = [ { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } }, { "key": "string", "uuid": None, "name": "string", "description": "string", "read_write": True, "virtual_size": long(0), "physical_utilisation": long(0), "uri": [ "string", "string" ], "keys": { "string": "string" } } ] - return result -class SR_commandline(): - """Parse command-line arguments and call an implementation.""" - def __init__(self, impl): - self.impl = impl - self.dispatcher = SR_server_dispatcher(self.impl) - def _parse_probe(self): - """[probe uri]: looks for existing SRs on the storage device""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[probe uri]: looks for existing SRs on the storage device') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='The Storage Repository URI') - return vars(parser.parse_args()) - def _parse_create(self): - """[create uri name description configuration]: creates a fresh SR""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[create uri name description configuration]: creates a fresh SR') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='The Storage Repository URI') - parser.add_argument('name', action='store', help='Human-readable name for the SR') - parser.add_argument('description', action='store', help='Human-readable description for the SR') - parser.add_argument('--configuration', default={}, nargs=2, action=xapi.ListAction, help='Plugin-specific configuration which describes where and how to create the storage repository. This may include the physical block device name, a remote NFS server and path or an RBD storage pool.') - return vars(parser.parse_args()) - def _parse_attach(self): - """[attach uri]: attaches the SR to the local host. Once an SR is attached then volumes may be manipulated.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[attach uri]: attaches the SR to the local host. Once an SR is attached then volumes may be manipulated.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('uri', action='store', help='The Storage Repository URI') - return vars(parser.parse_args()) - def _parse_detach(self): - """[detach sr]: detaches the SR, clearing up any associated resources. Once the SR is detached then volumes may not be manipulated.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[detach sr]: detaches the SR, clearing up any associated resources. Once the SR is detached then volumes may not be manipulated.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - return vars(parser.parse_args()) - def _parse_destroy(self): - """[destroy sr]: destroys the [sr] and deletes any volumes associated with it. Note that an SR must be attached to be destroyed; otherwise Sr_not_attached is thrown.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[destroy sr]: destroys the [sr] and deletes any volumes associated with it. Note that an SR must be attached to be destroyed; otherwise Sr_not_attached is thrown.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - return vars(parser.parse_args()) - def _parse_stat(self): - """[stat sr] returns summary metadata associated with [sr]. Note this call does not return details of sub-volumes, see SR.ls.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[stat sr] returns summary metadata associated with [sr]. Note this call does not return details of sub-volumes, see SR.ls.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - return vars(parser.parse_args()) - def _parse_set_name(self): - """[set_name sr new_name] changes the name of [sr]""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[set_name sr new_name] changes the name of [sr]') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('new_name', action='store', help='The new name of the SR') - return vars(parser.parse_args()) - def _parse_set_description(self): - """[set_description sr new_description] changes the description of [sr]""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[set_description sr new_description] changes the description of [sr]') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - parser.add_argument('new_description', action='store', help='The new description for the SR') - return vars(parser.parse_args()) - def _parse_ls(self): - """[ls sr] returns a list of volumes contained within an attached SR.""" - # in --json mode we don't have any other arguments - if ('--json' in sys.argv or '-j' in sys.argv): - jsondict = json.loads(sys.stdin.readline(),) - jsondict['json'] = True - return jsondict - parser = argparse.ArgumentParser(description='[ls sr] returns a list of volumes contained within an attached SR.') - parser.add_argument('-j', '--json', action='store_const', const=True, default=False, help='Read json from stdin, print json to stdout', required=False) - parser.add_argument('dbg', action='store', help='Debug context from the caller') - parser.add_argument('sr', action='store', help='The Storage Repository') - return vars(parser.parse_args()) - def probe(self): - use_json = False - try: - request = self._parse_probe() - use_json = 'json' in request and request['json'] - results = self.dispatcher.probe(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def create(self): - use_json = False - try: - request = self._parse_create() - use_json = 'json' in request and request['json'] - results = self.dispatcher.create(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def attach(self): - use_json = False - try: - request = self._parse_attach() - use_json = 'json' in request and request['json'] - results = self.dispatcher.attach(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def detach(self): - use_json = False - try: - request = self._parse_detach() - use_json = 'json' in request and request['json'] - results = self.dispatcher.detach(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def destroy(self): - use_json = False - try: - request = self._parse_destroy() - use_json = 'json' in request and request['json'] - results = self.dispatcher.destroy(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def stat(self): - use_json = False - try: - request = self._parse_stat() - use_json = 'json' in request and request['json'] - results = self.dispatcher.stat(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def set_name(self): - use_json = False - try: - request = self._parse_set_name() - use_json = 'json' in request and request['json'] - results = self.dispatcher.set_name(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def set_description(self): - use_json = False - try: - request = self._parse_set_description() - use_json = 'json' in request and request['json'] - results = self.dispatcher.set_description(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e - def ls(self): - use_json = False - try: - request = self._parse_ls() - use_json = 'json' in request and request['json'] - results = self.dispatcher.ls(request) - print(json.dumps(results)) - except Exception as e: - if use_json: - xapi.handle_exception(e) - else: - traceback.print_exc() - raise e -class volume_server_dispatcher: - """Demux calls to individual interface server_dispatchers""" - def __init__(self, Volume=None, SR=None): - self.Volume = Volume - self.SR = SR - def _dispatch(self, method, params): - try: - logging.debug("method = %s params = %s" % (method, repr(params))) - if method.startswith("Volume") and self.Volume: - return self.Volume._dispatch(method, params) - elif method.startswith("SR") and self.SR: - return self.SR._dispatch(method, params) - raise UnknownMethod(method) - except Exception as e: - logging.info("caught %s" % e) - traceback.print_exc() - try: - # A declared (expected) failure will have a .failure() method - logging.debug("returning %s" % (repr(e.failure()))) - return e.failure() - except AttributeError: - # An undeclared (unexpected) failure is wrapped as InternalError - return (InternalError(str(e)).failure()) -class volume_server_test(volume_server_dispatcher): - """Create a server which will respond to all calls, returning arbitrary values. This is intended as a marshal/unmarshal test.""" - def __init__(self): - volume_server_dispatcher.__init__(self, Volume_server_dispatcher(Volume_test()), SR_server_dispatcher(SR_test())) \ No newline at end of file From 724a1c094fe2729882d15f3e6c0148df9ea6e15e Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 24 Jul 2024 06:02:13 +0100 Subject: [PATCH 199/222] CP-49148: perfmon.service is not loaded When testing the feature/py3 branch, xapi failed to restart due to perfmon.service not loaded. The root cause is that: In this PR: https://github.com/xapi-project/xen-api/pull/5767, we moved perfmon.service from `scripts/Makefile` to `python3/Makefile` In `scripts/Makefile`, `IPROG` is defined as: IPROG=./install.sh 755 IDATA=./install.sh 644 While in `python3/Makefile`, `IPROG` is defined as: IPROG=install -m 755 IDATA=install -m 644 And the purpose of `install.sh` is to replace strings in *.service like: @OPTDIR@ -> ${OPTDIR} So in python3/Makefile, we didn't replace these strings, and then led to the error. Signed-off-by: Stephen Cheng --- python3/Makefile | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/python3/Makefile b/python3/Makefile index 0600c90646b..514d21e5cbd 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -1,21 +1,22 @@ include ../config.mk -IPROG=install -m 755 -IDATA=install -m 644 +# To replace strings in *.service like: @OPTDIR@ -> ${OPTDIR} +IPROG=../scripts/install.sh 755 +IDATA=../scripts/install.sh 644 SITE3_DIR=$(shell python3 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") DNF_PLUGIN_DIR=dnf-plugins install: # Create destination directories using install -m 755 -d: - $(IPROG) -d $(DESTDIR)$(OPTDIR)/bin - $(IPROG) -d $(DESTDIR)$(SITE3_DIR) - $(IPROG) -d $(DESTDIR)$(LIBEXECDIR) - $(IPROG) -d $(DESTDIR)$(PLUGINDIR) - $(IPROG) -d $(DESTDIR)/etc/sysconfig - $(IPROG) -d $(DESTDIR)/usr/lib/systemd/system - $(IPROG) -d $(DESTDIR)$(EXTENSIONDIR) - $(IPROG) -d $(DESTDIR)$(SITE3_DIR)/$(DNF_PLUGIN_DIR) + install -m 755 -d $(DESTDIR)$(OPTDIR)/bin + install -m 755 -d $(DESTDIR)$(SITE3_DIR) + install -m 755 -d $(DESTDIR)$(LIBEXECDIR) + install -m 755 -d $(DESTDIR)$(PLUGINDIR) + install -m 755 -d $(DESTDIR)/etc/sysconfig + install -m 755 -d $(DESTDIR)/usr/lib/systemd/system + install -m 755 -d $(DESTDIR)$(EXTENSIONDIR) + install -m 755 -d $(DESTDIR)$(SITE3_DIR)/$(DNF_PLUGIN_DIR) $(IDATA) packages/inventory.py $(DESTDIR)$(SITE3_DIR)/ $(IDATA) packages/observer.py $(DESTDIR)$(SITE3_DIR)/ From 04377fe1b9654d502ba5f60010f852eb92511345 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Tue, 30 Jul 2024 07:32:00 +0100 Subject: [PATCH 200/222] CP-49148: Fix ambiguous python shebang for XS9 When building xapi for XS9, ran into errors: *** ERROR: ambiguous python shebang in /opt/xensource/libexec/restore-sr-metadata.py: #!/usr/bin/python. Change it to python3 (or python2) explicitly. Explicitly use python3. Signed-off-by: Stephen Cheng --- python3/libexec/restore-sr-metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/python3/libexec/restore-sr-metadata.py b/python3/libexec/restore-sr-metadata.py index 4bbb9fe55af..7fa4e92aa18 100644 --- a/python3/libexec/restore-sr-metadata.py +++ b/python3/libexec/restore-sr-metadata.py @@ -1,4 +1,4 @@ -#!/usr/bin/python +#!/usr/bin/python3 # Restore SR metadata and VDI names from an XML file # (c) Anil Madhavapeddy, Citrix Systems Inc, 2008 From 2facf218941867b4bd0496ddc1383785a619a79d Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 31 Jul 2024 09:52:44 +0100 Subject: [PATCH 201/222] CP-49148: fix pylint warning This `import errno` was added by feature/py3 branch to fix pytype issues. But in the master branch, there was also a change to fix pytype issues. Use master code and remove the import. Signed-off-by: Stephen Cheng --- scripts/examples/python/XenAPI/XenAPI.py | 1 - 1 file changed, 1 deletion(-) diff --git a/scripts/examples/python/XenAPI/XenAPI.py b/scripts/examples/python/XenAPI/XenAPI.py index 9670502ffa9..e8ed75d0cb4 100644 --- a/scripts/examples/python/XenAPI/XenAPI.py +++ b/scripts/examples/python/XenAPI/XenAPI.py @@ -54,7 +54,6 @@ # OF THIS SOFTWARE. # -------------------------------------------------------------------- -import errno import gettext import os import socket From 81f12401daec568e76c87afb95e3639cc48244c4 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 31 Jul 2024 11:16:33 +0100 Subject: [PATCH 202/222] CP-49148: Remove unused xc.py - Use python3 shebang for pytype_reporter.py and test_usb_scan.py - Remove unused xc.py Signed-off-by: Stephen Cheng --- python3/tests/test_usb_scan.py | 2 +- pytype_reporter.py | 2 +- scripts/xc.py | 12 ------------ 3 files changed, 2 insertions(+), 14 deletions(-) delete mode 100644 scripts/xc.py diff --git a/python3/tests/test_usb_scan.py b/python3/tests/test_usb_scan.py index ad72c0cd928..8b886194c74 100644 --- a/python3/tests/test_usb_scan.py +++ b/python3/tests/test_usb_scan.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # # unittest for usb_scan.py diff --git a/pytype_reporter.py b/pytype_reporter.py index 4e7d91f172b..b94ed948786 100755 --- a/pytype_reporter.py +++ b/pytype_reporter.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 """GitHub action workflow Runner for pytype which works also locally without GitHub""" import json import re diff --git a/scripts/xc.py b/scripts/xc.py deleted file mode 100644 index 25723e2e7e0..00000000000 --- a/scripts/xc.py +++ /dev/null @@ -1,12 +0,0 @@ - -class xc : - def __init__(self): - self.d = {"XenServer" : "SDK"} - self.s = "SDK" - def readconsolering(self): - return self.s - def physinfo(self): - return self.d - def xeninfo(self): - return self.d - From 946ca371546a76349005d0f7d552c4853bd0e099 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Wed, 31 Jul 2024 12:21:32 +0100 Subject: [PATCH 203/222] CP-49148: Convert rrdd-example.py to python3 Signed-off-by: Stephen Cheng --- ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py | 31 +++++++++++---------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py b/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py index e25e0ddf016..17f7c2398f4 100755 --- a/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py +++ b/ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py @@ -1,18 +1,19 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 -import rrdd, os +import os +import rrdd if __name__ == "__main__": - # Create a proxy for communicating with xcp-rrdd. - api = rrdd.API(plugin_id="host_mem") - while True: - # Wait until 0.5 seconds before xcp-rrdd is going to read the output file. - api.wait_until_next_reading(neg_shift=.5) - # Collect measurements. - cmd = "free -k | grep Mem | awk '{print $2, $3, $4}'" - vs = os.popen(cmd).read().strip().split() - # Tell the proxy which datasources should be exposed in this iteration. - api.set_datasource("used_mem", vs[1], min_val=0, max_val=vs[0], units="KB") - api.set_datasource("free_mem", vs[2], min_val=0, max_val=vs[0], units="KB") - # Write all required information into a file about to be read by xcp-rrdd. - api.update() + # Create a proxy for communicating with xcp-rrdd. + api = rrdd.API(plugin_id="host_mem") + while True: + # Wait until 0.5 seconds before xcp-rrdd is going to read the output file. + api.wait_until_next_reading(neg_shift=.5) + # Collect measurements. + cmd = "free -k | grep Mem | awk '{print $2, $3, $4}'" + vs = os.popen(cmd).read().strip().split() + # Tell the proxy which datasources should be exposed in this iteration. + api.set_datasource("used_mem", vs[1], min_val=0, max_val=vs[0], units="KB") + api.set_datasource("free_mem", vs[2], min_val=0, max_val=vs[0], units="KB") + # Write all required information into a file about to be read by xcp-rrdd. + api.update() From 8899586a4956491ac389a0491f4f47f22b0ed87e Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 1 Aug 2024 11:29:16 +0100 Subject: [PATCH 204/222] CP-46112: Remove python2 compatible code from XenAPI Signed-off-by: Stephen Cheng --- scripts/Makefile | 4 ---- scripts/examples/python/XenAPI/XenAPI.py | 21 ++++++--------------- scripts/examples/python/XenAPIPlugin.py | 7 ++----- 3 files changed, 8 insertions(+), 24 deletions(-) diff --git a/scripts/Makefile b/scripts/Makefile index 87302dca48f..98d00f7c0b6 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -134,10 +134,6 @@ install: $(IPROG) host-backup-restore/host-backup $(DESTDIR)$(LIBEXECDIR) $(IPROG) host-backup-restore/host-restore $(DESTDIR)$(LIBEXECDIR) # example/python -ifneq ($(BUILD_PY2), NO) - $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE_DIR)/ - $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE_DIR)/ -endif $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ diff --git a/scripts/examples/python/XenAPI/XenAPI.py b/scripts/examples/python/XenAPI/XenAPI.py index e8ed75d0cb4..722a9e6e965 100644 --- a/scripts/examples/python/XenAPI/XenAPI.py +++ b/scripts/examples/python/XenAPI/XenAPI.py @@ -1,3 +1,4 @@ +#!/usr/bin/python3 # Copyright (c) Citrix Systems, Inc. # All rights reserved. # @@ -58,13 +59,8 @@ import os import socket import sys - -if sys.version_info[0] == 2: - import httplib as httplib - import xmlrpclib as xmlrpclib -else: - import http.client as httplib - import xmlrpc.client as xmlrpclib +import http.client as httplib +import xmlrpc.client as xmlrpclib otel = False try: @@ -150,15 +146,10 @@ class Session(xmlrpclib.ServerProxy): def __init__(self, uri, transport=None, encoding=None, verbose=False, allow_none=True, ignore_ssl=False): - if sys.version_info[0] > 2: - # this changed to be a 'bool' in Python3 - verbose = bool(verbose) - allow_none = bool(allow_none) + verbose = bool(verbose) + allow_none = bool(allow_none) - # Fix for CA-172901 (+ Python 2.4 compatibility) - # Fix for context=ctx ( < Python 2.7.9 compatibility) - if not (sys.version_info[0] <= 2 and sys.version_info[1] <= 7 and sys.version_info[2] <= 9 ) \ - and ignore_ssl: + if ignore_ssl: import ssl ctx = ssl._create_unverified_context() xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, diff --git a/scripts/examples/python/XenAPIPlugin.py b/scripts/examples/python/XenAPIPlugin.py index 82f1f2f8531..43744432843 100644 --- a/scripts/examples/python/XenAPIPlugin.py +++ b/scripts/examples/python/XenAPIPlugin.py @@ -1,3 +1,4 @@ +#!/usr/bin/env python3 """XenAPI python plugin boilerplate code""" # pylint: disable=invalid-name # Module name "XenAPIPlugin" doesn't conform to snake_case naming style @@ -9,11 +10,7 @@ import sys import XenAPI - -if sys.version_info[0] == 2: - import xmlrpclib -else: - import xmlrpc.client as xmlrpclib +import xmlrpc.client as xmlrpclib class Failure(Exception): """Provide compatibility with plugins written against the XenServer 5.5 API""" From 0e5ff89ce84b6ace81e257a2da69f1b38d458b3c Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Thu, 1 Aug 2024 13:11:01 +0100 Subject: [PATCH 205/222] CP-46112: Relocate XenAPI to python3 directory Signed-off-by: Stephen Cheng --- .github/workflows/release.yml | 2 +- .gitignore | 8 ++++---- Makefile | 4 ++-- ocaml/sdk-gen/README.md | 2 +- pyproject.toml | 3 +++ python3/Makefile | 5 +++++ {scripts/examples/python => python3/examples}/Makefile | 0 {scripts/examples/python => python3/examples}/README.md | 2 +- .../examples/python => python3/examples}/XenAPI/XenAPI.py | 0 .../python => python3/examples}/XenAPI/__init__.py | 0 .../examples/python => python3/examples}/XenAPIPlugin.py | 0 .../examples/python => python3/examples}/pyproject.toml | 2 +- {scripts/examples/python => python3/examples}/setup.cfg | 0 scripts/Makefile | 4 ---- 14 files changed, 18 insertions(+), 14 deletions(-) rename {scripts/examples/python => python3/examples}/Makefile (100%) rename {scripts/examples/python => python3/examples}/README.md (77%) rename {scripts/examples/python => python3/examples}/XenAPI/XenAPI.py (100%) rename {scripts/examples/python => python3/examples}/XenAPI/__init__.py (100%) rename {scripts/examples/python => python3/examples}/XenAPIPlugin.py (100%) rename {scripts/examples/python => python3/examples}/pyproject.toml (89%) rename {scripts/examples/python => python3/examples}/setup.cfg (100%) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 9a051ef15f9..830d94e969e 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -33,7 +33,7 @@ jobs: uses: actions/upload-artifact@v4 with: name: XenAPI - path: scripts/examples/python/dist/ + path: python3/examples/dist/ build-sdks: name: Build and upload SDK artifacts diff --git a/.gitignore b/.gitignore index 967e463c15f..b519eb9cb39 100644 --- a/.gitignore +++ b/.gitignore @@ -16,10 +16,10 @@ config.mk # python packaging **/__pycache__/ **/*.pyc -scripts/examples/python/setup.py -scripts/examples/python/XenAPI.egg-info/ -scripts/examples/python/build/ -scripts/examples/python/dist/ +python3/examples/setup.py +python3/examples/XenAPI.egg-info/ +python3/examples/build/ +python3/examples/dist/ # ignore file needed for building the SDK ocaml/sdk-gen/csharp/XE_SR_ERRORCODES.xml diff --git a/Makefile b/Makefile index 7bff3e3aca6..a73d939a8b2 100644 --- a/Makefile +++ b/Makefile @@ -121,7 +121,7 @@ sdk: cp -r _build/default/ocaml/sdk-gen/java/autogen/* $(XAPISDK)/java cp -r _build/default/ocaml/sdk-gen/powershell/autogen/* $(XAPISDK)/powershell cp -r _build/default/ocaml/sdk-gen/go/autogen/* $(XAPISDK)/go - cp scripts/examples/python/XenAPI/XenAPI.py $(XAPISDK)/python + cp python3/examples/XenAPI/XenAPI.py $(XAPISDK)/python sh ocaml/sdk-gen/windows-line-endings.sh $(XAPISDK)/csharp sh ocaml/sdk-gen/windows-line-endings.sh $(XAPISDK)/powershell @@ -136,7 +136,7 @@ sdk-build-java: sdk cd _build/install/default/xapi/sdk/java && mvn -f xen-api/pom.xml -B clean package install -Drevision=0.0 python: - $(MAKE) -C scripts/examples/python build + $(MAKE) -C python3/examples build doc-json: dune exec --profile=$(PROFILE) -- ocaml/idl/json_backend/gen_json.exe -destdir $(XAPIDOC)/jekyll diff --git a/ocaml/sdk-gen/README.md b/ocaml/sdk-gen/README.md index fa45a1c3803..1cb1f2a7238 100644 --- a/ocaml/sdk-gen/README.md +++ b/ocaml/sdk-gen/README.md @@ -9,7 +9,7 @@ XenAPI's datamodel. The generation code is written in OCaml and is contained in this directory. The Python module is not auto-generated, it can be found at -[XenAPI.py](../../scripts/examples/python/XenAPI/XenAPI.py). +[XenAPI.py](../../python3/examples/XenAPI/XenAPI.py). To compile the generated source code, follow the instructions in the corresponding `README` files. diff --git a/pyproject.toml b/pyproject.toml index 630f6c51e25..8a7ca2dc9fc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -227,6 +227,9 @@ exclude = [ "ocaml/xcp-rrdd/scripts/rrdd/rrdd.py", "ocaml/xcp-rrdd/scripts/rrdd/rrdd-example.py", "python3/packages/observer.py", + "python3/examples/XenAPI/XenAPI.py", + "python3/examples/XenAPIPlugin.py", + ] diff --git a/python3/Makefile b/python3/Makefile index 13bc58546c0..81735c73e16 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -52,6 +52,11 @@ install: $(IDATA) perfmon/perfmon.service $(DESTDIR)/usr/lib/systemd/system/perfmon.service $(IPROG) perfmon/sysconfig-perfmon $(DESTDIR)/etc/sysconfig/perfmon +# example/python + $(IDATA) examples/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ + sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py + $(IDATA) examples/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ + # poweron $(IPROG) poweron/wlan.py $(DESTDIR)$(PLUGINDIR)/wlan.py diff --git a/scripts/examples/python/Makefile b/python3/examples/Makefile similarity index 100% rename from scripts/examples/python/Makefile rename to python3/examples/Makefile diff --git a/scripts/examples/python/README.md b/python3/examples/README.md similarity index 77% rename from scripts/examples/python/README.md rename to python3/examples/README.md index 7761002ac70..f896978fee3 100644 --- a/scripts/examples/python/README.md +++ b/python3/examples/README.md @@ -7,7 +7,7 @@ To install the package, enable the virtual environment where it's going to be us Examples -------- -The [examples](https://github.com/xapi-project/xen-api/tree/master/scripts/examples/python) will not work unless they have been placed in the same directory as `XenAPI.py` or `XenAPI` package from PyPI has been installed (`pip install XenAPI`) +The [examples](https://github.com/xapi-project/xen-api/tree/master/python3/examples) will not work unless they have been placed in the same directory as `XenAPI.py` or `XenAPI` package from PyPI has been installed (`pip install XenAPI`) Packaging ========= diff --git a/scripts/examples/python/XenAPI/XenAPI.py b/python3/examples/XenAPI/XenAPI.py similarity index 100% rename from scripts/examples/python/XenAPI/XenAPI.py rename to python3/examples/XenAPI/XenAPI.py diff --git a/scripts/examples/python/XenAPI/__init__.py b/python3/examples/XenAPI/__init__.py similarity index 100% rename from scripts/examples/python/XenAPI/__init__.py rename to python3/examples/XenAPI/__init__.py diff --git a/scripts/examples/python/XenAPIPlugin.py b/python3/examples/XenAPIPlugin.py similarity index 100% rename from scripts/examples/python/XenAPIPlugin.py rename to python3/examples/XenAPIPlugin.py diff --git a/scripts/examples/python/pyproject.toml b/python3/examples/pyproject.toml similarity index 89% rename from scripts/examples/python/pyproject.toml rename to python3/examples/pyproject.toml index f556f2539ab..5a429e1a0c7 100644 --- a/scripts/examples/python/pyproject.toml +++ b/python3/examples/pyproject.toml @@ -3,4 +3,4 @@ requires = ["setuptools >= 38.6.0", "setuptools_scm[toml]", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] -root = "../../.." +root = "../.." diff --git a/scripts/examples/python/setup.cfg b/python3/examples/setup.cfg similarity index 100% rename from scripts/examples/python/setup.cfg rename to python3/examples/setup.cfg diff --git a/scripts/Makefile b/scripts/Makefile index 98d00f7c0b6..15ad2c62d51 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -133,10 +133,6 @@ install: # host-backup-restore $(IPROG) host-backup-restore/host-backup $(DESTDIR)$(LIBEXECDIR) $(IPROG) host-backup-restore/host-restore $(DESTDIR)$(LIBEXECDIR) -# example/python - $(IDATA) examples/python/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ - sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py - $(IDATA) examples/python/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ # YUM plugins $(IPROG) yum-plugins/accesstoken.py $(DESTDIR)$(YUMPLUGINDIR) $(IDATA) yum-plugins/accesstoken.conf $(DESTDIR)$(YUMPLUGINCONFDIR) From 17da7407513ef64606fb111c342857d721e76c40 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 2 Aug 2024 05:28:59 +0100 Subject: [PATCH 206/222] Fix pylint warnings Signed-off-by: Stephen Cheng --- python3/examples/XenAPIPlugin.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/python3/examples/XenAPIPlugin.py b/python3/examples/XenAPIPlugin.py index 43744432843..3d8d4871e4d 100644 --- a/python3/examples/XenAPIPlugin.py +++ b/python3/examples/XenAPIPlugin.py @@ -8,9 +8,8 @@ from __future__ import print_function import sys - -import XenAPI import xmlrpc.client as xmlrpclib +import XenAPI class Failure(Exception): """Provide compatibility with plugins written against the XenServer 5.5 API""" From eddf404f93bea75bacc9afbb26b758b9b6afeed3 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 2 Aug 2024 06:46:01 +0100 Subject: [PATCH 207/222] Delete unused sed command in python3 Makefile Signed-off-by: Stephen Cheng --- python3/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/python3/Makefile b/python3/Makefile index 81735c73e16..fed125c01bb 100644 --- a/python3/Makefile +++ b/python3/Makefile @@ -54,7 +54,6 @@ install: # example/python $(IDATA) examples/XenAPIPlugin.py $(DESTDIR)$(SITE3_DIR)/ - sed -i 's/#!\/usr\/bin\/python/#!\/usr\/bin\/python3/' $(DESTDIR)$(SITE3_DIR)/XenAPIPlugin.py $(IDATA) examples/XenAPI/XenAPI.py $(DESTDIR)$(SITE3_DIR)/ From fcd2edf42fb2ce5c2b75be1b72afd943d36510a4 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Fri, 2 Aug 2024 10:30:37 +0100 Subject: [PATCH 208/222] Remove `universal=1` from setup.cfg to only support python3 Signed-off-by: Stephen Cheng --- python3/examples/setup.cfg | 6 ------ 1 file changed, 6 deletions(-) diff --git a/python3/examples/setup.cfg b/python3/examples/setup.cfg index 47601de9c05..9a07df89928 100644 --- a/python3/examples/setup.cfg +++ b/python3/examples/setup.cfg @@ -20,9 +20,3 @@ classifiers = [options] packages = find: python_requires = >=3.6.*, <4 - -[bdist_wheel] -# This flag says that the code is written to work on both Python 2 and Python -# 3. If at all possible, it is good practice to do this. If you cannot, you -# will need to generate wheels for each Python version that you support. -universal=1 From 02a7154a67aa6feeeefce551007428fa0f32493b Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 7 Aug 2024 12:00:00 +0200 Subject: [PATCH 209/222] xenopsd: Add pytest for common.VIF.get_locking_mode() Signed-off-by: Bernhard Kaindl --- .../xenopsd/scripts/test_common_class_vif.py | 78 +++++++++++++++++++ 1 file changed, 78 insertions(+) create mode 100644 ocaml/xenopsd/scripts/test_common_class_vif.py diff --git a/ocaml/xenopsd/scripts/test_common_class_vif.py b/ocaml/xenopsd/scripts/test_common_class_vif.py new file mode 100644 index 00000000000..006d1966f6d --- /dev/null +++ b/ocaml/xenopsd/scripts/test_common_class_vif.py @@ -0,0 +1,78 @@ +"""Test ocaml/xenopsd/scripts/common.VIF.get_locking_mode()""" + +from unittest.mock import patch # to check the arguments passed to send_to_syslog() + +import pytest # for pytest.parametrize to run the same test with different parameters + +import common # Tested module + + +# Mock class to simulate the object containing the get_locking_mode method +class VifMockSubclass(common.VIF): + """Mock class to simulate a VIF object containing the get_locking_mode method""" + + def __init__(self, json): # pylint: disable=super-init-not-called + """Do not call the parent constructor, it would open a file""" + self.json = json + + def get_mac(self): + return "00:11:22:33:44:55" # Expected MAC address + + +@pytest.mark.parametrize( + # Call the test case 3 times with two args: + # inp: input for VIF.get_locking_mode() + # expected_output: expected output of the get_locking_mode method + # Asserted with: + # assert expected_output == get_locking_mode(input) + "input_params, expected_output", + [ + # Happy path tests + ( + # locked + { # input + "locking_mode": [ + "locked", + {"ipv4": ["1.1.1.1"], "ipv6": ["fe80::1"]}, + ] + }, # expected output + { + "mac": "00:11:22:33:44:55", + "locking_mode": "locked", + "ipv4_allowed": ["1.1.1.1"], + "ipv6_allowed": ["fe80::1"], + }, + ), + ( + # unlocked + {"locking_mode": "unlocked"}, + { + "mac": "00:11:22:33:44:55", + "locking_mode": "unlocked", + "ipv4_allowed": [], + "ipv6_allowed": [], + }, + ), + ( + {}, # no locking_mode + { + "mac": "00:11:22:33:44:55", + "locking_mode": "", + "ipv4_allowed": [], + "ipv6_allowed": [], + }, + ), + ], +) +def test_get_locking_mode(input_params, expected_output): + """Test VIF.get_locking_mode() using the VIF class test parameters defined above.""" + + # Act: Get the locking mode configuration for the input params from the VIF object: + with patch("common.send_to_syslog") as send_to_syslog: + test_result = VifMockSubclass(input_params).get_locking_mode() + + # Assert the expected output and the expected call to send_to_syslog(): + assert test_result == expected_output + send_to_syslog.assert_called_once_with( + "Got locking config: " + repr(expected_output) + ) From 2e883b7f35b910387da59e905c27cc41d48af5a9 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Sat, 3 Aug 2024 09:54:29 +0800 Subject: [PATCH 210/222] Removed shebang from XenAPI.py and XenAPIPlugin.py as they are library code. - Removed shebang from XenAPI.py and XenAPIPlugin.py - Modified setup.cfg of python_requires version - Removed unused `dune clean` command from python Makefile Co-authored-by: Pau Ruiz Safont Signed-off-by: Stephen Cheng --- python3/examples/Makefile | 1 - python3/examples/XenAPI/XenAPI.py | 1 - python3/examples/XenAPIPlugin.py | 1 - python3/examples/setup.cfg | 2 +- 4 files changed, 1 insertion(+), 4 deletions(-) diff --git a/python3/examples/Makefile b/python3/examples/Makefile index 251f747250d..ac84bf6ba77 100644 --- a/python3/examples/Makefile +++ b/python3/examples/Makefile @@ -8,5 +8,4 @@ build: SETUPTOOLS_SCM_PRETEND_VERSION=$(XAPI_VERSION) python -m build --sdist . clean: - dune clean rm -rf dist/ build/ XenAPI.egg-info/ diff --git a/python3/examples/XenAPI/XenAPI.py b/python3/examples/XenAPI/XenAPI.py index 722a9e6e965..e37f8813b6e 100644 --- a/python3/examples/XenAPI/XenAPI.py +++ b/python3/examples/XenAPI/XenAPI.py @@ -1,4 +1,3 @@ -#!/usr/bin/python3 # Copyright (c) Citrix Systems, Inc. # All rights reserved. # diff --git a/python3/examples/XenAPIPlugin.py b/python3/examples/XenAPIPlugin.py index 3d8d4871e4d..49998457783 100644 --- a/python3/examples/XenAPIPlugin.py +++ b/python3/examples/XenAPIPlugin.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python3 """XenAPI python plugin boilerplate code""" # pylint: disable=invalid-name # Module name "XenAPIPlugin" doesn't conform to snake_case naming style diff --git a/python3/examples/setup.cfg b/python3/examples/setup.cfg index 9a07df89928..b2c23c40369 100644 --- a/python3/examples/setup.cfg +++ b/python3/examples/setup.cfg @@ -19,4 +19,4 @@ classifiers = [options] packages = find: -python_requires = >=3.6.*, <4 +python_requires = >=3.6, <4 From c8d9d1322e43f49b2383ce170dd5379f35f90a60 Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Sat, 3 Aug 2024 03:54:10 +0100 Subject: [PATCH 211/222] Remove python2 related CI Signed-off-by: Stephen Cheng --- .github/workflows/main.yml | 2 +- .github/workflows/other.yml | 17 +---------------- 2 files changed, 2 insertions(+), 17 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index d4bf28aaab2..5ee2ee8da05 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -45,7 +45,7 @@ jobs: - name: Make install smoketest run: | opam exec -- make install DESTDIR=$(mktemp -d) - opam exec -- make install DESTDIR=$(mktemp -d) BUILD_PY2=NO + opam exec -- make install DESTDIR=$(mktemp -d) - name: Check disk space run: df -h || true diff --git a/.github/workflows/other.yml b/.github/workflows/other.yml index 58cc7c8cdfe..c4042638922 100644 --- a/.github/workflows/other.yml +++ b/.github/workflows/other.yml @@ -18,7 +18,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["2.7", "3.11"] + python-version: ["3.11"] steps: - name: Checkout code uses: actions/checkout@v4 @@ -39,24 +39,11 @@ jobs: - uses: pre-commit/action@v3.0.1 name: Run pre-commit checks (no spaces at end of lines, etc) - if: ${{ matrix.python-version != '2.7' }} with: extra_args: --all-files --verbose --hook-stage commit env: SKIP: no-commit-to-branch - - name: Run Pytest for python 2 and get code coverage - if: ${{ matrix.python-version == '2.7' }} - run: > - pip install enum future mock pytest-coverage pytest-mock && - pytest -vv -rA --cov=ocaml ocaml - --cov-report term-missing - --cov-report xml:.git/coverage${{matrix.python-version}}.xml - --cov-fail-under 50 - env: - PYTHONDEVMODE: yes - PYTHONPATH: "python3:python3/stubs" - - name: Upload coverage report to Coveralls uses: coverallsapp/github-action@v2 with: @@ -66,7 +53,6 @@ jobs: parallel: true - uses: dciborow/action-pylint@0.1.0 - if: ${{ matrix.python-version != '2.7' }} with: reporter: github-pr-review level: warning @@ -75,7 +61,6 @@ jobs: continue-on-error: true - name: Run pytype checks - if: ${{ matrix.python-version != '2.7' }} run: pip install pandas pytype toml && ./pytype_reporter.py env: PR_NUMBER: ${{ github.event.number }} From e7c9da840a6800a646c0414fa07dcaa455602a70 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 7 Aug 2024 12:00:00 +0200 Subject: [PATCH 212/222] xenopsd: Fix warnings: remove inner function, use isinstance Signed-off-by: Bernhard Kaindl --- ocaml/xenopsd/scripts/common.py | 25 +++++++++++++++---------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/ocaml/xenopsd/scripts/common.py b/ocaml/xenopsd/scripts/common.py index af8666ce62c..323b5ba06eb 100755 --- a/ocaml/xenopsd/scripts/common.py +++ b/ocaml/xenopsd/scripts/common.py @@ -192,28 +192,33 @@ def get_external_ids(self): results["xs-network-uuid"] = self.json["extra_private_keys"]["network-uuid"] results["attached-mac"] = self.get_mac() return results + def get_locking_mode(self): - def get_words(value, separator): - if string.strip(value) == "": - return [] - else: - return string.split(value, separator) + """ + Get the locking mode configuration for the VIF. + + :returns dict: A dictionary containing the locking mode configuration with keys: + - mac: The MAC address + - locking_mode: The locking mode + - ipv4_allowed: List of IPv4 addresses allowed + - ipv6_allowed: List of IPv6 addresses allowed + """ results = { "mac": self.get_mac(), "locking_mode": "", "ipv4_allowed": [], - "ipv6_allowed": [] + "ipv6_allowed": [], } if "locking_mode" in self.json: - if type(self.json["locking_mode"]) is list: - # Must be type=locked here + if isinstance(self.json["locking_mode"], list): + # Must be type=locked and have keys for allowed ipv4 and ipv6 addresses results["locking_mode"] = self.json["locking_mode"][0].lower() - locked_params=self.json["locking_mode"][1] + locked_params = self.json["locking_mode"][1] results["ipv4_allowed"] = locked_params["ipv4"] results["ipv6_allowed"] = locked_params["ipv6"] else: results["locking_mode"] = self.json["locking_mode"].lower() - send_to_syslog("Got locking config: %s" % (repr(results))) + send_to_syslog("Got locking config: " + repr(results)) return results class Interface: From f4c808eea5d092d5a649467c12bf8e7ba1b4d2ec Mon Sep 17 00:00:00 2001 From: Stephen Cheng Date: Sun, 4 Aug 2024 09:59:15 +0800 Subject: [PATCH 213/222] Remove duplicated line. Co-authored-by: Pau Ruiz Safont Signed-off-by: Stephen Cheng --- .github/workflows/main.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5ee2ee8da05..79ce257d7f2 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -45,7 +45,6 @@ jobs: - name: Make install smoketest run: | opam exec -- make install DESTDIR=$(mktemp -d) - opam exec -- make install DESTDIR=$(mktemp -d) - name: Check disk space run: df -h || true From fd913f6bf43a545ae0a1d6160528f31eaeea0e1b Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 8 Aug 2024 12:00:00 +0200 Subject: [PATCH 214/222] xenopsd: remove the orphaned qemu-vif-script (qemu backend was removed) In 2017, the qemu and libvirt backends were removed: https://github.com/xapi-project/xen-api/commit/90815e5fcad8a523d9f448eb6f885c54a9c2a955 With it, the use of qemu-vif-script was removed. Remove it as well. Signed-off-by: Bernhard Kaindl --- Makefile | 1 - ocaml/xenopsd/scripts/make-custom-xenopsd.conf | 1 - ocaml/xenopsd/scripts/qemu-vif-script | 15 --------------- ocaml/xenopsd/xenopsd.conf | 3 --- 4 files changed, 20 deletions(-) delete mode 100755 ocaml/xenopsd/scripts/qemu-vif-script diff --git a/Makefile b/Makefile index a73d939a8b2..337e4dad88c 100644 --- a/Makefile +++ b/Makefile @@ -237,7 +237,6 @@ install: build doc sdk doc-json install -D ./ocaml/xenopsd/scripts/block $(DESTDIR)/$(XENOPSD_LIBEXECDIR)/block install -D ./ocaml/xenopsd/scripts/xen-backend.rules $(DESTDIR)/$(ETCDIR)/udev/rules.d/xen-backend.rules install -D ./ocaml/xenopsd/scripts/tap $(DESTDIR)/$(XENOPSD_LIBEXECDIR)/tap - install -D ./ocaml/xenopsd/scripts/qemu-vif-script $(DESTDIR)/$(XENOPSD_LIBEXECDIR)/qemu-vif-script install -D ./ocaml/xenopsd/scripts/setup-vif-rules $(DESTDIR)/$(XENOPSD_LIBEXECDIR)/setup-vif-rules install -D ./_build/install/default/bin/pvs-proxy-ovs-setup $(DESTDIR)/$(XENOPSD_LIBEXECDIR)/pvs-proxy-ovs-setup (cd $(DESTDIR)/$(XENOPSD_LIBEXECDIR) && ln -s pvs-proxy-ovs-setup setup-pvs-proxy-rules) diff --git a/ocaml/xenopsd/scripts/make-custom-xenopsd.conf b/ocaml/xenopsd/scripts/make-custom-xenopsd.conf index b49610f0e9a..59f52269157 100755 --- a/ocaml/xenopsd/scripts/make-custom-xenopsd.conf +++ b/ocaml/xenopsd/scripts/make-custom-xenopsd.conf @@ -41,7 +41,6 @@ vif-script=${XENOPSD_LIBEXECDIR}/vif vif-xl-script=${XENOPSD_LIBEXECDIR}/vif vbd-script=${XENOPSD_LIBEXECDIR}/block vbd-xl-script=${XENOPSD_LIBEXECDIR}/block -qemu-vif-script=${XENOPSD_LIBEXECDIR}/qemu-vif-script setup-vif-rules=${XENOPSD_LIBEXECDIR}/setup-vif-rules sockets-group=$group qemu-wrapper=${QEMU_WRAPPER_DIR}/qemu-wrapper diff --git a/ocaml/xenopsd/scripts/qemu-vif-script b/ocaml/xenopsd/scripts/qemu-vif-script deleted file mode 100755 index a8fe976e3a1..00000000000 --- a/ocaml/xenopsd/scripts/qemu-vif-script +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python3 - - -from common import * -import sys - -if __name__ == "__main__": - if len(sys.argv) != 2: - print("Usage:", file=sys.stderr) - print(" %s " % sys.argv[0], file=sys.stderr) - sys.exit(1) - name = sys.argv[1] - send_to_syslog("setting up interface %s" % name) - i = Interface(name) - i.online() diff --git a/ocaml/xenopsd/xenopsd.conf b/ocaml/xenopsd/xenopsd.conf index 94fcafefbd0..e80194c1f55 100644 --- a/ocaml/xenopsd/xenopsd.conf +++ b/ocaml/xenopsd/xenopsd.conf @@ -61,9 +61,6 @@ disable-logging-for=http tracing tracing_export # Path to the vbd backend script # vbd-xl-script=/usr/lib/xcp/scripts/block -# Path to the qemu vif script -# qemu-vif-script=/etc/xcp/scripts/qemu-vif-script - # Path to the PCI FLR script # pci-flr-script=/opt/xensource/libexec/pci-flr From 8e6d4bf265b957607549e7c3a1add0afc9e2a139 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 7 Aug 2024 12:00:00 +0200 Subject: [PATCH 215/222] xenopsd: remove the orphaned common.Interface.online() method With qemu-vif-script removed, common.Interface.online() is orphaned, remove it. Signed-off-by: Bernhard Kaindl --- ocaml/xenopsd/scripts/common.py | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/ocaml/xenopsd/scripts/common.py b/ocaml/xenopsd/scripts/common.py index 323b5ba06eb..52157c2dd07 100755 --- a/ocaml/xenopsd/scripts/common.py +++ b/ocaml/xenopsd/scripts/common.py @@ -228,17 +228,3 @@ def __init__(self, vif_name, uuid, devid): self.vif = VIF(vif_name, uuid, int(devid)) def get_vif(self): return self.vif - def online(self): - v = self.get_vif() - mode = v.get_mode() - for (key, value) in v.get_ethtool(): - set_ethtool(mode, self.name, key, value) - set_mtu(mode, self.name, v.get_mtu()) - add_to_bridge(mode, self.name, v.get_bridge(), v.get_address(), v.get_external_ids()) - add_vif_rules(self.name) - set_promiscuous(mode, self.name, v.get_promiscuous()) - -#def add(mode, dev, bridge, address, external_ids): -# add_to_bridge(mode, dev, bridge, address, external_ids) - - From 7e356779d630889836436b3108b832ba5d9acf06 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 7 Aug 2024 12:00:00 +0200 Subject: [PATCH 216/222] xenopsd: as Interface.online() is removed, remove get_ethtool() too Signed-off-by: Bernhard Kaindl --- ocaml/xenopsd/scripts/common.py | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/ocaml/xenopsd/scripts/common.py b/ocaml/xenopsd/scripts/common.py index 52157c2dd07..26adb00c0d2 100755 --- a/ocaml/xenopsd/scripts/common.py +++ b/ocaml/xenopsd/scripts/common.py @@ -154,18 +154,7 @@ def get_bridge(self): return network[1] def get_address(self): return "fe:ff:ff:ff:ff:ff" - def get_ethtool(self): - results = [] - for (k, v) in self.json["other_config"]: - if k.startswith("ethtool-"): - k = k[len("ethtool-"):] - if v == "true" or v == "on": - results.append(k, True) - elif v == "false" or v == "off": - results.append(k, False) - else: - send_to_syslog("VIF %s/%d: ignoring ethtool argument %s=%s (use true/false)" % (self.vm_uuid, self.devid, k, v)) - return results + def get_mac(self): return self.json["mac"] def get_mtu(self): From 806a49ccb10f0ff82ed60d1088d679c486b8eb30 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 7 Aug 2024 12:00:00 +0200 Subject: [PATCH 217/222] loop+blkback example: Fix pytype warning: Handle None(not found) Signed-off-by: Bernhard Kaindl --- .../examples/datapath/loop+blkback/datapath.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py index f076b700a6f..10b1959e05c 100755 --- a/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py +++ b/ocaml/xapi-storage/python/examples/datapath/loop+blkback/datapath.py @@ -75,20 +75,22 @@ def attach(self, dbg, uri, domain): call(dbg, cmd) loop = Loop.from_path(dbg, file_path) + if not loop: + return {} return {"implementations": [ [ - 'XenDisk', + "XenDisk", { - 'backend_type': 'vbd', - 'params': loop.block_device(), - 'extra': {} + "backend_type": "vbd", + "params": loop.block_device(), + "extra": {} } ], [ - 'BlockDevice', + "BlockDevice", { - 'path': loop.block_device() + "path": loop.block_device() } ] ]} From 50ae66c9257e0c7bfbbf2a554237eeed2c5b6842 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Thu, 8 Aug 2024 12:00:00 +0200 Subject: [PATCH 218/222] pytype: Enable checking on ocaml dirs, fix pythonpath Signed-off-by: Bernhard Kaindl --- pyproject.toml | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 8a7ca2dc9fc..1881ebbd350 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -291,13 +291,12 @@ expected_to_fail = [ [tool.pytype] inputs = [ - # Python 3 "python3/", "ocaml/xcp-rrdd", - - # To be added later, - # when converted to Python3-compatible syntax: - # "ocaml/xapi-storage/python", + "ocaml/xenopsd", + "ocaml/xapi-storage/python", + "ocaml/xapi-storage-script", + "ocaml/vhd-tool", ] disable = [ # Reduce noise from python2 scripts(import yum, xenfsimage, xcp, urlgrabber) @@ -305,4 +304,4 @@ disable = [ ] platform = "linux" # Allow pytype to find the XenAPI module, the rrdd module and python3 modules: -pythonpath = "scripts/examples/python:.:scripts:scripts/plugins:scripts/examples" +pythonpath = "python3/examples:." From d3b3c7469255121488bd06b465190c9f1f9bfb14 Mon Sep 17 00:00:00 2001 From: Lin Liu Date: Fri, 9 Aug 2024 03:13:42 +0000 Subject: [PATCH 219/222] CP-49148: Clean py2 compatible code Signed-off-by: Lin Liu --- ocaml/vhd-tool/test/dummy_extent_reader.py | 3 +-- ocaml/xapi-storage/python/xapi/__init__.py | 23 +++------------------- pyproject.toml | 2 +- python3/libexec/mail-alarm | 5 ----- scripts/Makefile | 5 ----- 5 files changed, 5 insertions(+), 33 deletions(-) diff --git a/ocaml/vhd-tool/test/dummy_extent_reader.py b/ocaml/vhd-tool/test/dummy_extent_reader.py index 1c344af40ef..b692674dded 100755 --- a/ocaml/vhd-tool/test/dummy_extent_reader.py +++ b/ocaml/vhd-tool/test/dummy_extent_reader.py @@ -1,10 +1,9 @@ -#!/usr/bin/python +#!/usr/bin/python3 """ Dummy extent reader that returns a huge extent list """ -from __future__ import print_function import json import sys diff --git a/ocaml/xapi-storage/python/xapi/__init__.py b/ocaml/xapi-storage/python/xapi/__init__.py index 50eae33fe1a..0f7c2a13de3 100644 --- a/ocaml/xapi-storage/python/xapi/__init__.py +++ b/ocaml/xapi-storage/python/xapi/__init__.py @@ -25,30 +25,12 @@ SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ -from __future__ import print_function import sys import traceback import json import argparse -# is_str(): Shortcut to check if a value is an instance of a string type. -# -# Replace: -# if not isinstance(code, str) and not isinstance(code, unicode): -# with: -# if not is_str(code): -# -# This makes for much cleaner code and suits Python3 well too. -if sys.version_info[0] > 2: - long = int - def is_str(x): - return isinstance(x, str) # With Python3, all strings are unicode -else: - def is_str(x): # pragma: no cover - return isinstance(x, (str, unicode)) # pylint: disable=undefined-variable - - def success(result): return {"Status": "Success", "Value": result} @@ -84,7 +66,7 @@ class XenAPIException(Exception): def __init__(self, code, params): Exception.__init__(self) - if not is_str(code): + if not isinstance(code, str): raise TypeError("string", repr(code)) if not isinstance(params, list): raise TypeError("list", repr(params)) @@ -151,7 +133,8 @@ def __init__(self, name): def is_long(x): try: - long(x) + # Python3 int is long, keep the name for interface compatibility + int(x) return True except ValueError: return False diff --git a/pyproject.toml b/pyproject.toml index 1881ebbd350..55467081438 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -299,7 +299,7 @@ inputs = [ "ocaml/vhd-tool", ] disable = [ - # Reduce noise from python2 scripts(import yum, xenfsimage, xcp, urlgrabber) + # Reduce noise from python scripts(import yum, xenfsimage, xcp, urlgrabber) "import-error", ] platform = "linux" diff --git a/python3/libexec/mail-alarm b/python3/libexec/mail-alarm index 0b41dd5e0e9..aab40edc46a 100755 --- a/python3/libexec/mail-alarm +++ b/python3/libexec/mail-alarm @@ -121,11 +121,6 @@ def load_mail_language(mail_language): mail_language_pack_path, mail_language + ".json" ) - # this conditional branch won't be executed, it's solely for the purpose of ensuring pass in python2 ut. - if sys.version_info.major == 2: - with open(mail_language_file, "r") as fileh: - return json.load(fileh, encoding="utf-8") - with open(mail_language_file, encoding="utf-8") as fileh: return json.load(fileh) diff --git a/scripts/Makefile b/scripts/Makefile index 15ad2c62d51..4c04da3943c 100644 --- a/scripts/Makefile +++ b/scripts/Makefile @@ -1,8 +1,5 @@ include ../config.mk -SITE_DIR=$(shell python -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") -SITE3_DIR=$(shell python3 -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())") - IPROG=./install.sh 755 IDATA=./install.sh 644 @@ -22,8 +19,6 @@ install: mkdir -p $(DESTDIR)/usr/lib/systemd/system mkdir -p $(DESTDIR)/usr/lib/yum-plugins mkdir -p $(DESTDIR)$(OPTDIR)/packages/post-install-scripts - mkdir -p $(DESTDIR)$(SITE_DIR) - mkdir -p $(DESTDIR)$(SITE3_DIR) mkdir -p $(DESTDIR)/etc/systemd/system/stunnel@xapi.service.d/ $(IPROG) base-path $(DESTDIR)/etc/xapi.d $(IPROG) sm_diagnostics $(DESTDIR)$(LIBEXECDIR) From c44a02626e79dd90e33cf4ef55a2f66812aa86a8 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 14 Aug 2024 15:07:16 +0200 Subject: [PATCH 220/222] Add missing typing stubs for xcp.cmd and xcp.compat Signed-off-by: Bernhard Kaindl --- python3/stubs/xcp/cmd.pyi | 13 +++++++++++++ python3/stubs/xcp/compat.pyi | 9 +++++++++ 2 files changed, 22 insertions(+) create mode 100644 python3/stubs/xcp/cmd.pyi create mode 100644 python3/stubs/xcp/compat.pyi diff --git a/python3/stubs/xcp/cmd.pyi b/python3/stubs/xcp/cmd.pyi new file mode 100644 index 00000000000..950a6d28200 --- /dev/null +++ b/python3/stubs/xcp/cmd.pyi @@ -0,0 +1,13 @@ +from basedtyping import Untyped +from typing import Any +from xcp import logger as logger +from xcp.compat import open_defaults_for_utf8_text as open_defaults_for_utf8_text + +def runCmd(command: bytes | str | list[str], with_stdout: bool = False, with_stderr: bool = False, inputtext: bytes | str | None = None, **kwargs: Any) -> Any: ... + +class OutputCache: + cache: Untyped + def __init__(self): ... + def fileContents(self, fn, *args, **kwargs) -> Untyped: ... + def runCmd(self, command, with_stdout: bool = False, with_stderr: bool = False, inputtext: Untyped | None = None, **kwargs) -> Untyped: ... + def clearCache(self): ... diff --git a/python3/stubs/xcp/compat.pyi b/python3/stubs/xcp/compat.pyi new file mode 100644 index 00000000000..bd2c7cfa4a6 --- /dev/null +++ b/python3/stubs/xcp/compat.pyi @@ -0,0 +1,9 @@ +from basedtyping import Untyped +from typing import Any, IO + +def open_textfile(filename: str, mode: str, encoding: str = 'utf-8', **kwargs: Any) -> IO[str]: ... + +open_utf8: Untyped + +def open_with_codec_handling(filename: str, mode: str = 'r', encoding: str = 'utf-8', **kwargs: Any) -> IO[Any]: ... +def open_defaults_for_utf8_text(args: tuple[Any, ...] | None, kwargs: Any) -> tuple[str, Any]: ... From 5e40c09a3fef7ffb3b7239b0db5ab4e2fdde2457 Mon Sep 17 00:00:00 2001 From: Bernhard Kaindl Date: Wed, 14 Aug 2024 15:30:10 +0200 Subject: [PATCH 221/222] rrdd: Test the changed rrdd.API.update() method Signed-off-by: Bernhard Kaindl --- .../rrdd/test_api_wait_until_next_reading.py | 115 ++++++++++++++++++ 1 file changed, 115 insertions(+) diff --git a/ocaml/xcp-rrdd/scripts/rrdd/test_api_wait_until_next_reading.py b/ocaml/xcp-rrdd/scripts/rrdd/test_api_wait_until_next_reading.py index 5ca9b897fad..a038513e230 100644 --- a/ocaml/xcp-rrdd/scripts/rrdd/test_api_wait_until_next_reading.py +++ b/ocaml/xcp-rrdd/scripts/rrdd/test_api_wait_until_next_reading.py @@ -1,7 +1,11 @@ # Test: pytest -v -s ocaml/xcp-rrdd/scripts/rrdd/test_api_wait_until_next_reading.py """Parametrized test exercising all conditions in rrdd.API.wait_until_next_reading()""" +import json import socket +from io import BytesIO +from struct import pack, unpack from warnings import catch_warnings as import_without_warnings, simplefilter +from zlib import crc32 # Dependencies: # pip install pytest-mock @@ -77,3 +81,114 @@ def test_api_getter_functions(api): api.path = "path" assert api.get_header() == "header" assert api.get_path() == "path" + + +class MockDataSource: + """Mock class for testing the rrdd.API.update() method""" + def __init__(self, name, metadata, packed_data): + self.name = name + self.metadata = metadata + self.packed_data = packed_data + + def pack_data(self): + """Simple substitute for the pack_data() method of the rrdd.DataSource class""" + return self.packed_data + + +@pytest.mark.parametrize( + "data_sources, expected_metadata", + [ + pytest.param( + [ + MockDataSource("ds1", {"key1": "value1"}, b"\x00\x01"), + MockDataSource("ds2", {"key2": "value2"}, b"\x00\x02"), + ], + {"key1": "value1", "key2": "value2"}, + ), + pytest.param( + [MockDataSource("ds1", {"key1": "value1"}, b"\x00\x01")], + {"key1": "value1"}, + ), + pytest.param( + [], + {}, + ), + ], +) +def test_update( + mocker, + data_sources, + expected_metadata, +): + """Test the update() method of the rrdd.API class""" + # Arrange + def checksum(*args): + """Calculate the CRC32 checksum of the given arguments""" + return crc32(*args) & 0xFFFFFFFF + + class MockAPI(rrdd.API): + """Mock API class to test the update() method""" + def __init__(self): # pylint: disable=super-init-not-called + self.dest = BytesIO() + self.datasources = data_sources + + def pack_data(self, ds: MockDataSource): + return ds.pack_data() + + testee = MockAPI() + testee.deregister = mocker.Mock() + fixed_time = 1234567890 + mocker.patch("time.time", return_value=fixed_time) + + # Act + testee.update() + + # Assert + + # Read and unpack the header + testee.dest.seek(0) + # The header is 20 bytes long and has the following format: + # 0-11: "DATASOURCES" (12 bytes) + # 12-15: data_checksum (4 bytes) + # 16-19: metadata_checksum (4 bytes) + # 20-23: num_datasources (4 bytes) + # 24-31: timestamp (8 bytes) + header_len = len("DATASOURCES") + 4 + 4 + 4 + 8 + header = testee.dest.read(header_len) + ( + unpacked_data_checksum, + unpacked_metadata_checksum, + unpacked_num_datasources, + unpacked_timestamp, + ) = unpack(">LLLQ", header[11:]) + + # Assert the expected unpacked header value + assert header.startswith(b"DATASOURCES") + assert unpacked_num_datasources == len(data_sources) + assert unpacked_timestamp == fixed_time + + # + # Assert datasources and the expected data checksum + # + + # Initialize the expected checksum with the fixed time + expected_checksum = checksum(pack(">Q", fixed_time)) + # Loop over the datasources and assert the packed data + testee.dest.seek(header_len) + # sourcery skip: no-loop-in-tests + for ds in data_sources: + packed_data = testee.dest.read(len(ds.pack_data())) + assert packed_data == ds.pack_data() + # Update the checksum with the packed data + expected_checksum = checksum(packed_data, expected_checksum) + + assert unpacked_data_checksum == expected_checksum + + # + # Assert metadata and the expected metadata checksum + # + metadata_length = unpack(">L", testee.dest.read(4))[0] + metadata_json = testee.dest.read(metadata_length) + + assert json.loads(metadata_json) == {"datasources": expected_metadata} + assert unpacked_metadata_checksum == checksum(metadata_json) From bdf5268523f996d3fee5aa2930b208adc653049b Mon Sep 17 00:00:00 2001 From: Lin Liu Date: Tue, 20 Aug 2024 02:29:26 +0000 Subject: [PATCH 222/222] CP-49148: More clean python2 code - Update following embeded shellbang to python3 * generate-iscsi-iqn * xe-backup-metadata * xe-restore-metadata - Remove interop-test.sh as not used Signed-off-by: Lin Liu --- ocaml/message-switch/core_test/interop-test.sh | 9 --------- scripts/generate-iscsi-iqn | 2 +- scripts/xe-backup-metadata | 2 +- scripts/xe-restore-metadata | 2 +- 4 files changed, 3 insertions(+), 12 deletions(-) delete mode 100755 ocaml/message-switch/core_test/interop-test.sh diff --git a/ocaml/message-switch/core_test/interop-test.sh b/ocaml/message-switch/core_test/interop-test.sh deleted file mode 100755 index 912d47f2349..00000000000 --- a/ocaml/message-switch/core_test/interop-test.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/sh -set -ex - -LINKPATH="${TMPDIR:-/tmp}/link_test" - -rm -rf ${LINKPATH} && mkdir -p ${LINKPATH} - -lwt/link_test_main.exe -PYTHONPATH=core python message_switch_test.py diff --git a/scripts/generate-iscsi-iqn b/scripts/generate-iscsi-iqn index 882a4c7f6fd..9550435716d 100755 --- a/scripts/generate-iscsi-iqn +++ b/scripts/generate-iscsi-iqn @@ -36,7 +36,7 @@ geniqn() { domain=${defaultdomain} fi - revdomain=$(python -c "${REVERSE_PY}" $domain) + revdomain=$(python3 -c "${REVERSE_PY}" $domain) uuid=$(uuidgen | cut -d- -f1) date=$(date +"%Y-%m") diff --git a/scripts/xe-backup-metadata b/scripts/xe-backup-metadata index 43c4617ec3b..19f0cf0e4a9 100755 --- a/scripts/xe-backup-metadata +++ b/scripts/xe-backup-metadata @@ -51,7 +51,7 @@ function usage { function uuid5 { # could use a modern uuidgen but it's not on XS 8 # should work with Python 2 and 3 - python -c "import uuid; print (uuid.uuid5(uuid.UUID('$1'), '$2'))" + python3 -c "import uuid; print (uuid.uuid5(uuid.UUID('$1'), '$2'))" } function test_sr { diff --git a/scripts/xe-restore-metadata b/scripts/xe-restore-metadata index 5968dc102e8..ca7029d7c07 100755 --- a/scripts/xe-restore-metadata +++ b/scripts/xe-restore-metadata @@ -65,7 +65,7 @@ function test_sr { NS="e93e0639-2bdb-4a59-8b46-352b3f408c19" function uuid5 { # could use a modern uuidgen but it's not on XS 8 - python -c "import uuid; print (uuid.uuid5(uuid.UUID('$1'), '$2'))" + python3 -c "import uuid; print (uuid.uuid5(uuid.UUID('$1'), '$2'))" } dry_run=0