diff options
author | Aravinda VK <avishwan@redhat.com> | 2016-11-17 15:42:24 +0530 |
---|---|---|
committer | Aravinda VK <avishwan@redhat.com> | 2016-12-02 00:20:39 -0800 |
commit | da71bdcf82a8dc71a1170ad3f190702fa49d2838 (patch) | |
tree | e5e1c43307333475dd0bf5d255c44948e5815d3b | |
parent | 47e69455d3aede77960fd81a7cf3d6b4a869dbfa (diff) |
eventsapi: JSON output and different error codes
JSON outputs are added to all commands, use `--json` to
get JSON output.
Following error codes are added to differenciate between errors.
Any other Unknown errors will have return code 1
ERROR_SAME_CONFIG = 2
ERROR_ALL_NODES_STATUS_NOT_OK = 3
ERROR_PARTIAL_SUCCESS = 4
ERROR_WEBHOOK_ALREADY_EXISTS = 5
ERROR_WEBHOOK_NOT_EXISTS = 6
ERROR_INVALID_CONFIG = 7
ERROR_WEBHOOK_SYNC_FAILED = 8
ERROR_CONFIG_SYNC_FAILED = 9
Also hidden `node-` commands in the help message.
BUG: 1357753
Change-Id: I962b5435c8a448b4573059da0eae42f3f93cc97e
Signed-off-by: Aravinda VK <avishwan@redhat.com>
Reviewed-on: http://review.gluster.org/15867
Smoke: Gluster Build System <jenkins@build.gluster.org>
Reviewed-by: Prashanth Pai <ppai@redhat.com>
NetBSD-regression: NetBSD Build System <jenkins@build.gluster.org>
CentOS-regression: Gluster Build System <jenkins@build.gluster.org>
-rw-r--r-- | events/src/eventsapiconf.py.in | 10 | ||||
-rw-r--r-- | events/src/peer_eventsapi.py | 309 | ||||
-rw-r--r-- | extras/cliutils/__init__.py | 6 | ||||
-rw-r--r-- | extras/cliutils/cliutils.py | 21 |
4 files changed, 259 insertions, 87 deletions
diff --git a/events/src/eventsapiconf.py.in b/events/src/eventsapiconf.py.in index e0028761bcc..85fefebbbbf 100644 --- a/events/src/eventsapiconf.py.in +++ b/events/src/eventsapiconf.py.in @@ -26,3 +26,13 @@ UUID_FILE = "@GLUSTERD_WORKDIR@/glusterd.info" PID_FILE = "@localstatedir@/run/glustereventsd.pid" AUTO_BOOL_ATTRIBUTES = ["force", "push-pem", "no-verify"] AUTO_INT_ATTRIBUTES = ["ssh-port"] + +# Errors +ERROR_SAME_CONFIG = 2 +ERROR_ALL_NODES_STATUS_NOT_OK = 3 +ERROR_PARTIAL_SUCCESS = 4 +ERROR_WEBHOOK_ALREADY_EXISTS = 5 +ERROR_WEBHOOK_NOT_EXISTS = 6 +ERROR_INVALID_CONFIG = 7 +ERROR_WEBHOOK_SYNC_FAILED = 8 +ERROR_CONFIG_SYNC_FAILED = 9 diff --git a/events/src/peer_eventsapi.py b/events/src/peer_eventsapi.py index 7f80f791c4d..6cba277bf70 100644 --- a/events/src/peer_eventsapi.py +++ b/events/src/peer_eventsapi.py @@ -17,13 +17,15 @@ from errno import EEXIST import fcntl from errno import EACCES, EAGAIN import signal +import sys import requests from prettytable import PrettyTable -from gluster.cliutils import (Cmd, execute, node_output_ok, node_output_notok, +from gluster.cliutils import (Cmd, node_output_ok, node_output_notok, sync_file_to_peers, GlusterCmdException, - output_error, execute_in_peers, runcli) + output_error, execute_in_peers, runcli, + set_common_args_func) from events.utils import LockedOpen from events.eventsapiconf import (WEBHOOKS_FILE_TO_SYNC, @@ -36,7 +38,26 @@ from events.eventsapiconf import (WEBHOOKS_FILE_TO_SYNC, BOOL_CONFIGS, INT_CONFIGS, PID_FILE, - RESTART_CONFIGS) + RESTART_CONFIGS, + ERROR_INVALID_CONFIG, + ERROR_WEBHOOK_NOT_EXISTS, + ERROR_CONFIG_SYNC_FAILED, + ERROR_WEBHOOK_ALREADY_EXISTS, + ERROR_PARTIAL_SUCCESS, + ERROR_ALL_NODES_STATUS_NOT_OK, + ERROR_SAME_CONFIG, + ERROR_WEBHOOK_SYNC_FAILED) + + +def handle_output_error(err, errcode=1, json_output=False): + if json_output: + print (json.dumps({ + "output": "", + "error": err + })) + sys.exit(errcode) + else: + output_error(err, errcode) def file_content_overwrite(fname, data): @@ -46,15 +67,27 @@ def file_content_overwrite(fname, data): os.rename(fname + ".tmp", fname) -def create_custom_config_file_if_not_exists(): - mkdirp(os.path.dirname(CUSTOM_CONFIG_FILE)) +def create_custom_config_file_if_not_exists(args): + try: + config_dir = os.path.dirname(CUSTOM_CONFIG_FILE) + mkdirp(config_dir) + except OSError as e: + handle_output_error("Failed to create dir %s: %s" % (config_dir, e), + json_output=args.json) + if not os.path.exists(CUSTOM_CONFIG_FILE): with open(CUSTOM_CONFIG_FILE, "w") as f: f.write("{}") -def create_webhooks_file_if_not_exists(): - mkdirp(os.path.dirname(WEBHOOKS_FILE)) +def create_webhooks_file_if_not_exists(args): + try: + webhooks_dir = os.path.dirname(WEBHOOKS_FILE) + mkdirp(webhooks_dir) + except OSError as e: + handle_output_error("Failed to create dir %s: %s" % (webhooks_dir, e), + json_output=args.json) + if not os.path.exists(WEBHOOKS_FILE): with open(WEBHOOKS_FILE, "w") as f: f.write("{}") @@ -75,11 +108,9 @@ def mkdirp(path, exit_on_err=False, logger=None): """ try: os.makedirs(path) - except (OSError, IOError) as e: - if e.errno == EEXIST and os.path.isdir(path): - pass - else: - output_error("Fail to create dir %s: %s" % (path, e)) + except OSError as e: + if e.errno != EEXIST or not os.path.isdir(path): + raise def is_active(): @@ -111,33 +142,77 @@ def reload_service(): return (0, "", "") -def sync_to_peers(): +def rows_to_json(json_out, column_name, rows): + num_ok_rows = 0 + for row in rows: + num_ok_rows += 1 if row.ok else 0 + json_out.append({ + "node": row.hostname, + "node_status": "UP" if row.node_up else "DOWN", + column_name: "OK" if row.ok else "NOT OK", + "error": row.error + }) + return num_ok_rows + + +def rows_to_table(table, rows): + num_ok_rows = 0 + for row in rows: + num_ok_rows += 1 if row.ok else 0 + table.add_row([row.hostname, + "UP" if row.node_up else "DOWN", + "OK" if row.ok else "NOT OK: {1}".format( + row.error)]) + return num_ok_rows + + +def sync_to_peers(args): if os.path.exists(WEBHOOKS_FILE): try: sync_file_to_peers(WEBHOOKS_FILE_TO_SYNC) except GlusterCmdException as e: - output_error("Failed to sync Webhooks file: [Error: {0}]" - "{1}".format(e[0], e[2])) + handle_output_error("Failed to sync Webhooks file: [Error: {0}]" + "{1}".format(e[0], e[2]), + errcode=ERROR_WEBHOOK_SYNC_FAILED, + json_output=args.json) if os.path.exists(CUSTOM_CONFIG_FILE): try: sync_file_to_peers(CUSTOM_CONFIG_FILE_TO_SYNC) except GlusterCmdException as e: - output_error("Failed to sync Config file: [Error: {0}]" - "{1}".format(e[0], e[2])) + handle_output_error("Failed to sync Config file: [Error: {0}]" + "{1}".format(e[0], e[2]), + errcode=ERROR_CONFIG_SYNC_FAILED, + json_output=args.json) out = execute_in_peers("node-reload") - table = PrettyTable(["NODE", "NODE STATUS", "SYNC STATUS"]) - table.align["NODE STATUS"] = "r" - table.align["SYNC STATUS"] = "r" + if not args.json: + table = PrettyTable(["NODE", "NODE STATUS", "SYNC STATUS"]) + table.align["NODE STATUS"] = "r" + table.align["SYNC STATUS"] = "r" - for p in out: - table.add_row([p.hostname, - "UP" if p.node_up else "DOWN", - "OK" if p.ok else "NOT OK: {0}".format( - p.error)]) + json_out = [] + if args.json: + num_ok_rows = rows_to_json(json_out, "sync_status", out) + else: + num_ok_rows = rows_to_table(table, out) + + ret = 0 + if num_ok_rows == 0: + ret = ERROR_ALL_NODES_STATUS_NOT_OK + elif num_ok_rows != len(out): + ret = ERROR_PARTIAL_SUCCESS + + if args.json: + print (json.dumps({ + "output": json_out, + "error": "" + })) + else: + print (table) - print (table) + # If sync status is not ok for any node set error code as partial success + sys.exit(ret) def node_output_handle(resp): @@ -148,29 +223,24 @@ def node_output_handle(resp): node_output_notok(err) -def action_handle(action): +def action_handle(action, json_output=False): out = execute_in_peers("node-" + action) column_name = action.upper() if action == "status": column_name = EVENTSD.upper() - table = PrettyTable(["NODE", "NODE STATUS", column_name + " STATUS"]) - table.align["NODE STATUS"] = "r" - table.align[column_name + " STATUS"] = "r" - - for p in out: - status_col_val = "OK" if p.ok else "NOT OK: {0}".format( - p.error) - if action == "status": - status_col_val = "DOWN" - if p.ok: - status_col_val = p.output + if not json_output: + table = PrettyTable(["NODE", "NODE STATUS", column_name + " STATUS"]) + table.align["NODE STATUS"] = "r" + table.align[column_name + " STATUS"] = "r" - table.add_row([p.hostname, - "UP" if p.node_up else "DOWN", - status_col_val]) + json_out = [] + if json_output: + rows_to_json(json_out, column_name.lower() + "_status", out) + else: + rows_to_table(table, out) - print (table) + return json_out if json_output else table class NodeReload(Cmd): @@ -184,7 +254,14 @@ class ReloadCmd(Cmd): name = "reload" def run(self, args): - action_handle("reload") + out = action_handle("reload", args.json) + if args.json: + print (json.dumps({ + "output": out, + "error": "" + })) + else: + print (out) class NodeStatus(Cmd): @@ -202,12 +279,25 @@ class StatusCmd(Cmd): if os.path.exists(WEBHOOKS_FILE): webhooks = json.load(open(WEBHOOKS_FILE)) - print ("Webhooks: " + ("" if webhooks else "None")) - for w in webhooks: - print (w) - - print () - action_handle("status") + json_out = {"webhooks": [], "data": []} + if args.json: + json_out["webhooks"] = webhooks.keys() + else: + print ("Webhooks: " + ("" if webhooks else "None")) + for w in webhooks: + print (w) + + print () + + out = action_handle("status", args.json) + if args.json: + json_out["data"] = out + print (json.dumps({ + "output": json_out, + "error": "" + })) + else: + print (out) class WebhookAddCmd(Cmd): @@ -219,17 +309,19 @@ class WebhookAddCmd(Cmd): default="") def run(self, args): - create_webhooks_file_if_not_exists() + create_webhooks_file_if_not_exists(args) with LockedOpen(WEBHOOKS_FILE, 'r+'): data = json.load(open(WEBHOOKS_FILE)) if data.get(args.url, None) is not None: - output_error("Webhook already exists") + handle_output_error("Webhook already exists", + errcode=ERROR_WEBHOOK_ALREADY_EXISTS, + json_output=args.json) data[args.url] = args.bearer_token file_content_overwrite(WEBHOOKS_FILE, data) - sync_to_peers() + sync_to_peers(args) class WebhookModCmd(Cmd): @@ -241,17 +333,19 @@ class WebhookModCmd(Cmd): default="") def run(self, args): - create_webhooks_file_if_not_exists() + create_webhooks_file_if_not_exists(args) with LockedOpen(WEBHOOKS_FILE, 'r+'): data = json.load(open(WEBHOOKS_FILE)) if data.get(args.url, None) is None: - output_error("Webhook does not exists") + handle_output_error("Webhook does not exists", + errcode=ERROR_WEBHOOK_NOT_EXISTS, + json_output=args.json) data[args.url] = args.bearer_token file_content_overwrite(WEBHOOKS_FILE, data) - sync_to_peers() + sync_to_peers(args) class WebhookDelCmd(Cmd): @@ -261,17 +355,19 @@ class WebhookDelCmd(Cmd): parser.add_argument("url", help="URL of Webhook") def run(self, args): - create_webhooks_file_if_not_exists() + create_webhooks_file_if_not_exists(args) with LockedOpen(WEBHOOKS_FILE, 'r+'): data = json.load(open(WEBHOOKS_FILE)) if data.get(args.url, None) is None: - output_error("Webhook does not exists") + handle_output_error("Webhook does not exists", + errcode=ERROR_WEBHOOK_NOT_EXISTS, + json_output=args.json) del data[args.url] file_content_overwrite(WEBHOOKS_FILE, data) - sync_to_peers() + sync_to_peers(args) class NodeWebhookTestCmd(Cmd): @@ -314,17 +410,33 @@ class WebhookTestCmd(Cmd): out = execute_in_peers("node-webhook-test", [url, bearer_token]) - table = PrettyTable(["NODE", "NODE STATUS", "WEBHOOK STATUS"]) - table.align["NODE STATUS"] = "r" - table.align["WEBHOOK STATUS"] = "r" + if not args.json: + table = PrettyTable(["NODE", "NODE STATUS", "WEBHOOK STATUS"]) + table.align["NODE STATUS"] = "r" + table.align["WEBHOOK STATUS"] = "r" - for p in out: - table.add_row([p.hostname, - "UP" if p.node_up else "DOWN", - "OK" if p.ok else "NOT OK: {0}".format( - p.error)]) + num_ok_rows = 0 + json_out = [] + if args.json: + num_ok_rows = rows_to_json(json_out, "webhook_status", out) + else: + num_ok_rows = rows_to_table(table, out) + + ret = 0 + if num_ok_rows == 0: + ret = ERROR_ALL_NODES_STATUS_NOT_OK + elif num_ok_rows != len(out): + ret = ERROR_PARTIAL_SUCCESS + + if args.json: + print (json.dumps({ + "output": json_out, + "error": "" + })) + else: + print (table) - print (table) + sys.exit(ret) class ConfigGetCmd(Cmd): @@ -339,16 +451,30 @@ class ConfigGetCmd(Cmd): data.update(json.load(open(CUSTOM_CONFIG_FILE))) if args.name is not None and args.name not in CONFIG_KEYS: - output_error("Invalid Config item") + handle_output_error("Invalid Config item", + errcode=ERROR_INVALID_CONFIG, + json_output=args.json) + + if args.json: + json_out = {} + if args.name is None: + json_out = data + else: + json_out[args.name] = data[args.name] - table = PrettyTable(["NAME", "VALUE"]) - if args.name is None: - for k, v in data.items(): - table.add_row([k, v]) + print (json.dumps({ + "output": json_out, + "error": "" + })) else: - table.add_row([args.name, data[args.name]]) + table = PrettyTable(["NAME", "VALUE"]) + if args.name is None: + for k, v in data.items(): + table.add_row([k, v]) + else: + table.add_row([args.name, data[args.name]]) - print (table) + print (table) def read_file_content_json(fname): @@ -370,9 +496,11 @@ class ConfigSetCmd(Cmd): def run(self, args): if args.name not in CONFIG_KEYS: - output_error("Invalid Config item") + handle_output_error("Invalid Config item", + errcode=ERROR_INVALID_CONFIG, + json_output=args.json) - create_custom_config_file_if_not_exists() + create_custom_config_file_if_not_exists(args) with LockedOpen(CUSTOM_CONFIG_FILE, 'r+'): data = json.load(open(DEFAULT_CONFIG_FILE)) @@ -382,7 +510,9 @@ class ConfigSetCmd(Cmd): # Do Nothing if same as previous value if data[args.name] == args.value: - return + handle_output_error("Config value not changed. Same config", + errcode=ERROR_SAME_CONFIG, + json_output=args.json) # TODO: Validate Value new_data = read_file_content_json(CUSTOM_CONFIG_FILE) @@ -402,10 +532,11 @@ class ConfigSetCmd(Cmd): if args.name in RESTART_CONFIGS: restart = True - sync_to_peers() if restart: print ("\nRestart glustereventsd in all nodes") + sync_to_peers(args) + class ConfigResetCmd(Cmd): name = "config-reset" @@ -414,7 +545,7 @@ class ConfigResetCmd(Cmd): parser.add_argument("name", help="Config Name or all") def run(self, args): - create_custom_config_file_if_not_exists() + create_custom_config_file_if_not_exists(args) with LockedOpen(CUSTOM_CONFIG_FILE, 'r+'): changed_keys = [] @@ -422,8 +553,14 @@ class ConfigResetCmd(Cmd): if os.path.exists(CUSTOM_CONFIG_FILE): data = read_file_content_json(CUSTOM_CONFIG_FILE) - if not data: - return + # If No data available in custom config or, the specific config + # item is not available in custom config + if not data or \ + (args.name != "all" and data.get(args.name, None) is None): + handle_output_error("Config value not reset. Already " + "set to default value", + errcode=ERROR_SAME_CONFIG, + json_output=args.json) if args.name.lower() == "all": for k, v in data.items(): @@ -443,17 +580,23 @@ class ConfigResetCmd(Cmd): restart = True break - sync_to_peers() if restart: print ("\nRestart glustereventsd in all nodes") + sync_to_peers(args) + class SyncCmd(Cmd): name = "sync" def run(self, args): - sync_to_peers() + sync_to_peers(args) + + +def common_args(parser): + parser.add_argument("--json", help="JSON Output", action="store_true") if __name__ == "__main__": + set_common_args_func(common_args) runcli() diff --git a/extras/cliutils/__init__.py b/extras/cliutils/__init__.py index 4bb8395bb46..9c930982be0 100644 --- a/extras/cliutils/__init__.py +++ b/extras/cliutils/__init__.py @@ -11,7 +11,8 @@ from cliutils import (runcli, yesno, get_node_uuid, Cmd, - GlusterCmdException) + GlusterCmdException, + set_common_args_func) # This will be useful when `from cliutils import *` @@ -26,4 +27,5 @@ __all__ = ["runcli", "yesno", "get_node_uuid", "Cmd", - "GlusterCmdException"] + "GlusterCmdException", + "set_common_args_func"] diff --git a/extras/cliutils/cliutils.py b/extras/cliutils/cliutils.py index 4e035d7ff5c..d805ac6d100 100644 --- a/extras/cliutils/cliutils.py +++ b/extras/cliutils/cliutils.py @@ -16,6 +16,7 @@ subparsers = parser.add_subparsers(dest="mode") subcommands = {} cache_data = {} ParseError = etree.ParseError if hasattr(etree, 'ParseError') else SyntaxError +_common_args_func = lambda p: True class GlusterCmdException(Exception): @@ -50,9 +51,9 @@ def oknotok(flag): return "OK" if flag else "NOT OK" -def output_error(message): +def output_error(message, errcode=1): print (message, file=sys.stderr) - sys.exit(1) + sys.exit(errcode) def node_output_ok(message=""): @@ -186,6 +187,7 @@ def runcli(): # a subcommand as specified in the Class name. Call the args # method by passing subcommand parser, Derived class can add # arguments to the subcommand parser. + metavar_data = [] for c in Cmd.__subclasses__(): cls = c() if getattr(cls, "name", "") == "": @@ -193,14 +195,24 @@ def runcli(): "to \"{0}\"".format( cls.__class__.__name__)) + # Do not show in help message if subcommand starts with node- + if not cls.name.startswith("node-"): + metavar_data.append(cls.name) + p = subparsers.add_parser(cls.name) args_func = getattr(cls, "args", None) if args_func is not None: args_func(p) + # Apply common args if any + _common_args_func(p) + # A dict to save subcommands, key is name of the subcommand subcommands[cls.name] = cls + # Hide node commands in Help message + subparsers.metavar = "{" + ",".join(metavar_data) + "}" + # Get all parsed arguments args = parser.parse_args() @@ -210,3 +222,8 @@ def runcli(): # Run if cls is not None: cls.run(args) + + +def set_common_args_func(func): + global _common_args_func + _common_args_func = func |