From edfdbbcf6a6f94a9eb93d1997a29c4aa2406bcda Mon Sep 17 00:00:00 2001 From: lthievenaz-keeper Date: Fri, 24 Apr 2026 19:35:33 +0100 Subject: [PATCH 1/4] PAM Project Extend - Add support for stringified JSON (#1970) * Support passing stringified JSON as filename PAM Project Extend command requires physical file with sensitive credential data. This commit adds support for passing stringified JSON data in the filename directly, rather than a filepath. * Remove debug statement --- keepercommander/commands/pam_import/extend.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/keepercommander/commands/pam_import/extend.py b/keepercommander/commands/pam_import/extend.py index c8a0127c0..cd1756c5f 100644 --- a/keepercommander/commands/pam_import/extend.py +++ b/keepercommander/commands/pam_import/extend.py @@ -424,15 +424,19 @@ def execute(self, params, **kwargs): if not (configuration and isinstance(configuration, vault.TypedRecord) and configuration.version == 6): raise CommandError("pam project extend", f"""PAM configuration not found: "{config_name}" """) - if not (file_name != "" and os.path.isfile(file_name)): - raise CommandError("pam project extend", f"""PAM Import JSON file not found: "{file_name}" """) - data = {} - try: - with open(file_name, encoding="utf-8") as f: - data = json.load(f) - except Exception: - data = {} + if not (file_name != "" and os.path.isfile(file_name)): + try: + data = json.loads(file_name) + except ValueError as e: + raise CommandError("pam project extend", f"""PAM Import JSON file not found: "{file_name}" """) + + if not data: + try: + with open(file_name, encoding="utf-8") as f: + data = json.load(f) + except Exception as e: + raise CommandError("pam project extend", f"""Unable to read file "{file_name}": {e}""") pam_data = data.get("pam_data") if isinstance(data, dict) else {} pam_data = pam_data if isinstance(pam_data, dict) else {} From a5e0beff41280bfcd5b862aea7789d6ce73d47f2 Mon Sep 17 00:00:00 2001 From: lthievenaz-keeper Date: Fri, 24 Apr 2026 19:37:08 +0100 Subject: [PATCH 2/4] Add support for importing stringified JSON (#1968) * Add native JSON validation for BaseFileImporter BaseFileImporter is a process of the import command which only supports a file. Added support for stringified JSON. * Added support for stringified JSON import The JSON import supports: - JSON filename - ZIP filename Added support for stringified JSON * Remove redundant path validation in ApplyMembershipCommand Removed path validation from ApplyMembershipCommand. This is required for the ApplyMembershipCommand to support stringified JSON data instead of a physical JSON file. This validation is redundant as it is done inside do_import. It is not performed in other similar commands in this file. --- keepercommander/importer/commands.py | 3 --- keepercommander/importer/importer.py | 19 ++++++++------- keepercommander/importer/json/json.py | 33 ++++++++++++++++----------- 3 files changed, 31 insertions(+), 24 deletions(-) diff --git a/keepercommander/importer/commands.py b/keepercommander/importer/commands.py index 8e9869cfa..312717d99 100644 --- a/keepercommander/importer/commands.py +++ b/keepercommander/importer/commands.py @@ -463,9 +463,6 @@ def get_parser(self): def execute(self, params, **kwargs): file_name = kwargs.get('name') or 'shared_folder_membership.json' - if not os.path.exists(file_name): - logging.warning('Shared folder membership file "%s" not found', file_name) - return shared_folders = [] # type: List[SharedFolder] teams = [] # type: List[Team] diff --git a/keepercommander/importer/importer.py b/keepercommander/importer/importer.py index 06da07518..3241c0371 100644 --- a/keepercommander/importer/importer.py +++ b/keepercommander/importer/importer.py @@ -400,15 +400,18 @@ def __init__(self): def execute(self, name, **kwargs): # type: (str, ...) -> Iterable[Union[Record, SharedFolder, File]] + try: + json.loads(name) + path = name + except ValueError as e: + path = os.path.expanduser(name) + if not os.path.isfile(path): + ext = self.extension() + if ext: + path = path + '.' + ext - path = os.path.expanduser(name) - if not os.path.isfile(path): - ext = self.extension() - if ext: - path = path + '.' + ext - - if not os.path.isfile(path): - raise CommandError('import', f'File \'{name}\' does not exist') + if not os.path.isfile(path): + raise CommandError('import', f'File \'{name}\' does not exist') yield from self.do_import(path, **kwargs) diff --git a/keepercommander/importer/json/json.py b/keepercommander/importer/json/json.py index b9cbfcf18..b4c7a2484 100644 --- a/keepercommander/importer/json/json.py +++ b/keepercommander/importer/json/json.py @@ -162,19 +162,26 @@ def prepare(self): class KeeperJsonImporter(BaseFileImporter, KeeperJsonMixin): def do_import(self, filename, **kwargs): users_only = kwargs.get('users_only') or False - if not os.path.isfile(filename): - zip_name = pathlib.Path(filename).with_suffix('.zip').name - if os.path.isfile(zip_name): - if zipfile.is_zipfile(zip_name): - filename = zip_name - file_path = pathlib.Path(filename) - zip_archive = file_path.suffix == '.zip' - if zip_archive: - with zipfile.ZipFile(filename, 'r') as zf: - export = json.loads(zf.read('export.json')) - else: - with open(filename, "r", encoding='utf-8') as jf: - export = json.load(jf) + try: + export = json.loads(filename) + zip_archive = False + logging.info("Extracted JSON from object") + except ValueError as e: + if not os.path.isfile(filename): + zip_name = pathlib.Path(filename).with_suffix('.zip').name + if os.path.isfile(zip_name): + if zipfile.is_zipfile(zip_name): + filename = zip_name + file_path = pathlib.Path(filename) + zip_archive = file_path.suffix == '.zip' + if zip_archive: + with zipfile.ZipFile(filename, 'r') as zf: + export = json.loads(zf.read('export.json')) + logging.info("Extracted JSON from archive") + else: + with open(filename, "r", encoding='utf-8') as jf: + export = json.load(jf) + logging.info("Extracted JSON from file") records = None folders = None From a760459b12c2949321b0d38ac0d0d9dd64ead418 Mon Sep 17 00:00:00 2001 From: lthievenaz-keeper Date: Mon, 13 Apr 2026 17:45:37 +0100 Subject: [PATCH 3/4] Improve node wipe-out - Added selected nodes in deletion alert - Added logging for all deletion steps - Added automator deletion - If user can list gateways, and there are gateways on nodes, list gateways that need to be moved and how - If node_delete fails, clarify that provisioning methods and gateways are not cleared --- keepercommander/commands/enterprise.py | 46 ++++++++++++++++++++++---- 1 file changed, 39 insertions(+), 7 deletions(-) diff --git a/keepercommander/commands/enterprise.py b/keepercommander/commands/enterprise.py index e25fa327b..7b681fea7 100644 --- a/keepercommander/commands/enterprise.py +++ b/keepercommander/commands/enterprise.py @@ -40,13 +40,14 @@ from .base import user_choice, suppress_exit, raise_parse_exception, dump_report_data, Command, field_to_title, \ report_output_parser from .enterprise_common import EnterpriseCommand +from .automator import AutomatorListCommand from .enterprise_push import EnterprisePushCommand, enterprise_push_parser from .transfer_account import EnterpriseTransferUserCommand, transfer_user_parser from .. import api, crypto, utils, constants from ..display import bcolors from ..error import CommandError, KeeperApiError from ..params import KeeperParams -from ..proto import record_pb2, APIRequest_pb2, enterprise_pb2 +from ..proto import record_pb2, APIRequest_pb2, enterprise_pb2, automator_pb2, pam_pb2 def register_commands(commands): @@ -1359,6 +1360,7 @@ def traverse_to_root(node_id, depth): 'node_id': node['node_id'] } request_batch.append(rq) + elif kwargs.get('wipe_out'): if len(matched_nodes) != 1: raise CommandError('enterprise-node', 'Cannot wipe-out more than one node') @@ -1366,19 +1368,23 @@ def traverse_to_root(node_id, depth): if not node.get('parent_id'): raise CommandError('enterprise-node', 'Cannot wipe out root node') + sub_nodes = [node['node_id']] + EnterpriseNodeCommand.get_subnodes(params, sub_nodes, 0) + nodes = set(sub_nodes) + verbose_nodes = {x["node_id"]:x["data"]["displayname"] for x in params.enterprise['nodes'] if x["node_id"] in nodes} + answer = 'y' if kwargs.get('force') else user_choice( bcolors.FAIL + bcolors.BOLD + '\nALERT!\n' + bcolors.ENDC + - 'This action cannot be undone.\n\n' + + 'Selected nodes:\n' + + "\n".join([f"- {verbose_nodes[node]} ({node})" for node in sub_nodes]) + + '\n\nThis action cannot be undone.\n\n' + 'Do you want to proceed with deletion?', 'yn', 'n') if answer.lower() != 'y': return - - sub_nodes = [node['node_id']] - EnterpriseNodeCommand.get_subnodes(params, sub_nodes, 0) - nodes = set(sub_nodes) - + if 'queued_teams' in params.enterprise: queued_teams = [x for x in params.enterprise['queued_teams'] if x['node_id'] in nodes] + if queued_teams: logging.info('Deleting queued teams') for qt in queued_teams: rq = { 'command': 'team_delete', @@ -1390,6 +1396,7 @@ def traverse_to_root(node_id, depth): roles = [x for x in params.enterprise['roles'] if x['node_id'] in nodes] role_set = set([x['role_id'] for x in managed_nodes]) role_set = role_set.union([x['role_id'] for x in roles]) + if role_set: logging.info('Deleting roles') if 'role_users' in params.enterprise: for ru in params.enterprise['role_users']: if ru['role_id'] in role_set: @@ -1414,6 +1421,7 @@ def traverse_to_root(node_id, depth): request_batch.append(rq) users = [x for x in params.enterprise['users'] if x['node_id'] in nodes] + if users: logging.info('Deleting users') for u in users: rq = { 'command': 'enterprise_user_delete', @@ -1423,12 +1431,33 @@ def traverse_to_root(node_id, depth): if 'teams' in params.enterprise: teams = [x for x in params.enterprise['teams'] if x['node_id'] in nodes] + if teams: logging.info('Deleting teams') for t in teams: rq = { 'command': 'team_delete', 'team_uid': t['team_uid'] } request_batch.append(rq) + + automators = json.loads(AutomatorListCommand().execute(params,format='json')) + found_automators = [x for x in automators if x['node_id'] in nodes] + if found_automators: + logging.info('Deleting automators') + for a in found_automators: + rq = automator_pb2.AdminDeleteAutomatorRequest() + rq.automatorId = a['id'] + api.communicate_rest(params, rq, 'automator/automator_delete', rs_type=automator_pb2.AdminResponse) + + can_list_gateways = [x for x in params.enforcements['booleans'] if x['key']=='allow_secrets_manager' and x['value']==True] + if can_list_gateways: + rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse) + found_gateways = [f'{x.controllerName} exists in node {x.nodeId}' for x in rs.controllers if x.nodeId in nodes] + if found_gateways: + logging.info( + 'Detected gateway objects under selected nodes:\n- ' + + '\n- '.join(found_gateways) + '\n' + 'You must move all gateways outside of selected nodes (pam gateway edit -g -i )\n' + ) sub_nodes.pop(0) sub_nodes.reverse() @@ -1499,6 +1528,9 @@ def is_in_chain(node_id, parent_id): logging.info('\'%s\' node is %s', node_name, verb) else: logging.warning('\'%s\' node is not %s. Error: %s', node_name, verb, rs['message']) + if rs['message'] == "You must first delete or move the objects on this node": + logging.warning('Note: Provisioning Methods and Gateways are not cleared by this command') + else: if rs['result'] != 'success': raise CommandError('enterprise-node', '\'{0}\' command error: {1}'.format(command, rs['message'])) From 40071e7a7230b273a8dcad0c4f592f926c28643a Mon Sep 17 00:00:00 2001 From: Max Ustinov Date: Wed, 8 Apr 2026 15:47:57 -0700 Subject: [PATCH 4/4] KC-1210: Cache get_all_gateways() to prevent pam/get_controllers throttle storm Add thread-safe caching with 60s TTL to get_all_gateways() which calls keeperapp's pam/get_controllers endpoint. Under concurrent load, multiple threads sharing the same cached result instead of each making independent API calls. Prevents exceeding the server-side rate limit (10 req/10s) and the resulting cascading throttle loop. Cache is invalidated on gateway removal to prevent stale data. --- .../commands/pam/gateway_helper.py | 31 +++++++++++++++++-- 1 file changed, 29 insertions(+), 2 deletions(-) diff --git a/keepercommander/commands/pam/gateway_helper.py b/keepercommander/commands/pam/gateway_helper.py index 5a44b7abe..ec87111e9 100644 --- a/keepercommander/commands/pam/gateway_helper.py +++ b/keepercommander/commands/pam/gateway_helper.py @@ -1,3 +1,5 @@ +import threading +import time from typing import Sequence, Optional, List from keeper_secrets_manager_core.utils import url_safe_str_to_bytes @@ -9,6 +11,12 @@ from ...proto import pam_pb2, enterprise_pb2 +_gateway_cache_lock = threading.Lock() +_gateway_cache_result = None # type: Optional[Sequence[pam_pb2.PAMController]] +_gateway_cache_time = 0.0 +_GATEWAY_CACHE_TTL = 60 # seconds + + def find_one_gateway_by_uid_or_name(params, gateway_name_or_uid): all_gateways = get_all_gateways(params) gateway_uid_bytes = url_safe_str_to_bytes(gateway_name_or_uid) @@ -26,8 +34,26 @@ def find_one_gateway_by_uid_or_name(params, gateway_name_or_uid): def get_all_gateways(params): # type: (KeeperParams) -> Sequence[pam_pb2.PAMController] - rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse) - return rs.controllers + global _gateway_cache_result, _gateway_cache_time + now = time.time() + if _gateway_cache_result is not None and (now - _gateway_cache_time) < _GATEWAY_CACHE_TTL: + return _gateway_cache_result + with _gateway_cache_lock: + # Re-check after acquiring lock (another thread may have refreshed) + now = time.time() + if _gateway_cache_result is not None and (now - _gateway_cache_time) < _GATEWAY_CACHE_TTL: + return _gateway_cache_result + rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse) + _gateway_cache_result = rs.controllers + _gateway_cache_time = time.time() + return _gateway_cache_result + + +def invalidate_gateway_cache(): + global _gateway_cache_result, _gateway_cache_time + with _gateway_cache_lock: + _gateway_cache_result = None + _gateway_cache_time = 0.0 def find_connected_gateways(all_controllers, identifier): # type: (List[bytes], str) -> Optional[bytes] @@ -74,6 +100,7 @@ def remove_gateway(params, gateway_uid): # type: (KeeperParams, bytes) -> None rq = pam_pb2.PAMGenericUidRequest() rq.uid = gateway_uid rs = api.communicate_rest(params, rq, 'pam/remove_controller', rs_type=pam_pb2.PAMRemoveControllerResponse) + invalidate_gateway_cache() controller = next((x for x in rs.controllers if x.controllerUid == gateway_uid), None) if controller: raise Exception(controller.message)