Skip to content
Merged

Release #1990

Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
46 changes: 39 additions & 7 deletions keepercommander/commands/enterprise.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,14 @@
from .base import user_choice, suppress_exit, raise_parse_exception, dump_report_data, Command, field_to_title, \
report_output_parser
from .enterprise_common import EnterpriseCommand
from .automator import AutomatorListCommand
from .enterprise_push import EnterprisePushCommand, enterprise_push_parser
from .transfer_account import EnterpriseTransferUserCommand, transfer_user_parser
from .. import api, crypto, utils, constants
from ..display import bcolors
from ..error import CommandError, KeeperApiError
from ..params import KeeperParams
from ..proto import record_pb2, APIRequest_pb2, enterprise_pb2
from ..proto import record_pb2, APIRequest_pb2, enterprise_pb2, automator_pb2, pam_pb2


def register_commands(commands):
Expand Down Expand Up @@ -1359,26 +1360,31 @@ def traverse_to_root(node_id, depth):
'node_id': node['node_id']
}
request_batch.append(rq)

elif kwargs.get('wipe_out'):
if len(matched_nodes) != 1:
raise CommandError('enterprise-node', 'Cannot wipe-out more than one node')
node = matched_nodes[0]
if not node.get('parent_id'):
raise CommandError('enterprise-node', 'Cannot wipe out root node')

sub_nodes = [node['node_id']]
EnterpriseNodeCommand.get_subnodes(params, sub_nodes, 0)
nodes = set(sub_nodes)
verbose_nodes = {x["node_id"]:x["data"]["displayname"] for x in params.enterprise['nodes'] if x["node_id"] in nodes}

answer = 'y' if kwargs.get('force') else user_choice(
bcolors.FAIL + bcolors.BOLD + '\nALERT!\n' + bcolors.ENDC +
'This action cannot be undone.\n\n' +
'Selected nodes:\n' +
"\n".join([f"- {verbose_nodes[node]} ({node})" for node in sub_nodes]) +
'\n\nThis action cannot be undone.\n\n' +
'Do you want to proceed with deletion?', 'yn', 'n')
if answer.lower() != 'y':
return

sub_nodes = [node['node_id']]
EnterpriseNodeCommand.get_subnodes(params, sub_nodes, 0)
nodes = set(sub_nodes)


if 'queued_teams' in params.enterprise:
queued_teams = [x for x in params.enterprise['queued_teams'] if x['node_id'] in nodes]
if queued_teams: logging.info('Deleting queued teams')
for qt in queued_teams:
rq = {
'command': 'team_delete',
Expand All @@ -1390,6 +1396,7 @@ def traverse_to_root(node_id, depth):
roles = [x for x in params.enterprise['roles'] if x['node_id'] in nodes]
role_set = set([x['role_id'] for x in managed_nodes])
role_set = role_set.union([x['role_id'] for x in roles])
if role_set: logging.info('Deleting roles')
if 'role_users' in params.enterprise:
for ru in params.enterprise['role_users']:
if ru['role_id'] in role_set:
Expand All @@ -1414,6 +1421,7 @@ def traverse_to_root(node_id, depth):
request_batch.append(rq)

users = [x for x in params.enterprise['users'] if x['node_id'] in nodes]
if users: logging.info('Deleting users')
for u in users:
rq = {
'command': 'enterprise_user_delete',
Expand All @@ -1423,12 +1431,33 @@ def traverse_to_root(node_id, depth):

if 'teams' in params.enterprise:
teams = [x for x in params.enterprise['teams'] if x['node_id'] in nodes]
if teams: logging.info('Deleting teams')
for t in teams:
rq = {
'command': 'team_delete',
'team_uid': t['team_uid']
}
request_batch.append(rq)

automators = json.loads(AutomatorListCommand().execute(params,format='json'))
found_automators = [x for x in automators if x['node_id'] in nodes]
if found_automators:
logging.info('Deleting automators')
for a in found_automators:
rq = automator_pb2.AdminDeleteAutomatorRequest()
rq.automatorId = a['id']
api.communicate_rest(params, rq, 'automator/automator_delete', rs_type=automator_pb2.AdminResponse)

can_list_gateways = [x for x in params.enforcements['booleans'] if x['key']=='allow_secrets_manager' and x['value']==True]
if can_list_gateways:
rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse)
found_gateways = [f'{x.controllerName} exists in node {x.nodeId}' for x in rs.controllers if x.nodeId in nodes]
if found_gateways:
logging.info(
'Detected gateway objects under selected nodes:\n- ' +
'\n- '.join(found_gateways) + '\n'
'You must move all gateways outside of selected nodes (pam gateway edit -g <gateway_uid> -i <target_node>)\n'
)

sub_nodes.pop(0)
sub_nodes.reverse()
Expand Down Expand Up @@ -1499,6 +1528,9 @@ def is_in_chain(node_id, parent_id):
logging.info('\'%s\' node is %s', node_name, verb)
else:
logging.warning('\'%s\' node is not %s. Error: %s', node_name, verb, rs['message'])
if rs['message'] == "You must first delete or move the objects on this node":
logging.warning('Note: Provisioning Methods and Gateways are not cleared by this command')

else:
if rs['result'] != 'success':
raise CommandError('enterprise-node', '\'{0}\' command error: {1}'.format(command, rs['message']))
Expand Down
31 changes: 29 additions & 2 deletions keepercommander/commands/pam/gateway_helper.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import threading
import time
from typing import Sequence, Optional, List

from keeper_secrets_manager_core.utils import url_safe_str_to_bytes
Expand All @@ -9,6 +11,12 @@
from ...proto import pam_pb2, enterprise_pb2


_gateway_cache_lock = threading.Lock()
_gateway_cache_result = None # type: Optional[Sequence[pam_pb2.PAMController]]
_gateway_cache_time = 0.0
_GATEWAY_CACHE_TTL = 60 # seconds


def find_one_gateway_by_uid_or_name(params, gateway_name_or_uid):
all_gateways = get_all_gateways(params)
gateway_uid_bytes = url_safe_str_to_bytes(gateway_name_or_uid)
Expand All @@ -26,8 +34,26 @@ def find_one_gateway_by_uid_or_name(params, gateway_name_or_uid):


def get_all_gateways(params): # type: (KeeperParams) -> Sequence[pam_pb2.PAMController]
rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse)
return rs.controllers
global _gateway_cache_result, _gateway_cache_time
now = time.time()
if _gateway_cache_result is not None and (now - _gateway_cache_time) < _GATEWAY_CACHE_TTL:
return _gateway_cache_result
with _gateway_cache_lock:
# Re-check after acquiring lock (another thread may have refreshed)
now = time.time()
if _gateway_cache_result is not None and (now - _gateway_cache_time) < _GATEWAY_CACHE_TTL:
return _gateway_cache_result
rs = api.communicate_rest(params, None, 'pam/get_controllers', rs_type=pam_pb2.PAMControllersResponse)
_gateway_cache_result = rs.controllers
_gateway_cache_time = time.time()
return _gateway_cache_result


def invalidate_gateway_cache():
global _gateway_cache_result, _gateway_cache_time
with _gateway_cache_lock:
_gateway_cache_result = None
_gateway_cache_time = 0.0


def find_connected_gateways(all_controllers, identifier): # type: (List[bytes], str) -> Optional[bytes]
Expand Down Expand Up @@ -74,6 +100,7 @@ def remove_gateway(params, gateway_uid): # type: (KeeperParams, bytes) -> None
rq = pam_pb2.PAMGenericUidRequest()
rq.uid = gateway_uid
rs = api.communicate_rest(params, rq, 'pam/remove_controller', rs_type=pam_pb2.PAMRemoveControllerResponse)
invalidate_gateway_cache()
controller = next((x for x in rs.controllers if x.controllerUid == gateway_uid), None)
if controller:
raise Exception(controller.message)
Expand Down
20 changes: 12 additions & 8 deletions keepercommander/commands/pam_import/extend.py
Original file line number Diff line number Diff line change
Expand Up @@ -424,15 +424,19 @@ def execute(self, params, **kwargs):
if not (configuration and isinstance(configuration, vault.TypedRecord) and configuration.version == 6):
raise CommandError("pam project extend", f"""PAM configuration not found: "{config_name}" """)

if not (file_name != "" and os.path.isfile(file_name)):
raise CommandError("pam project extend", f"""PAM Import JSON file not found: "{file_name}" """)

data = {}
try:
with open(file_name, encoding="utf-8") as f:
data = json.load(f)
except Exception:
data = {}
if not (file_name != "" and os.path.isfile(file_name)):
try:
data = json.loads(file_name)
except ValueError as e:
raise CommandError("pam project extend", f"""PAM Import JSON file not found: "{file_name}" """)

if not data:
try:
with open(file_name, encoding="utf-8") as f:
data = json.load(f)
except Exception as e:
raise CommandError("pam project extend", f"""Unable to read file "{file_name}": {e}""")

pam_data = data.get("pam_data") if isinstance(data, dict) else {}
pam_data = pam_data if isinstance(pam_data, dict) else {}
Expand Down
3 changes: 0 additions & 3 deletions keepercommander/importer/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -463,9 +463,6 @@ def get_parser(self):

def execute(self, params, **kwargs):
file_name = kwargs.get('name') or 'shared_folder_membership.json'
if not os.path.exists(file_name):
logging.warning('Shared folder membership file "%s" not found', file_name)
return

shared_folders = [] # type: List[SharedFolder]
teams = [] # type: List[Team]
Expand Down
19 changes: 11 additions & 8 deletions keepercommander/importer/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -400,15 +400,18 @@ def __init__(self):

def execute(self, name, **kwargs):
# type: (str, ...) -> Iterable[Union[Record, SharedFolder, File]]
try:
json.loads(name)
path = name
except ValueError as e:
path = os.path.expanduser(name)
if not os.path.isfile(path):
ext = self.extension()
if ext:
path = path + '.' + ext

path = os.path.expanduser(name)
if not os.path.isfile(path):
ext = self.extension()
if ext:
path = path + '.' + ext

if not os.path.isfile(path):
raise CommandError('import', f'File \'{name}\' does not exist')
if not os.path.isfile(path):
raise CommandError('import', f'File \'{name}\' does not exist')

yield from self.do_import(path, **kwargs)

Expand Down
33 changes: 20 additions & 13 deletions keepercommander/importer/json/json.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,19 +162,26 @@ def prepare(self):
class KeeperJsonImporter(BaseFileImporter, KeeperJsonMixin):
def do_import(self, filename, **kwargs):
users_only = kwargs.get('users_only') or False
if not os.path.isfile(filename):
zip_name = pathlib.Path(filename).with_suffix('.zip').name
if os.path.isfile(zip_name):
if zipfile.is_zipfile(zip_name):
filename = zip_name
file_path = pathlib.Path(filename)
zip_archive = file_path.suffix == '.zip'
if zip_archive:
with zipfile.ZipFile(filename, 'r') as zf:
export = json.loads(zf.read('export.json'))
else:
with open(filename, "r", encoding='utf-8') as jf:
export = json.load(jf)
try:
export = json.loads(filename)
zip_archive = False
logging.info("Extracted JSON from object")
except ValueError as e:
if not os.path.isfile(filename):
zip_name = pathlib.Path(filename).with_suffix('.zip').name
if os.path.isfile(zip_name):
if zipfile.is_zipfile(zip_name):
filename = zip_name
file_path = pathlib.Path(filename)
zip_archive = file_path.suffix == '.zip'
if zip_archive:
with zipfile.ZipFile(filename, 'r') as zf:
export = json.loads(zf.read('export.json'))
logging.info("Extracted JSON from archive")
else:
with open(filename, "r", encoding='utf-8') as jf:
export = json.load(jf)
logging.info("Extracted JSON from file")

records = None
folders = None
Expand Down
Loading