From 8a9a8d7fa6f7b5ff34dff1f1951414105c333d0f Mon Sep 17 00:00:00 2001 From: Albin George Date: Fri, 1 Nov 2024 11:13:35 -0400 Subject: [PATCH] With this Change, added the collaborators migration support to the cmlutil. The collaborators will be migrated automatically without the need for any additional user input. Added the validation as well for the collaborators migration. --- cmlutils/constants.py | 2 + cmlutils/directory_utils.py | 7 + cmlutils/project_entrypoint.py | 198 +++++++++++++++++++++++---- cmlutils/projects.py | 240 ++++++++++++++++++++++++++++++--- cmlutils/utils.py | 43 +++++- 5 files changed, 441 insertions(+), 49 deletions(-) diff --git a/cmlutils/constants.py b/cmlutils/constants.py index b05ad9c..ffdb0a4 100644 --- a/cmlutils/constants.py +++ b/cmlutils/constants.py @@ -41,6 +41,8 @@ class ApiV2Endpoints(Enum): SEARCH_APP = "/api/v2/projects/$project_id/applications?search_filter=$search_option&page_size=100000" RUNTIME_ADDONS = "/api/v2/runtimeaddons?search_filter=$search_option" RUNTIMES = "/api/v2/runtimes?page_size=$page_size&page_token=$page_token" + COLLABORATORS = "/api/v2/projects/$project_id/collaborators?page_size=$page_size&page_token=$page_token" + ADD_COLLABORATOR = "/api/v2/projects/$project_id/collaborators/$user_name" class ApiV1Endpoints(Enum): diff --git a/cmlutils/directory_utils.py b/cmlutils/directory_utils.py index 8ad2f51..4a918f8 100644 --- a/cmlutils/directory_utils.py +++ b/cmlutils/directory_utils.py @@ -17,6 +17,13 @@ def get_project_metadata_file_path(top_level_dir: str, project_name: str) -> str ) +def get_project_collaborators_file_path(top_level_dir: str, project_name: str) -> str: + return os.path.join( + get_project_metadata_dir_path(top_level_dir, project_name), + "project-collaborators.json", + ) + + def get_models_metadata_file_path(top_level_dir: str, project_name: str) -> str: return os.path.join( get_project_metadata_dir_path(top_level_dir, project_name), diff --git a/cmlutils/project_entrypoint.py b/cmlutils/project_entrypoint.py index a5e0ce6..5d1648e 100644 --- a/cmlutils/project_entrypoint.py +++ b/cmlutils/project_entrypoint.py @@ -22,11 +22,12 @@ from cmlutils.script_models import ValidationResponseStatus from cmlutils.utils import ( compare_metadata, + compare_collaborator_metadata, get_absolute_path, parse_runtimes_v2, read_json_file, update_verification_status, - write_json_file + write_json_file, ) from cmlutils.validator import ( initialize_export_validators, @@ -122,7 +123,9 @@ def project_export_cmd(project_name): project_slug=project_name, owner_type="", ) - creator_username, project_slug, owner_type = pobj.get_creator_username() + creator_username, project_slug, owner_type, public_id = ( + pobj.get_creator_username() + ) if creator_username is None: logging.error( "Validation error: Cannot find project - %s under username %s", @@ -167,6 +170,7 @@ def project_export_cmd(project_name): ) start_time = time.time() pexport.transfer_project_files(log_filedir=log_filedir) + pexport.set_project_public_id(public_id=public_id) exported_data = pexport.dump_project_and_related_metadata() print("\033[32m✔ Export of Project {} Successful \033[0m".format(project_name)) print( @@ -185,6 +189,12 @@ def project_export_cmd(project_name): exported_data.get("application_name_list"), ) ) + print( + "\033[34m\tExported {} Collaborators {}\033[0m".format( + exported_data.get("total_collaborators"), + exported_data.get("collaborator_list"), + ) + ) end_time = time.time() export_file = log_filedir + constants.EXPORT_METRIC_FILE write_json_file(file_path=export_file, json_data=exported_data) @@ -299,11 +309,12 @@ def project_import_cmd(project_name, verify): ) start_time = time.time() if verify: - import_diff_file_list=pimport.transfer_project(log_filedir=log_filedir, verify=True) + import_diff_file_list = pimport.transfer_project( + log_filedir=log_filedir, verify=True + ) else: pimport.transfer_project(log_filedir=log_filedir) - if uses_engine: proj_patch_metadata = {"default_project_engine_type": "legacy_engine"} pimport.convert_project_to_engine_based( @@ -329,6 +340,12 @@ def project_import_cmd(project_name, verify): import_data.get("application_name_list"), ) ) + print( + "\033[34m\tImported {} Collaborators {}\033[0m".format( + import_data.get("total_collaborators"), + import_data.get("collaborator_list"), + ) + ) end_time = time.time() import_file = log_filedir + constants.IMPORT_METRIC_FILE write_json_file(file_path=import_file, json_data=import_data) @@ -340,7 +357,11 @@ def project_import_cmd(project_name, verify): pimport.terminate_ssh_session() # If verification is also needed after import if verify: - print("***************************************************** Started verifying migration for project: {} ***************************************************** ".format(project_name)) + print( + "***************************************************** Started verifying migration for project: {} ***************************************************** ".format( + project_name + ) + ) ( imported_project_data, imported_project_list, @@ -350,6 +371,8 @@ def project_import_cmd(project_name, verify): imported_app_list, imported_job_data, imported_job_list, + imported_collaborator_data, + imported_collaborator_list, ) = pimport.collect_imported_project_data(project_id=project_id) # import_diff_file_list = pimport.verify_project(log_filedir=log_filedir) @@ -372,7 +395,7 @@ def project_import_cmd(project_name, verify): _configure_project_command_logging(log_filedir, project_name) import_file = log_filedir + constants.IMPORT_METRIC_FILE - with open(import_file, 'r') as file: + with open(import_file, "r") as file: validation_data = json.load(file) try: # Get username of the creator of project - This is required so that admins can also migrate the project @@ -386,11 +409,9 @@ def project_import_cmd(project_name, verify): project_slug=project_name, owner_type="", ) - ( - export_creator_username, - export_project_slug, - export_owner_type, - ) = pobj.get_creator_username() + (export_creator_username, export_project_slug, export_owner_type, _) = ( + pobj.get_creator_username() + ) if export_creator_username is None: logging.error( "Validation error: Cannot find project - %s under username %s", @@ -410,7 +431,10 @@ def project_import_cmd(project_name, verify): ) for v in validators: validation_response = v.validate() - if validation_response.validation_status == ValidationResponseStatus.FAILED: + if ( + validation_response.validation_status + == ValidationResponseStatus.FAILED + ): logging.error( "Validation error: %s", project_name, @@ -442,6 +466,8 @@ def project_import_cmd(project_name, verify): exported_app_list, exported_job_data, exported_job_list, + exported_collaborator_data, + exported_collaborator_list, ) = pexport.collect_export_project_data() # File verification @@ -506,7 +532,9 @@ def project_import_cmd(project_name, verify): skip_field=["environment"], ) logging.info("Source Application list {}".format(exported_app_list)) - logging.info("Destination Application list {}".format(imported_app_list)) + logging.info( + "Destination Application list {}".format(imported_app_list) + ) logging.info( "All Application in source project is present at destination project ".format( app_diff @@ -540,7 +568,9 @@ def project_import_cmd(project_name, verify): model_diff ) if not model_diff - else "Model {} Not Found in source or destination".format(model_diff) + else "Model {} Not Found in source or destination".format( + model_diff + ) ) logging.info( "No Model Config Difference Found" @@ -578,13 +608,66 @@ def project_import_cmd(project_name, verify): True if (job_diff or job_config_diff) else False, message="Job Verification", ) - result = [export_diff_file_list,import_diff_file_list,proj_diff, - proj_config_diff,app_diff,app_config_diff,model_diff,model_config_diff,job_diff, job_config_diff] + + # Collaborators verification + collaborator_diff, collaborator_config_diff = ( + compare_collaborator_metadata( + imported_collaborator_data, + exported_collaborator_data, + imported_collaborator_list, + exported_collaborator_list, + skip_field=None, + ) + ) + logging.info( + "Source collaborator list {}".format(exported_collaborator_list) + ) + logging.info( + "Destination collaborator list {}".format( + imported_collaborator_list + ) + ) + logging.info( + "All collaborators in source project is present at destination project ".format( + collaborator_diff + ) + if not collaborator_diff + else "collaborator {} Not Found in source or destination".format( + collaborator_diff + ) + ) + logging.info( + "No collaborator Config Difference Found" + if not collaborator_config_diff + else "Difference in collaborator Config {}".format( + collaborator_config_diff + ) + ) + update_verification_status( + True if (collaborator_diff or collaborator_config_diff) else False, + message="Collaborator Verification", + ) + result = [ + export_diff_file_list, + import_diff_file_list, + proj_diff, + proj_config_diff, + app_diff, + app_config_diff, + model_diff, + model_config_diff, + job_diff, + job_config_diff, + collaborator_diff, + collaborator_config_diff, + ] migration_status = all(not sublist for sublist in result) validation_data["isMigrationSuccessful"] = migration_status update_verification_status( not migration_status, - message="Migration Validation status for project : {} is".format(project_name), + message="Migration Validation status for project : {} is".format( + project_name + ), ) write_json_file(file_path=import_file, json_data=validation_data) @@ -633,7 +716,7 @@ def project_verify_cmd(project_name): logging.info("Started Verifying project: %s", project_name) import_file = log_filedir + constants.IMPORT_METRIC_FILE try: - with open(import_file, 'r') as file: + with open(import_file, "r") as file: validation_data = json.load(file) except: logging.error("File not found Exception: ", exc_info=1) @@ -649,11 +732,9 @@ def project_verify_cmd(project_name): project_slug=project_name, owner_type="", ) - ( - export_creator_username, - export_project_slug, - export_owner_type, - ) = pobj.get_creator_username() + (export_creator_username, export_project_slug, export_owner_type, _) = ( + pobj.get_creator_username() + ) if export_creator_username is None: logging.error( "Validation error: Cannot find project - %s under username %s", @@ -705,6 +786,8 @@ def project_verify_cmd(project_name): exported_app_list, exported_job_data, exported_job_list, + exported_collaborator_data, + exported_collaborator_list, ) = pexport.collect_export_project_data() pexport.terminate_ssh_session() pimport = None @@ -786,6 +869,8 @@ def project_verify_cmd(project_name): imported_app_list, imported_job_data, imported_job_list, + imported_collaborator_data, + imported_collaborator_list, ) = pimport.collect_imported_project_data(project_id=project_id) # File verification @@ -924,12 +1009,73 @@ def project_verify_cmd(project_name): True if (job_diff or job_config_diff) else False, message="Job Verification", ) - result = [export_diff_file_list,import_diff_file_list,proj_diff, - proj_config_diff,app_diff,app_config_diff,model_diff,model_config_diff,job_diff, job_config_diff] + result = [ + export_diff_file_list, + import_diff_file_list, + proj_diff, + proj_config_diff, + app_diff, + app_config_diff, + model_diff, + model_config_diff, + job_diff, + job_config_diff, + ] + + # Collaborators verification + collaborator_diff, collaborator_config_diff = compare_collaborator_metadata( + imported_collaborator_data, + exported_collaborator_data, + imported_collaborator_list, + exported_collaborator_list, + skip_field=None, + ) + logging.info( + "Source collaborator list {}".format(exported_collaborator_list) + ) + logging.info( + "Destination collaborator list {}".format(imported_collaborator_list) + ) + logging.info( + "All collaborators in source project is present at destination project ".format( + collaborator_diff + ) + if not collaborator_diff + else "collaborator {} Not Found in source or destination".format( + collaborator_diff + ) + ) + logging.info( + "No collaborator Config Difference Found" + if not collaborator_config_diff + else "Difference in collaborator Config {}".format( + collaborator_config_diff + ) + ) + update_verification_status( + True if (collaborator_diff or collaborator_config_diff) else False, + message="Collaborator Verification", + ) + result = [ + export_diff_file_list, + import_diff_file_list, + proj_diff, + proj_config_diff, + app_diff, + app_config_diff, + model_diff, + model_config_diff, + job_diff, + job_config_diff, + collaborator_diff, + collaborator_config_diff, + ] migration_status = all(not sublist for sublist in result) update_verification_status( not migration_status, - message="Migration Validation status for project : {} is".format(project_name), + message="Migration Validation status for project : {} is".format( + project_name + ), ) validation_data["isMigrationSuccessful"] = migration_status write_json_file(file_path=import_file, json_data=validation_data) diff --git a/cmlutils/projects.py b/cmlutils/projects.py index 11d83c1..d9d4858 100644 --- a/cmlutils/projects.py +++ b/cmlutils/projects.py @@ -22,6 +22,7 @@ get_models_metadata_file_path, get_project_data_dir_path, get_project_metadata_file_path, + get_project_collaborators_file_path, ) from cmlutils.ssh import open_ssh_endpoint from cmlutils.utils import ( @@ -36,7 +37,6 @@ ) - def is_project_configured_with_runtimes( host: str, username: str, @@ -238,11 +238,8 @@ def verify_files( ) # Use list comprehension to remove empty strings and .local and ,cache files filtered_list = [ - file - for file in file_list - if (file != "" and - not file.startswith('.')) - ] + file for file in file_list if (file != "" and not file.startswith(".")) + ] return filtered_list logging.warning("Got non zero return code. Retrying...") if result.returncode != 0: @@ -287,6 +284,7 @@ def __init__( self.owner_type = owner_type super().__init__(host, username, project_name, api_key, ca_path, project_slug) self.metrics_data = dict() + self.project_public_id = None # Get CDSW project info using API v1 def get_project_infov1(self): @@ -364,14 +362,16 @@ def get_creator_username(self): project["owner"]["username"], project["slug_raw"], constants.ORGANIZATION_TYPE, + project["public_identifier"], ) else: return ( project["creator"]["username"], project["slug_raw"], constants.USER_TYPE, + project["public_identifier"], ) - return None, None, None + return None, None, None, None # Get all models list info using API v1 def get_models_listv1(self, project_id: int): @@ -437,6 +437,9 @@ def get_model_infov1(self, model_id: str): ) return response.json() + def set_project_public_id(self, public_id: str): + self.project_public_id = public_id + # Get Job info using API v1 def get_job_infov1(self, job_id: int): endpoint = Template(ApiV1Endpoints.JOB_INFO.value).substitute( @@ -605,6 +608,25 @@ def verify_project_files(self, log_filedir: str): self.terminate_ssh_session() return result + def get_project_collaborators_v2(self, page_token: str, project_id: str): + endpoint = Template(ApiV2Endpoints.COLLABORATORS.value).substitute( + page_size=constants.MAX_API_PAGE_LENGTH, + page_token=page_token, + project_id=project_id, + ) + + response = call_api_v2( + host=self.host, + endpoint=endpoint, + method="GET", + user_token=self.apiv2_key, + ca_path=self.ca_path, + ) + result_list = response.json() + if result_list: + return result_list + return None + def _export_project_metadata(self): filepath = get_project_metadata_file_path( top_level_dir=self.top_level_dir, project_name=self.project_name @@ -638,6 +660,32 @@ def _export_project_metadata(self): self.project_id = project_info_resp["id"] write_json_file(file_path=filepath, json_data=project_metadata) + def _export_project_collaborators(self): + filepath = get_project_collaborators_file_path( + top_level_dir=self.top_level_dir, project_name=self.project_name + ) + logging.info("Exporting project collaborators to path %s", filepath) + project_collaborators_resp = self.get_project_collaborators_v2( + project_id=self.project_public_id, page_token="" + ) + project_collaborators_new = {"collaborators": []} + + usernames = [] + + for collaborator in project_collaborators_resp["collaborators"]: + collaborator_entry = { + "permission": collaborator["permission"], + "username": collaborator["user"]["username"], + } + project_collaborators_new["collaborators"].append(collaborator_entry) + usernames.append(collaborator["user"]["username"]) + + self.metrics_data["total_collaborators"] = len( + project_collaborators_new["collaborators"] + ) + self.metrics_data["collaborator_list"] = sorted(usernames) + write_json_file(file_path=filepath, json_data=project_collaborators_new) + def _export_models_metadata(self): filepath = get_models_metadata_file_path( top_level_dir=self.top_level_dir, project_name=self.project_name @@ -750,7 +798,9 @@ def collect_export_job_list(self): if len(job_list) == 0: logging.info("Jobs are not present in the project %s.", self.project_name) else: - logging.info("Project {} has {} Jobs".format(self.project_name, len(job_list))) + logging.info( + "Project {} has {} Jobs".format(self.project_name, len(job_list)) + ) job_metadata_list = [] for job in job_list: job_info_flatten = flatten_json_data(job) @@ -765,7 +815,9 @@ def collect_export_model_list(self, proj_id): if len(model_list) == 0: logging.info("Models are not present in the project %s.", self.project_name) else: - logging.info("Project {} has {} Models".format(self.project_name, len(model_list))) + logging.info( + "Project {} has {} Models".format(self.project_name, len(model_list)) + ) model_metadata_list = [] for model in model_list: model_info_flatten = flatten_json_data(model) @@ -782,7 +834,11 @@ def collect_export_application_list(self): "Applications are not present in the project %s.", self.project_name ) else: - logging.info("Project {} has {} Applications".format(self.project_name, len(app_list))) + logging.info( + "Project {} has {} Applications".format( + self.project_name, len(app_list) + ) + ) app_metadata_list = [] for app in app_list: app_info_flatten = flatten_json_data(app) @@ -794,6 +850,29 @@ def collect_export_application_list(self): app_metadata_list.append(app_metadata) return app_metadata_list, sorted(app_name_list) + def collect_export_collaborator_list(self, project_id): + project_collaborators_resp = self.get_project_collaborators_v2( + project_id=project_id, page_token="" + ) + project_collaborators_new = {"collaborators": []} + + usernames = [] + + for collaborator in project_collaborators_resp["collaborators"]: + collaborator_entry = { + "permission": collaborator["permission"], + "username": collaborator["user"]["username"], + } + project_collaborators_new["collaborators"].append(collaborator_entry) + usernames.append(collaborator["user"]["username"]) + + self.metrics_data["total_collaborators"] = len( + project_collaborators_new["collaborators"] + ) + self.metrics_data["collaborator_list"] = sorted(usernames) + + return project_collaborators_new["collaborators"], sorted(usernames) + def _export_job_metadata(self): filepath = get_jobs_metadata_file_path( top_level_dir=self.top_level_dir, project_name=self.project_name @@ -876,6 +955,7 @@ def dump_project_and_related_metadata(self): self._export_models_metadata() self._export_application_metadata() self._export_job_metadata() + self._export_project_collaborators() return self.metrics_data def collect_export_project_data(self): @@ -891,6 +971,9 @@ def collect_export_project_data(self): ) app_data, app_list = self.collect_export_application_list() job_data, job_list = self.collect_export_job_list() + collaborators_data, collaborators_list = self.collect_export_collaborator_list( + proj_data_raw["public_identifier"] + ) return ( proj_data, proj_list, @@ -900,6 +983,8 @@ def collect_export_project_data(self): app_list, job_data, job_list, + collaborators_data, + collaborators_list, ) @@ -1130,6 +1215,22 @@ def create_model_build_v2( ) return + def add_proj_collaborator_v2(self, proj_id: str, user_name: str, metadata): + try: + endpoint = Template(ApiV2Endpoints.ADD_COLLABORATOR.value).substitute( + project_id=proj_id, user_name=user_name + ) + call_api_v2( + host=self.host, + endpoint=endpoint, + method="PUT", + user_token=self.apiv2_key, + json_data=metadata, + ca_path=self.ca_path, + ) + except KeyError as e: + raise + def create_application_v2(self, proj_id: str, app_metadata) -> str: try: endpoint = Template(ApiV2Endpoints.CREATE_APP.value).substitute( @@ -1420,10 +1521,18 @@ def import_metadata(self, project_id: str): self.create_paused_jobs( project_id=project_id, job_metadata_filepath=job_metadata_filepath ) + proj_collaborator_filepath = get_project_collaborators_file_path( + top_level_dir=self.top_level_dir, project_name=self.project_name + ) + self.add_project_collaborators( + project_id=project_id, + collaborator_metadata_filepath=proj_collaborator_filepath, + ) self.get_project_infov2(proj_id=project_id) self.collect_import_model_list(project_id=project_id) self.collect_import_application_list(project_id=project_id) self.collect_import_job_list(project_id=project_id) + self.collect_import_collaborator_list(project_id=project_id) return self.metrics_data def collect_imported_project_data(self, project_id: str): @@ -1431,13 +1540,18 @@ def collect_imported_project_data(self, project_id: str): proj_info_flatten = flatten_json_data(proj_data_raw) proj_data = [extract_fields(proj_info_flatten, constants.PROJECT_MAPV2)] proj_list = [ - self.project_name.lower() - if self.check_project_exist(self.project_name) - else None + ( + self.project_name.lower() + if self.check_project_exist(self.project_name) + else None + ) ] model_data, model_list = self.collect_import_model_list(project_id=project_id) app_data, app_list = self.collect_import_application_list(project_id=project_id) job_data, job_list = self.collect_import_job_list(project_id=project_id) + collaborator_data, collaborators = self.collect_import_collaborator_list( + project_id=project_id + ) return ( proj_data, proj_list, @@ -1447,6 +1561,8 @@ def collect_imported_project_data(self, project_id: str): app_list, job_data, job_list, + collaborator_data, + collaborators, ) def create_models(self, project_id: str, models_metadata_filepath: str): @@ -1480,9 +1596,9 @@ def create_models(self, project_id: str, models_metadata_filepath: str): model_metadata["runtime_fullversion"], ) if runtime_identifier != None: - model_metadata[ - "runtime_identifier" - ] = runtime_identifier + model_metadata["runtime_identifier"] = ( + runtime_identifier + ) else: logging.warning( "Couldn't locate runtime identifier for model %s", @@ -1588,6 +1704,38 @@ def create_stoppped_applications(self, project_id: str, app_metadata_filepath: s logging.error(f"Error: {e}") raise + def add_project_collaborators( + self, project_id: str, collaborator_metadata_filepath: str + ): + try: + collaborator_metadata = read_json_file(collaborator_metadata_filepath) + collaborator_metadata_list = collaborator_metadata.get("collaborators") + + if collaborator_metadata_list != None: + for collaborator_metadata in collaborator_metadata_list: + try: + self.add_proj_collaborator_v2( + proj_id=project_id, + user_name=collaborator_metadata["username"], + metadata=collaborator_metadata, + ) + except Exception as e: + logging.error( + f"Failed to add collaborator {collaborator_metadata['username']}. Error: {e}" + ) + else: + logging.info( + f"{collaborator_metadata['username']} has been added successfully as a collaborator." + ) + return + except FileNotFoundError as e: + logging.info("No collaborator-metadata file found for migration") + return + except Exception as e: + logging.error("Collaborator migration failed") + logging.error(f"Error: {e}") + raise + def create_paused_jobs(self, project_id: str, job_metadata_filepath: str): try: runtime_list = self.get_all_runtimes() @@ -1689,13 +1837,34 @@ def get_project_infov2(self, proj_id: str): ) return response.json() + def get_project_collaborators_v2(self, page_token: str, project_id: str): + endpoint = Template(ApiV2Endpoints.COLLABORATORS.value).substitute( + page_size=constants.MAX_API_PAGE_LENGTH, + page_token=page_token, + project_id=project_id, + ) + + response = call_api_v2( + host=self.host, + endpoint=endpoint, + method="GET", + user_token=self.apiv2_key, + ca_path=self.ca_path, + ) + result_list = response.json() + if result_list: + return result_list + return None + def collect_import_job_list(self, project_id): job_list = self.get_jobs_listv2(proj_id=project_id)["jobs"] job_name_list = [] if len(job_list) == 0: logging.info("Jobs are not present in the project %s.", self.project_name) else: - logging.info("Project {} has {} Jobs".format(self.project_name, len(job_list))) + logging.info( + "Project {} has {} Jobs".format(self.project_name, len(job_list)) + ) job_metadata_list = [] for job in job_list: job_info_flatten = flatten_json_data(job) @@ -1712,14 +1881,20 @@ def collect_import_model_list(self, project_id): if len(model_list) == 0: logging.info("Models are not present in the project %s.", self.project_name) else: - logging.info("Project {} has {} Models".format(self.project_name, len(model_list))) + logging.info( + "Project {} has {} Models".format(self.project_name, len(model_list)) + ) model_metadata_list = [] model_detail_data = {} for model in model_list: model_info_flatten = flatten_json_data(model) model_detail_data["name"] = model_info_flatten["name"] model_detail_data["description"] = model_info_flatten["description"] - model_detail_data["disable_authentication"] = model_info_flatten["auth_enabled"] if isinstance(model_info_flatten["auth_enabled"], bool) else model_info_flatten["auth_enabled"] + model_detail_data["disable_authentication"] = ( + model_info_flatten["auth_enabled"] + if isinstance(model_info_flatten["auth_enabled"], bool) + else model_info_flatten["auth_enabled"] + ) model_details = self.get_models_detailv2( proj_id=project_id, model_id=model_info_flatten["id"] ) @@ -1744,7 +1919,9 @@ def collect_import_application_list(self, project_id): "Applications are not present in the project %s.", self.project_name ) else: - logging.info("Project {} has {} Application".format(self.project_name, len(app_list))) + logging.info( + "Project {} has {} Application".format(self.project_name, len(app_list)) + ) app_metadata_list = [] for app in app_list: app_info_flatten = flatten_json_data(app) @@ -1754,3 +1931,26 @@ def collect_import_application_list(self, project_id): self.metrics_data["total_application"] = len(app_name_list) self.metrics_data["application_name_list"] = sorted(app_name_list) return app_metadata_list, sorted(app_name_list) + + def collect_import_collaborator_list(self, project_id): + project_collaborators_resp = self.get_project_collaborators_v2( + project_id=project_id, page_token="" + ) + project_collaborators_new = {"collaborators": []} + + usernames = [] + + for collaborator in project_collaborators_resp["collaborators"]: + collaborator_entry = { + "permission": collaborator["permission"], + "username": collaborator["user"]["username"], + } + project_collaborators_new["collaborators"].append(collaborator_entry) + usernames.append(collaborator["user"]["username"]) + + self.metrics_data["total_collaborators"] = len( + project_collaborators_new["collaborators"] + ) + self.metrics_data["collaborator_list"] = sorted(usernames) + + return project_collaborators_new["collaborators"], sorted(usernames) diff --git a/cmlutils/utils.py b/cmlutils/utils.py index 2bc4c67..393caf4 100644 --- a/cmlutils/utils.py +++ b/cmlutils/utils.py @@ -312,12 +312,49 @@ def compare_metadata( if key not in skip_field: ex_value = ex_data.get(key) if ex_value is not None and str(ex_value) != str(value): - difference = ["{} value in destination is {}, and source is {}".format( - key, str(value), str(ex_value))] + difference = [ + "{} value in destination is {}, and source is {}".format( + key, str(value), str(ex_value) + ) + ] + if config_differences.get(name): + config_differences[name].extend(difference) + else: + config_differences[name] = difference + return data_list_diff, config_differences + + +def compare_collaborator_metadata( + import_data, export_data, import_data_list, export_data_list, skip_field=None +): + if skip_field is None: + skip_field = [] + + data_list_diff = list(set(sorted(export_data_list)) - set(sorted(import_data_list))) + config_differences = {} + + import_data_dict = {data["username"]: data for data in import_data} + export_data_dict = {data["username"]: data for data in export_data} + + for name, im_data in import_data_dict.items(): + ex_data = export_data_dict.get(name) + + if ex_data is None: + continue + + for key, value in im_data.items(): + if key not in skip_field: + ex_value = ex_data.get(key) + if ex_value is not None and str(ex_value) != str(value): + difference = [ + "{} value in destination is {}, and source is {}".format( + key, str(value), str(ex_value) + ) + ] if config_differences.get(name): config_differences[name].extend(difference) else: - config_differences[name]= difference + config_differences[name] = difference return data_list_diff, config_differences