#!/opt/continuum/python/bin/python -O
import sys
import os


if "CONTINUUM_HOME" not in os.environ:
    raise Exception("CONTINUUM_HOME environment variable not set.")
else:
    sys.path.insert(0, os.path.join(os.environ["CONTINUUM_HOME"]))
    sys.path.insert(0, os.path.join(os.environ["CONTINUUM_HOME"], "common", "lib"))
    sys.path.insert(0, os.path.join(os.environ["CONTINUUM_HOME"], "integrations", "transformer", "migration"))

import argparse
import subprocess
import time
import json
import yaml
from bson import ObjectId
import decimal
import datetime as dt
import copy
from common.lib.catoconfig.catoconfig import CONFIG
from common.lib.catocommon import catocommon
from integrations.transformer.migration.plugin_instance_add_webhook_handler_name import (
    add_webhook_handler_name_to_plugin_instances
)

epoch_time = str(int(time.time()))
frmt_date = dt.datetime.utcfromtimestamp(int(epoch_time)).strftime("%Y-%m-%d-%H-%M-%S")

BASE_DUMP_DIR_NAME = "agilitysync-datadump-{}".format(frmt_date)
ENCRYPTION_KEY_FILE_NAME = "encryption_key.json"
DEFAULT_LOGS_FOLDER_PATH = "/var/continuum/log/agility_connect/"

AS_SUPPORTED_PLUGINS = ("teamforgeplugin", "v1plugin", "jiraplugin", "tfsplugin", "servicenow", "hpalmqc", "sfdcplugin",
                        "rallyplugin")
PLUGINS_TO_REMOVE_END_SLASHES = ("jiraplugin", "tfsplugin", "servicenow", "sfdcplugin")

plugin_config = {
    "jiraplugin": {
        "project_info": {
            "id": "project_id",
            "project": "project_id",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "issuetype_id",
            "asset": "issuetype_id",
            "display_name": "issuetype_name",
        }
    },
    "v1plugin": {
        "project_info": {
            "id": "project_id",
            "project": "project_id",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "asset_type_id",
            "asset": "asset_type",
            "display_name": "asset_type",
        }
    },
    "tfsplugin": {
        "project_info": {
            "id": "project_id",
            "project": "project_id",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "workitemtype_id",
            "asset": "workitemtype_name",
            "display_name": "workitemtype_name",
        }
    },
    "hpalmqc": {
        "project_info": {
            "id": "project_id",
            "project": "project_name",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "asset_type_id",
            "asset": "asset_type_id",
            "display_name": "asset_type",
        }
    },
    "servicenow": {
        "project_info": {
            "id": "project_id",
            "project": "project_id",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "asset_type_id",
            "asset": "asset_type_id",
            "display_name": "asset_type",
        }
    },
    "teamforgeplugin": {
        "project_info": {
            "id": "project_id",
            "project": "project_id",
            "display_name": "project_name",
        },
        "asset_info": {
            "id": "tracker_id",
            "asset": "tracker_id",
            "display_name": "tracker_name",
        }
    },
}

collections_to_export = (
    {
        "to_collection": "data_map",
        "from_collection": "transformer.data_map",
        "skip_fields": set()
    },
    {
        "to_collection": "outbound",
        "from_collection": "transformer.outbound",
        "skip_fields": set()
    },
    {
        "to_collection": "sync_reference",
        "from_collection": "transformer.sync_reference",
        "skip_fields": set()
    },
    {
        "to_collection": "sync_queue",
        "from_collection": "transformer.sync_queue",
        "skip_fields": set()
    },
    {
        "to_collection": "migration",
        "from_collection": "transformer.migration",
        "skip_fields": set()
    },
    {
        "to_collection": "outbound_sync_log",
        "from_collection": "transformer.outbound_sync_log",
        "skip_fields": set()
    },
    {
        "to_collection": "users",
        "from_collection": "users",
        "skip_fields": set((
            "is_shared_asset_manager",
            "contributors",
            "is_system_administrator",
            "teams"
        ))
    },
    {
        "to_collection": "user_settings",
        "from_collection": "user_settings",
        "skip_fields": set()
    },
    {
        "to_collection": "poll_config",
        "from_collection": "transformer.poll_config",
        "skip_fields": set()
    },
    {
        "to_collection": "action_plans",
        "from_collection": "action_plans",
        "skip_fields": set()
    },
    {
        "to_collection": "action_plan_histories",
        "from_collection": "action_plan_histories",
        "skip_fields": set()
    },
    {
        "to_collection": "action_schedules",
        "from_collection": "action_schedules",
        "skip_fields": set()
    },
    {
        "to_collection": "apitokens",
        "from_collection": "apitokens",
        "skip_fields": set()
    },
    {
        "to_collection": "config",
        "from_collection": "config",
        "skip_fields": set()
    },
    {
        "to_collection": "global_registry",
        "from_collection": "global_registry",
        "skip_fields": set()
    },
    {
        "to_collection": "tags",
        "from_collection": "tags",
        "skip_fields": set()
    },
    {
        "to_collection": "tasks",
        "from_collection": "tasks",
        "skip_fields": set(("team_id",))
    },
    {
        "to_collection": "tasksteps",
        "from_collection": "tasksteps",
        "skip_fields": set(("team_id",))
    },
    {
        "to_collection": "task_clipboard",
        "from_collection": "task_clipboard",
        "skip_fields": set()
    },
    {
        "to_collection": "taskinstance",
        "from_collection": "taskinstance",
        "skip_fields": set()
    },
    {
        "to_collection": "taskinstance_log",
        "from_collection": "taskinstance_log",
        "skip_fields": set()
    },
    {
        "to_collection": "taskinstance_data",
        "from_collection": "taskinstance_data",
        "skip_fields": set()
    },
    {
        "to_collection": "taskinstance_parameter",
        "from_collection": "taskinstance_parameter",
        "skip_fields": set()
    },
    {
        "to_collection": "webhookhandlers",
        "from_collection": "webhookhandlers",
        "skip_fields": set()
    }
)

def execute_command(cmd):
    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
    stdout, stderr = proc.communicate()
    if stderr:
        print("\n\n***** Failed to execute command *****\n [{}] \
        \n*************************************\n".format(str(stderr)))
        sys.exit(1)


def get_ctm_config_data():
    ctm_yaml_file = os.environ.get("CONTINUUM_CONFIG", "/etc/continuum/continuum.yaml")
    with open(ctm_yaml_file) as yaml_fp:
        config_data = yaml.load(yaml_fp, Loader=yaml.SafeLoader)
    return config_data


def create_dump_folder():
    is_dump_dir_exist = os.path.exists(BASE_DUMP_DIR)
    if not is_dump_dir_exist:
        os.mkdir(BASE_DUMP_DIR)
        child_dir_to_create = [COLLECTION_DIR, BASE_LOG_DIR]
        for child_dir in child_dir_to_create:
            os.mkdir(child_dir)
        print("Successfully created agility sync data dump directory [{}].".format(BASE_DUMP_DIR_NAME))
    else:
        print("Agilityconnect data dump folder [{}] with same name found.".format(BASE_DUMP_DIR_NAME))
        sys.exit(0)


def jsonSerializeHandler(obj):
    # decimals
    if isinstance(obj, decimal.Decimal):
        return float(obj)

    # Mongo results will often have the ObjectId type
    if isinstance(obj, ObjectId):
        return str(obj)

    # date time
    if hasattr(obj, "isoformat"):
        return obj.isoformat()
    else:
        return str(obj)

def write_json_to_file(dir_path, file_name, data):
    collection_name = file_name
    file_name = "{}.json".format(file_name)
    collection_file = os.path.join(dir_path, file_name)

    with open(collection_file, "w") as write_obj:
        for line in data:
            if collection_name == "events":
                try:
                    line = modify_events_doc_system_mapping(line)
                except Exception as e:
                    print("Event schmea change faced a unknow error. Event ID - [{}]"
                    " - Error - [{}]".format(line.get("id"), e))
            write_obj.write(json.dumps(line, default=jsonSerializeHandler, sort_keys=True))
            write_obj.write("\n")

    print("Data Dump written to file [{}]".format(file_name))


def get_encryption_key():
    config_data = get_ctm_config_data()

    data = {
        "encryption_key": config_data.get('system', {}).get('key'),
        "unencryption_key": CONFIG['key']
    }

    encryption_key_file = os.path.join(BASE_DUMP_DIR, ENCRYPTION_KEY_FILE_NAME)
    with open(encryption_key_file, "w") as fp:
        json.dump(data, fp, ensure_ascii=False, indent=4)
    print("Encryption key process completed successfully.")

def get_fields(collection_info):
    doc = db_obj[collection_info["from_collection"]].find_one({})
    if doc:
        fields = list(set(doc.keys()) - collection_info["skip_fields"])
        return fields

    return []

def modify_events_doc_system_mapping(event_doc):
    if "system_mapping" not in event_doc:
        return event_doc

    plugin_info = []

    system_mapping = event_doc["system_mapping"]
    inbound_plugin_name = event_doc["plugin_name"]
    outbound_plugin_name = event_doc["out_plugin_name"]

    if inbound_plugin_name in plugin_config:
        plugin_info.append(("inbound", inbound_plugin_name))

    if outbound_plugin_name in plugin_config:
        plugin_info.append(("outbound", outbound_plugin_name))

    if plugin_info:
        for bound, plugin_name in plugin_info:
            for config in plugin_config[plugin_name]:
                temp = {}
                for key, value in (plugin_config[plugin_name][config]).items():
                    if value in system_mapping["{}_plugin_config".format(bound)]:
                        temp[key] = system_mapping["{}_plugin_config".format(bound)][value]
                for del_key in list(set(plugin_config[plugin_name][config].values())):
                    if del_key in system_mapping["{}_plugin_config".format(bound)]:
                        del system_mapping["{}_plugin_config".format(bound)][del_key]
                system_mapping["{}_plugin_config".format(bound)][config] = temp
        del event_doc["system_mapping"]
        event_doc["system_mapping"] = system_mapping
    return event_doc

def modify_data_map_system_mapping_schema(collection_doc):
    for direction in ("forward-direction", "backward-direction"):
        for system_mapping in collection_doc[direction]["system_mapping"]:
            system_mappings = []
            for bound in ("inbound", "outbound"):
                if collection_doc[direction][bound]["plugin_name"] in plugin_config:
                    for config in plugin_config[collection_doc[direction][bound]["plugin_name"]]:
                        temp = {}
                        for key, value in (plugin_config[collection_doc[direction]
                                                         [bound]["plugin_name"]][config].items()):
                            temp[key] = system_mapping["{}_plugin_config".format(bound)][value]

                        for del_key in list(
                            set(
                                plugin_config[collection_doc[direction][bound]["plugin_name"]][config].values())):
                            del system_mapping["{}_plugin_config".format(bound)][del_key]
                        system_mapping["{}_plugin_config".format(bound)][config] = temp
            system_mappings.append(system_mapping)
        del collection_doc[direction]["system_mapping"]
        collection_doc[direction]["system_mapping"] = system_mappings
    return collection_doc

def export_collections(db_obj):
    field_ids = []
    data_map_ids = []
    for collection_info in collections_to_export:
        file_name = collection_info["to_collection"] + ".json"
        collection_file = os.path.join(COLLECTION_DIR, file_name)
        file_obj = open(collection_file, "w")
        print("Exporting collection {}.".format(collection_info["from_collection"]))
        for collection_doc in db_obj[collection_info["from_collection"]].find({}):
            for skip_field in collection_info["skip_fields"]:
                if skip_field in collection_doc:
                    del(collection_doc[skip_field])

            if collection_info["from_collection"] == "transformer.data_map":
                data_map_ids.append(str(collection_doc['_id']))
                field_ids.append(ObjectId(collection_doc["forward-direction"]["inbound"]["field_id"]))
                field_ids.append(ObjectId(collection_doc["forward-direction"]["outbound"]["field_id"]))
                collection_doc = modify_data_map_system_mapping_schema(collection_doc)
            elif collection_info["from_collection"] == "transformer.sync_reference":
                for dir in ('forward-direction', 'backward-direction'):
                    for w_ref in collection_doc[dir]['workitem_ref']:
                        if 'url' in w_ref:
                            w_ref['relative_url'] = w_ref.pop('url')
            elif collection_info["from_collection"] == "webhookhandlers":
                if "directives" in collection_doc:
                    if len(collection_doc["directives"]) > 0:
                        for idx in range(len(collection_doc["directives"])):
                            if collection_doc["directives"][idx]["type"] == "initiate_pipeline":
                                collection_doc["directives"].pop(idx)
                else:
                    print("Skipping webhookhandler document. \"directives\" key "
                    "not found for - {}".format(str(collection_doc["_id"])))
                    continue

            file_obj.write(json.dumps(collection_doc, default=jsonSerializeHandler, sort_keys=True))
            file_obj.write("\n")
        file_obj.truncate()
        print("Done exporting collection {}.".format(collection_info["from_collection"]))

    export_plugin_collection_data(db_obj)

    if field_ids:
        export_fields_collection(db_obj, field_ids)
    if data_map_ids:
        export_events_collection(db_obj, data_map_ids)

def export_fields_collection(db_obj, field_ids):
    fields_records = db_obj.transformer.fields.find({"_id": {"$in": field_ids}})
    if fields_records:
        print("Exporting collection fields.")
        write_json_to_file(COLLECTION_DIR, "fields", fields_records)
        print("Done exporting collection fields.")

def export_events_collection(db_obj, data_map_ids):
    events_records = db_obj.transformer.events.find({"data_map_id": {"$in": data_map_ids}})
    if events_records:
        print("Exporting collection events...")
        write_json_to_file(COLLECTION_DIR, "events", events_records)
        print("Done exporting collection events.")

def export_plugin_collection_data(db_obj):
    plugin_collection_name = "plugin_instances"
    instances_doc_list = []
    add_webhook_handler_name_to_plugin_instances(db_obj)

    for plugin_doc in db_obj.flow.plugin_config.find({"_plugin": {"$in": AS_SUPPORTED_PLUGINS}}):
        for instance_doc in plugin_doc.get("instances", []):
            if not instance_doc.get("webhook_handler_name"):
                print("Skipping the instance detail export for plugin name "
                "[{}] with id [{}] (Found empty webhook handler name).".format(plugin_doc["_plugin"],
                str(instance_doc["_id"])))
                continue
            instance_doc["_id"] = str(instance_doc["_id"])
            instance_doc["plugin_name"] = plugin_doc["_plugin"]
            instance_doc["created_by"] = "administrator"
            if plugin_doc['_plugin'] in PLUGINS_TO_REMOVE_END_SLASHES:
                instance_doc['url'] = instance_doc['url'].rstrip('/')
            instances_doc_list.append(instance_doc)

    if instances_doc_list:
        print("Exporting collection plugin_instances.")
        write_json_to_file(COLLECTION_DIR, plugin_collection_name, instances_doc_list)
        print("Done exporting collection plugin_instances.")
    else:
        print("Collection [{}] has no data. ".format("flow.plugin_config"))


def copy_log_files():
    print("Copying log files started.")
    config_data = get_ctm_config_data()
    config_logs_folder_path = config_data.get("system", {}).get("logfiles")
    logs_folder_path = (os.path.join(config_logs_folder_path, "agility_connect", "") if config_logs_folder_path
    else DEFAULT_LOGS_FOLDER_PATH)
    cmd = "cp -r {}* {}".format(logs_folder_path, BASE_LOG_DIR)
    execute_command(cmd)
    print("Copying log files completed.")


def bundle_data_dumps():
    print("Bundle agility connect log files process started.")
    cmd = "tar -Pcf {} -C {} {}".format(TAR_FILE, DEFAULT_BASE_PATH, BASE_DUMP_DIR_NAME)
    execute_command(cmd)
    print("Bundling log files of agility connect completed and file is in {}".format(TAR_NAME))

def service_config_changes():
    service_config_file = os.environ.get("CONTINUUM_SERVICES", "/etc/continuum/service.conf")
    print("Starting to change service conf file.")
    with open(service_config_file) as service_file:
        service_file_data = service_file.readlines()
    if "service ctm-echo_datasync\n" in service_file_data:
        idx = service_file_data.index("service ctm-echo_datasync\n")
        service_file_data.pop(idx)
        service_file_data.insert(idx, "# service ctm-echo_datasync\n")
        with open(service_config_file, "w") as service_file:
            for line in service_file_data:
                service_file.write(line)
    print("Completed changes for service conf file.")

def disable_continuum_modules():
    print("Starting to disable the unused continuum modules.")
    ctm_yaml_file = os.environ.get("CONTINUUM_CONFIG", "/etc/continuum/continuum.yaml")
    with open(ctm_yaml_file, "r") as f:
        yaml_file_data = yaml.load(f, Loader=yaml.SafeLoader)
    yaml_file_data_copy = copy.deepcopy(yaml_file_data)
    if "features" in yaml_file_data["system"]:
        for feature in yaml_file_data["system"]["features"]:
            if feature in ['ECHO', 'Echo Data Migration', 'Echo Import Mapping']:
                yaml_file_data_copy["system"]["features"].remove(feature)
        if len(yaml_file_data_copy["system"]["features"]) == 0:
            del yaml_file_data_copy["system"]["features"]

    if "disabled" not in yaml_file_data_copy["system"]:
        yaml_file_data_copy["system"]["disabled"] = {}

    if "pages" not in yaml_file_data_copy["system"]["disabled"]:
        yaml_file_data_copy["system"]["disabled"]["pages"] = {}
    yaml_file_data_copy["system"]["disabled"]["pages"]["taskManage"] = True
    yaml_file_data_copy["system"]["disabled"]["pages"]["taskActivityLog"] = True

    with open(ctm_yaml_file, "w") as f:
        yaml.dump(yaml_file_data_copy, f, sort_keys=False, default_flow_style=False)

    print("Process to disable the unused continuum modules got completed.")


if __name__ == "__main__":

    parser = argparse.ArgumentParser(description="Agilitysync import data dump.")
    parser.add_argument("-p", "--path", dest="export_path", type=str, action="store",
                        help="Please provide path to export.")
    args = parser.parse_args()

    if args.export_path:
        DEFAULT_BASE_PATH = args.export_path
    else:
        DEFAULT_BASE_PATH = "/tmp"

    BASE_DUMP_DIR = os.path.join(DEFAULT_BASE_PATH, BASE_DUMP_DIR_NAME)
    COLLECTION_DIR = os.path.join(BASE_DUMP_DIR, "collections")
    BASE_LOG_DIR = os.path.join(BASE_DUMP_DIR, "logs")
    TAR_NAME = "{}.tar.gz".format(BASE_DUMP_DIR_NAME)
    TAR_FILE = os.path.join(DEFAULT_BASE_PATH, TAR_NAME)

    print("Creating folder to dump data...")
    create_dump_folder()
    print("Done creating folder to dump data.")

    db_obj = catocommon.new_mongo_conn()

    print("Dumping required collections from Mongo DB...")
    export_collections(db_obj)
    print("Done dumping required collections from Mongo DB.")

    get_encryption_key()
    print("Copying required log files...")
    copy_log_files()
    print("Done copying required log files.")

    print("Bundling all dumped data...")
    bundle_data_dumps()
    print("Done bundling all dumped data.")
    disable_continuum_modules()
    service_config_changes()
