diff --git a/dotdrop/action.py b/dotdrop/action.py index 617bf72..daf1545 100644 --- a/dotdrop/action.py +++ b/dotdrop/action.py @@ -10,10 +10,10 @@ import subprocess import os # local imports -from dotdrop.logger import Logger +from dotdrop.dictparser import DictParser -class Cmd: +class Cmd(DictParser): eq_ignore = ('log',) def __init__(self, key, action): @@ -23,7 +23,10 @@ class Cmd: """ self.key = key self.action = action - self.log = Logger() + + @classmethod + def _adjust_yaml_keys(cls, value): + return {'action': value} def __str__(self): return 'key:{} -> \"{}\"'.format(self.key, self.action) @@ -50,20 +53,35 @@ class Cmd: class Action(Cmd): - def __init__(self, key, kind, action, *args): + pre = 'pre' + post = 'post' + + def __init__(self, key, kind, action): """constructor @key: action key @kind: type of action (pre or post) @action: action string - @args: action arguments """ super(Action, self).__init__(key, action) self.kind = kind - self.args = args + self.args = [] + + @classmethod + def parse(cls, key, value): + """parse key value into object""" + v = {} + v['kind'], v['action'] = value + return cls(key=key, **v) + + def copy(self, args): + """return a copy of this object with arguments""" + action = Action(self.key, self.kind, self.action) + action.args = args + return action def __str__(self): - out = '{}: \"{}\" with args: {}' - return out.format(self.key, self.action, self.args) + out = '{}: \"{}\" ({})' + return out.format(self.key, self.action, self.kind) def __repr__(self): return 'action({})'.format(self.__str__()) @@ -74,6 +92,7 @@ class Action(Cmd): action = self.action if templater: action = templater.generate_string(self.action) + cmd = action try: cmd = action.format(*self.args) except IndexError: @@ -94,9 +113,11 @@ class Action(Cmd): class Transform(Cmd): def transform(self, arg0, arg1): - """execute transformation with {0} and {1} - where {0} is the file to transform and - {1} is the result file""" + """ + execute transformation with {0} and {1} + where {0} is the file to transform + and {1} is the result file + """ ret = 1 cmd = self.action.format(arg0, arg1) if os.path.exists(arg1): diff --git a/dotdrop/cfg_aggregator.py b/dotdrop/cfg_aggregator.py new file mode 100644 index 0000000..5e35bf5 --- /dev/null +++ b/dotdrop/cfg_aggregator.py @@ -0,0 +1,306 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2019, deadc0de6 + +handle higher level of the config file +""" + +import os +import shlex + + +# local imports +from dotdrop.cfg_yaml import CfgYaml +from dotdrop.dotfile import Dotfile +from dotdrop.settings import Settings +from dotdrop.profile import Profile +from dotdrop.action import Action, Transform +from dotdrop.logger import Logger +from dotdrop.utils import strip_home + + +class CfgAggregator: + + file_prefix = 'f' + dir_prefix = 'd' + key_sep = '_' + + def __init__(self, path, profile=None, debug=False): + """ + high level config parser + @path: path to the config file + @profile: selected profile + @debug: debug flag + """ + self.path = path + self.profile = profile + self.debug = debug + self.log = Logger() + self._load() + + def _load(self): + """load lower level config""" + self.cfgyaml = CfgYaml(self.path, + self.profile, + debug=self.debug) + + # settings + self.settings = Settings.parse(None, self.cfgyaml.settings) + self.settings.resolve_paths(self.cfgyaml.resolve_path) + if self.debug: + self.log.dbg('settings: {}'.format(self.settings)) + + # dotfiles + self.dotfiles = Dotfile.parse_dict(self.cfgyaml.dotfiles) + if self.debug: + self.log.dbg('dotfiles: {}'.format(self.dotfiles)) + + # profiles + self.profiles = Profile.parse_dict(self.cfgyaml.profiles) + if self.debug: + self.log.dbg('profiles: {}'.format(self.profiles)) + + # actions + self.actions = Action.parse_dict(self.cfgyaml.actions) + if self.debug: + self.log.dbg('actions: {}'.format(self.actions)) + + # trans_r + self.trans_r = Transform.parse_dict(self.cfgyaml.trans_r) + if self.debug: + self.log.dbg('trans_r: {}'.format(self.trans_r)) + + # trans_w + self.trans_w = Transform.parse_dict(self.cfgyaml.trans_w) + if self.debug: + self.log.dbg('trans_w: {}'.format(self.trans_w)) + + # variables + self.variables = self.cfgyaml.variables + if self.debug: + self.log.dbg('variables: {}'.format(self.variables)) + + # patch dotfiles in profiles + self._patch_keys_to_objs(self.profiles, + "dotfiles", self.get_dotfile) + + # patch action in actions + self._patch_keys_to_objs(self.dotfiles, + "actions", self._get_action_w_args) + self._patch_keys_to_objs(self.profiles, + "actions", self._get_action_w_args) + + # patch default actions in settings + self._patch_keys_to_objs([self.settings], + "default_actions", self._get_action_w_args) + if self.debug: + msg = 'default actions: {}'.format(self.settings.default_actions) + self.log.dbg(msg) + + # patch trans_w/trans_r in dotfiles + self._patch_keys_to_objs(self.dotfiles, + "trans_r", self.get_trans_r) + self._patch_keys_to_objs(self.dotfiles, + "trans_w", self.get_trans_w) + + def _patch_keys_to_objs(self, containers, keys, get_by_key): + """ + patch each object in "containers" containing + a list of keys in the attribute "keys" with + the returned object of the function "get_by_key" + """ + if not containers: + return + if self.debug: + self.log.dbg('patching {} ...'.format(keys)) + for c in containers: + objects = [] + okeys = getattr(c, keys) + if not okeys: + continue + for k in okeys: + o = get_by_key(k) + if not o: + err = 'bad key for \"{}\": {}'.format(c.key, k) + raise Exception(err) + objects.append(o) + if self.debug: + self.log.dbg('patching {}.{} with {}'.format(c, keys, objects)) + setattr(c, keys, objects) + + def new(self, src, dst, link, profile_key): + """ + import a new dotfile + @src: path in dotpath + @dst: path in FS + @link: LinkType + @profile_key: to which profile + """ + home = os.path.expanduser('~') + dst = dst.replace(home, '~', 1) + + dotfile = self._get_dotfile_by_dst(dst) + if not dotfile: + # get a new dotfile with a unique key + key = self._get_new_dotfile_key(dst) + if self.debug: + self.log.dbg('new dotfile key: {}'.format(key)) + # add the dotfile + self.cfgyaml.add_dotfile(key, src, dst, link) + dotfile = Dotfile(key, dst, src) + + key = dotfile.key + ret = self.cfgyaml.add_dotfile_to_profile(key, profile_key) + if self.debug: + self.log.dbg('new dotfile {} to profile {}'.format(key, + profile_key)) + + # reload + self.cfgyaml.save() + if self.debug: + self.log.dbg('RELOADING') + self._load() + return ret + + def _get_new_dotfile_key(self, dst): + """return a new unique dotfile key""" + path = os.path.expanduser(dst) + existing_keys = [x.key for x in self.dotfiles] + if self.settings.longkey: + return self._get_long_key(path, existing_keys) + return self._get_short_key(path, existing_keys) + + def _norm_key_elem(self, elem): + """normalize path element for sanity""" + elem = elem.lstrip('.') + elem = elem.replace(' ', '-') + return elem.lower() + + def _split_path_for_key(self, path): + """return a list of path elements, excluded home path""" + p = strip_home(path) + dirs = [] + while True: + p, f = os.path.split(p) + dirs.append(f) + if not p or not f: + break + dirs.reverse() + # remove empty entries + dirs = filter(None, dirs) + # normalize entries + return list(map(self._norm_key_elem, dirs)) + + def _get_long_key(self, path, keys): + """ + return a unique long key representing the + absolute path of path + """ + dirs = self._split_path_for_key(path) + prefix = self.dir_prefix if os.path.isdir(path) else self.file_prefix + key = self.key_sep.join([prefix, *dirs]) + return self._uniq_key(key, keys) + + def _get_short_key(self, path, keys): + """ + return a unique key where path + is known not to be an already existing dotfile + """ + dirs = self._split_path_for_key(path) + dirs.reverse() + prefix = self.dir_prefix if os.path.isdir(path) else self.file_prefix + entries = [] + for d in dirs: + entries.insert(0, d) + key = self.key_sep.join([prefix, *entries]) + if key not in keys: + return key + return self._uniq_key(key, keys) + + def _uniq_key(self, key, keys): + """unique dotfile key""" + newkey = key + cnt = 1 + while newkey in keys: + # if unable to get a unique path + # get a random one + newkey = self.key_sep.join([key, cnt]) + cnt += 1 + return newkey + + def _get_dotfile_by_dst(self, dst): + """get a dotfile by dst""" + try: + return next(d for d in self.dotfiles if d.dst == dst) + except StopIteration: + return None + + def save(self): + """save the config""" + return self.cfgyaml.save() + + def dump(self): + """dump the config dictionary""" + return self.cfgyaml.dump() + + def get_settings(self): + """return settings as a dict""" + return self.settings.serialize()[Settings.key_yaml] + + def get_variables(self): + """return variables""" + return self.variables + + def get_profiles(self): + """return profiles""" + return self.profiles + + def get_dotfiles(self, profile=None): + """return dotfiles dict for this profile key""" + if not profile: + return self.dotfiles + try: + return next(x.dotfiles for x in self.profiles if x.key == profile) + except StopIteration: + return [] + + def get_dotfile(self, key): + """return dotfile by key""" + try: + return next(x for x in self.dotfiles if x.key == key) + except StopIteration: + return None + + def get_action(self, key): + """return action by key""" + try: + return next(x for x in self.actions if x.key == key) + except StopIteration: + return None + + def _get_action_w_args(self, key): + """return action by key with the arguments""" + fields = shlex.split(key) + if len(fields) > 1: + # we have args + key, *args = fields + if self.debug: + self.log.dbg('action with parm: {} and {}'.format(key, args)) + action = self.get_action(key).copy(args) + else: + action = self.get_action(key) + return action + + def get_trans_r(self, key): + """return the trans_r with this key""" + try: + return next(x for x in self.trans_r if x.key == key) + except StopIteration: + return None + + def get_trans_w(self, key): + """return the trans_w with this key""" + try: + return next(x for x in self.trans_w if x.key == key) + except StopIteration: + return None diff --git a/dotdrop/cfg_yaml.py b/dotdrop/cfg_yaml.py new file mode 100644 index 0000000..29c4ee2 --- /dev/null +++ b/dotdrop/cfg_yaml.py @@ -0,0 +1,626 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2019, deadc0de6 + +handle lower level of the config file +""" + +import os +import yaml + +# local imports +from dotdrop.settings import Settings +from dotdrop.logger import Logger +from dotdrop.templategen import Templategen +from dotdrop.linktypes import LinkTypes +from dotdrop.utils import shell + + +class CfgYaml: + + # global entries + key_settings = 'config' + key_dotfiles = 'dotfiles' + key_profiles = 'profiles' + key_actions = 'actions' + key_trans_r = 'trans' + key_trans_w = 'trans_write' + key_variables = 'variables' + key_dvariables = 'dynvariables' + + action_pre = 'pre' + action_post = 'post' + + # profiles/dotfiles entries + key_profiles_dotfiles = 'dotfiles' + key_dotfile_src = 'src' + key_dotfile_dst = 'dst' + key_dotfile_link = 'link' + key_dotfile_actions = 'actions' + key_dotfile_link_children = 'link_children' + + # profile + key_profile_include = 'include' + key_profile_variables = 'variables' + key_profile_dvariables = 'dynvariables' + key_all = 'ALL' + + # import entries + key_import_actions = 'import_actions' + key_import_configs = 'import_configs' + key_import_variables = 'import_variables' + key_import_profile_dfs = 'import' + + # settings + key_settings_dotpath = 'dotpath' + key_settings_workdir = 'workdir' + key_settings_link_dotfile_default = 'link_dotfile_default' + key_imp_link = 'link_on_import' + + # link values + lnk_nolink = LinkTypes.NOLINK.name.lower() + lnk_link = LinkTypes.LINK.name.lower() + lnk_children = LinkTypes.LINK_CHILDREN.name.lower() + + def __init__(self, path, profile=None, debug=False): + """ + config parser + @path: config file path + @profile: the selected profile + @debug: debug flag + """ + self.path = os.path.abspath(path) + self.profile = profile + self.debug = debug + self.log = Logger() + self.dirty = False + + self.yaml_dict = self._load_yaml(self.path) + self._fix_deprecated(self.yaml_dict) + self._parse_main_yaml(self.yaml_dict) + if self.debug: + self.log.dbg('current dict: {}'.format(self.yaml_dict)) + + # resolve variables + allvars = self._merge_and_apply_variables() + self.variables.update(allvars) + # process imported configs + self._resolve_import_configs() + # process other imports + self._resolve_imports() + # process diverse options + self._resolve_rest() + # patch dotfiles paths + self._resolve_dotfile_paths() + + def _parse_main_yaml(self, dic): + """parse the different blocks""" + self.ori_settings = self._get_entry(self.yaml_dict, self.key_settings) + self.settings = Settings(None).serialize().get(self.key_settings) + self.settings.update(self.ori_settings) + if self.debug: + self.log.dbg('settings: {}'.format(self.settings)) + + # dotfiles + self.dotfiles = self._get_entry(self.yaml_dict, self.key_dotfiles) + if self.debug: + self.log.dbg('dotfiles: {}'.format(self.dotfiles)) + + # profiles + self.profiles = self._get_entry(self.yaml_dict, self.key_profiles) + if self.debug: + self.log.dbg('profiles: {}'.format(self.profiles)) + + # actions + self.actions = self._get_entry(self.yaml_dict, self.key_actions, + mandatory=False) + self.actions = self._patch_actions(self.actions) + if self.debug: + self.log.dbg('actions: {}'.format(self.actions)) + + # trans_r + self.trans_r = self._get_entry(self.yaml_dict, self.key_trans_r, + mandatory=False) + if self.debug: + self.log.dbg('trans_r: {}'.format(self.trans_r)) + + # trans_w + self.trans_w = self._get_entry(self.yaml_dict, self.key_trans_w, + mandatory=False) + if self.debug: + self.log.dbg('trans_w: {}'.format(self.trans_w)) + + # variables + self.variables = self._get_entry(self.yaml_dict, self.key_variables, + mandatory=False) + if self.debug: + self.log.dbg('variables: {}'.format(self.variables)) + + # dynvariables + self.dvariables = self._get_entry(self.yaml_dict, self.key_dvariables, + mandatory=False) + if self.debug: + self.log.dbg('dvariables: {}'.format(self.dvariables)) + + def _resolve_dotfile_paths(self): + """resolve dotfile paths""" + for dotfile in self.dotfiles.values(): + src = dotfile[self.key_dotfile_src] + src = os.path.join(self.settings[self.key_settings_dotpath], src) + dotfile[self.key_dotfile_src] = self.resolve_path(src) + dst = dotfile[self.key_dotfile_dst] + dotfile[self.key_dotfile_dst] = self.resolve_path(dst) + + def _merge_and_apply_variables(self): + """ + resolve all variables across the config + apply them to any needed entries + and return the full list of variables + """ + # first construct the list of variables + var = self._get_variables_dict(self.profile, seen=[self.profile]) + dvar = self._get_dvariables_dict(self.profile, seen=[self.profile]) + + # recursive resolve variables + allvars = var.copy() + allvars.update(dvar) + if self.debug: + self.log.dbg('all variables: {}'.format(allvars)) + + t = Templategen(variables=allvars) + for k in allvars.keys(): + val = allvars[k] + while Templategen.var_is_template(val): + val = t.generate_string(val) + allvars[k] = val + t.update_variables(allvars) + + # exec dynvariables + for k in dvar.keys(): + allvars[k] = shell(allvars[k]) + + if self.debug: + self.log.dbg('variables:') + for k, v in allvars.items(): + self.log.dbg('\t\"{}\": {}'.format(k, v)) + + if self.debug: + self.log.dbg('resolve all uses of variables in config') + + # now resolve blocks + t = Templategen(variables=allvars) + + # dotfiles entries + for k, v in self.dotfiles.items(): + # src + src = v.get(self.key_dotfile_src) + v[self.key_dotfile_src] = t.generate_string(src) + # dst + dst = v.get(self.key_dotfile_dst) + v[self.key_dotfile_dst] = t.generate_string(dst) + # actions + new = [] + for a in v.get(self.key_dotfile_actions, []): + new.append(t.generate_string(a)) + if new: + if self.debug: + self.log.dbg('resolved: {}'.format(new)) + v[self.key_dotfile_actions] = new + + # external actions paths + new = [] + for p in self.settings.get(self.key_import_actions, []): + new.append(t.generate_string(p)) + if new: + if self.debug: + self.log.dbg('resolved: {}'.format(new)) + self.settings[self.key_import_actions] = new + + # external config paths + new = [] + for p in self.settings.get(self.key_import_configs, []): + new.append(t.generate_string(p)) + if new: + if self.debug: + self.log.dbg('resolved: {}'.format(new)) + self.settings[self.key_import_configs] = new + + # external variables paths + new = [] + for p in self.settings.get(self.key_import_variables, []): + new.append(t.generate_string(p)) + if new: + if self.debug: + self.log.dbg('resolved: {}'.format(new)) + self.settings[self.key_import_variables] = new + + # external profiles dotfiles + for k, v in self.profiles.items(): + new = [] + for p in v.get(self.key_import_profile_dfs, []): + new.append(t.generate_string(p)) + if new: + if self.debug: + self.log.dbg('resolved: {}'.format(new)) + v[self.key_import_profile_dfs] = new + + return allvars + + def _patch_actions(self, actions): + """ + ensure each action is either pre or post explicitely + action entry of the form {action_key: (pre|post, action)} + """ + if not actions: + return actions + new = {} + for k, v in actions.items(): + if k == self.action_pre or k == self.action_post: + for key, action in v.items(): + new[key] = (k, action) + else: + new[k] = (self.action_pre, v) + return new + + def _get_variables_dict(self, profile, seen, sub=False): + """return enriched variables""" + variables = {} + if not sub: + # add profile variable + if profile: + variables['profile'] = profile + # add some more variables + p = self.settings.get(self.key_settings_dotpath) + p = self.resolve_path(p) + variables['_dotdrop_dotpath'] = p + variables['_dotdrop_cfgpath'] = self.resolve_path(self.path) + p = self.settings.get(self.key_settings_workdir) + p = self.resolve_path(p) + variables['_dotdrop_workdir'] = p + + # variables + variables.update(self.variables) + + if not profile or profile not in self.profiles.keys(): + return variables + + # profile entry + pentry = self.profiles.get(profile) + + # inherite profile variables + for inherited_profile in pentry.get(self.key_profile_include, []): + if inherited_profile == profile or inherited_profile in seen: + raise Exception('\"include\" loop') + seen.append(inherited_profile) + new = self._get_variables_dict(inherited_profile, seen, sub=True) + variables.update(new) + + # overwrite with profile variables + for k, v in pentry.get(self.key_profile_variables, {}).items(): + variables[k] = v + + return variables + + def _get_dvariables_dict(self, profile, seen, sub=False): + """return dynvariables""" + variables = {} + + # dynvariables + variables.update(self.dvariables) + + if not profile or profile not in self.profiles.keys(): + return variables + + # profile entry + pentry = self.profiles.get(profile) + + # inherite profile dynvariables + for inherited_profile in pentry.get(self.key_profile_include, []): + if inherited_profile == profile or inherited_profile in seen: + raise Exception('\"include loop\"') + seen.append(inherited_profile) + new = self._get_dvariables_dict(inherited_profile, seen, sub=True) + variables.update(new) + + # overwrite with profile dynvariables + for k, v in pentry.get(self.key_profile_dvariables, {}).items(): + variables[k] = v + + return variables + + def _resolve_imports(self): + """handle all the imports""" + # settings -> import_variables + imp = self.settings.get(self.key_import_variables, None) + if imp: + for p in imp: + path = self.resolve_path(p) + if self.debug: + self.log.dbg('import variables from {}'.format(path)) + self.variables = self._import_sub(path, self.key_variables, + self.variables, + mandatory=False) + self.dvariables = self._import_sub(path, self.key_dvariables, + self.dvariables, + mandatory=False) + # settings -> import_actions + imp = self.settings.get(self.key_import_actions, None) + if imp: + for p in imp: + path = self.resolve_path(p) + if self.debug: + self.log.dbg('import actions from {}'.format(path)) + self.actions = self._import_sub(path, self.key_actions, + self.actions, mandatory=False, + patch_func=self._patch_actions) + + # profiles -> import + for k, v in self.profiles.items(): + imp = v.get(self.key_import_profile_dfs, None) + if not imp: + continue + if self.debug: + self.log.dbg('import dotfiles for profile {}'.format(k)) + for p in imp: + current = v.get(self.key_dotfiles, []) + path = self.resolve_path(p) + current = self._import_sub(path, self.key_dotfiles, + current, mandatory=False) + v[self.key_dotfiles] = current + + def _resolve_import_configs(self): + """resolve import_configs""" + # settings -> import_configs + imp = self.settings.get(self.key_import_configs, None) + if not imp: + return + for p in imp: + path = self.resolve_path(p) + if self.debug: + self.log.dbg('import config from {}'.format(path)) + sub = CfgYaml(path, debug=self.debug) + # settings is ignored + self.dotfiles = self._merge_dict(self.dotfiles, sub.dotfiles) + self.profiles = self._merge_dict(self.profiles, sub.profiles) + self.actions = self._merge_dict(self.actions, sub.actions) + self.trans_r = self._merge_dict(self.trans_r, sub.trans_r) + self.trans_w = self._merge_dict(self.trans_w, sub.trans_w) + self.variables = self._merge_dict(self.variables, sub.variables) + self.dvariables = self._merge_dict(self.dvariables, sub.dvariables) + + def _resolve_rest(self): + """resolve some other parts of the config""" + # profile -> ALL + for k, v in self.profiles.items(): + dfs = v.get(self.key_profiles_dotfiles, None) + if not dfs: + continue + if self.debug: + self.log.dbg('add ALL to profile {}'.format(k)) + if self.key_all in dfs: + v[self.key_profiles_dotfiles] = self.dotfiles.keys() + + # profiles -> include other profile + for k, v in self.profiles.items(): + self._rec_resolve_profile_include(k) + + def _rec_resolve_profile_include(self, profile): + """recursively resolve include of other profiles's dotfiles""" + values = self.profiles[profile] + current = values.get(self.key_profiles_dotfiles, []) + inc = values.get(self.key_profile_include, None) + if not inc: + return current + seen = [] + for i in inc: + if i in seen: + raise Exception('\"include loop\"') + seen.append(i) + if i not in self.profiles.keys(): + self.log.warn('include unknown profile: {}'.format(i)) + continue + p = self.profiles[i] + others = p.get(self.key_profiles_dotfiles, []) + if self.key_profile_include in p.keys(): + others.extend(self._rec_resolve_profile_include(i)) + current.extend(others) + # unique them + values[self.key_profiles_dotfiles] = list(set(current)) + return values.get(self.key_profiles_dotfiles, []) + + def resolve_path(self, path): + """resolve a path either absolute or relative to config path""" + path = os.path.expanduser(path) + if not os.path.isabs(path): + d = os.path.dirname(self.path) + return os.path.join(d, path) + return os.path.normpath(path) + + def _import_sub(self, path, key, current, + mandatory=False, patch_func=None): + """ + import the block "key" from "path" + and merge it with "current" + patch_func is applied before merge if defined + """ + if self.debug: + self.log.dbg('import \"{}\" from \"{}\"'.format(key, path)) + self.log.dbg('current: {}'.format(current)) + extdict = self._load_yaml(path) + new = self._get_entry(extdict, key, mandatory=mandatory) + if patch_func: + new = patch_func(new) + if not new: + self.log.warn('no \"{}\" imported from \"{}\"'.format(key, path)) + return + if self.debug: + self.log.dbg('found: {}'.format(new)) + if isinstance(current, dict) and isinstance(new, dict): + # imported entries get more priority than current + current = {**current, **new} + elif isinstance(current, list) and isinstance(new, list): + current = [*current, *new] + else: + raise Exception('invalid import {} from {}'.format(key, path)) + if self.debug: + self.log.dbg('new \"{}\": {}'.format(key, current)) + return current + + def _merge_dict(self, high, low): + """merge low into high""" + return {**low, **high} + + def _get_entry(self, yaml_dict, key, mandatory=True): + """return entry from yaml dictionary""" + if key not in yaml_dict: + if mandatory: + raise Exception('invalid config: no {} found'.format(key)) + yaml_dict[key] = {} + return yaml_dict[key] + if mandatory and not yaml_dict[key]: + # ensure is not none + yaml_dict[key] = {} + return yaml_dict[key] + + def _load_yaml(self, path): + """load a yaml file to a dict""" + content = {} + if not os.path.exists(path): + raise Exception('config path not found: {}'.format(path)) + with open(path, 'r') as f: + try: + content = yaml.safe_load(f) + except Exception as e: + self.log.err(e) + raise Exception('invalid config: {}'.format(path)) + return content + + def _new_profile(self, key): + """add a new profile if it doesn't exist""" + if key not in self.profiles.keys(): + # update yaml_dict + self.yaml_dict[self.key_profiles][key] = { + self.key_profiles_dotfiles: [] + } + if self.debug: + self.log.dbg('adding new profile: {}'.format(key)) + self.dirty = True + + def add_dotfile_to_profile(self, dotfile_key, profile_key): + """add an existing dotfile key to a profile_key""" + self._new_profile(profile_key) + profile = self.yaml_dict[self.key_profiles][profile_key] + if dotfile_key not in profile[self.key_profiles_dotfiles]: + profile[self.key_profiles_dotfiles].append(dotfile_key) + if self.debug: + msg = 'add \"{}\" to profile \"{}\"'.format(dotfile_key, + profile_key) + msg.format(dotfile_key, profile_key) + self.log.dbg(msg) + self.dirty = True + return self.dirty + + def add_dotfile(self, key, src, dst, link): + """add a new dotfile""" + if key in self.dotfiles.keys(): + return False + if self.debug: + self.log.dbg('adding new dotfile: {}'.format(key)) + + df_dict = { + self.key_dotfile_src: src, + self.key_dotfile_dst: dst, + } + dfl = self.settings[self.key_settings_link_dotfile_default] + if str(link) != dfl: + df_dict[self.key_dotfile_link] = str(link) + self.yaml_dict[self.key_dotfiles][key] = df_dict + self.dirty = True + + def _fix_deprecated(self, yamldict): + """fix deprecated entries""" + self._fix_deprecated_link_by_default(yamldict) + self._fix_deprecated_dotfile_link(yamldict) + + def _fix_deprecated_link_by_default(self, yamldict): + """fix deprecated link_by_default""" + key = 'link_by_default' + newkey = self.key_imp_link + if self.key_settings not in yamldict: + return + if not yamldict[self.key_settings]: + return + config = yamldict[self.key_settings] + if key not in config: + return + if config[key]: + config[newkey] = self.lnk_link + else: + config[newkey] = self.lnk_nolink + del config[key] + self.log.warn('deprecated \"link_by_default\"') + self.dirty = True + + def _fix_deprecated_dotfile_link(self, yamldict): + """fix deprecated link in dotfiles""" + if self.key_dotfiles not in yamldict: + return + if not yamldict[self.key_dotfiles]: + return + for k, dotfile in yamldict[self.key_dotfiles].items(): + new = self.lnk_nolink + if self.key_dotfile_link in dotfile and \ + type(dotfile[self.key_dotfile_link]) is bool: + # patch link: + cur = dotfile[self.key_dotfile_link] + new = self.lnk_nolink + if cur: + new = self.lnk_link + dotfile[self.key_dotfile_link] = new + self.dirty = True + self.log.warn('deprecated \"link\" value') + + elif self.key_dotfile_link_children in dotfile and \ + type(dotfile[self.key_dotfile_link_children]) is bool: + # patch link_children: + cur = dotfile[self.key_dotfile_link_children] + new = self.lnk_nolink + if cur: + new = self.lnk_children + del dotfile[self.key_dotfile_link_children] + dotfile[self.key_dotfile_link] = new + self.dirty = True + self.log.warn('deprecated \"link_children\" value') + + def _clear_none(self, dic): + """recursively delete all none/empty values in a dictionary.""" + new = {} + for k, v in dic.items(): + newv = v + if isinstance(v, dict): + newv = self._clear_none(v) + if v is None: + continue + if not v: + continue + new[k] = newv + return new + + def save(self): + """save this instance and return True if saved""" + if not self.dirty: + return False + + content = self._clear_none(self.dump()) + if self.debug: + self.log.dbg('saving: {}'.format(content)) + with open(self.path, 'w') as f: + yaml.safe_dump(content, f, + default_flow_style=False, + indent=2) + self.dirty = False + return True + + def dump(self): + """dump the config dictionary""" + return self.yaml_dict diff --git a/dotdrop/config.py b/dotdrop/config.py deleted file mode 100644 index 657de76..0000000 --- a/dotdrop/config.py +++ /dev/null @@ -1,1215 +0,0 @@ -""" -author: deadc0de6 (https://github.com/deadc0de6) -Copyright (c) 2017, deadc0de6 - -yaml config file manager -""" - -import itertools -import os -import shlex -from functools import partial -from glob import iglob - -import yaml - -# local import -from dotdrop.dotfile import Dotfile -from dotdrop.templategen import Templategen -from dotdrop.logger import Logger -from dotdrop.action import Action, Transform -from dotdrop.utils import strip_home, shell -from dotdrop.linktypes import LinkTypes - - -class Cfg: - key_all = 'ALL' - - # settings keys - key_settings = 'config' - key_dotpath = 'dotpath' - key_backup = 'backup' - key_create = 'create' - key_banner = 'banner' - key_long = 'longkey' - key_keepdot = 'keepdot' - key_ignoreempty = 'ignoreempty' - key_showdiff = 'showdiff' - key_imp_link = 'link_on_import' - key_dotfile_link = 'link_dotfile_default' - key_workdir = 'workdir' - key_cmpignore = 'cmpignore' - key_upignore = 'upignore' - key_defactions = 'default_actions' - - # import keys - key_import_vars = 'import_variables' - key_import_actions = 'import_actions' - - key_import_configs = 'import_configs' - - # actions keys - key_actions = 'actions' - key_actions_pre = 'pre' - key_actions_post = 'post' - - # transformations keys - key_trans_r = 'trans' - key_trans_w = 'trans_write' - - # template variables - key_variables = 'variables' - # shell variables - key_dynvariables = 'dynvariables' - - # dotfiles keys - key_dotfiles = 'dotfiles' - key_dotfiles_src = 'src' - key_dotfiles_dst = 'dst' - key_dotfiles_link = 'link' - key_dotfiles_link_children = 'link_children' - key_dotfiles_noempty = 'ignoreempty' - key_dotfiles_cmpignore = 'cmpignore' - key_dotfiles_actions = 'actions' - key_dotfiles_trans_r = 'trans' - key_dotfiles_trans_w = 'trans_write' - key_dotfiles_upignore = 'upignore' - - # profiles keys - key_profiles = 'profiles' - key_profiles_dots = 'dotfiles' - key_profiles_incl = 'include' - key_profiles_imp = 'import' - - # link values - lnk_nolink = LinkTypes.NOLINK.name.lower() - lnk_link = LinkTypes.LINK.name.lower() - lnk_children = LinkTypes.LINK_CHILDREN.name.lower() - - # settings defaults - default_dotpath = 'dotfiles' - default_backup = True - default_create = True - default_banner = True - default_longkey = False - default_keepdot = False - default_showdiff = False - default_ignoreempty = False - default_link_imp = lnk_nolink - default_link = lnk_nolink - default_workdir = '~/.config/dotdrop' - - def __init__(self, cfgpath, profile=None, debug=False): - """constructor - @cfgpath: path to the config file - @profile: chosen profile - @debug: enable debug - """ - if not cfgpath: - raise ValueError('config file path undefined') - if not os.path.exists(cfgpath): - raise ValueError('config file does not exist: {}'.format(cfgpath)) - - # make sure to have an absolute path to config file - self.cfgpath = os.path.abspath(cfgpath) - self.debug = debug - self._modified = False - - # init the logger - self.log = Logger() - - # represents all entries under "config" - # linked inside the yaml dict (self.content) - self.lnk_settings = {} - - # represents all entries under "profiles" - # linked inside the yaml dict (self.content) - self.lnk_profiles = {} - - # represents all dotfiles - # NOT linked inside the yaml dict (self.content) - self.dotfiles = {} - - # dict of all action objects by action key - # NOT linked inside the yaml dict (self.content) - self.actions = {} - - # dict of all read transformation objects by trans key - # NOT linked inside the yaml dict (self.content) - self.trans_r = {} - - # dict of all write transformation objects by trans key - # NOT linked inside the yaml dict (self.content) - self.trans_w = {} - - # represents all dotfiles per profile by profile key - # NOT linked inside the yaml dict (self.content) - self.prodots = {} - - # represents all variables from external files - # NOT linked inside the yaml dict (self.content) - self.ext_variables = {} - self.ext_dynvariables = {} - - # cmpignore patterns - # NOT linked inside the yaml dict (self.content) - self.cmpignores = [] - - # upignore patterns - # NOT linked inside the yaml dict (self.content) - self.upignores = [] - - # default actions - # NOT linked inside the yaml dict (self.content) - self.defactions = {} - - if not self._load_config(profile=profile): - raise ValueError('config is not valid') - - def __eq__(self, other): - return self.cfgpath == other.cfgpath - - def eval_dotfiles(self, profile, variables, debug=False): - """resolve dotfiles src/dst/actions templating for this profile""" - t = Templategen(variables=variables) - dotfiles = self._get_dotfiles(profile) - tvars = t.add_tmp_vars() - for d in dotfiles: - # add dotfile variables - t.restore_vars(tvars) - newvar = d.get_vars() - t.add_tmp_vars(newvars=newvar) - # src and dst path - d.src = t.generate_string(d.src) - d.dst = t.generate_string(d.dst) - # pre actions - if self.key_actions_pre in d.actions: - for action in d.actions[self.key_actions_pre]: - action.action = t.generate_string(action.action) - # post actions - if self.key_actions_post in d.actions: - for action in d.actions[self.key_actions_post]: - action.action = t.generate_string(action.action) - return dotfiles - - def _load_config(self, profile=None): - """load the yaml file""" - self.content = self._load_yaml(self.cfgpath) - if not self._is_valid(): - return False - return self._parse(profile=profile) - - def _load_yaml(self, path): - """load a yaml file to a dict""" - content = {} - if not os.path.exists(path): - return content - with open(path, 'r') as f: - try: - content = yaml.safe_load(f) - except Exception as e: - self.log.err(e) - return {} - return content - - def _is_valid(self): - """test the yaml dict (self.content) is valid""" - if self.key_profiles not in self.content: - self.log.err('missing \"{}\" in config'.format(self.key_profiles)) - return False - if self.key_settings not in self.content: - self.log.err('missing \"{}\" in config'.format(self.key_settings)) - return False - if self.key_dotfiles not in self.content: - self.log.err('missing \"{}\" in config'.format(self.key_dotfiles)) - return False - return True - - def _get_def_link(self): - """get dotfile link entry when not specified""" - string = self.lnk_settings[self.key_dotfile_link].lower() - return self._string_to_linktype(string) - - def _string_to_linktype(self, string): - """translate string to linktype""" - if string == self.lnk_link.lower(): - return LinkTypes.LINK - elif string == self.lnk_children.lower(): - return LinkTypes.LINK_CHILDREN - return LinkTypes.NOLINK - - def _parse(self, profile=None): - """parse config file""" - # parse the settings - self.lnk_settings = self.content[self.key_settings] or {} - if not self._complete_settings(): - return False - - # parse the profiles - # ensures self.lnk_profiles is a dict - if not isinstance(self.content[self.key_profiles], dict): - self.content[self.key_profiles] = {} - - self.lnk_profiles = self.content[self.key_profiles] - for p in filter(bool, self.lnk_profiles.values()): - # Ensures that the dotfiles entry is an empty list when not given - # or none - p.setdefault(self.key_profiles_dots, []) - if p[self.key_profiles_dots] is None: - p[self.key_profiles_dots] = [] - - # make sure we have an absolute dotpath - self.curdotpath = self.lnk_settings[self.key_dotpath] - self.lnk_settings[self.key_dotpath] = self._abs_path(self.curdotpath) - - # make sure we have an absolute workdir - self.curworkdir = self.lnk_settings[self.key_workdir] - self.lnk_settings[self.key_workdir] = self._abs_path(self.curworkdir) - - # load external variables/dynvariables - try: - paths = self.lnk_settings[self.key_import_vars] or [] - self._load_ext_variables(paths, profile=profile) - except KeyError: - pass - - # load global upignore - if self.key_upignore in self.lnk_settings: - key = self.key_upignore - self.upignores = self.lnk_settings[key].copy() or [] - - # load global cmpignore - if self.key_cmpignore in self.lnk_settings: - key = self.key_cmpignore - self.cmpignores = self.lnk_settings[key].copy() or [] - - # parse external actions - try: - ext_actions = self.lnk_settings[self.key_import_actions] or () - for path in ext_actions: - path = self._abs_path(path) - if self.debug: - self.log.dbg('loading actions from {}'.format(path)) - content = self._load_yaml(path) - # If external actions are None, replaces them with empty dict - try: - external_actions = content[self.key_actions] or {} - self._load_actions(external_actions) - except KeyError: - pass - except KeyError: - pass - - # parse external configs - try: - ext_configs = self.lnk_settings[self.key_import_configs] or () - - try: - iglob('./*', recursive=True) - find_glob = partial(iglob, recursive=True) - except TypeError: - from platform import python_version - - msg = ('Recursive globbing is not available on Python {}: ' - .format(python_version())) - if any('**' in config for config in ext_configs): - msg += "import_configs won't work" - self.log.err(msg) - return False - - msg = 'upgrade to version >3.5 if you want to use this feature' - self.log.warn(msg) - find_glob = iglob - - ext_configs = itertools.chain.from_iterable( - find_glob(self._abs_path(config)) - for config in ext_configs - ) - for config in ext_configs: - self._merge_cfg(config) - except KeyError: - pass - - # parse local actions - # If local actions are None, replaces them with empty dict - try: - local_actions = self.content[self.key_actions] or {} - self._load_actions(local_actions) - except KeyError: - pass - - # load default actions - try: - dactions = self.lnk_settings[self.key_defactions].copy() or [] - self.defactions = self._parse_actions_list(dactions, - profile=profile) - except KeyError: - self.defactions = { - self.key_actions_pre: [], - self.key_actions_post: [], - } - - # parse read transformations - # If read transformations are None, replaces them with empty dict - try: - read_trans = self.content[self.key_trans_r] or {} - self.trans_r.update({ - k: Transform(k, v) - for k, v - in read_trans.items() - }) - except KeyError: - pass - - # parse write transformations - # If write transformations are None, replaces them with empty dict - try: - read_trans = self.content[self.key_trans_w] or {} - self.trans_w.update({ - k: Transform(k, v) - for k, v - in read_trans.items() - }) - except KeyError: - pass - - # parse the dotfiles and construct the dict of objects per dotfile key - # ensures the dotfiles entry is a dict - if not isinstance(self.content[self.key_dotfiles], dict): - self.content[self.key_dotfiles] = {} - - dotfiles = self.content[self.key_dotfiles] - noempty_default = self.lnk_settings[self.key_ignoreempty] - dotpath = self.lnk_settings[self.key_dotpath] - for k, v in dotfiles.items(): - src = v[self.key_dotfiles_src] - if dotpath not in src: - src = os.path.join(dotpath, src) - src = os.path.normpath(self._abs_path(src)) - dst = os.path.normpath(v[self.key_dotfiles_dst]) - - # Fail if both `link` and `link_children` present - if self.key_dotfiles_link in v \ - and self.key_dotfiles_link_children in v: - msg = 'only one of `link` or `link_children` allowed per' - msg += ' dotfile, error on dotfile "{}".' - self.log.err(msg.format(k)) - return False - - # fix it - v = self._fix_dotfile_link(k, v) - dotfiles[k] = v - - # get link type - link = self._get_def_link() - if self.key_dotfiles_link in v: - link = self._string_to_linktype(v[self.key_dotfiles_link]) - - # get ignore empty - noempty = v.get(self.key_dotfiles_noempty, noempty_default) - - # parse actions - itsactions = v.get(self.key_dotfiles_actions, []) - actions = self._parse_actions_list(itsactions, profile=profile) - if self.debug: - self.log.dbg('action for {}'.format(k)) - for t in [self.key_actions_pre, self.key_actions_post]: - for action in actions[t]: - self.log.dbg('- {}: {}'.format(t, action)) - - # parse read transformation - itstrans_r = v.get(self.key_dotfiles_trans_r) - trans_r = None - if itstrans_r: - if type(itstrans_r) is list: - msg = 'One transformation allowed per dotfile' - msg += ', error on dotfile \"{}\"' - self.log.err(msg.format(k)) - msg = 'Please modify your config file to: \"trans: {}\"' - self.log.err(msg.format(itstrans_r[0])) - return False - trans_r = self._parse_trans(itstrans_r, read=True) - if not trans_r: - msg = 'unknown trans \"{}\" for \"{}\"' - self.log.err(msg.format(itstrans_r, k)) - return False - - # parse write transformation - itstrans_w = v.get(self.key_dotfiles_trans_w) - trans_w = None - if itstrans_w: - if type(itstrans_w) is list: - msg = 'One write transformation allowed per dotfile' - msg += ', error on dotfile \"{}\"' - self.log.err(msg.format(k)) - msg = 'Please modify your config file: \"trans_write: {}\"' - self.log.err(msg.format(itstrans_w[0])) - return False - trans_w = self._parse_trans(itstrans_w, read=False) - if not trans_w: - msg = 'unknown trans_write \"{}\" for \"{}\"' - self.log.err(msg.format(itstrans_w, k)) - return False - - # disable transformation when link is true - if link != LinkTypes.NOLINK and (trans_r or trans_w): - msg = 'transformations disabled for \"{}\"'.format(dst) - msg += ' because link|link_children is enabled' - self.log.warn(msg) - trans_r = None - trans_w = None - - # parse cmpignore pattern - cmpignores = v.get(self.key_dotfiles_cmpignore, []).copy() - cmpignores.extend(self.cmpignores) - - # parse upignore pattern - upignores = v.get(self.key_dotfiles_upignore, []).copy() - upignores.extend(self.upignores) - - # create new dotfile - self.dotfiles[k] = Dotfile(k, dst, src, - link=link, actions=actions, - trans_r=trans_r, trans_w=trans_w, - cmpignore=cmpignores, noempty=noempty, - upignore=upignores) - - # assign dotfiles to each profile - self.prodots = {k: [] for k in self.lnk_profiles.keys()} - for name, profile in self.lnk_profiles.items(): - if not profile: - continue - dots = profile[self.key_profiles_dots] - if not dots: - continue - - if self.key_all in dots: - # add all if key ALL is used - self.prodots[name] = list(self.dotfiles.values()) - else: - # add the dotfiles - for d in dots: - if d not in self.dotfiles: - msg = 'unknown dotfile \"{}\" for {}'.format(d, k) - self.log.err(msg) - continue - self.prodots[name].append(self.dotfiles[d]) - - profile_names = self.lnk_profiles.keys() - # handle "import" (from file) for each profile - for k in profile_names: - dots = self._get_imported_dotfiles_keys(k) - for d in dots: - if d not in self.dotfiles: - msg = '(i) unknown dotfile \"{}\" for {}'.format(d, k) - self.log.err(msg) - continue - self.prodots[k].append(self.dotfiles[d]) - - # handle "include" (from other profile) for each profile - for k in profile_names: - ret, dots = self._get_included_dotfiles(k) - if not ret: - return False - self.prodots[k].extend(dots) - - # remove duplicates if any - self.prodots = {k: list(set(v)) for k, v in self.prodots.items()} - - # print dotfiles for each profile - if self.debug: - for k in self.lnk_profiles.keys(): - df = ','.join(d.key for d in self.prodots[k]) - self.log.dbg('dotfiles for \"{}\": {}'.format(k, df)) - return True - - def _merge_dict(self, ext_config, warning_prefix, self_member, - ext_member=None, traceback=False): - """Merge into self a dictionary instance members from an external Cfg. - - This method merges instance members of another Cfg instance into self. - It issues a warning for any key shared between self and the other Cfg. - It can adds an own=False porperty to any dictionary in the external - instance member before merging. - - :param ext_config: The other Cfg to merge from. - :type ext_config: Cfg - :param warnign_prefix: The prefix to th warning messages. - :type warning_prefix: str - :param self_member: The member of self which will be augmented by the - external member. Or the self_member name as a string. - :type self_member: dict or str - :param ext_member: The member of ext_config which wil be merged in - self_member. When not given, self_member is assumed to be a string, - and self_member and ext_member are supposed to have the same name. - :type ext_member: dict or None - :param traceback: Whether to add own=False to ext_member dict values - before merging in. - :type traceback: bool - - """ - if ext_member is None: - member_name = self_member - self_member = getattr(self, member_name) - ext_member = getattr(ext_config, member_name) - - common_keys = ( - key - for key in (set(self_member.keys()) - .intersection(set(ext_member.keys()))) - if not key.startswith('_') # filtering out internal variables - ) - warning_msg = ('%s {} defined both in %s and %s: {} in %s used' - % (warning_prefix, self.cfgpath, ext_config.cfgpath, - self.cfgpath)) - for key in common_keys: - self.log.warn(warning_msg.format(key, key)) - - if traceback: - # Assumes v to be a dict. So far it's only used for profiles, - # that are in fact dicts - merged = { - k: dict(v, own=False) - for k, v in ext_member.items() - } - else: - merged = ext_member.copy() - merged.update(self_member) - self_member.update(merged) - - return self_member - - def _merge_cfg(self, config_path): - """Merge an external config.yaml file into self.""" - # Parsing external config file - try: - ext_config = Cfg(config_path) - except ValueError: - raise ValueError( - 'external config file not found: {}'.format(config_path)) - - # Merging in members from the external config file - self._merge_dict(ext_config=ext_config, warning_prefix='Dotfile', - self_member='dotfiles') - self._merge_dict(ext_config=ext_config, warning_prefix='Profile', - self_member='lnk_profiles', traceback=True) - self._merge_dict(ext_config=ext_config, warning_prefix='Action', - self_member='actions') - self._merge_dict(ext_config=ext_config, - warning_prefix='Transformation', - self_member='trans_r') - self._merge_dict(ext_config=ext_config, - warning_prefix='Write transformation', - self_member='trans_w') - self._merge_dict(ext_config=ext_config, warning_prefix='Profile', - self_member='prodots') - - # variables are merged in ext_*variables so as not to be added in - # self.content. This needs an additional step to account for imported - # variables sharing a key with the ones defined in self.content. - variables = { - k: v - for k, v in ext_config._get_variables(None).items() - if k not in self._get_variables(None).keys() - } - dyn_variables = { - k: v - for k, v in ext_config._get_dynvariables(None).items() - if k not in self._get_dynvariables(None).keys() - } - self._merge_dict(ext_config=ext_config, warning_prefix='Variable', - self_member=self.ext_variables, - ext_member=variables) - self._merge_dict(ext_config=ext_config, - warning_prefix='Dynamic variable', - self_member=self.ext_dynvariables, - ext_member=dyn_variables) - - def _load_ext_variables(self, paths, profile=None): - """load external variables""" - variables = {} - dvariables = {} - cur_vars = self.get_variables(profile, debug=self.debug) - t = Templategen(variables=cur_vars) - for path in paths: - path = self._abs_path(path) - path = t.generate_string(path) - if self.debug: - self.log.dbg('loading variables from {}'.format(path)) - content = self._load_yaml(path) - if not content: - self.log.warn('\"{}\" does not exist'.format(path)) - continue - # variables - if self.key_variables in content: - variables.update(content[self.key_variables]) - # dynamic variables - if self.key_dynvariables in content: - dvariables.update(content[self.key_dynvariables]) - self.ext_variables = variables - if self.debug: - self.log.dbg('loaded ext variables: {}'.format(variables)) - self.ext_dynvariables = dvariables - if self.debug: - self.log.dbg('loaded ext dynvariables: {}'.format(dvariables)) - - def _load_actions(self, dic): - for k, v in dic.items(): - # loop through all actions - if k in [self.key_actions_pre, self.key_actions_post]: - # parse pre/post actions - items = dic[k].items() - for k2, v2 in items: - if k not in self.actions: - self.actions[k] = {} - a = Action(k2, k, v2) - self.actions[k][k2] = a - if self.debug: - self.log.dbg('new action: {}'.format(a)) - else: - # parse naked actions as post actions - if self.key_actions_post not in self.actions: - self.actions[self.key_actions_post] = {} - a = Action(k, '', v) - self.actions[self.key_actions_post][k] = a - if self.debug: - self.log.dbg('new action: {}'.format(a)) - - def _abs_path(self, path): - """return absolute path of path relative to the confpath""" - path = os.path.expanduser(path) - if not os.path.isabs(path): - d = os.path.dirname(self.cfgpath) - return os.path.join(d, path) - return path - - def _get_imported_dotfiles_keys(self, profile): - """import dotfiles from external file""" - keys = [] - if self.key_profiles_imp not in self.lnk_profiles[profile]: - return keys - variables = self.get_variables(profile, debug=self.debug) - t = Templategen(variables=variables) - paths = self.lnk_profiles[profile][self.key_profiles_imp] - for path in paths: - path = self._abs_path(path) - path = t.generate_string(path) - if self.debug: - self.log.dbg('loading dotfiles from {}'.format(path)) - content = self._load_yaml(path) - if not content: - self.log.warn('\"{}\" does not exist'.format(path)) - continue - if self.key_profiles_dots not in content: - self.log.warn('not dotfiles in \"{}\"'.format(path)) - continue - df = content[self.key_profiles_dots] - if self.debug: - self.log.dbg('imported dotfiles keys: {}'.format(df)) - keys.extend(df) - return keys - - def _get_included_dotfiles(self, profile, seen=[]): - """find all dotfiles for a specific profile - when using the include keyword""" - if profile in seen: - self.log.err('cyclic include in profile \"{}\"'.format(profile)) - return False, [] - if not self.lnk_profiles[profile]: - return True, [] - dotfiles = self.prodots[profile] - if self.key_profiles_incl not in self.lnk_profiles[profile]: - # no include found - return True, dotfiles - if not self.lnk_profiles[profile][self.key_profiles_incl]: - # empty include found - return True, dotfiles - variables = self.get_variables(profile, debug=self.debug) - t = Templategen(variables=variables) - if self.debug: - self.log.dbg('handle includes for profile \"{}\"'.format(profile)) - for other in self.lnk_profiles[profile][self.key_profiles_incl]: - # resolve include value - other = t.generate_string(other) - if other not in self.prodots: - # no such profile - self.log.warn('unknown included profile \"{}\"'.format(other)) - continue - if self.debug: - msg = 'include dotfiles from \"{}\" into \"{}\"' - self.log.dbg(msg.format(other, profile)) - lseen = seen.copy() - lseen.append(profile) - ret, recincludes = self._get_included_dotfiles(other, seen=lseen) - if not ret: - return False, [] - dotfiles.extend(recincludes) - dotfiles.extend(self.prodots[other]) - return True, dotfiles - - def _parse_actions_list(self, entries, profile=None): - """parse actions specified for an element - where entries are the ones defined for this dotfile""" - res = { - self.key_actions_pre: [], - self.key_actions_post: [], - } - vars = self.get_variables(profile, debug=self.debug) - t = Templategen(variables=vars) - for line in entries: - fields = shlex.split(line) - entry = fields[0] - args = [] - if len(fields) > 1: - tmpargs = fields[1:] - args = [] - # template args - for arg in tmpargs: - args.append(t.generate_string(arg)) - action = None - if self.key_actions_pre in self.actions and \ - entry in self.actions[self.key_actions_pre]: - kind = self.key_actions_pre - if not args: - action = self.actions[self.key_actions_pre][entry] - else: - a = self.actions[self.key_actions_pre][entry].action - action = Action(entry, kind, a, *args) - elif self.key_actions_post in self.actions and \ - entry in self.actions[self.key_actions_post]: - kind = self.key_actions_post - if not args: - action = self.actions[self.key_actions_post][entry] - else: - a = self.actions[self.key_actions_post][entry].action - action = Action(entry, kind, a, *args) - else: - self.log.warn('unknown action \"{}\"'.format(entry)) - continue - res[kind].append(action) - return res - - def _parse_trans(self, trans, read=True): - """parse transformation key specified for a dotfile""" - transformations = self.trans_r - if not read: - transformations = self.trans_w - if trans not in transformations.keys(): - return None - return transformations[trans] - - def _complete_settings(self): - """set settings defaults if not present""" - self._fix_deprecated() - if self.key_dotpath not in self.lnk_settings: - self.lnk_settings[self.key_dotpath] = self.default_dotpath - if self.key_backup not in self.lnk_settings: - self.lnk_settings[self.key_backup] = self.default_backup - if self.key_create not in self.lnk_settings: - self.lnk_settings[self.key_create] = self.default_create - if self.key_banner not in self.lnk_settings: - self.lnk_settings[self.key_banner] = self.default_banner - if self.key_long not in self.lnk_settings: - self.lnk_settings[self.key_long] = self.default_longkey - if self.key_keepdot not in self.lnk_settings: - self.lnk_settings[self.key_keepdot] = self.default_keepdot - if self.key_workdir not in self.lnk_settings: - self.lnk_settings[self.key_workdir] = self.default_workdir - if self.key_showdiff not in self.lnk_settings: - self.lnk_settings[self.key_showdiff] = self.default_showdiff - if self.key_ignoreempty not in self.lnk_settings: - self.lnk_settings[self.key_ignoreempty] = self.default_ignoreempty - - if self.key_dotfile_link not in self.lnk_settings: - self.lnk_settings[self.key_dotfile_link] = self.default_link - else: - key = self.lnk_settings[self.key_dotfile_link] - if key != self.lnk_link and \ - key != self.lnk_children and \ - key != self.lnk_nolink: - self.log.err('bad value for {}'.format(self.key_dotfile_link)) - return False - - if self.key_imp_link not in self.lnk_settings: - self.lnk_settings[self.key_imp_link] = self.default_link_imp - else: - key = self.lnk_settings[self.key_imp_link] - if key != self.lnk_link and \ - key != self.lnk_children and \ - key != self.lnk_nolink: - self.log.err('bad value for {}'.format(self.key_dotfile_link)) - return False - return True - - def _fix_deprecated(self): - """fix deprecated entries""" - # link_by_default - key = 'link_by_default' - newkey = self.key_imp_link - if key in self.lnk_settings: - if self.lnk_settings[key]: - self.lnk_settings[newkey] = self.lnk_link - else: - self.lnk_settings[newkey] = self.lnk_nolink - del self.lnk_settings[key] - self._modified = True - - def _fix_dotfile_link(self, key, entry): - """fix deprecated link usage in dotfile entry""" - v = entry - - if self.key_dotfiles_link not in v \ - and self.key_dotfiles_link_children not in v: - # nothing defined - return v - - new = self.lnk_nolink - if self.key_dotfiles_link in v \ - and type(v[self.key_dotfiles_link]) is bool: - # patch link: - if v[self.key_dotfiles_link]: - new = self.lnk_link - else: - new = self.lnk_nolink - self._modified = True - if self.debug: - self.log.dbg('link updated for {} to {}'.format(key, new)) - elif self.key_dotfiles_link_children in v \ - and type(v[self.key_dotfiles_link_children]) is bool: - # patch link_children: - if v[self.key_dotfiles_link_children]: - new = self.lnk_children - else: - new = self.lnk_nolink - del v[self.key_dotfiles_link_children] - self._modified = True - if self.debug: - self.log.dbg('link updated for {} to {}'.format(key, new)) - else: - # no change - new = v[self.key_dotfiles_link] - - v[self.key_dotfiles_link] = new - return v - - @classmethod - def _filter_not_own(cls, content): - """Filters out from a dict its dict values with own=False.""" - # This way it recursively explores only dicts. Since own=False is used - # only in profiles, which are in fact dicts, this is fine for now. - return { - k: cls._filter_not_own(v) if isinstance(v, dict) else v - for k, v in content.items() - if not isinstance(v, dict) or v.get('own', True) - } - - def _save(self, content, path): - """writes the config to file""" - ret = False - with open(path, 'w') as f: - ret = yaml.safe_dump(self._filter_not_own(content), f, - default_flow_style=False, - indent=2) - if ret: - self._modified = False - return ret - - def _norm_key_elem(self, elem): - """normalize path element for sanity""" - elem = elem.lstrip('.') - elem = elem.replace(' ', '-') - return elem.lower() - - def _get_paths(self, path): - """return a list of path elements, excluded home path""" - p = strip_home(path) - dirs = [] - while True: - p, f = os.path.split(p) - dirs.append(f) - if not p or not f: - break - dirs.reverse() - # remove empty entries - dirs = filter(None, dirs) - # normalize entries - dirs = list(map(self._norm_key_elem, dirs)) - return dirs - - def _get_long_key(self, path, keys): - """return a unique long key representing the - absolute path of path""" - dirs = self._get_paths(path) - # prepend with indicator - if os.path.isdir(path): - key = 'd_{}'.format('_'.join(dirs)) - else: - key = 'f_{}'.format('_'.join(dirs)) - return self._get_unique_key(key, keys) - - def _get_short_key(self, path, keys): - """return a unique key where path - is known not to be an already existing dotfile""" - dirs = self._get_paths(path) - dirs.reverse() - pre = 'f' - if os.path.isdir(path): - pre = 'd' - entries = [] - for d in dirs: - entries.insert(0, d) - key = '_'.join(entries) - key = '{}_{}'.format(pre, key) - if key not in keys: - return key - return self._get_unique_key(key, keys) - - def _get_unique_key(self, key, keys): - """return a unique dotfile key""" - newkey = key - cnt = 1 - while newkey in keys: - # if unable to get a unique path - # get a random one - newkey = '{}_{}'.format(key, cnt) - cnt += 1 - return newkey - - def _dotfile_exists(self, dotfile): - """return True and the existing dotfile key - if it already exists, False and a new unique key otherwise""" - try: - return True, next(key - for key, d in self.dotfiles.items() - if d.dst == dotfile.dst) - except StopIteration: - pass - # return key for this new dotfile - path = os.path.expanduser(dotfile.dst) - keys = self.dotfiles.keys() - if self.lnk_settings[self.key_long]: - return False, self._get_long_key(path, keys) - return False, self._get_short_key(path, keys) - - def new(self, src, dst, profile, link, debug=False): - """import new dotfile""" - # keep it short - home = os.path.expanduser('~') - dst = dst.replace(home, '~', 1) - dotfile = Dotfile('', dst, src) - - # adding new profile if doesn't exist - if profile not in self.lnk_profiles: - if debug: - self.log.dbg('adding profile to config') - # in the yaml - self.lnk_profiles[profile] = {self.key_profiles_dots: []} - # in the global list of dotfiles per profile - self.prodots[profile] = [] - - exists, key = self._dotfile_exists(dotfile) - if exists: - if debug: - self.log.dbg('key already exists: {}'.format(key)) - # retrieve existing dotfile - dotfile = self.dotfiles[key] - if dotfile in self.prodots[profile]: - self.log.err('\"{}\" already present'.format(dotfile.key)) - return False, dotfile - - # add for this profile - self.prodots[profile].append(dotfile) - - # get a pointer in the yaml profiles->this_profile - # and complete it with the new entry - pro = self.content[self.key_profiles][profile] - if self.key_all not in pro[self.key_profiles_dots]: - pro[self.key_profiles_dots].append(dotfile.key) - return True, dotfile - - if debug: - self.log.dbg('dotfile attributed key: {}'.format(key)) - # adding the new dotfile - dotfile.key = key - dotfile.link = link - if debug: - self.log.dbg('adding new dotfile: {}'.format(dotfile)) - # add the entry in the yaml file - dots = self.content[self.key_dotfiles] - dots[dotfile.key] = { - self.key_dotfiles_dst: dotfile.dst, - self.key_dotfiles_src: dotfile.src, - } - - # set the link flag - if link != self._get_def_link(): - val = link.name.lower() - dots[dotfile.key][self.key_dotfiles_link] = val - - # link it to this profile in the yaml file - pro = self.content[self.key_profiles][profile] - if self.key_all not in pro[self.key_profiles_dots]: - pro[self.key_profiles_dots].append(dotfile.key) - - # add it to the global list of dotfiles - self.dotfiles[dotfile.key] = dotfile - # add it to this profile - self.prodots[profile].append(dotfile) - - return True, dotfile - - def _get_dotfiles(self, profile): - """return a list of dotfiles for a specific profile""" - if profile not in self.prodots: - return [] - return sorted(self.prodots[profile], - key=lambda x: str(x.key)) - - def get_profiles(self): - """return all defined profiles""" - return self.lnk_profiles.keys() - - def get_settings(self): - """return all defined settings""" - settings = self.lnk_settings.copy() - # patch link entries - key = self.key_imp_link - settings[key] = self._string_to_linktype(settings[key]) - key = self.key_dotfile_link - settings[key] = self._string_to_linktype(settings[key]) - # patch defactions - key = self.key_defactions - settings[key] = self.defactions - return settings - - def get_variables(self, profile, debug=False): - """return the variables for this profile""" - # get flat variables - variables = self._get_variables(profile=profile) - - # get interpreted variables - dvariables = self._get_dynvariables(profile) - - # recursive resolve variables - allvars = variables.copy() - allvars.update(dvariables) - var = self._rec_resolve_vars(allvars) - - # execute dynvariables - for k in dvariables.keys(): - var[k] = shell(var[k]) - - if debug: - self.log.dbg('variables:') - for k, v in var.items(): - self.log.dbg('\t\"{}\": {}'.format(k, v)) - - return var - - def _rec_resolve_vars(self, variables): - """recursive resolve all variables""" - t = Templategen(variables=variables) - - for k in variables.keys(): - val = variables[k] - while Templategen.var_is_template(val): - val = t.generate_string(val) - variables[k] = val - t.update_variables(variables) - return variables - - def _get_variables(self, profile=None, sub=False): - """return the un-interpreted variables""" - variables = {} - - if not sub: - # profile variable - if profile: - variables['profile'] = profile - - # add paths variables - variables['_dotdrop_dotpath'] = self.lnk_settings[self.key_dotpath] - variables['_dotdrop_cfgpath'] = self.cfgpath - variables['_dotdrop_workdir'] = self.lnk_settings[self.key_workdir] - - # global variables - if self.key_variables in self.content: - variables.update(self.content[self.key_variables]) - - # external variables - variables.update(self.ext_variables) - - if not profile or profile not in self.lnk_profiles: - return variables - - var = self.lnk_profiles[profile] - - # inherited profile variables - if self.key_profiles_incl in var.keys(): - for inherited_profile in var[self.key_profiles_incl]: - inherited_vars = self._get_variables(inherited_profile, True) - variables.update(inherited_vars) - - # finally we override with profile variables - if self.key_variables in var.keys(): - for k, v in var[self.key_variables].items(): - variables[k] = v - - return variables - - def _get_dynvariables(self, profile): - """return the dyn variables""" - variables = {} - - # global dynvariables - if self.key_dynvariables in self.content: - # interpret dynamic variables - variables.update(self.content[self.key_dynvariables]) - - # external variables - variables.update(self.ext_dynvariables) - - if profile not in self.lnk_profiles: - return variables - - # profile dynvariables - var = self.lnk_profiles[profile] - if self.key_dynvariables in var.keys(): - variables.update(var[self.key_dynvariables]) - - return variables - - def dump(self): - """return a dump of the config""" - # temporary reset paths - dotpath = self.lnk_settings[self.key_dotpath] - workdir = self.lnk_settings[self.key_workdir] - self.lnk_settings[self.key_dotpath] = self.curdotpath - self.lnk_settings[self.key_workdir] = self.curworkdir - # dump - ret = yaml.safe_dump(self.content, - default_flow_style=False, - indent=2) - ret = ret.replace('{}', '') - # restore paths - self.lnk_settings[self.key_dotpath] = dotpath - self.lnk_settings[self.key_workdir] = workdir - return ret - - def is_modified(self): - """need the db to be saved""" - return self._modified - - def save(self): - """save the config to file""" - # temporary reset paths - dotpath = self.lnk_settings[self.key_dotpath] - workdir = self.lnk_settings[self.key_workdir] - self.lnk_settings[self.key_dotpath] = self.curdotpath - self.lnk_settings[self.key_workdir] = self.curworkdir - # save - ret = self._save(self.content, self.cfgpath) - # restore path - self.lnk_settings[self.key_dotpath] = dotpath - self.lnk_settings[self.key_workdir] = workdir - return ret diff --git a/dotdrop/dictparser.py b/dotdrop/dictparser.py new file mode 100644 index 0000000..3031164 --- /dev/null +++ b/dotdrop/dictparser.py @@ -0,0 +1,38 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2019, deadc0de6 + +dictionary parser abstract class +""" + +from dotdrop.logger import Logger + + +class DictParser: + + log = Logger() + + @classmethod + def _adjust_yaml_keys(cls, value): + """adjust value for object 'cls'""" + return value + + @classmethod + def parse(cls, key, value): + """parse (key,value) and construct object 'cls'""" + tmp = value + try: + tmp = value.copy() + except AttributeError: + pass + newv = cls._adjust_yaml_keys(tmp) + if not key: + return cls(**newv) + return cls(key=key, **newv) + + @classmethod + def parse_dict(cls, items): + """parse a dictionary and construct object 'cls'""" + if not items: + return [] + return [cls.parse(k, v) for k, v in items.items()] diff --git a/dotdrop/dotdrop.py b/dotdrop/dotdrop.py index 409f4be..b734eae 100644 --- a/dotdrop/dotdrop.py +++ b/dotdrop/dotdrop.py @@ -15,7 +15,6 @@ from dotdrop.templategen import Templategen from dotdrop.installer import Installer from dotdrop.updater import Updater from dotdrop.comparator import Comparator -from dotdrop.config import Cfg from dotdrop.utils import get_tmpdir, remove, strip_home, run from dotdrop.linktypes import LinkTypes @@ -95,15 +94,13 @@ def cmd_install(o): for dotfile in dotfiles: # add dotfile variables t.restore_vars(tvars) - newvars = dotfile.get_vars() + newvars = dotfile.get_dotfile_variables() t.add_tmp_vars(newvars=newvars) preactions = [] - if not o.install_temporary and dotfile.actions \ - and Cfg.key_actions_pre in dotfile.actions: - for action in dotfile.actions[Cfg.key_actions_pre]: - preactions.append(action) - defactions = o.install_default_actions[Cfg.key_actions_pre] + if not o.install_temporary: + preactions.extend(dotfile.get_pre_actions()) + defactions = o.install_default_actions_pre pre_actions_exec = action_executor(o, dotfile, preactions, defactions, t, post=False) @@ -132,10 +129,9 @@ def cmd_install(o): if os.path.exists(tmp): remove(tmp) if r: - if not o.install_temporary and \ - Cfg.key_actions_post in dotfile.actions: - defactions = o.install_default_actions[Cfg.key_actions_post] - postactions = dotfile.actions[Cfg.key_actions_post] + if not o.install_temporary: + defactions = o.install_default_actions_post + postactions = dotfile.get_post_actions() post_actions_exec = action_executor(o, dotfile, postactions, defactions, t, post=True) post_actions_exec() @@ -329,8 +325,7 @@ def cmd_importer(o): LOG.err('importing \"{}\" failed!'.format(path)) ret = False continue - retconf, dotfile = o.conf.new(src, dst, o.profile, - linktype, debug=o.debug) + retconf = o.conf.new(src, dst, linktype, o.profile) if retconf: LOG.sub('\"{}\" imported'.format(path)) cnt += 1 @@ -355,7 +350,7 @@ def cmd_list_profiles(o): def cmd_list_files(o): """list all dotfiles for a specific profile""" - if o.profile not in o.profiles: + if o.profile not in [p.key for p in o.profiles]: LOG.warn('unknown profile \"{}\"'.format(o.profile)) return what = 'Dotfile(s)' @@ -375,7 +370,7 @@ def cmd_list_files(o): def cmd_detail(o): """list details on all files for all dotfile entries""" - if o.profile not in o.profiles: + if o.profile not in [p.key for p in o.profiles]: LOG.warn('unknown profile \"{}\"'.format(o.profile)) return dotfiles = o.dotfiles @@ -394,7 +389,7 @@ def cmd_detail(o): def _detail(dotpath, dotfile): - """print details on all files under a dotfile entry""" + """display details on all files under a dotfile entry""" LOG.log('{} (dst: \"{}\", link: {})'.format(dotfile.key, dotfile.dst, dotfile.link.name.lower())) path = os.path.join(dotpath, os.path.expanduser(dotfile.src)) @@ -404,7 +399,7 @@ def _detail(dotpath, dotfile): template = 'yes' LOG.sub('{} (template:{})'.format(path, template)) else: - for root, dir, files in os.walk(path): + for root, _, files in os.walk(path): for f in files: p = os.path.join(root, f) template = 'no' @@ -433,17 +428,17 @@ def apply_trans(dotpath, dotfile, debug=False): return None if fails and new source if succeed""" src = dotfile.src new_src = '{}.{}'.format(src, TRANS_SUFFIX) - trans = dotfile.trans_r - if debug: - LOG.dbg('executing transformation {}'.format(trans)) - s = os.path.join(dotpath, src) - temp = os.path.join(dotpath, new_src) - if not trans.transform(s, temp): - msg = 'transformation \"{}\" failed for {}' - LOG.err(msg.format(trans.key, dotfile.key)) - if new_src and os.path.exists(new_src): - remove(new_src) - return None + for trans in dotfile.trans_r: + if debug: + LOG.dbg('executing transformation {}'.format(trans)) + s = os.path.join(dotpath, src) + temp = os.path.join(dotpath, new_src) + if not trans.transform(s, temp): + msg = 'transformation \"{}\" failed for {}' + LOG.err(msg.format(trans.key, dotfile.key)) + if new_src and os.path.exists(new_src): + remove(new_src) + return None return new_src @@ -456,8 +451,8 @@ def main(): """entry point""" try: o = Options() - except ValueError as e: - LOG.err('Config error: {}'.format(str(e))) + except Exception as e: + LOG.err('options error: {}'.format(str(e))) return False ret = True @@ -512,9 +507,8 @@ def main(): LOG.err('interrupted') ret = False - if ret and o.conf.is_modified(): + if ret and o.conf.save(): LOG.log('config file updated') - o.conf.save() return ret diff --git a/dotdrop/dotfile.py b/dotdrop/dotfile.py index b11f8ce..8c4ab24 100644 --- a/dotdrop/dotfile.py +++ b/dotdrop/dotfile.py @@ -6,15 +6,23 @@ represents a dotfile in dotdrop """ from dotdrop.linktypes import LinkTypes +from dotdrop.dictparser import DictParser +from dotdrop.action import Action -class Dotfile: +class Dotfile(DictParser): + """Represent a dotfile.""" + # dotfile keys + key_noempty = 'ignoreempty' + key_trans_r = 'trans' + key_trans_w = 'trans_write' def __init__(self, key, dst, src, - actions={}, trans_r=None, trans_w=None, + actions=[], trans_r=[], trans_w=[], link=LinkTypes.NOLINK, cmpignore=[], noempty=False, upignore=[]): - """constructor + """ + constructor @key: dotfile key @dst: dotfile dst (in user's home usually) @src: dotfile src (in dotpath) @@ -26,39 +34,73 @@ class Dotfile: @noempty: ignore empty template if True @upignore: patterns to ignore when updating """ - self.key = key - self.dst = dst - self.src = src - self.link = link - # ensure link of right type - if type(link) != LinkTypes: - raise Exception('bad value for link: {}'.format(link)) self.actions = actions + self.cmpignore = cmpignore + self.dst = dst + self.key = key + self.link = LinkTypes.get(link) + self.noempty = noempty + self.src = src self.trans_r = trans_r self.trans_w = trans_w - self.cmpignore = cmpignore - self.noempty = noempty self.upignore = upignore - def get_vars(self): - """return this dotfile templating vars""" - _vars = {} - _vars['_dotfile_abs_src'] = self.src - _vars['_dotfile_abs_dst'] = self.dst - _vars['_dotfile_key'] = self.key - _vars['_dotfile_link'] = self.link.name.lower() + def get_dotfile_variables(self): + """return this dotfile specific variables""" + return { + '_dotfile_abs_src': self.src, + '_dotfile_abs_dst': self.dst, + '_dotfile_key': self.key, + '_dotfile_link': str(self.link), + } - return _vars + def get_pre_actions(self): + """return all 'pre' actions""" + return [a for a in self.actions if a.kind == Action.pre] - def __str__(self): - msg = 'key:\"{}\", src:\"{}\", dst:\"{}\", link:\"{}\"' - return msg.format(self.key, self.src, self.dst, self.link.name.lower()) + def get_post_actions(self): + """return all 'post' actions""" + return [a for a in self.actions if a.kind == Action.post] - def __repr__(self): - return 'dotfile({})'.format(self.__str__()) + def get_trans_r(self): + """return trans_r object""" + if self.trans_r: + return self.trans_r[0] + return None + + def get_trans_w(self): + """return trans_w object""" + if self.trans_w: + return self.trans_w[0] + return None + + @classmethod + def _adjust_yaml_keys(cls, value): + """patch dict""" + value['noempty'] = value.get(cls.key_noempty, False) + value['trans_r'] = value.get(cls.key_trans_r) + if value['trans_r']: + # ensure is a list + value['trans_r'] = [value['trans_r']] + value['trans_w'] = value.get(cls.key_trans_w) + if value['trans_w']: + # ensure is a list + value['trans_w'] = [value['trans_w']] + # remove old entries + value.pop(cls.key_noempty, None) + value.pop(cls.key_trans_r, None) + value.pop(cls.key_trans_w, None) + return value def __eq__(self, other): return self.__dict__ == other.__dict__ def __hash__(self): return hash(self.dst) ^ hash(self.src) ^ hash(self.key) + + def __str__(self): + msg = 'key:\"{}\", src:\"{}\", dst:\"{}\", link:\"{}\"' + return msg.format(self.key, self.src, self.dst, str(self.link)) + + def __repr__(self): + return 'dotfile({!s})'.format(self) diff --git a/dotdrop/linktypes.py b/dotdrop/linktypes.py index 59da01f..68e2b3b 100644 --- a/dotdrop/linktypes.py +++ b/dotdrop/linktypes.py @@ -5,3 +5,15 @@ class LinkTypes(IntEnum): NOLINK = 0 LINK = 1 LINK_CHILDREN = 2 + + @classmethod + def get(cls, key, default=None): + try: + return key if isinstance(key, cls) else cls[key.upper()] + except KeyError: + if default: + return default + raise ValueError('bad {} value: "{}"'.format(cls.__name__, key)) + + def __str__(self): + return self.name.lower() diff --git a/dotdrop/logger.py b/dotdrop/logger.py index fec1e07..51c6e8a 100644 --- a/dotdrop/logger.py +++ b/dotdrop/logger.py @@ -16,8 +16,10 @@ class Logger: YELLOW = '\033[93m' BLUE = '\033[94m' MAGENTA = '\033[95m' + LMAGENTA = '\033[35m' RESET = '\033[0m' EMPH = '\033[33m' + BOLD = '\033[1m' def __init__(self): pass @@ -37,10 +39,14 @@ class Logger: ce = self._color(self.RESET) sys.stderr.write('{}{}{}'.format(cs, string, ce)) - def err(self, string, end='\n'): + def err(self, string, end='\n', *, throw=None): cs = self._color(self.RED) ce = self._color(self.RESET) - sys.stderr.write('{}[ERR] {} {}{}'.format(cs, string, end, ce)) + msg = '{} {}'.format(string, end) + sys.stderr.write('{}[ERR] {}{}'.format(cs, msg, ce)) + + if throw is not None: + raise throw(msg) def warn(self, string, end='\n'): cs = self._color(self.YELLOW) @@ -53,8 +59,10 @@ class Logger: func = inspect.stack()[1][3] cs = self._color(self.MAGENTA) ce = self._color(self.RESET) - line = '{}[DEBUG][{}.{}] {}{}\n' - sys.stderr.write(line.format(cs, mod, func, string, ce)) + cl = self._color(self.LMAGENTA) + bl = self._color(self.BOLD) + line = '{}{}[DEBUG][{}.{}]{}{} {}{}\n' + sys.stderr.write(line.format(bl, cl, mod, func, ce, cs, string, ce)) def dry(self, string, end='\n'): cs = self._color(self.GREEN) diff --git a/dotdrop/options.py b/dotdrop/options.py index ae30cc6..5771789 100644 --- a/dotdrop/options.py +++ b/dotdrop/options.py @@ -14,7 +14,8 @@ from docopt import docopt from dotdrop.version import __version__ as VERSION from dotdrop.linktypes import LinkTypes from dotdrop.logger import Logger -from dotdrop.config import Cfg +from dotdrop.cfg_aggregator import CfgAggregator as Cfg +from dotdrop.action import Action ENV_PROFILE = 'DOTDROP_PROFILE' ENV_CONFIG = 'DOTDROP_CONFIG' @@ -107,24 +108,23 @@ class Options(AttrMonitor): if not args: self.args = docopt(USAGE, version=VERSION) self.log = Logger() - self.debug = self.args['--verbose'] - if not self.debug and ENV_DEBUG in os.environ: - self.debug = True + self.debug = self.args['--verbose'] or ENV_DEBUG in os.environ if ENV_NODEBUG in os.environ: + # force disabling debugs self.debug = False self.profile = self.args['--profile'] self.confpath = self._get_config_path() if self.debug: self.log.dbg('config file: {}'.format(self.confpath)) - self._read_config(self.profile) + self._read_config() self._apply_args() self._fill_attr() if ENV_NOBANNER not in os.environ \ and self.banner \ and not self.args['--no-banner']: self._header() - self._print_attr() + self._debug_attr() # start monitoring for bad attribute self._set_attr_err = True @@ -167,25 +167,18 @@ class Options(AttrMonitor): return None - def _find_cfg(self, paths): - """try to find the config in the paths list""" - for path in paths: - if os.path.exists(path): - return path - return None - def _header(self): - """print the header""" + """display the header""" self.log.log(BANNER) self.log.log('') - def _read_config(self, profile=None): + def _read_config(self): """read the config file""" - self.conf = Cfg(self.confpath, profile=profile, debug=self.debug) + self.conf = Cfg(self.confpath, self.profile, debug=self.debug) # transform the config settings to self attribute for k, v in self.conf.get_settings().items(): if self.debug: - self.log.dbg('setting: {}={}'.format(k, v)) + self.log.dbg('new setting: {}={}'.format(k, v)) setattr(self, k, v) def _apply_args(self): @@ -212,8 +205,6 @@ class Options(AttrMonitor): self.log.err('bad option for --link: {}'.format(link)) sys.exit(USAGE) self.import_link = OPT_LINK[link] - if self.debug: - self.log.dbg('link_import value: {}'.format(self.import_link)) # "listfiles" specifics self.listfiles_templateonly = self.args['--template'] @@ -223,7 +214,10 @@ class Options(AttrMonitor): self.install_diff = not self.args['--nodiff'] self.install_showdiff = self.showdiff or self.args['--showdiff'] self.install_backup_suffix = BACKUP_SUFFIX - self.install_default_actions = self.default_actions + self.install_default_actions_pre = [a for a in self.default_actions + if a.kind == Action.pre] + self.install_default_actions_post = [a for a in self.default_actions + if a.kind == Action.post] # "compare" specifics self.compare_dopts = self.args['--dopts'] self.compare_focus = self.args['--file'] @@ -243,26 +237,24 @@ class Options(AttrMonitor): def _fill_attr(self): """create attributes from conf""" # variables - self.variables = self.conf.get_variables(self.profile, - debug=self.debug).copy() + self.variables = self.conf.get_variables() # the dotfiles - self.dotfiles = self.conf.eval_dotfiles(self.profile, self.variables, - debug=self.debug).copy() + self.dotfiles = self.conf.get_dotfiles(self.profile) # the profiles self.profiles = self.conf.get_profiles() - def _print_attr(self): - """print all of this class attributes""" + def _debug_attr(self): + """debug display all of this class attributes""" if not self.debug: return - self.log.dbg('options:') + self.log.dbg('CLI options:') for att in dir(self): if att.startswith('_'): continue val = getattr(self, att) if callable(val): continue - self.log.dbg('- {}: \"{}\"'.format(att, val)) + self.log.dbg('- {}: {}'.format(att, val)) def _attr_set(self, attr): """error when some inexistent attr is set""" diff --git a/dotdrop/profile.py b/dotdrop/profile.py new file mode 100644 index 0000000..5a1e671 --- /dev/null +++ b/dotdrop/profile.py @@ -0,0 +1,50 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2019, deadc0de6 + +represent a profile in dotdrop +""" + +from dotdrop.dictparser import DictParser + + +class Profile(DictParser): + + # profile keys + key_include = 'include' + key_import = 'import' + + def __init__(self, key, actions=[], dotfiles=[], variables=[]): + """ + constructor + @key: profile key + @actions: list of action keys + @dotfiles: list of dotfile keys + @variables: list of variable keys + """ + self.key = key + self.actions = actions + self.dotfiles = dotfiles + self.variables = variables + + @classmethod + def _adjust_yaml_keys(cls, value): + """patch dict""" + value.pop(cls.key_import, None) + value.pop(cls.key_include, None) + return value + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + def __hash__(self): + return (hash(self.key) ^ + hash(tuple(self.dotfiles)) ^ + hash(tuple(self.included_profiles))) + + def __str__(self): + msg = 'key:"{}"' + return msg.format(self.key) + + def __repr__(self): + return 'profile({!s})'.format(self) diff --git a/dotdrop/settings.py b/dotdrop/settings.py new file mode 100644 index 0000000..1d2a5dc --- /dev/null +++ b/dotdrop/settings.py @@ -0,0 +1,96 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2019, deadc0de6 + +settings block +""" + +# local imports +from dotdrop.linktypes import LinkTypes +from dotdrop.dictparser import DictParser + + +class Settings(DictParser): + # key in yaml file + key_yaml = 'config' + + # settings item keys + key_backup = 'backup' + key_banner = 'banner' + key_cmpignore = 'cmpignore' + key_create = 'create' + key_default_actions = 'default_actions' + key_dotpath = 'dotpath' + key_ignoreempty = 'ignoreempty' + key_keepdot = 'keepdot' + key_longkey = 'longkey' + key_link_dotfile_default = 'link_dotfile_default' + key_link_on_import = 'link_on_import' + key_showdiff = 'showdiff' + key_upignore = 'upignore' + key_workdir = 'workdir' + + # import keys + key_import_actions = 'import_actions' + key_import_configs = 'import_configs' + key_import_variables = 'import_variables' + + def __init__(self, backup=True, banner=True, cmpignore=[], + create=True, default_actions=[], dotpath='dotfiles', + ignoreempty=True, import_actions=[], import_configs=[], + import_variables=[], keepdot=False, + link_dotfile_default=LinkTypes.NOLINK, + link_on_import=LinkTypes.NOLINK, longkey=False, + showdiff=False, upignore=[], workdir='~/.config/dotdrop'): + self.backup = backup + self.banner = banner + self.create = create + self.cmpignore = cmpignore + self.default_actions = default_actions + self.dotpath = dotpath + self.ignoreempty = ignoreempty + self.import_actions = import_actions + self.import_configs = import_configs + self.import_variables = import_variables + self.keepdot = keepdot + self.longkey = longkey + self.showdiff = showdiff + self.upignore = upignore + self.workdir = workdir + self.link_dotfile_default = LinkTypes.get(link_dotfile_default) + self.link_on_import = LinkTypes.get(link_on_import) + + def resolve_paths(self, resolver): + """resolve path using resolver function""" + self.dotpath = resolver(self.dotpath) + self.workdir = resolver(self.workdir) + + def _serialize_seq(self, name, dic): + """serialize attribute 'name' into 'dic'""" + seq = getattr(self, name) + dic[name] = seq + + def serialize(self): + """Return key-value pair representation of the settings""" + # Tedious, but less error-prone than introspection + dic = { + self.key_backup: self.backup, + self.key_banner: self.banner, + self.key_create: self.create, + self.key_dotpath: self.dotpath, + self.key_ignoreempty: self.ignoreempty, + self.key_keepdot: self.keepdot, + self.key_link_dotfile_default: str(self.link_dotfile_default), + self.key_link_on_import: str(self.link_on_import), + self.key_longkey: self.longkey, + self.key_showdiff: self.showdiff, + self.key_workdir: self.workdir, + } + self._serialize_seq(self.key_cmpignore, dic) + self._serialize_seq(self.key_default_actions, dic) + self._serialize_seq(self.key_import_actions, dic) + self._serialize_seq(self.key_import_configs, dic) + self._serialize_seq(self.key_import_variables, dic) + self._serialize_seq(self.key_upignore, dic) + + return {self.key_yaml: dic} diff --git a/dotdrop/templategen.py b/dotdrop/templategen.py index 3b22d80..e532a5f 100644 --- a/dotdrop/templategen.py +++ b/dotdrop/templategen.py @@ -52,6 +52,8 @@ class Templategen: self.env.globals['exists_in_path'] = jhelpers.exists_in_path self.env.globals['basename'] = jhelpers.basename self.env.globals['dirname'] = jhelpers.dirname + if self.debug: + self.log.dbg('template additional variables: {}'.format(variables)) def generate(self, src): """render template from path""" diff --git a/dotdrop/updater.py b/dotdrop/updater.py index 65b39e1..5524d5c 100644 --- a/dotdrop/updater.py +++ b/dotdrop/updater.py @@ -81,12 +81,11 @@ class Updater: if self._ignore([path, dtpath]): self.log.sub('\"{}\" ignored'.format(dotfile.key)) return True - if dotfile.trans_w: - # apply write transformation if any - new_path = self._apply_trans_w(path, dotfile) - if not new_path: - return False - path = new_path + # apply write transformation if any + new_path = self._apply_trans_w(path, dotfile) + if not new_path: + return False + path = new_path if os.path.isdir(path): ret = self._handle_dir(path, dtpath) else: @@ -98,7 +97,9 @@ class Updater: def _apply_trans_w(self, path, dotfile): """apply write transformation to dotfile""" - trans = dotfile.trans_w + trans = dotfile.get_trans_w() + if not trans: + return path if self.debug: self.log.dbg('executing write transformation {}'.format(trans)) tmp = utils.get_unique_tmp_name() diff --git a/dotdrop/utils.py b/dotdrop/utils.py index 70b1a61..88dbf75 100644 --- a/dotdrop/utils.py +++ b/dotdrop/utils.py @@ -66,7 +66,7 @@ def get_tmpdir(): def get_tmpfile(): """create a temporary file""" - (fd, path) = tempfile.mkstemp(prefix='dotdrop-') + (_, path) = tempfile.mkstemp(prefix='dotdrop-') return path diff --git a/dotdrop/version.py b/dotdrop/version.py index e35d6cc..a658c2b 100644 --- a/dotdrop/version.py +++ b/dotdrop/version.py @@ -3,4 +3,4 @@ author: deadc0de6 (https://github.com/deadc0de6) Copyright (c) 2018, deadc0de6 """ -__version__ = '0.28.0' +__version__ = '0.27.0' diff --git a/packages/arch-dotdrop/.SRCINFO b/packages/arch-dotdrop/.SRCINFO index 26b449c..4da2e16 100644 --- a/packages/arch-dotdrop/.SRCINFO +++ b/packages/arch-dotdrop/.SRCINFO @@ -1,6 +1,6 @@ pkgbase = dotdrop pkgdesc = Save your dotfiles once, deploy them everywhere - pkgver = 0.28.0 + pkgver = 0.27.0 pkgrel = 1 url = https://github.com/deadc0de6/dotdrop arch = any @@ -11,7 +11,7 @@ pkgbase = dotdrop depends = python-jinja depends = python-docopt depends = python-pyaml - source = git+https://github.com/deadc0de6/dotdrop.git#tag=v0.28.0 + source = git+https://github.com/deadc0de6/dotdrop.git#tag=v0.27.0 md5sums = SKIP pkgname = dotdrop diff --git a/packages/arch-dotdrop/PKGBUILD b/packages/arch-dotdrop/PKGBUILD index 5891d6a..20da6a6 100644 --- a/packages/arch-dotdrop/PKGBUILD +++ b/packages/arch-dotdrop/PKGBUILD @@ -1,7 +1,7 @@ # Maintainer: deadc0de6 pkgname=dotdrop -pkgver=0.28.0 +pkgver=0.27.0 pkgrel=1 pkgdesc="Save your dotfiles once, deploy them everywhere " arch=('any') diff --git a/scripts/change-link.py b/scripts/change-link.py index 8911d1b..f2ee974 100755 --- a/scripts/change-link.py +++ b/scripts/change-link.py @@ -42,7 +42,7 @@ def main(): ignores = args['--ignore'] with open(path, 'r') as f: - content = yaml.load(f) + content = yaml.safe_load(f) for k, v in content[key].items(): if k in ignores: continue diff --git a/tests-ng/compare.sh b/tests-ng/compare.sh index e5fadfe..0460c91 100755 --- a/tests-ng/compare.sh +++ b/tests-ng/compare.sh @@ -93,6 +93,7 @@ create_conf ${cfg} # sets token echo "[+] import" cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/dir1 cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/uniquefile +cat ${cfg} # let's see the dotpath #tree ${basedir}/dotfiles diff --git a/tests-ng/dotdrop-variables.sh b/tests-ng/dotdrop-variables.sh index 3978512..3659f5f 100755 --- a/tests-ng/dotdrop-variables.sh +++ b/tests-ng/dotdrop-variables.sh @@ -79,9 +79,9 @@ echo "cfgpath: {{@@ _dotdrop_cfgpath @@}}" >> ${tmps}/dotfiles/abc echo "workdir: {{@@ _dotdrop_workdir @@}}" >> ${tmps}/dotfiles/abc # install -cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 +cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V -#cat ${tmpd}/abc +cat ${tmpd}/abc grep "^dotpath: ${tmps}/dotfiles$" ${tmpd}/abc >/dev/null grep "^cfgpath: ${tmps}/config.yaml$" ${tmpd}/abc >/dev/null diff --git a/tests-ng/dotfile-variables.sh b/tests-ng/dotfile-variables.sh index e23b0aa..a33c98f 100755 --- a/tests-ng/dotfile-variables.sh +++ b/tests-ng/dotfile-variables.sh @@ -81,7 +81,7 @@ cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V # checks [ ! -e ${tmpd}/abc ] && echo 'dotfile not installed' && exit 1 -#cat ${tmpd}/abc +cat ${tmpd}/abc grep "src:${tmps}/dotfiles/abc" ${tmpd}/abc >/dev/null grep "dst:${tmpd}/abc" ${tmpd}/abc >/dev/null grep "key:f_abc" ${tmpd}/abc >/dev/null diff --git a/tests-ng/ext-actions.sh b/tests-ng/ext-actions.sh index 550d69b..f753dbf 100755 --- a/tests-ng/ext-actions.sh +++ b/tests-ng/ext-actions.sh @@ -96,7 +96,7 @@ _EOF echo "test" > ${tmps}/dotfiles/abc # install -cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 +cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V # checks [ ! -e ${tmpa}/pre ] && exit 1 diff --git a/tests-ng/import-configs.sh b/tests-ng/import-configs.sh new file mode 100755 index 0000000..1507e2d --- /dev/null +++ b/tests-ng/import-configs.sh @@ -0,0 +1,130 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2019, deadc0de6 +# +# import config testing +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "\e[96m\e[1m==> RUNNING $(basename $BASH_SOURCE) <==\e[0m" + +################################################################ +# this is the test +################################################################ + +# the dotfile source +tmps=`mktemp -d --suffix='-dotdrop-tests'` +mkdir -p ${tmps}/dotfiles +# the dotfile destination +tmpd=`mktemp -d --suffix='-dotdrop-tests'` + +# create the config file +cfg1="${tmps}/config1.yaml" +cfg2="${tmps}/config2.yaml" + +cat > ${cfg1} << _EOF +config: + backup: true + create: true + dotpath: dotfiles + import_configs: + - ${cfg2} +dotfiles: + f_abc: + dst: ${tmpd}/abc + src: abc + f_zzz: + dst: ${tmpd}/zzz + src: zzz + f_sub: + dst: ${tmpd}/sub + src: sub +profiles: + p0: + include: + - p2 + p1: + dotfiles: + - f_abc + p3: + dotfiles: + - f_zzz + pup: + include: + - psubsub +_EOF + +cat > ${cfg2} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_def: + dst: ${tmpd}/def + src: def + f_ghi: + dst: ${tmpd}/ghi + src: ghi +profiles: + p2: + dotfiles: + - f_def + psubsub: + dotfiles: + - f_sub +_EOF + +# create the source +mkdir -p ${tmps}/dotfiles/ +echo "abc" > ${tmps}/dotfiles/abc +echo "def" > ${tmps}/dotfiles/def +echo "ghi" > ${tmps}/dotfiles/ghi +echo "zzz" > ${tmps}/dotfiles/zzz +echo "sub" > ${tmps}/dotfiles/sub + +# install +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p0 -V | grep f_def +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p1 -V | grep f_abc +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p2 -V | grep f_def +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p3 -V | grep f_zzz +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p pup -V | grep f_sub +cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p psubsub -V | grep f_sub + +## CLEANING +rm -rf ${tmps} ${tmpd} + +echo "OK" +exit 0 diff --git a/tests-ng/import-profile-dotfiles.sh b/tests-ng/import-profile-dotfiles.sh new file mode 100755 index 0000000..402b041 --- /dev/null +++ b/tests-ng/import-profile-dotfiles.sh @@ -0,0 +1,127 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2017, deadc0de6 +# +# test the use of the keyword "import" in profiles +# returns 1 in case of error +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "\e[96m\e[1m==> RUNNING $(basename $BASH_SOURCE) <==\e[0m" + +################################################################ +# this is the test +################################################################ + +# the dotfile source +tmps=`mktemp -d --suffix='-dotdrop-tests'` +mkdir -p ${tmps}/dotfiles +# the dotfile destination +tmpd=`mktemp -d --suffix='-dotdrop-tests'` +extdotfiles="${tmps}/df_p1.yaml" + +dynextdotfiles_name="d_uid_dynvar" +dynextdotfiles="${tmps}/ext_${dynextdotfiles_name}" + +# create the config file +cfg="${tmps}/config.yaml" + +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dynvariables: + d_uid: "echo ${dynextdotfiles_name}" +dotfiles: + f_abc: + dst: ${tmpd}/abc + src: abc + f_def: + dst: ${tmpd}/def + src: def + f_xyz: + dst: ${tmpd}/xyz + src: xyz + f_dyn: + dst: ${tmpd}/dyn + src: dyn +profiles: + p1: + dotfiles: + - f_abc + import: + - $(basename ${extdotfiles}) + - "ext_{{@@ d_uid @@}}" +_EOF + +# create the external dotfile file +cat > ${extdotfiles} << _EOF +dotfiles: + - f_def + - f_xyz +_EOF + +cat > ${dynextdotfiles} << _EOF +dotfiles: + - f_dyn +_EOF + +# create the source +mkdir -p ${tmps}/dotfiles/ +echo "abc" > ${tmps}/dotfiles/abc +echo "def" > ${tmps}/dotfiles/def +echo "xyz" > ${tmps}/dotfiles/xyz +echo "dyn" > ${tmps}/dotfiles/dyn + +# install +cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V + +# checks +[ ! -e ${tmpd}/abc ] && exit 1 +[ ! -e ${tmpd}/def ] && exit 1 +[ ! -e ${tmpd}/xyz ] && exit 1 +[ ! -e ${tmpd}/dyn ] && exit 1 +echo 'file found' +grep 'abc' ${tmpd}/abc >/dev/null 2>&1 +grep 'def' ${tmpd}/def >/dev/null 2>&1 +grep 'xyz' ${tmpd}/xyz >/dev/null 2>&1 +grep 'dyn' ${tmpd}/dyn >/dev/null 2>&1 + +## CLEANING +rm -rf ${tmps} ${tmpd} + +echo "OK" +exit 0 diff --git a/tests-ng/import.sh b/tests-ng/import.sh index c71972a..6d7c0af 100755 --- a/tests-ng/import.sh +++ b/tests-ng/import.sh @@ -1,9 +1,8 @@ #!/usr/bin/env bash # author: deadc0de6 (https://github.com/deadc0de6) -# Copyright (c) 2017, deadc0de6 +# Copyright (c) 2019, deadc0de6 # -# test the use of the keyword "import" in profiles -# returns 1 in case of error +# test basic import # # exit on first error @@ -50,10 +49,13 @@ tmps=`mktemp -d --suffix='-dotdrop-tests'` mkdir -p ${tmps}/dotfiles # the dotfile destination tmpd=`mktemp -d --suffix='-dotdrop-tests'` -extdotfiles="${tmps}/df_p1.yaml" +#echo "dotfile destination: ${tmpd}" -dynextdotfiles_name="d_uid_dynvar" -dynextdotfiles="${tmps}/ext_${dynextdotfiles_name}" +# create the dotfile +mkdir -p ${tmpd}/adir +echo "adir/file1" > ${tmpd}/adir/file1 +echo "adir/fil2" > ${tmpd}/adir/file2 +echo "file3" > ${tmpd}/file3 # create the config file cfg="${tmps}/config.yaml" @@ -63,61 +65,30 @@ config: backup: true create: true dotpath: dotfiles -dynvariables: - d_uid: "echo ${dynextdotfiles_name}" dotfiles: - f_abc: - dst: ${tmpd}/abc - src: abc - f_def: - dst: ${tmpd}/def - src: def - f_xyz: - dst: ${tmpd}/xyz - src: xyz - f_dyn: - dst: ${tmpd}/dyn - src: dyn profiles: - p1: - dotfiles: - - f_abc - import: - - $(basename ${extdotfiles}) - - "ext_{{@@ d_uid @@}}" _EOF +#cat ${cfg} -# create the external dotfile file -cat > ${extdotfiles} << _EOF -dotfiles: - - f_def - - f_xyz -_EOF +# import +cd ${ddpath} | ${bin} import -c ${cfg} -p p1 -V ${tmpd}/adir +cd ${ddpath} | ${bin} import -c ${cfg} -p p1 -V ${tmpd}/file3 -cat > ${dynextdotfiles} << _EOF -dotfiles: - - f_dyn -_EOF +cat ${cfg} -# create the source -mkdir -p ${tmps}/dotfiles/ -echo "abc" > ${tmps}/dotfiles/abc -echo "def" > ${tmps}/dotfiles/def -echo "xyz" > ${tmps}/dotfiles/xyz -echo "dyn" > ${tmps}/dotfiles/dyn +# ensure exists and is not link +[ ! -d ${tmps}/dotfiles/${tmpd}/adir ] && echo "not a directory" && exit 1 +[ ! -e ${tmps}/dotfiles/${tmpd}/adir/file1 ] && echo "not exist" && exit 1 +[ ! -e ${tmps}/dotfiles/${tmpd}/adir/file2 ] && echo "not exist" && exit 1 +[ ! -e ${tmps}/dotfiles/${tmpd}/file3 ] && echo "not a file" && exit 1 -# install -cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V +cat ${cfg} | grep ${tmpd}/adir >/dev/null 2>&1 +cat ${cfg} | grep ${tmpd}/file3 >/dev/null 2>&1 -# checks -[ ! -e ${tmpd}/abc ] && exit 1 -[ ! -e ${tmpd}/def ] && exit 1 -[ ! -e ${tmpd}/xyz ] && exit 1 -[ ! -e ${tmpd}/dyn ] && exit 1 -grep 'abc' ${tmpd}/abc >/dev/null 2>&1 -grep 'def' ${tmpd}/def >/dev/null 2>&1 -grep 'xyz' ${tmpd}/xyz >/dev/null 2>&1 -grep 'dyn' ${tmpd}/dyn >/dev/null 2>&1 +nb=`cat ${cfg} | grep d_adir | wc -l` +[ "${nb}" != "2" ] && echo 'bad config1' && exit 1 +nb=`cat ${cfg} | grep f_file3 | wc -l` +[ "${nb}" != "2" ] && echo 'bad config2' && exit 1 ## CLEANING rm -rf ${tmps} ${tmpd} diff --git a/tests-ng/include.sh b/tests-ng/include.sh index c91c05d..e5dd12e 100755 --- a/tests-ng/include.sh +++ b/tests-ng/include.sh @@ -64,12 +64,18 @@ dotfiles: dst: ${tmpd}/abc src: abc profiles: + p0: + include: + - p3 p1: dotfiles: - f_abc p2: include: - p1 + p3: + include: + - p2 _EOF # create the source @@ -82,6 +88,14 @@ cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 # compare cd ${ddpath} | ${bin} compare -c ${cfg} -p p1 cd ${ddpath} | ${bin} compare -c ${cfg} -p p2 +cd ${ddpath} | ${bin} compare -c ${cfg} -p p3 +cd ${ddpath} | ${bin} compare -c ${cfg} -p p0 + +# list +cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p1 | grep f_abc +cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p2 | grep f_abc +cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p3 | grep f_abc +cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p0 | grep f_abc # count cnt=`cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p1 -b | grep '^f_' | wc -l` diff --git a/tests.sh b/tests.sh index d59efdb..3d2088b 100755 --- a/tests.sh +++ b/tests.sh @@ -8,7 +8,7 @@ set -ev # PEP8 tests which pycodestyle 2>/dev/null [ "$?" != "0" ] && echo "Install pycodestyle" && exit 1 -pycodestyle --ignore=W605 dotdrop/ +pycodestyle --ignore=W503,W504,W605 dotdrop/ pycodestyle tests/ pycodestyle scripts/ @@ -35,7 +35,17 @@ PYTHONPATH=dotdrop ${nosebin} -s --with-coverage --cover-package=dotdrop ## execute bash script tests [ "$1" = '--python-only' ] || { - for scr in tests-ng/*.sh; do - ${scr} - done + log=`mktemp` + for scr in tests-ng/*.sh; do + ${scr} 2>&1 | tee ${log} + set +e + if grep Traceback ${log}; then + echo "crash found in logs" + rm -f ${log} + exit 1 + fi + set -e + done + rm -f ${log} } + diff --git a/tests/helpers.py b/tests/helpers.py index eeaa5bd..6656a4b 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -171,8 +171,9 @@ def get_dotfile_from_yaml(dic, path): """Return the dotfile from the yaml dictionary""" # path is not the file in dotpath but on the FS dotfiles = dic['dotfiles'] - src = get_path_strip_version(path) - return [d for d in dotfiles.values() if d['src'] == src][0] + # src = get_path_strip_version(path) + dotfile = [d for d in dotfiles.values() if d['dst'] == path][0] + return dotfile def yaml_dashed_list(items, indent=0): @@ -256,10 +257,10 @@ def file_in_yaml(yaml_file, path, link=False): dotfiles = yaml_conf['dotfiles'].values() - in_src = strip in (x['src'] for x in dotfiles) + in_src = any([x['src'].endswith(strip) for x in dotfiles]) in_dst = path in (os.path.expanduser(x['dst']) for x in dotfiles) if link: - has_link = get_dotfile_from_yaml(yaml_conf, path)['link'] + has_link = 'link' in get_dotfile_from_yaml(yaml_conf, path) return in_src and in_dst and has_link return in_src and in_dst diff --git a/tests/test_import.py b/tests/test_import.py index 5e712f1..25bb861 100644 --- a/tests/test_import.py +++ b/tests/test_import.py @@ -33,7 +33,7 @@ class TestImport(unittest.TestCase): self.assertTrue(os.path.exists(path)) content = '' with open(path, 'r') as f: - content = yaml.load(f) + content = yaml.safe_load(f) return content def assert_file(self, path, o, profile): @@ -45,7 +45,7 @@ class TestImport(unittest.TestCase): def assert_in_yaml(self, path, dic, link=False): """Make sure "path" is in the "dic" representing the yaml file""" - self.assertTrue(file_in_yaml(dic, path, link)) + self.assertTrue(file_in_yaml(dic, path, link=link)) def test_import(self): """Test the import function""" @@ -117,7 +117,7 @@ class TestImport(unittest.TestCase): o = load_options(confpath, profile) # test dotfiles in config class - self.assertTrue(profile in o.profiles) + self.assertTrue(profile in [p.key for p in o.profiles]) self.assert_file(dotfile1, o, profile) self.assert_file(dotfile2, o, profile) self.assert_file(dotfile3, o, profile) @@ -218,9 +218,10 @@ class TestImport(unittest.TestCase): self.assertTrue(os.path.exists(dotdrop_home)) self.addCleanup(clean, dotdrop_home) + dotpath_ed = 'imported' imported = { 'config': { - 'dotpath': 'imported', + 'dotpath': dotpath_ed, }, 'dotfiles': {}, 'profiles': { @@ -250,9 +251,10 @@ class TestImport(unittest.TestCase): 'dv_log_ed': 'echo 5', }, } + dotpath_ing = 'importing' importing = { 'config': { - 'dotpath': 'importing', + 'dotpath': dotpath_ing, }, 'dotfiles': {}, 'profiles': { @@ -293,7 +295,7 @@ class TestImport(unittest.TestCase): # create the importing base config file importing_path = create_fake_config(dotdrop_home, configname='config.yaml', - import_configs=('config-*.yaml',), + import_configs=['config-2.yaml'], **importing['config']) # edit the imported config @@ -326,8 +328,10 @@ class TestImport(unittest.TestCase): y = self.load_yaml(imported_path) # testing dotfiles - self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ed)) - self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ing)) + self.assertTrue(all(file_in_yaml(y, df) + for df in dotfiles_ed)) + self.assertFalse(any(file_in_yaml(y, df) + for df in dotfiles_ing)) # testing profiles profiles = y['profiles'].keys() @@ -355,7 +359,7 @@ class TestImport(unittest.TestCase): self.assertFalse(any(t.endswith('ing') for t in transformations)) # testing variables - variables = y['variables'].keys() + variables = self._remove_priv_vars(y['variables'].keys()) self.assertTrue(all(v.endswith('ed') for v in variables)) self.assertFalse(any(v.endswith('ing') for v in variables)) dyn_variables = y['dynvariables'].keys() @@ -366,8 +370,10 @@ class TestImport(unittest.TestCase): y = self.load_yaml(importing_path) # testing dotfiles - self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ing)) - self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ed)) + self.assertTrue(all(file_in_yaml(y, df) + for df in dotfiles_ing)) + self.assertFalse(any(file_in_yaml(y, df) + for df in dotfiles_ed)) # testing profiles profiles = y['profiles'].keys() @@ -395,13 +401,19 @@ class TestImport(unittest.TestCase): self.assertFalse(any(t.endswith('ed') for t in transformations)) # testing variables - variables = y['variables'].keys() + variables = self._remove_priv_vars(y['variables'].keys()) self.assertTrue(all(v.endswith('ing') for v in variables)) self.assertFalse(any(v.endswith('ed') for v in variables)) dyn_variables = y['dynvariables'].keys() self.assertTrue(all(dv.endswith('ing') for dv in dyn_variables)) self.assertFalse(any(dv.endswith('ed') for dv in dyn_variables)) + def _remove_priv_vars(self, variables_keys): + variables = [v for v in variables_keys if not v.startswith('_')] + if 'profile' in variables: + variables.remove('profile') + return variables + def main(): unittest.main() diff --git a/tests/test_install.py b/tests/test_install.py index b4cd587..80e1b4f 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -9,7 +9,7 @@ import unittest from unittest.mock import MagicMock, patch import filecmp -from dotdrop.config import Cfg +from dotdrop.cfg_aggregator import CfgAggregator as Cfg from tests.helpers import (clean, create_dir, create_fake_config, create_random_file, get_string, get_tempdir, load_options, populate_fake_config) @@ -89,7 +89,7 @@ exec bspwm f1, c1 = create_random_file(tmp) dst1 = os.path.join(dst, get_string(6)) d1 = Dotfile(get_string(5), dst1, os.path.basename(f1)) - # fake a print + # fake a __str__ self.assertTrue(str(d1) != '') f2, c2 = create_random_file(tmp) dst2 = os.path.join(dst, get_string(6)) @@ -178,7 +178,7 @@ exec bspwm dotfiles = [d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, ddot] self.fake_config(confpath, dotfiles, profile, tmp, [act1], [tr]) - conf = Cfg(confpath) + conf = Cfg(confpath, profile) self.assertTrue(conf is not None) # install them @@ -305,7 +305,7 @@ exec bspwm # create the importing base config file importing_path = create_fake_config(tmp, configname='config.yaml', - import_configs=('config-*.yaml',), + import_configs=['config-2.yaml'], **importing['config']) # edit the imported config diff --git a/tests/test_update.py b/tests/test_update.py index 4b30621..2fdee34 100644 --- a/tests/test_update.py +++ b/tests/test_update.py @@ -117,7 +117,7 @@ class TestUpdate(unittest.TestCase): # retrieve the path of the sub in the dotpath d1indotpath = os.path.join(o.dotpath, dotfile.src) d1indotpath = os.path.expanduser(d1indotpath) - dotfile.trans_w = trans + dotfile.trans_w = [trans] # update template o.update_path = [d3t] diff --git a/tests/test_config.py b/tests/test_yamlcfg.py similarity index 80% rename from tests/test_config.py rename to tests/test_yamlcfg.py index 2b23d7f..a2cd152 100644 --- a/tests/test_config.py +++ b/tests/test_yamlcfg.py @@ -10,7 +10,7 @@ from unittest.mock import patch import os import yaml -from dotdrop.config import Cfg +from dotdrop.cfg_yaml import CfgYaml as Cfg from dotdrop.options import Options from dotdrop.linktypes import LinkTypes from tests.helpers import (SubsetTestCase, _fake_args, clean, @@ -41,14 +41,12 @@ class TestConfig(SubsetTestCase): conf = Cfg(confpath) self.assertTrue(conf is not None) - opts = conf.get_settings() + opts = conf.settings self.assertTrue(opts is not None) self.assertTrue(opts != {}) self.assertTrue(opts['backup'] == self.CONFIG_BACKUP) self.assertTrue(opts['create'] == self.CONFIG_CREATE) - dotpath = os.path.join(tmp, self.CONFIG_DOTPATH) - self.assertTrue(opts['dotpath'] == dotpath) - self.assertTrue(conf._is_valid()) + self.assertTrue(opts['dotpath'] == self.CONFIG_DOTPATH) self.assertTrue(conf.dump() != '') def test_def_link(self): @@ -68,8 +66,8 @@ class TestConfig(SubsetTestCase): 'link_children') self._test_link_import_fail('whatever') - @patch('dotdrop.config.open', create=True) - @patch('dotdrop.config.os.path.exists', create=True) + @patch('dotdrop.cfg_yaml.open', create=True) + @patch('dotdrop.cfg_yaml.os.path.exists', create=True) def _test_link_import(self, cfgstring, expected, cliargs, mock_exists, mock_open): data = ''' @@ -99,8 +97,8 @@ profiles: self.assertTrue(o.import_link == expected) - @patch('dotdrop.config.open', create=True) - @patch('dotdrop.config.os.path.exists', create=True) + @patch('dotdrop.cfg_yaml.open', create=True) + @patch('dotdrop.cfg_yaml.os.path.exists', create=True) def _test_link_import_fail(self, value, mock_exists, mock_open): data = ''' config: @@ -125,7 +123,7 @@ profiles: args['--profile'] = 'p1' args['--cfg'] = 'mocked' - with self.assertRaisesRegex(ValueError, 'config is not valid'): + with self.assertRaises(ValueError): o = Options(args=args) print(o.import_link) @@ -143,7 +141,7 @@ profiles: # edit the config with open(confpath, 'r') as f: - content = yaml.load(f) + content = yaml.safe_load(f) # adding dotfiles df1key = 'f_vimrc' @@ -171,22 +169,22 @@ profiles: self.assertTrue(conf is not None) # test profile - profiles = conf.get_profiles() + profiles = conf.profiles self.assertTrue(pf1key in profiles) self.assertTrue(pf2key in profiles) # test dotfiles - dotfiles = conf._get_dotfiles(pf1key) - self.assertTrue(df1key in [x.key for x in dotfiles]) - self.assertTrue(df2key in [x.key for x in dotfiles]) - dotfiles = conf._get_dotfiles(pf2key) - self.assertTrue(df1key in [x.key for x in dotfiles]) - self.assertFalse(df2key in [x.key for x in dotfiles]) + dotfiles = conf.profiles[pf1key]['dotfiles'] + self.assertTrue(df1key in dotfiles) + self.assertTrue(df2key in dotfiles) + dotfiles = conf.profiles[pf2key]['dotfiles'] + self.assertTrue(df1key in dotfiles) + self.assertFalse(df2key in dotfiles) # test not existing included profile # edit the config with open(confpath, 'r') as f: - content = yaml.load(f) + content = yaml.safe_load(f) content['profiles'] = { pf1key: {'dotfiles': [df2key], 'include': ['host2']}, pf2key: {'dotfiles': [df1key], 'include': ['host3']} @@ -227,22 +225,26 @@ profiles: vars_ing_file = create_yaml_keyval(vars_ing, tmp) actions_ed = { - 'pre': { - 'a_pre_action_ed': 'echo pre 22', - }, - 'post': { - 'a_post_action_ed': 'echo post 22', - }, - 'a_action_ed': 'echo 22', + 'actions': { + 'pre': { + 'a_pre_action_ed': 'echo pre 22', + }, + 'post': { + 'a_post_action_ed': 'echo post 22', + }, + 'a_action_ed': 'echo 22', + } } actions_ing = { - 'pre': { - 'a_pre_action_ing': 'echo pre aa', - }, - 'post': { - 'a_post_action_ing': 'echo post aa', - }, - 'a_action_ing': 'echo aa', + 'actions': { + 'pre': { + 'a_pre_action_ing': 'echo pre aa', + }, + 'post': { + 'a_post_action_ing': 'echo post aa', + }, + 'a_action_ing': 'echo aa', + } } actions_ed_file = create_yaml_keyval(actions_ed, tmp) actions_ing_file = create_yaml_keyval(actions_ing, tmp) @@ -328,7 +330,9 @@ profiles: # create the importing base config file importing_path = create_fake_config(tmp, configname=self.CONFIG_NAME, - import_configs=('config-*.yaml',), + import_configs=[ + self.CONFIG_NAME_2 + ], **importing['config']) # edit the imported config @@ -352,17 +356,28 @@ profiles: self.assertIsNotNone(imported_cfg) # test profiles - self.assertIsSubset(imported_cfg.lnk_profiles, - importing_cfg.lnk_profiles) + self.assertIsSubset(imported_cfg.profiles, + importing_cfg.profiles) # test dotfiles self.assertIsSubset(imported_cfg.dotfiles, importing_cfg.dotfiles) # test actions - self.assertIsSubset(imported_cfg.actions['pre'], - importing_cfg.actions['pre']) - self.assertIsSubset(imported_cfg.actions['post'], - importing_cfg.actions['post']) + pre_ed = post_ed = pre_ing = post_ing = {} + for k, v in imported_cfg.actions.items(): + kind, _ = v + if kind == 'pre': + pre_ed[k] = v + elif kind == 'post': + post_ed[k] = v + for k, v in importing_cfg.actions.items(): + kind, _ = v + if kind == 'pre': + pre_ing[k] = v + elif kind == 'post': + post_ing[k] = v + self.assertIsSubset(pre_ed, pre_ing) + self.assertIsSubset(post_ed, post_ing) # test transactions self.assertIsSubset(imported_cfg.trans_r, importing_cfg.trans_r) @@ -371,18 +386,18 @@ profiles: # test variables imported_vars = { k: v - for k, v in imported_cfg.get_variables(None).items() + for k, v in imported_cfg.variables.items() if not k.startswith('_') } importing_vars = { k: v - for k, v in importing_cfg.get_variables(None).items() + for k, v in importing_cfg.variables.items() if not k.startswith('_') } self.assertIsSubset(imported_vars, importing_vars) # test prodots - self.assertIsSubset(imported_cfg.prodots, importing_cfg.prodots) + self.assertIsSubset(imported_cfg.profiles, importing_cfg.profiles) def test_import_configs_override(self): """Test import_configs when some config keys overlap.""" @@ -410,22 +425,26 @@ profiles: vars_ing_file = create_yaml_keyval(vars_ing, tmp) actions_ed = { - 'pre': { - 'a_pre_action': 'echo pre 22', - }, - 'post': { - 'a_post_action': 'echo post 22', - }, - 'a_action': 'echo 22', + 'actions': { + 'pre': { + 'a_pre_action': 'echo pre 22', + }, + 'post': { + 'a_post_action': 'echo post 22', + }, + 'a_action': 'echo 22', + } } actions_ing = { - 'pre': { - 'a_pre_action': 'echo pre aa', - }, - 'post': { - 'a_post_action': 'echo post aa', - }, - 'a_action': 'echo aa', + 'actions': { + 'pre': { + 'a_pre_action': 'echo pre aa', + }, + 'post': { + 'a_post_action': 'echo post aa', + }, + 'a_action': 'echo aa', + } } actions_ed_file = create_yaml_keyval(actions_ed, tmp) actions_ing_file = create_yaml_keyval(actions_ing, tmp) @@ -542,8 +561,8 @@ profiles: self.assertIsNotNone(imported_cfg) # test profiles - self.assertIsSubset(imported_cfg.lnk_profiles, - importing_cfg.lnk_profiles) + self.assertIsSubset(imported_cfg.profiles, + importing_cfg.profiles) # test dotfiles self.assertEqual(importing_cfg.dotfiles['f_vimrc'], @@ -553,14 +572,9 @@ profiles: # test actions self.assertFalse(any( - (imported_cfg.actions['pre'][key] - == importing_cfg.actions['pre'][key]) - for key in imported_cfg.actions['pre'] - )) - self.assertFalse(any( - (imported_cfg.actions['post'][key] - == importing_cfg.actions['post'][key]) - for key in imported_cfg.actions['post'] + (imported_cfg.actions[key] + == importing_cfg.actions[key]) + for key in imported_cfg.actions )) # test transactions @@ -574,20 +588,20 @@ profiles: )) # test variables - imported_vars = imported_cfg.get_variables(None) + imported_vars = imported_cfg.variables self.assertFalse(any( imported_vars[k] == v - for k, v in importing_cfg.get_variables(None).items() + for k, v in importing_cfg.variables.items() if not k.startswith('_') )) - # test prodots - self.assertEqual(imported_cfg.prodots['host1'], - importing_cfg.prodots['host1']) - self.assertNotEqual(imported_cfg.prodots['host2'], - importing_cfg.prodots['host2']) - self.assertTrue(set(imported_cfg.prodots['host1']) - < set(importing_cfg.prodots['host2'])) + # test profiles dotfiles + self.assertEqual(imported_cfg.profiles['host1']['dotfiles'], + importing_cfg.profiles['host1']['dotfiles']) + self.assertNotEqual(imported_cfg.profiles['host2']['dotfiles'], + importing_cfg.profiles['host2']['dotfiles']) + self.assertTrue(set(imported_cfg.profiles['host1']['dotfiles']) + < set(importing_cfg.profiles['host2']['dotfiles'])) def main():