1
0
mirror of https://github.com/deadc0de6/dotdrop.git synced 2026-02-04 20:54:51 +00:00

refactor the parsing

This commit is contained in:
deadc0de6
2019-05-31 18:30:19 +02:00
parent 18cc3bc2ac
commit 6e6c5fb2e3
33 changed files with 1739 additions and 1486 deletions

View File

@@ -10,10 +10,10 @@ import subprocess
import os
# local imports
from dotdrop.logger import Logger
from dotdrop.dictparser import DictParser
class Cmd:
class Cmd(DictParser):
eq_ignore = ('log',)
def __init__(self, key, action):
@@ -23,7 +23,10 @@ class Cmd:
"""
self.key = key
self.action = action
self.log = Logger()
@classmethod
def _adjust_yaml_keys(cls, value):
return {'action': value}
def __str__(self):
return 'key:{} -> \"{}\"'.format(self.key, self.action)
@@ -50,20 +53,35 @@ class Cmd:
class Action(Cmd):
def __init__(self, key, kind, action, *args):
pre = 'pre'
post = 'post'
def __init__(self, key, kind, action):
"""constructor
@key: action key
@kind: type of action (pre or post)
@action: action string
@args: action arguments
"""
super(Action, self).__init__(key, action)
self.kind = kind
self.args = args
self.args = []
@classmethod
def parse(cls, key, value):
"""parse key value into object"""
v = {}
v['kind'], v['action'] = value
return cls(key=key, **v)
def copy(self, args):
"""return a copy of this object with arguments"""
action = Action(self.key, self.kind, self.action)
action.args = args
return action
def __str__(self):
out = '{}: \"{}\" with args: {}'
return out.format(self.key, self.action, self.args)
out = '{}: \"{}\" ({})'
return out.format(self.key, self.action, self.kind)
def __repr__(self):
return 'action({})'.format(self.__str__())
@@ -74,6 +92,7 @@ class Action(Cmd):
action = self.action
if templater:
action = templater.generate_string(self.action)
cmd = action
try:
cmd = action.format(*self.args)
except IndexError:
@@ -94,9 +113,11 @@ class Action(Cmd):
class Transform(Cmd):
def transform(self, arg0, arg1):
"""execute transformation with {0} and {1}
where {0} is the file to transform and
{1} is the result file"""
"""
execute transformation with {0} and {1}
where {0} is the file to transform
and {1} is the result file
"""
ret = 1
cmd = self.action.format(arg0, arg1)
if os.path.exists(arg1):

306
dotdrop/cfg_aggregator.py Normal file
View File

@@ -0,0 +1,306 @@
"""
author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2019, deadc0de6
handle higher level of the config file
"""
import os
import shlex
# local imports
from dotdrop.cfg_yaml import CfgYaml
from dotdrop.dotfile import Dotfile
from dotdrop.settings import Settings
from dotdrop.profile import Profile
from dotdrop.action import Action, Transform
from dotdrop.logger import Logger
from dotdrop.utils import strip_home
class CfgAggregator:
file_prefix = 'f'
dir_prefix = 'd'
key_sep = '_'
def __init__(self, path, profile=None, debug=False):
"""
high level config parser
@path: path to the config file
@profile: selected profile
@debug: debug flag
"""
self.path = path
self.profile = profile
self.debug = debug
self.log = Logger()
self._load()
def _load(self):
"""load lower level config"""
self.cfgyaml = CfgYaml(self.path,
self.profile,
debug=self.debug)
# settings
self.settings = Settings.parse(None, self.cfgyaml.settings)
self.settings.resolve_paths(self.cfgyaml.resolve_path)
if self.debug:
self.log.dbg('settings: {}'.format(self.settings))
# dotfiles
self.dotfiles = Dotfile.parse_dict(self.cfgyaml.dotfiles)
if self.debug:
self.log.dbg('dotfiles: {}'.format(self.dotfiles))
# profiles
self.profiles = Profile.parse_dict(self.cfgyaml.profiles)
if self.debug:
self.log.dbg('profiles: {}'.format(self.profiles))
# actions
self.actions = Action.parse_dict(self.cfgyaml.actions)
if self.debug:
self.log.dbg('actions: {}'.format(self.actions))
# trans_r
self.trans_r = Transform.parse_dict(self.cfgyaml.trans_r)
if self.debug:
self.log.dbg('trans_r: {}'.format(self.trans_r))
# trans_w
self.trans_w = Transform.parse_dict(self.cfgyaml.trans_w)
if self.debug:
self.log.dbg('trans_w: {}'.format(self.trans_w))
# variables
self.variables = self.cfgyaml.variables
if self.debug:
self.log.dbg('variables: {}'.format(self.variables))
# patch dotfiles in profiles
self._patch_keys_to_objs(self.profiles,
"dotfiles", self.get_dotfile)
# patch action in actions
self._patch_keys_to_objs(self.dotfiles,
"actions", self._get_action_w_args)
self._patch_keys_to_objs(self.profiles,
"actions", self._get_action_w_args)
# patch default actions in settings
self._patch_keys_to_objs([self.settings],
"default_actions", self._get_action_w_args)
if self.debug:
msg = 'default actions: {}'.format(self.settings.default_actions)
self.log.dbg(msg)
# patch trans_w/trans_r in dotfiles
self._patch_keys_to_objs(self.dotfiles,
"trans_r", self.get_trans_r)
self._patch_keys_to_objs(self.dotfiles,
"trans_w", self.get_trans_w)
def _patch_keys_to_objs(self, containers, keys, get_by_key):
"""
patch each object in "containers" containing
a list of keys in the attribute "keys" with
the returned object of the function "get_by_key"
"""
if not containers:
return
if self.debug:
self.log.dbg('patching {} ...'.format(keys))
for c in containers:
objects = []
okeys = getattr(c, keys)
if not okeys:
continue
for k in okeys:
o = get_by_key(k)
if not o:
err = 'bad key for \"{}\": {}'.format(c.key, k)
raise Exception(err)
objects.append(o)
if self.debug:
self.log.dbg('patching {}.{} with {}'.format(c, keys, objects))
setattr(c, keys, objects)
def new(self, src, dst, link, profile_key):
"""
import a new dotfile
@src: path in dotpath
@dst: path in FS
@link: LinkType
@profile_key: to which profile
"""
home = os.path.expanduser('~')
dst = dst.replace(home, '~', 1)
dotfile = self._get_dotfile_by_dst(dst)
if not dotfile:
# get a new dotfile with a unique key
key = self._get_new_dotfile_key(dst)
if self.debug:
self.log.dbg('new dotfile key: {}'.format(key))
# add the dotfile
self.cfgyaml.add_dotfile(key, src, dst, link)
dotfile = Dotfile(key, dst, src)
key = dotfile.key
ret = self.cfgyaml.add_dotfile_to_profile(key, profile_key)
if self.debug:
self.log.dbg('new dotfile {} to profile {}'.format(key,
profile_key))
# reload
self.cfgyaml.save()
if self.debug:
self.log.dbg('RELOADING')
self._load()
return ret
def _get_new_dotfile_key(self, dst):
"""return a new unique dotfile key"""
path = os.path.expanduser(dst)
existing_keys = [x.key for x in self.dotfiles]
if self.settings.longkey:
return self._get_long_key(path, existing_keys)
return self._get_short_key(path, existing_keys)
def _norm_key_elem(self, elem):
"""normalize path element for sanity"""
elem = elem.lstrip('.')
elem = elem.replace(' ', '-')
return elem.lower()
def _split_path_for_key(self, path):
"""return a list of path elements, excluded home path"""
p = strip_home(path)
dirs = []
while True:
p, f = os.path.split(p)
dirs.append(f)
if not p or not f:
break
dirs.reverse()
# remove empty entries
dirs = filter(None, dirs)
# normalize entries
return list(map(self._norm_key_elem, dirs))
def _get_long_key(self, path, keys):
"""
return a unique long key representing the
absolute path of path
"""
dirs = self._split_path_for_key(path)
prefix = self.dir_prefix if os.path.isdir(path) else self.file_prefix
key = self.key_sep.join([prefix, *dirs])
return self._uniq_key(key, keys)
def _get_short_key(self, path, keys):
"""
return a unique key where path
is known not to be an already existing dotfile
"""
dirs = self._split_path_for_key(path)
dirs.reverse()
prefix = self.dir_prefix if os.path.isdir(path) else self.file_prefix
entries = []
for d in dirs:
entries.insert(0, d)
key = self.key_sep.join([prefix, *entries])
if key not in keys:
return key
return self._uniq_key(key, keys)
def _uniq_key(self, key, keys):
"""unique dotfile key"""
newkey = key
cnt = 1
while newkey in keys:
# if unable to get a unique path
# get a random one
newkey = self.key_sep.join([key, cnt])
cnt += 1
return newkey
def _get_dotfile_by_dst(self, dst):
"""get a dotfile by dst"""
try:
return next(d for d in self.dotfiles if d.dst == dst)
except StopIteration:
return None
def save(self):
"""save the config"""
return self.cfgyaml.save()
def dump(self):
"""dump the config dictionary"""
return self.cfgyaml.dump()
def get_settings(self):
"""return settings as a dict"""
return self.settings.serialize()[Settings.key_yaml]
def get_variables(self):
"""return variables"""
return self.variables
def get_profiles(self):
"""return profiles"""
return self.profiles
def get_dotfiles(self, profile=None):
"""return dotfiles dict for this profile key"""
if not profile:
return self.dotfiles
try:
return next(x.dotfiles for x in self.profiles if x.key == profile)
except StopIteration:
return []
def get_dotfile(self, key):
"""return dotfile by key"""
try:
return next(x for x in self.dotfiles if x.key == key)
except StopIteration:
return None
def get_action(self, key):
"""return action by key"""
try:
return next(x for x in self.actions if x.key == key)
except StopIteration:
return None
def _get_action_w_args(self, key):
"""return action by key with the arguments"""
fields = shlex.split(key)
if len(fields) > 1:
# we have args
key, *args = fields
if self.debug:
self.log.dbg('action with parm: {} and {}'.format(key, args))
action = self.get_action(key).copy(args)
else:
action = self.get_action(key)
return action
def get_trans_r(self, key):
"""return the trans_r with this key"""
try:
return next(x for x in self.trans_r if x.key == key)
except StopIteration:
return None
def get_trans_w(self, key):
"""return the trans_w with this key"""
try:
return next(x for x in self.trans_w if x.key == key)
except StopIteration:
return None

626
dotdrop/cfg_yaml.py Normal file
View File

@@ -0,0 +1,626 @@
"""
author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2019, deadc0de6
handle lower level of the config file
"""
import os
import yaml
# local imports
from dotdrop.settings import Settings
from dotdrop.logger import Logger
from dotdrop.templategen import Templategen
from dotdrop.linktypes import LinkTypes
from dotdrop.utils import shell
class CfgYaml:
# global entries
key_settings = 'config'
key_dotfiles = 'dotfiles'
key_profiles = 'profiles'
key_actions = 'actions'
key_trans_r = 'trans'
key_trans_w = 'trans_write'
key_variables = 'variables'
key_dvariables = 'dynvariables'
action_pre = 'pre'
action_post = 'post'
# profiles/dotfiles entries
key_profiles_dotfiles = 'dotfiles'
key_dotfile_src = 'src'
key_dotfile_dst = 'dst'
key_dotfile_link = 'link'
key_dotfile_actions = 'actions'
key_dotfile_link_children = 'link_children'
# profile
key_profile_include = 'include'
key_profile_variables = 'variables'
key_profile_dvariables = 'dynvariables'
key_all = 'ALL'
# import entries
key_import_actions = 'import_actions'
key_import_configs = 'import_configs'
key_import_variables = 'import_variables'
key_import_profile_dfs = 'import'
# settings
key_settings_dotpath = 'dotpath'
key_settings_workdir = 'workdir'
key_settings_link_dotfile_default = 'link_dotfile_default'
key_imp_link = 'link_on_import'
# link values
lnk_nolink = LinkTypes.NOLINK.name.lower()
lnk_link = LinkTypes.LINK.name.lower()
lnk_children = LinkTypes.LINK_CHILDREN.name.lower()
def __init__(self, path, profile=None, debug=False):
"""
config parser
@path: config file path
@profile: the selected profile
@debug: debug flag
"""
self.path = os.path.abspath(path)
self.profile = profile
self.debug = debug
self.log = Logger()
self.dirty = False
self.yaml_dict = self._load_yaml(self.path)
self._fix_deprecated(self.yaml_dict)
self._parse_main_yaml(self.yaml_dict)
if self.debug:
self.log.dbg('current dict: {}'.format(self.yaml_dict))
# resolve variables
allvars = self._merge_and_apply_variables()
self.variables.update(allvars)
# process imported configs
self._resolve_import_configs()
# process other imports
self._resolve_imports()
# process diverse options
self._resolve_rest()
# patch dotfiles paths
self._resolve_dotfile_paths()
def _parse_main_yaml(self, dic):
"""parse the different blocks"""
self.ori_settings = self._get_entry(self.yaml_dict, self.key_settings)
self.settings = Settings(None).serialize().get(self.key_settings)
self.settings.update(self.ori_settings)
if self.debug:
self.log.dbg('settings: {}'.format(self.settings))
# dotfiles
self.dotfiles = self._get_entry(self.yaml_dict, self.key_dotfiles)
if self.debug:
self.log.dbg('dotfiles: {}'.format(self.dotfiles))
# profiles
self.profiles = self._get_entry(self.yaml_dict, self.key_profiles)
if self.debug:
self.log.dbg('profiles: {}'.format(self.profiles))
# actions
self.actions = self._get_entry(self.yaml_dict, self.key_actions,
mandatory=False)
self.actions = self._patch_actions(self.actions)
if self.debug:
self.log.dbg('actions: {}'.format(self.actions))
# trans_r
self.trans_r = self._get_entry(self.yaml_dict, self.key_trans_r,
mandatory=False)
if self.debug:
self.log.dbg('trans_r: {}'.format(self.trans_r))
# trans_w
self.trans_w = self._get_entry(self.yaml_dict, self.key_trans_w,
mandatory=False)
if self.debug:
self.log.dbg('trans_w: {}'.format(self.trans_w))
# variables
self.variables = self._get_entry(self.yaml_dict, self.key_variables,
mandatory=False)
if self.debug:
self.log.dbg('variables: {}'.format(self.variables))
# dynvariables
self.dvariables = self._get_entry(self.yaml_dict, self.key_dvariables,
mandatory=False)
if self.debug:
self.log.dbg('dvariables: {}'.format(self.dvariables))
def _resolve_dotfile_paths(self):
"""resolve dotfile paths"""
for dotfile in self.dotfiles.values():
src = dotfile[self.key_dotfile_src]
src = os.path.join(self.settings[self.key_settings_dotpath], src)
dotfile[self.key_dotfile_src] = self.resolve_path(src)
dst = dotfile[self.key_dotfile_dst]
dotfile[self.key_dotfile_dst] = self.resolve_path(dst)
def _merge_and_apply_variables(self):
"""
resolve all variables across the config
apply them to any needed entries
and return the full list of variables
"""
# first construct the list of variables
var = self._get_variables_dict(self.profile, seen=[self.profile])
dvar = self._get_dvariables_dict(self.profile, seen=[self.profile])
# recursive resolve variables
allvars = var.copy()
allvars.update(dvar)
if self.debug:
self.log.dbg('all variables: {}'.format(allvars))
t = Templategen(variables=allvars)
for k in allvars.keys():
val = allvars[k]
while Templategen.var_is_template(val):
val = t.generate_string(val)
allvars[k] = val
t.update_variables(allvars)
# exec dynvariables
for k in dvar.keys():
allvars[k] = shell(allvars[k])
if self.debug:
self.log.dbg('variables:')
for k, v in allvars.items():
self.log.dbg('\t\"{}\": {}'.format(k, v))
if self.debug:
self.log.dbg('resolve all uses of variables in config')
# now resolve blocks
t = Templategen(variables=allvars)
# dotfiles entries
for k, v in self.dotfiles.items():
# src
src = v.get(self.key_dotfile_src)
v[self.key_dotfile_src] = t.generate_string(src)
# dst
dst = v.get(self.key_dotfile_dst)
v[self.key_dotfile_dst] = t.generate_string(dst)
# actions
new = []
for a in v.get(self.key_dotfile_actions, []):
new.append(t.generate_string(a))
if new:
if self.debug:
self.log.dbg('resolved: {}'.format(new))
v[self.key_dotfile_actions] = new
# external actions paths
new = []
for p in self.settings.get(self.key_import_actions, []):
new.append(t.generate_string(p))
if new:
if self.debug:
self.log.dbg('resolved: {}'.format(new))
self.settings[self.key_import_actions] = new
# external config paths
new = []
for p in self.settings.get(self.key_import_configs, []):
new.append(t.generate_string(p))
if new:
if self.debug:
self.log.dbg('resolved: {}'.format(new))
self.settings[self.key_import_configs] = new
# external variables paths
new = []
for p in self.settings.get(self.key_import_variables, []):
new.append(t.generate_string(p))
if new:
if self.debug:
self.log.dbg('resolved: {}'.format(new))
self.settings[self.key_import_variables] = new
# external profiles dotfiles
for k, v in self.profiles.items():
new = []
for p in v.get(self.key_import_profile_dfs, []):
new.append(t.generate_string(p))
if new:
if self.debug:
self.log.dbg('resolved: {}'.format(new))
v[self.key_import_profile_dfs] = new
return allvars
def _patch_actions(self, actions):
"""
ensure each action is either pre or post explicitely
action entry of the form {action_key: (pre|post, action)}
"""
if not actions:
return actions
new = {}
for k, v in actions.items():
if k == self.action_pre or k == self.action_post:
for key, action in v.items():
new[key] = (k, action)
else:
new[k] = (self.action_pre, v)
return new
def _get_variables_dict(self, profile, seen, sub=False):
"""return enriched variables"""
variables = {}
if not sub:
# add profile variable
if profile:
variables['profile'] = profile
# add some more variables
p = self.settings.get(self.key_settings_dotpath)
p = self.resolve_path(p)
variables['_dotdrop_dotpath'] = p
variables['_dotdrop_cfgpath'] = self.resolve_path(self.path)
p = self.settings.get(self.key_settings_workdir)
p = self.resolve_path(p)
variables['_dotdrop_workdir'] = p
# variables
variables.update(self.variables)
if not profile or profile not in self.profiles.keys():
return variables
# profile entry
pentry = self.profiles.get(profile)
# inherite profile variables
for inherited_profile in pentry.get(self.key_profile_include, []):
if inherited_profile == profile or inherited_profile in seen:
raise Exception('\"include\" loop')
seen.append(inherited_profile)
new = self._get_variables_dict(inherited_profile, seen, sub=True)
variables.update(new)
# overwrite with profile variables
for k, v in pentry.get(self.key_profile_variables, {}).items():
variables[k] = v
return variables
def _get_dvariables_dict(self, profile, seen, sub=False):
"""return dynvariables"""
variables = {}
# dynvariables
variables.update(self.dvariables)
if not profile or profile not in self.profiles.keys():
return variables
# profile entry
pentry = self.profiles.get(profile)
# inherite profile dynvariables
for inherited_profile in pentry.get(self.key_profile_include, []):
if inherited_profile == profile or inherited_profile in seen:
raise Exception('\"include loop\"')
seen.append(inherited_profile)
new = self._get_dvariables_dict(inherited_profile, seen, sub=True)
variables.update(new)
# overwrite with profile dynvariables
for k, v in pentry.get(self.key_profile_dvariables, {}).items():
variables[k] = v
return variables
def _resolve_imports(self):
"""handle all the imports"""
# settings -> import_variables
imp = self.settings.get(self.key_import_variables, None)
if imp:
for p in imp:
path = self.resolve_path(p)
if self.debug:
self.log.dbg('import variables from {}'.format(path))
self.variables = self._import_sub(path, self.key_variables,
self.variables,
mandatory=False)
self.dvariables = self._import_sub(path, self.key_dvariables,
self.dvariables,
mandatory=False)
# settings -> import_actions
imp = self.settings.get(self.key_import_actions, None)
if imp:
for p in imp:
path = self.resolve_path(p)
if self.debug:
self.log.dbg('import actions from {}'.format(path))
self.actions = self._import_sub(path, self.key_actions,
self.actions, mandatory=False,
patch_func=self._patch_actions)
# profiles -> import
for k, v in self.profiles.items():
imp = v.get(self.key_import_profile_dfs, None)
if not imp:
continue
if self.debug:
self.log.dbg('import dotfiles for profile {}'.format(k))
for p in imp:
current = v.get(self.key_dotfiles, [])
path = self.resolve_path(p)
current = self._import_sub(path, self.key_dotfiles,
current, mandatory=False)
v[self.key_dotfiles] = current
def _resolve_import_configs(self):
"""resolve import_configs"""
# settings -> import_configs
imp = self.settings.get(self.key_import_configs, None)
if not imp:
return
for p in imp:
path = self.resolve_path(p)
if self.debug:
self.log.dbg('import config from {}'.format(path))
sub = CfgYaml(path, debug=self.debug)
# settings is ignored
self.dotfiles = self._merge_dict(self.dotfiles, sub.dotfiles)
self.profiles = self._merge_dict(self.profiles, sub.profiles)
self.actions = self._merge_dict(self.actions, sub.actions)
self.trans_r = self._merge_dict(self.trans_r, sub.trans_r)
self.trans_w = self._merge_dict(self.trans_w, sub.trans_w)
self.variables = self._merge_dict(self.variables, sub.variables)
self.dvariables = self._merge_dict(self.dvariables, sub.dvariables)
def _resolve_rest(self):
"""resolve some other parts of the config"""
# profile -> ALL
for k, v in self.profiles.items():
dfs = v.get(self.key_profiles_dotfiles, None)
if not dfs:
continue
if self.debug:
self.log.dbg('add ALL to profile {}'.format(k))
if self.key_all in dfs:
v[self.key_profiles_dotfiles] = self.dotfiles.keys()
# profiles -> include other profile
for k, v in self.profiles.items():
self._rec_resolve_profile_include(k)
def _rec_resolve_profile_include(self, profile):
"""recursively resolve include of other profiles's dotfiles"""
values = self.profiles[profile]
current = values.get(self.key_profiles_dotfiles, [])
inc = values.get(self.key_profile_include, None)
if not inc:
return current
seen = []
for i in inc:
if i in seen:
raise Exception('\"include loop\"')
seen.append(i)
if i not in self.profiles.keys():
self.log.warn('include unknown profile: {}'.format(i))
continue
p = self.profiles[i]
others = p.get(self.key_profiles_dotfiles, [])
if self.key_profile_include in p.keys():
others.extend(self._rec_resolve_profile_include(i))
current.extend(others)
# unique them
values[self.key_profiles_dotfiles] = list(set(current))
return values.get(self.key_profiles_dotfiles, [])
def resolve_path(self, path):
"""resolve a path either absolute or relative to config path"""
path = os.path.expanduser(path)
if not os.path.isabs(path):
d = os.path.dirname(self.path)
return os.path.join(d, path)
return os.path.normpath(path)
def _import_sub(self, path, key, current,
mandatory=False, patch_func=None):
"""
import the block "key" from "path"
and merge it with "current"
patch_func is applied before merge if defined
"""
if self.debug:
self.log.dbg('import \"{}\" from \"{}\"'.format(key, path))
self.log.dbg('current: {}'.format(current))
extdict = self._load_yaml(path)
new = self._get_entry(extdict, key, mandatory=mandatory)
if patch_func:
new = patch_func(new)
if not new:
self.log.warn('no \"{}\" imported from \"{}\"'.format(key, path))
return
if self.debug:
self.log.dbg('found: {}'.format(new))
if isinstance(current, dict) and isinstance(new, dict):
# imported entries get more priority than current
current = {**current, **new}
elif isinstance(current, list) and isinstance(new, list):
current = [*current, *new]
else:
raise Exception('invalid import {} from {}'.format(key, path))
if self.debug:
self.log.dbg('new \"{}\": {}'.format(key, current))
return current
def _merge_dict(self, high, low):
"""merge low into high"""
return {**low, **high}
def _get_entry(self, yaml_dict, key, mandatory=True):
"""return entry from yaml dictionary"""
if key not in yaml_dict:
if mandatory:
raise Exception('invalid config: no {} found'.format(key))
yaml_dict[key] = {}
return yaml_dict[key]
if mandatory and not yaml_dict[key]:
# ensure is not none
yaml_dict[key] = {}
return yaml_dict[key]
def _load_yaml(self, path):
"""load a yaml file to a dict"""
content = {}
if not os.path.exists(path):
raise Exception('config path not found: {}'.format(path))
with open(path, 'r') as f:
try:
content = yaml.safe_load(f)
except Exception as e:
self.log.err(e)
raise Exception('invalid config: {}'.format(path))
return content
def _new_profile(self, key):
"""add a new profile if it doesn't exist"""
if key not in self.profiles.keys():
# update yaml_dict
self.yaml_dict[self.key_profiles][key] = {
self.key_profiles_dotfiles: []
}
if self.debug:
self.log.dbg('adding new profile: {}'.format(key))
self.dirty = True
def add_dotfile_to_profile(self, dotfile_key, profile_key):
"""add an existing dotfile key to a profile_key"""
self._new_profile(profile_key)
profile = self.yaml_dict[self.key_profiles][profile_key]
if dotfile_key not in profile[self.key_profiles_dotfiles]:
profile[self.key_profiles_dotfiles].append(dotfile_key)
if self.debug:
msg = 'add \"{}\" to profile \"{}\"'.format(dotfile_key,
profile_key)
msg.format(dotfile_key, profile_key)
self.log.dbg(msg)
self.dirty = True
return self.dirty
def add_dotfile(self, key, src, dst, link):
"""add a new dotfile"""
if key in self.dotfiles.keys():
return False
if self.debug:
self.log.dbg('adding new dotfile: {}'.format(key))
df_dict = {
self.key_dotfile_src: src,
self.key_dotfile_dst: dst,
}
dfl = self.settings[self.key_settings_link_dotfile_default]
if str(link) != dfl:
df_dict[self.key_dotfile_link] = str(link)
self.yaml_dict[self.key_dotfiles][key] = df_dict
self.dirty = True
def _fix_deprecated(self, yamldict):
"""fix deprecated entries"""
self._fix_deprecated_link_by_default(yamldict)
self._fix_deprecated_dotfile_link(yamldict)
def _fix_deprecated_link_by_default(self, yamldict):
"""fix deprecated link_by_default"""
key = 'link_by_default'
newkey = self.key_imp_link
if self.key_settings not in yamldict:
return
if not yamldict[self.key_settings]:
return
config = yamldict[self.key_settings]
if key not in config:
return
if config[key]:
config[newkey] = self.lnk_link
else:
config[newkey] = self.lnk_nolink
del config[key]
self.log.warn('deprecated \"link_by_default\"')
self.dirty = True
def _fix_deprecated_dotfile_link(self, yamldict):
"""fix deprecated link in dotfiles"""
if self.key_dotfiles not in yamldict:
return
if not yamldict[self.key_dotfiles]:
return
for k, dotfile in yamldict[self.key_dotfiles].items():
new = self.lnk_nolink
if self.key_dotfile_link in dotfile and \
type(dotfile[self.key_dotfile_link]) is bool:
# patch link: <bool>
cur = dotfile[self.key_dotfile_link]
new = self.lnk_nolink
if cur:
new = self.lnk_link
dotfile[self.key_dotfile_link] = new
self.dirty = True
self.log.warn('deprecated \"link\" value')
elif self.key_dotfile_link_children in dotfile and \
type(dotfile[self.key_dotfile_link_children]) is bool:
# patch link_children: <bool>
cur = dotfile[self.key_dotfile_link_children]
new = self.lnk_nolink
if cur:
new = self.lnk_children
del dotfile[self.key_dotfile_link_children]
dotfile[self.key_dotfile_link] = new
self.dirty = True
self.log.warn('deprecated \"link_children\" value')
def _clear_none(self, dic):
"""recursively delete all none/empty values in a dictionary."""
new = {}
for k, v in dic.items():
newv = v
if isinstance(v, dict):
newv = self._clear_none(v)
if v is None:
continue
if not v:
continue
new[k] = newv
return new
def save(self):
"""save this instance and return True if saved"""
if not self.dirty:
return False
content = self._clear_none(self.dump())
if self.debug:
self.log.dbg('saving: {}'.format(content))
with open(self.path, 'w') as f:
yaml.safe_dump(content, f,
default_flow_style=False,
indent=2)
self.dirty = False
return True
def dump(self):
"""dump the config dictionary"""
return self.yaml_dict

File diff suppressed because it is too large Load Diff

38
dotdrop/dictparser.py Normal file
View File

@@ -0,0 +1,38 @@
"""
author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2019, deadc0de6
dictionary parser abstract class
"""
from dotdrop.logger import Logger
class DictParser:
log = Logger()
@classmethod
def _adjust_yaml_keys(cls, value):
"""adjust value for object 'cls'"""
return value
@classmethod
def parse(cls, key, value):
"""parse (key,value) and construct object 'cls'"""
tmp = value
try:
tmp = value.copy()
except AttributeError:
pass
newv = cls._adjust_yaml_keys(tmp)
if not key:
return cls(**newv)
return cls(key=key, **newv)
@classmethod
def parse_dict(cls, items):
"""parse a dictionary and construct object 'cls'"""
if not items:
return []
return [cls.parse(k, v) for k, v in items.items()]

View File

@@ -15,7 +15,6 @@ from dotdrop.templategen import Templategen
from dotdrop.installer import Installer
from dotdrop.updater import Updater
from dotdrop.comparator import Comparator
from dotdrop.config import Cfg
from dotdrop.utils import get_tmpdir, remove, strip_home, run
from dotdrop.linktypes import LinkTypes
@@ -95,15 +94,13 @@ def cmd_install(o):
for dotfile in dotfiles:
# add dotfile variables
t.restore_vars(tvars)
newvars = dotfile.get_vars()
newvars = dotfile.get_dotfile_variables()
t.add_tmp_vars(newvars=newvars)
preactions = []
if not o.install_temporary and dotfile.actions \
and Cfg.key_actions_pre in dotfile.actions:
for action in dotfile.actions[Cfg.key_actions_pre]:
preactions.append(action)
defactions = o.install_default_actions[Cfg.key_actions_pre]
if not o.install_temporary:
preactions.extend(dotfile.get_pre_actions())
defactions = o.install_default_actions_pre
pre_actions_exec = action_executor(o, dotfile, preactions,
defactions, t, post=False)
@@ -132,10 +129,9 @@ def cmd_install(o):
if os.path.exists(tmp):
remove(tmp)
if r:
if not o.install_temporary and \
Cfg.key_actions_post in dotfile.actions:
defactions = o.install_default_actions[Cfg.key_actions_post]
postactions = dotfile.actions[Cfg.key_actions_post]
if not o.install_temporary:
defactions = o.install_default_actions_post
postactions = dotfile.get_post_actions()
post_actions_exec = action_executor(o, dotfile, postactions,
defactions, t, post=True)
post_actions_exec()
@@ -329,8 +325,7 @@ def cmd_importer(o):
LOG.err('importing \"{}\" failed!'.format(path))
ret = False
continue
retconf, dotfile = o.conf.new(src, dst, o.profile,
linktype, debug=o.debug)
retconf = o.conf.new(src, dst, linktype, o.profile)
if retconf:
LOG.sub('\"{}\" imported'.format(path))
cnt += 1
@@ -355,7 +350,7 @@ def cmd_list_profiles(o):
def cmd_list_files(o):
"""list all dotfiles for a specific profile"""
if o.profile not in o.profiles:
if o.profile not in [p.key for p in o.profiles]:
LOG.warn('unknown profile \"{}\"'.format(o.profile))
return
what = 'Dotfile(s)'
@@ -375,7 +370,7 @@ def cmd_list_files(o):
def cmd_detail(o):
"""list details on all files for all dotfile entries"""
if o.profile not in o.profiles:
if o.profile not in [p.key for p in o.profiles]:
LOG.warn('unknown profile \"{}\"'.format(o.profile))
return
dotfiles = o.dotfiles
@@ -394,7 +389,7 @@ def cmd_detail(o):
def _detail(dotpath, dotfile):
"""print details on all files under a dotfile entry"""
"""display details on all files under a dotfile entry"""
LOG.log('{} (dst: \"{}\", link: {})'.format(dotfile.key, dotfile.dst,
dotfile.link.name.lower()))
path = os.path.join(dotpath, os.path.expanduser(dotfile.src))
@@ -404,7 +399,7 @@ def _detail(dotpath, dotfile):
template = 'yes'
LOG.sub('{} (template:{})'.format(path, template))
else:
for root, dir, files in os.walk(path):
for root, _, files in os.walk(path):
for f in files:
p = os.path.join(root, f)
template = 'no'
@@ -433,17 +428,17 @@ def apply_trans(dotpath, dotfile, debug=False):
return None if fails and new source if succeed"""
src = dotfile.src
new_src = '{}.{}'.format(src, TRANS_SUFFIX)
trans = dotfile.trans_r
if debug:
LOG.dbg('executing transformation {}'.format(trans))
s = os.path.join(dotpath, src)
temp = os.path.join(dotpath, new_src)
if not trans.transform(s, temp):
msg = 'transformation \"{}\" failed for {}'
LOG.err(msg.format(trans.key, dotfile.key))
if new_src and os.path.exists(new_src):
remove(new_src)
return None
for trans in dotfile.trans_r:
if debug:
LOG.dbg('executing transformation {}'.format(trans))
s = os.path.join(dotpath, src)
temp = os.path.join(dotpath, new_src)
if not trans.transform(s, temp):
msg = 'transformation \"{}\" failed for {}'
LOG.err(msg.format(trans.key, dotfile.key))
if new_src and os.path.exists(new_src):
remove(new_src)
return None
return new_src
@@ -456,8 +451,8 @@ def main():
"""entry point"""
try:
o = Options()
except ValueError as e:
LOG.err('Config error: {}'.format(str(e)))
except Exception as e:
LOG.err('options error: {}'.format(str(e)))
return False
ret = True
@@ -512,9 +507,8 @@ def main():
LOG.err('interrupted')
ret = False
if ret and o.conf.is_modified():
if ret and o.conf.save():
LOG.log('config file updated')
o.conf.save()
return ret

View File

@@ -6,15 +6,23 @@ represents a dotfile in dotdrop
"""
from dotdrop.linktypes import LinkTypes
from dotdrop.dictparser import DictParser
from dotdrop.action import Action
class Dotfile:
class Dotfile(DictParser):
"""Represent a dotfile."""
# dotfile keys
key_noempty = 'ignoreempty'
key_trans_r = 'trans'
key_trans_w = 'trans_write'
def __init__(self, key, dst, src,
actions={}, trans_r=None, trans_w=None,
actions=[], trans_r=[], trans_w=[],
link=LinkTypes.NOLINK, cmpignore=[],
noempty=False, upignore=[]):
"""constructor
"""
constructor
@key: dotfile key
@dst: dotfile dst (in user's home usually)
@src: dotfile src (in dotpath)
@@ -26,39 +34,73 @@ class Dotfile:
@noempty: ignore empty template if True
@upignore: patterns to ignore when updating
"""
self.key = key
self.dst = dst
self.src = src
self.link = link
# ensure link of right type
if type(link) != LinkTypes:
raise Exception('bad value for link: {}'.format(link))
self.actions = actions
self.cmpignore = cmpignore
self.dst = dst
self.key = key
self.link = LinkTypes.get(link)
self.noempty = noempty
self.src = src
self.trans_r = trans_r
self.trans_w = trans_w
self.cmpignore = cmpignore
self.noempty = noempty
self.upignore = upignore
def get_vars(self):
"""return this dotfile templating vars"""
_vars = {}
_vars['_dotfile_abs_src'] = self.src
_vars['_dotfile_abs_dst'] = self.dst
_vars['_dotfile_key'] = self.key
_vars['_dotfile_link'] = self.link.name.lower()
def get_dotfile_variables(self):
"""return this dotfile specific variables"""
return {
'_dotfile_abs_src': self.src,
'_dotfile_abs_dst': self.dst,
'_dotfile_key': self.key,
'_dotfile_link': str(self.link),
}
return _vars
def get_pre_actions(self):
"""return all 'pre' actions"""
return [a for a in self.actions if a.kind == Action.pre]
def __str__(self):
msg = 'key:\"{}\", src:\"{}\", dst:\"{}\", link:\"{}\"'
return msg.format(self.key, self.src, self.dst, self.link.name.lower())
def get_post_actions(self):
"""return all 'post' actions"""
return [a for a in self.actions if a.kind == Action.post]
def __repr__(self):
return 'dotfile({})'.format(self.__str__())
def get_trans_r(self):
"""return trans_r object"""
if self.trans_r:
return self.trans_r[0]
return None
def get_trans_w(self):
"""return trans_w object"""
if self.trans_w:
return self.trans_w[0]
return None
@classmethod
def _adjust_yaml_keys(cls, value):
"""patch dict"""
value['noempty'] = value.get(cls.key_noempty, False)
value['trans_r'] = value.get(cls.key_trans_r)
if value['trans_r']:
# ensure is a list
value['trans_r'] = [value['trans_r']]
value['trans_w'] = value.get(cls.key_trans_w)
if value['trans_w']:
# ensure is a list
value['trans_w'] = [value['trans_w']]
# remove old entries
value.pop(cls.key_noempty, None)
value.pop(cls.key_trans_r, None)
value.pop(cls.key_trans_w, None)
return value
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return hash(self.dst) ^ hash(self.src) ^ hash(self.key)
def __str__(self):
msg = 'key:\"{}\", src:\"{}\", dst:\"{}\", link:\"{}\"'
return msg.format(self.key, self.src, self.dst, str(self.link))
def __repr__(self):
return 'dotfile({!s})'.format(self)

View File

@@ -5,3 +5,15 @@ class LinkTypes(IntEnum):
NOLINK = 0
LINK = 1
LINK_CHILDREN = 2
@classmethod
def get(cls, key, default=None):
try:
return key if isinstance(key, cls) else cls[key.upper()]
except KeyError:
if default:
return default
raise ValueError('bad {} value: "{}"'.format(cls.__name__, key))
def __str__(self):
return self.name.lower()

View File

@@ -16,8 +16,10 @@ class Logger:
YELLOW = '\033[93m'
BLUE = '\033[94m'
MAGENTA = '\033[95m'
LMAGENTA = '\033[35m'
RESET = '\033[0m'
EMPH = '\033[33m'
BOLD = '\033[1m'
def __init__(self):
pass
@@ -37,10 +39,14 @@ class Logger:
ce = self._color(self.RESET)
sys.stderr.write('{}{}{}'.format(cs, string, ce))
def err(self, string, end='\n'):
def err(self, string, end='\n', *, throw=None):
cs = self._color(self.RED)
ce = self._color(self.RESET)
sys.stderr.write('{}[ERR] {} {}{}'.format(cs, string, end, ce))
msg = '{} {}'.format(string, end)
sys.stderr.write('{}[ERR] {}{}'.format(cs, msg, ce))
if throw is not None:
raise throw(msg)
def warn(self, string, end='\n'):
cs = self._color(self.YELLOW)
@@ -53,8 +59,10 @@ class Logger:
func = inspect.stack()[1][3]
cs = self._color(self.MAGENTA)
ce = self._color(self.RESET)
line = '{}[DEBUG][{}.{}] {}{}\n'
sys.stderr.write(line.format(cs, mod, func, string, ce))
cl = self._color(self.LMAGENTA)
bl = self._color(self.BOLD)
line = '{}{}[DEBUG][{}.{}]{}{} {}{}\n'
sys.stderr.write(line.format(bl, cl, mod, func, ce, cs, string, ce))
def dry(self, string, end='\n'):
cs = self._color(self.GREEN)

View File

@@ -14,7 +14,8 @@ from docopt import docopt
from dotdrop.version import __version__ as VERSION
from dotdrop.linktypes import LinkTypes
from dotdrop.logger import Logger
from dotdrop.config import Cfg
from dotdrop.cfg_aggregator import CfgAggregator as Cfg
from dotdrop.action import Action
ENV_PROFILE = 'DOTDROP_PROFILE'
ENV_CONFIG = 'DOTDROP_CONFIG'
@@ -107,24 +108,23 @@ class Options(AttrMonitor):
if not args:
self.args = docopt(USAGE, version=VERSION)
self.log = Logger()
self.debug = self.args['--verbose']
if not self.debug and ENV_DEBUG in os.environ:
self.debug = True
self.debug = self.args['--verbose'] or ENV_DEBUG in os.environ
if ENV_NODEBUG in os.environ:
# force disabling debugs
self.debug = False
self.profile = self.args['--profile']
self.confpath = self._get_config_path()
if self.debug:
self.log.dbg('config file: {}'.format(self.confpath))
self._read_config(self.profile)
self._read_config()
self._apply_args()
self._fill_attr()
if ENV_NOBANNER not in os.environ \
and self.banner \
and not self.args['--no-banner']:
self._header()
self._print_attr()
self._debug_attr()
# start monitoring for bad attribute
self._set_attr_err = True
@@ -167,25 +167,18 @@ class Options(AttrMonitor):
return None
def _find_cfg(self, paths):
"""try to find the config in the paths list"""
for path in paths:
if os.path.exists(path):
return path
return None
def _header(self):
"""print the header"""
"""display the header"""
self.log.log(BANNER)
self.log.log('')
def _read_config(self, profile=None):
def _read_config(self):
"""read the config file"""
self.conf = Cfg(self.confpath, profile=profile, debug=self.debug)
self.conf = Cfg(self.confpath, self.profile, debug=self.debug)
# transform the config settings to self attribute
for k, v in self.conf.get_settings().items():
if self.debug:
self.log.dbg('setting: {}={}'.format(k, v))
self.log.dbg('new setting: {}={}'.format(k, v))
setattr(self, k, v)
def _apply_args(self):
@@ -212,8 +205,6 @@ class Options(AttrMonitor):
self.log.err('bad option for --link: {}'.format(link))
sys.exit(USAGE)
self.import_link = OPT_LINK[link]
if self.debug:
self.log.dbg('link_import value: {}'.format(self.import_link))
# "listfiles" specifics
self.listfiles_templateonly = self.args['--template']
@@ -223,7 +214,10 @@ class Options(AttrMonitor):
self.install_diff = not self.args['--nodiff']
self.install_showdiff = self.showdiff or self.args['--showdiff']
self.install_backup_suffix = BACKUP_SUFFIX
self.install_default_actions = self.default_actions
self.install_default_actions_pre = [a for a in self.default_actions
if a.kind == Action.pre]
self.install_default_actions_post = [a for a in self.default_actions
if a.kind == Action.post]
# "compare" specifics
self.compare_dopts = self.args['--dopts']
self.compare_focus = self.args['--file']
@@ -243,26 +237,24 @@ class Options(AttrMonitor):
def _fill_attr(self):
"""create attributes from conf"""
# variables
self.variables = self.conf.get_variables(self.profile,
debug=self.debug).copy()
self.variables = self.conf.get_variables()
# the dotfiles
self.dotfiles = self.conf.eval_dotfiles(self.profile, self.variables,
debug=self.debug).copy()
self.dotfiles = self.conf.get_dotfiles(self.profile)
# the profiles
self.profiles = self.conf.get_profiles()
def _print_attr(self):
"""print all of this class attributes"""
def _debug_attr(self):
"""debug display all of this class attributes"""
if not self.debug:
return
self.log.dbg('options:')
self.log.dbg('CLI options:')
for att in dir(self):
if att.startswith('_'):
continue
val = getattr(self, att)
if callable(val):
continue
self.log.dbg('- {}: \"{}\"'.format(att, val))
self.log.dbg('- {}: {}'.format(att, val))
def _attr_set(self, attr):
"""error when some inexistent attr is set"""

50
dotdrop/profile.py Normal file
View File

@@ -0,0 +1,50 @@
"""
author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2019, deadc0de6
represent a profile in dotdrop
"""
from dotdrop.dictparser import DictParser
class Profile(DictParser):
# profile keys
key_include = 'include'
key_import = 'import'
def __init__(self, key, actions=[], dotfiles=[], variables=[]):
"""
constructor
@key: profile key
@actions: list of action keys
@dotfiles: list of dotfile keys
@variables: list of variable keys
"""
self.key = key
self.actions = actions
self.dotfiles = dotfiles
self.variables = variables
@classmethod
def _adjust_yaml_keys(cls, value):
"""patch dict"""
value.pop(cls.key_import, None)
value.pop(cls.key_include, None)
return value
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __hash__(self):
return (hash(self.key) ^
hash(tuple(self.dotfiles)) ^
hash(tuple(self.included_profiles)))
def __str__(self):
msg = 'key:"{}"'
return msg.format(self.key)
def __repr__(self):
return 'profile({!s})'.format(self)

96
dotdrop/settings.py Normal file
View File

@@ -0,0 +1,96 @@
"""
author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2019, deadc0de6
settings block
"""
# local imports
from dotdrop.linktypes import LinkTypes
from dotdrop.dictparser import DictParser
class Settings(DictParser):
# key in yaml file
key_yaml = 'config'
# settings item keys
key_backup = 'backup'
key_banner = 'banner'
key_cmpignore = 'cmpignore'
key_create = 'create'
key_default_actions = 'default_actions'
key_dotpath = 'dotpath'
key_ignoreempty = 'ignoreempty'
key_keepdot = 'keepdot'
key_longkey = 'longkey'
key_link_dotfile_default = 'link_dotfile_default'
key_link_on_import = 'link_on_import'
key_showdiff = 'showdiff'
key_upignore = 'upignore'
key_workdir = 'workdir'
# import keys
key_import_actions = 'import_actions'
key_import_configs = 'import_configs'
key_import_variables = 'import_variables'
def __init__(self, backup=True, banner=True, cmpignore=[],
create=True, default_actions=[], dotpath='dotfiles',
ignoreempty=True, import_actions=[], import_configs=[],
import_variables=[], keepdot=False,
link_dotfile_default=LinkTypes.NOLINK,
link_on_import=LinkTypes.NOLINK, longkey=False,
showdiff=False, upignore=[], workdir='~/.config/dotdrop'):
self.backup = backup
self.banner = banner
self.create = create
self.cmpignore = cmpignore
self.default_actions = default_actions
self.dotpath = dotpath
self.ignoreempty = ignoreempty
self.import_actions = import_actions
self.import_configs = import_configs
self.import_variables = import_variables
self.keepdot = keepdot
self.longkey = longkey
self.showdiff = showdiff
self.upignore = upignore
self.workdir = workdir
self.link_dotfile_default = LinkTypes.get(link_dotfile_default)
self.link_on_import = LinkTypes.get(link_on_import)
def resolve_paths(self, resolver):
"""resolve path using resolver function"""
self.dotpath = resolver(self.dotpath)
self.workdir = resolver(self.workdir)
def _serialize_seq(self, name, dic):
"""serialize attribute 'name' into 'dic'"""
seq = getattr(self, name)
dic[name] = seq
def serialize(self):
"""Return key-value pair representation of the settings"""
# Tedious, but less error-prone than introspection
dic = {
self.key_backup: self.backup,
self.key_banner: self.banner,
self.key_create: self.create,
self.key_dotpath: self.dotpath,
self.key_ignoreempty: self.ignoreempty,
self.key_keepdot: self.keepdot,
self.key_link_dotfile_default: str(self.link_dotfile_default),
self.key_link_on_import: str(self.link_on_import),
self.key_longkey: self.longkey,
self.key_showdiff: self.showdiff,
self.key_workdir: self.workdir,
}
self._serialize_seq(self.key_cmpignore, dic)
self._serialize_seq(self.key_default_actions, dic)
self._serialize_seq(self.key_import_actions, dic)
self._serialize_seq(self.key_import_configs, dic)
self._serialize_seq(self.key_import_variables, dic)
self._serialize_seq(self.key_upignore, dic)
return {self.key_yaml: dic}

View File

@@ -52,6 +52,8 @@ class Templategen:
self.env.globals['exists_in_path'] = jhelpers.exists_in_path
self.env.globals['basename'] = jhelpers.basename
self.env.globals['dirname'] = jhelpers.dirname
if self.debug:
self.log.dbg('template additional variables: {}'.format(variables))
def generate(self, src):
"""render template from path"""

View File

@@ -81,12 +81,11 @@ class Updater:
if self._ignore([path, dtpath]):
self.log.sub('\"{}\" ignored'.format(dotfile.key))
return True
if dotfile.trans_w:
# apply write transformation if any
new_path = self._apply_trans_w(path, dotfile)
if not new_path:
return False
path = new_path
# apply write transformation if any
new_path = self._apply_trans_w(path, dotfile)
if not new_path:
return False
path = new_path
if os.path.isdir(path):
ret = self._handle_dir(path, dtpath)
else:
@@ -98,7 +97,9 @@ class Updater:
def _apply_trans_w(self, path, dotfile):
"""apply write transformation to dotfile"""
trans = dotfile.trans_w
trans = dotfile.get_trans_w()
if not trans:
return path
if self.debug:
self.log.dbg('executing write transformation {}'.format(trans))
tmp = utils.get_unique_tmp_name()

View File

@@ -66,7 +66,7 @@ def get_tmpdir():
def get_tmpfile():
"""create a temporary file"""
(fd, path) = tempfile.mkstemp(prefix='dotdrop-')
(_, path) = tempfile.mkstemp(prefix='dotdrop-')
return path

View File

@@ -3,4 +3,4 @@ author: deadc0de6 (https://github.com/deadc0de6)
Copyright (c) 2018, deadc0de6
"""
__version__ = '0.28.0'
__version__ = '0.27.0'

View File

@@ -1,6 +1,6 @@
pkgbase = dotdrop
pkgdesc = Save your dotfiles once, deploy them everywhere
pkgver = 0.28.0
pkgver = 0.27.0
pkgrel = 1
url = https://github.com/deadc0de6/dotdrop
arch = any
@@ -11,7 +11,7 @@ pkgbase = dotdrop
depends = python-jinja
depends = python-docopt
depends = python-pyaml
source = git+https://github.com/deadc0de6/dotdrop.git#tag=v0.28.0
source = git+https://github.com/deadc0de6/dotdrop.git#tag=v0.27.0
md5sums = SKIP
pkgname = dotdrop

View File

@@ -1,7 +1,7 @@
# Maintainer: deadc0de6 <info@deadc0de.ch>
pkgname=dotdrop
pkgver=0.28.0
pkgver=0.27.0
pkgrel=1
pkgdesc="Save your dotfiles once, deploy them everywhere "
arch=('any')

View File

@@ -42,7 +42,7 @@ def main():
ignores = args['--ignore']
with open(path, 'r') as f:
content = yaml.load(f)
content = yaml.safe_load(f)
for k, v in content[key].items():
if k in ignores:
continue

View File

@@ -93,6 +93,7 @@ create_conf ${cfg} # sets token
echo "[+] import"
cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/dir1
cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/uniquefile
cat ${cfg}
# let's see the dotpath
#tree ${basedir}/dotfiles

View File

@@ -79,9 +79,9 @@ echo "cfgpath: {{@@ _dotdrop_cfgpath @@}}" >> ${tmps}/dotfiles/abc
echo "workdir: {{@@ _dotdrop_workdir @@}}" >> ${tmps}/dotfiles/abc
# install
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V
#cat ${tmpd}/abc
cat ${tmpd}/abc
grep "^dotpath: ${tmps}/dotfiles$" ${tmpd}/abc >/dev/null
grep "^cfgpath: ${tmps}/config.yaml$" ${tmpd}/abc >/dev/null

View File

@@ -81,7 +81,7 @@ cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V
# checks
[ ! -e ${tmpd}/abc ] && echo 'dotfile not installed' && exit 1
#cat ${tmpd}/abc
cat ${tmpd}/abc
grep "src:${tmps}/dotfiles/abc" ${tmpd}/abc >/dev/null
grep "dst:${tmpd}/abc" ${tmpd}/abc >/dev/null
grep "key:f_abc" ${tmpd}/abc >/dev/null

View File

@@ -96,7 +96,7 @@ _EOF
echo "test" > ${tmps}/dotfiles/abc
# install
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V
# checks
[ ! -e ${tmpa}/pre ] && exit 1

130
tests-ng/import-configs.sh Executable file
View File

@@ -0,0 +1,130 @@
#!/usr/bin/env bash
# author: deadc0de6 (https://github.com/deadc0de6)
# Copyright (c) 2019, deadc0de6
#
# import config testing
#
# exit on first error
set -e
# all this crap to get current path
rl="readlink -f"
if ! ${rl} "${0}" >/dev/null 2>&1; then
rl="realpath"
if ! hash ${rl}; then
echo "\"${rl}\" not found !" && exit 1
fi
fi
cur=$(dirname "$(${rl} "${0}")")
#hash dotdrop >/dev/null 2>&1
#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1
#echo "called with ${1}"
# dotdrop path can be pass as argument
ddpath="${cur}/../"
[ "${1}" != "" ] && ddpath="${1}"
[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1
export PYTHONPATH="${ddpath}:${PYTHONPATH}"
bin="python3 -m dotdrop.dotdrop"
echo "dotdrop path: ${ddpath}"
echo "pythonpath: ${PYTHONPATH}"
# get the helpers
source ${cur}/helpers
echo -e "\e[96m\e[1m==> RUNNING $(basename $BASH_SOURCE) <==\e[0m"
################################################################
# this is the test
################################################################
# the dotfile source
tmps=`mktemp -d --suffix='-dotdrop-tests'`
mkdir -p ${tmps}/dotfiles
# the dotfile destination
tmpd=`mktemp -d --suffix='-dotdrop-tests'`
# create the config file
cfg1="${tmps}/config1.yaml"
cfg2="${tmps}/config2.yaml"
cat > ${cfg1} << _EOF
config:
backup: true
create: true
dotpath: dotfiles
import_configs:
- ${cfg2}
dotfiles:
f_abc:
dst: ${tmpd}/abc
src: abc
f_zzz:
dst: ${tmpd}/zzz
src: zzz
f_sub:
dst: ${tmpd}/sub
src: sub
profiles:
p0:
include:
- p2
p1:
dotfiles:
- f_abc
p3:
dotfiles:
- f_zzz
pup:
include:
- psubsub
_EOF
cat > ${cfg2} << _EOF
config:
backup: true
create: true
dotpath: dotfiles
dotfiles:
f_def:
dst: ${tmpd}/def
src: def
f_ghi:
dst: ${tmpd}/ghi
src: ghi
profiles:
p2:
dotfiles:
- f_def
psubsub:
dotfiles:
- f_sub
_EOF
# create the source
mkdir -p ${tmps}/dotfiles/
echo "abc" > ${tmps}/dotfiles/abc
echo "def" > ${tmps}/dotfiles/def
echo "ghi" > ${tmps}/dotfiles/ghi
echo "zzz" > ${tmps}/dotfiles/zzz
echo "sub" > ${tmps}/dotfiles/sub
# install
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p0 -V | grep f_def
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p1 -V | grep f_abc
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p2 -V | grep f_def
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p p3 -V | grep f_zzz
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p pup -V | grep f_sub
cd ${ddpath} | ${bin} listfiles -c ${cfg1} -p psubsub -V | grep f_sub
## CLEANING
rm -rf ${tmps} ${tmpd}
echo "OK"
exit 0

View File

@@ -0,0 +1,127 @@
#!/usr/bin/env bash
# author: deadc0de6 (https://github.com/deadc0de6)
# Copyright (c) 2017, deadc0de6
#
# test the use of the keyword "import" in profiles
# returns 1 in case of error
#
# exit on first error
set -e
# all this crap to get current path
rl="readlink -f"
if ! ${rl} "${0}" >/dev/null 2>&1; then
rl="realpath"
if ! hash ${rl}; then
echo "\"${rl}\" not found !" && exit 1
fi
fi
cur=$(dirname "$(${rl} "${0}")")
#hash dotdrop >/dev/null 2>&1
#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1
#echo "called with ${1}"
# dotdrop path can be pass as argument
ddpath="${cur}/../"
[ "${1}" != "" ] && ddpath="${1}"
[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1
export PYTHONPATH="${ddpath}:${PYTHONPATH}"
bin="python3 -m dotdrop.dotdrop"
echo "dotdrop path: ${ddpath}"
echo "pythonpath: ${PYTHONPATH}"
# get the helpers
source ${cur}/helpers
echo -e "\e[96m\e[1m==> RUNNING $(basename $BASH_SOURCE) <==\e[0m"
################################################################
# this is the test
################################################################
# the dotfile source
tmps=`mktemp -d --suffix='-dotdrop-tests'`
mkdir -p ${tmps}/dotfiles
# the dotfile destination
tmpd=`mktemp -d --suffix='-dotdrop-tests'`
extdotfiles="${tmps}/df_p1.yaml"
dynextdotfiles_name="d_uid_dynvar"
dynextdotfiles="${tmps}/ext_${dynextdotfiles_name}"
# create the config file
cfg="${tmps}/config.yaml"
cat > ${cfg} << _EOF
config:
backup: true
create: true
dotpath: dotfiles
dynvariables:
d_uid: "echo ${dynextdotfiles_name}"
dotfiles:
f_abc:
dst: ${tmpd}/abc
src: abc
f_def:
dst: ${tmpd}/def
src: def
f_xyz:
dst: ${tmpd}/xyz
src: xyz
f_dyn:
dst: ${tmpd}/dyn
src: dyn
profiles:
p1:
dotfiles:
- f_abc
import:
- $(basename ${extdotfiles})
- "ext_{{@@ d_uid @@}}"
_EOF
# create the external dotfile file
cat > ${extdotfiles} << _EOF
dotfiles:
- f_def
- f_xyz
_EOF
cat > ${dynextdotfiles} << _EOF
dotfiles:
- f_dyn
_EOF
# create the source
mkdir -p ${tmps}/dotfiles/
echo "abc" > ${tmps}/dotfiles/abc
echo "def" > ${tmps}/dotfiles/def
echo "xyz" > ${tmps}/dotfiles/xyz
echo "dyn" > ${tmps}/dotfiles/dyn
# install
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V
# checks
[ ! -e ${tmpd}/abc ] && exit 1
[ ! -e ${tmpd}/def ] && exit 1
[ ! -e ${tmpd}/xyz ] && exit 1
[ ! -e ${tmpd}/dyn ] && exit 1
echo 'file found'
grep 'abc' ${tmpd}/abc >/dev/null 2>&1
grep 'def' ${tmpd}/def >/dev/null 2>&1
grep 'xyz' ${tmpd}/xyz >/dev/null 2>&1
grep 'dyn' ${tmpd}/dyn >/dev/null 2>&1
## CLEANING
rm -rf ${tmps} ${tmpd}
echo "OK"
exit 0

View File

@@ -1,9 +1,8 @@
#!/usr/bin/env bash
# author: deadc0de6 (https://github.com/deadc0de6)
# Copyright (c) 2017, deadc0de6
# Copyright (c) 2019, deadc0de6
#
# test the use of the keyword "import" in profiles
# returns 1 in case of error
# test basic import
#
# exit on first error
@@ -50,10 +49,13 @@ tmps=`mktemp -d --suffix='-dotdrop-tests'`
mkdir -p ${tmps}/dotfiles
# the dotfile destination
tmpd=`mktemp -d --suffix='-dotdrop-tests'`
extdotfiles="${tmps}/df_p1.yaml"
#echo "dotfile destination: ${tmpd}"
dynextdotfiles_name="d_uid_dynvar"
dynextdotfiles="${tmps}/ext_${dynextdotfiles_name}"
# create the dotfile
mkdir -p ${tmpd}/adir
echo "adir/file1" > ${tmpd}/adir/file1
echo "adir/fil2" > ${tmpd}/adir/file2
echo "file3" > ${tmpd}/file3
# create the config file
cfg="${tmps}/config.yaml"
@@ -63,61 +65,30 @@ config:
backup: true
create: true
dotpath: dotfiles
dynvariables:
d_uid: "echo ${dynextdotfiles_name}"
dotfiles:
f_abc:
dst: ${tmpd}/abc
src: abc
f_def:
dst: ${tmpd}/def
src: def
f_xyz:
dst: ${tmpd}/xyz
src: xyz
f_dyn:
dst: ${tmpd}/dyn
src: dyn
profiles:
p1:
dotfiles:
- f_abc
import:
- $(basename ${extdotfiles})
- "ext_{{@@ d_uid @@}}"
_EOF
#cat ${cfg}
# create the external dotfile file
cat > ${extdotfiles} << _EOF
dotfiles:
- f_def
- f_xyz
_EOF
# import
cd ${ddpath} | ${bin} import -c ${cfg} -p p1 -V ${tmpd}/adir
cd ${ddpath} | ${bin} import -c ${cfg} -p p1 -V ${tmpd}/file3
cat > ${dynextdotfiles} << _EOF
dotfiles:
- f_dyn
_EOF
cat ${cfg}
# create the source
mkdir -p ${tmps}/dotfiles/
echo "abc" > ${tmps}/dotfiles/abc
echo "def" > ${tmps}/dotfiles/def
echo "xyz" > ${tmps}/dotfiles/xyz
echo "dyn" > ${tmps}/dotfiles/dyn
# ensure exists and is not link
[ ! -d ${tmps}/dotfiles/${tmpd}/adir ] && echo "not a directory" && exit 1
[ ! -e ${tmps}/dotfiles/${tmpd}/adir/file1 ] && echo "not exist" && exit 1
[ ! -e ${tmps}/dotfiles/${tmpd}/adir/file2 ] && echo "not exist" && exit 1
[ ! -e ${tmps}/dotfiles/${tmpd}/file3 ] && echo "not a file" && exit 1
# install
cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V
cat ${cfg} | grep ${tmpd}/adir >/dev/null 2>&1
cat ${cfg} | grep ${tmpd}/file3 >/dev/null 2>&1
# checks
[ ! -e ${tmpd}/abc ] && exit 1
[ ! -e ${tmpd}/def ] && exit 1
[ ! -e ${tmpd}/xyz ] && exit 1
[ ! -e ${tmpd}/dyn ] && exit 1
grep 'abc' ${tmpd}/abc >/dev/null 2>&1
grep 'def' ${tmpd}/def >/dev/null 2>&1
grep 'xyz' ${tmpd}/xyz >/dev/null 2>&1
grep 'dyn' ${tmpd}/dyn >/dev/null 2>&1
nb=`cat ${cfg} | grep d_adir | wc -l`
[ "${nb}" != "2" ] && echo 'bad config1' && exit 1
nb=`cat ${cfg} | grep f_file3 | wc -l`
[ "${nb}" != "2" ] && echo 'bad config2' && exit 1
## CLEANING
rm -rf ${tmps} ${tmpd}

View File

@@ -64,12 +64,18 @@ dotfiles:
dst: ${tmpd}/abc
src: abc
profiles:
p0:
include:
- p3
p1:
dotfiles:
- f_abc
p2:
include:
- p1
p3:
include:
- p2
_EOF
# create the source
@@ -82,6 +88,14 @@ cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1
# compare
cd ${ddpath} | ${bin} compare -c ${cfg} -p p1
cd ${ddpath} | ${bin} compare -c ${cfg} -p p2
cd ${ddpath} | ${bin} compare -c ${cfg} -p p3
cd ${ddpath} | ${bin} compare -c ${cfg} -p p0
# list
cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p1 | grep f_abc
cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p2 | grep f_abc
cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p3 | grep f_abc
cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p0 | grep f_abc
# count
cnt=`cd ${ddpath} | ${bin} listfiles -c ${cfg} -p p1 -b | grep '^f_' | wc -l`

View File

@@ -8,7 +8,7 @@ set -ev
# PEP8 tests
which pycodestyle 2>/dev/null
[ "$?" != "0" ] && echo "Install pycodestyle" && exit 1
pycodestyle --ignore=W605 dotdrop/
pycodestyle --ignore=W503,W504,W605 dotdrop/
pycodestyle tests/
pycodestyle scripts/
@@ -35,7 +35,17 @@ PYTHONPATH=dotdrop ${nosebin} -s --with-coverage --cover-package=dotdrop
## execute bash script tests
[ "$1" = '--python-only' ] || {
for scr in tests-ng/*.sh; do
${scr}
done
log=`mktemp`
for scr in tests-ng/*.sh; do
${scr} 2>&1 | tee ${log}
set +e
if grep Traceback ${log}; then
echo "crash found in logs"
rm -f ${log}
exit 1
fi
set -e
done
rm -f ${log}
}

View File

@@ -171,8 +171,9 @@ def get_dotfile_from_yaml(dic, path):
"""Return the dotfile from the yaml dictionary"""
# path is not the file in dotpath but on the FS
dotfiles = dic['dotfiles']
src = get_path_strip_version(path)
return [d for d in dotfiles.values() if d['src'] == src][0]
# src = get_path_strip_version(path)
dotfile = [d for d in dotfiles.values() if d['dst'] == path][0]
return dotfile
def yaml_dashed_list(items, indent=0):
@@ -256,10 +257,10 @@ def file_in_yaml(yaml_file, path, link=False):
dotfiles = yaml_conf['dotfiles'].values()
in_src = strip in (x['src'] for x in dotfiles)
in_src = any([x['src'].endswith(strip) for x in dotfiles])
in_dst = path in (os.path.expanduser(x['dst']) for x in dotfiles)
if link:
has_link = get_dotfile_from_yaml(yaml_conf, path)['link']
has_link = 'link' in get_dotfile_from_yaml(yaml_conf, path)
return in_src and in_dst and has_link
return in_src and in_dst

View File

@@ -33,7 +33,7 @@ class TestImport(unittest.TestCase):
self.assertTrue(os.path.exists(path))
content = ''
with open(path, 'r') as f:
content = yaml.load(f)
content = yaml.safe_load(f)
return content
def assert_file(self, path, o, profile):
@@ -45,7 +45,7 @@ class TestImport(unittest.TestCase):
def assert_in_yaml(self, path, dic, link=False):
"""Make sure "path" is in the "dic" representing the yaml file"""
self.assertTrue(file_in_yaml(dic, path, link))
self.assertTrue(file_in_yaml(dic, path, link=link))
def test_import(self):
"""Test the import function"""
@@ -117,7 +117,7 @@ class TestImport(unittest.TestCase):
o = load_options(confpath, profile)
# test dotfiles in config class
self.assertTrue(profile in o.profiles)
self.assertTrue(profile in [p.key for p in o.profiles])
self.assert_file(dotfile1, o, profile)
self.assert_file(dotfile2, o, profile)
self.assert_file(dotfile3, o, profile)
@@ -218,9 +218,10 @@ class TestImport(unittest.TestCase):
self.assertTrue(os.path.exists(dotdrop_home))
self.addCleanup(clean, dotdrop_home)
dotpath_ed = 'imported'
imported = {
'config': {
'dotpath': 'imported',
'dotpath': dotpath_ed,
},
'dotfiles': {},
'profiles': {
@@ -250,9 +251,10 @@ class TestImport(unittest.TestCase):
'dv_log_ed': 'echo 5',
},
}
dotpath_ing = 'importing'
importing = {
'config': {
'dotpath': 'importing',
'dotpath': dotpath_ing,
},
'dotfiles': {},
'profiles': {
@@ -293,7 +295,7 @@ class TestImport(unittest.TestCase):
# create the importing base config file
importing_path = create_fake_config(dotdrop_home,
configname='config.yaml',
import_configs=('config-*.yaml',),
import_configs=['config-2.yaml'],
**importing['config'])
# edit the imported config
@@ -326,8 +328,10 @@ class TestImport(unittest.TestCase):
y = self.load_yaml(imported_path)
# testing dotfiles
self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ed))
self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ing))
self.assertTrue(all(file_in_yaml(y, df)
for df in dotfiles_ed))
self.assertFalse(any(file_in_yaml(y, df)
for df in dotfiles_ing))
# testing profiles
profiles = y['profiles'].keys()
@@ -355,7 +359,7 @@ class TestImport(unittest.TestCase):
self.assertFalse(any(t.endswith('ing') for t in transformations))
# testing variables
variables = y['variables'].keys()
variables = self._remove_priv_vars(y['variables'].keys())
self.assertTrue(all(v.endswith('ed') for v in variables))
self.assertFalse(any(v.endswith('ing') for v in variables))
dyn_variables = y['dynvariables'].keys()
@@ -366,8 +370,10 @@ class TestImport(unittest.TestCase):
y = self.load_yaml(importing_path)
# testing dotfiles
self.assertTrue(all(file_in_yaml(y, df) for df in dotfiles_ing))
self.assertFalse(any(file_in_yaml(y, df) for df in dotfiles_ed))
self.assertTrue(all(file_in_yaml(y, df)
for df in dotfiles_ing))
self.assertFalse(any(file_in_yaml(y, df)
for df in dotfiles_ed))
# testing profiles
profiles = y['profiles'].keys()
@@ -395,13 +401,19 @@ class TestImport(unittest.TestCase):
self.assertFalse(any(t.endswith('ed') for t in transformations))
# testing variables
variables = y['variables'].keys()
variables = self._remove_priv_vars(y['variables'].keys())
self.assertTrue(all(v.endswith('ing') for v in variables))
self.assertFalse(any(v.endswith('ed') for v in variables))
dyn_variables = y['dynvariables'].keys()
self.assertTrue(all(dv.endswith('ing') for dv in dyn_variables))
self.assertFalse(any(dv.endswith('ed') for dv in dyn_variables))
def _remove_priv_vars(self, variables_keys):
variables = [v for v in variables_keys if not v.startswith('_')]
if 'profile' in variables:
variables.remove('profile')
return variables
def main():
unittest.main()

View File

@@ -9,7 +9,7 @@ import unittest
from unittest.mock import MagicMock, patch
import filecmp
from dotdrop.config import Cfg
from dotdrop.cfg_aggregator import CfgAggregator as Cfg
from tests.helpers import (clean, create_dir, create_fake_config,
create_random_file, get_string, get_tempdir,
load_options, populate_fake_config)
@@ -89,7 +89,7 @@ exec bspwm
f1, c1 = create_random_file(tmp)
dst1 = os.path.join(dst, get_string(6))
d1 = Dotfile(get_string(5), dst1, os.path.basename(f1))
# fake a print
# fake a __str__
self.assertTrue(str(d1) != '')
f2, c2 = create_random_file(tmp)
dst2 = os.path.join(dst, get_string(6))
@@ -178,7 +178,7 @@ exec bspwm
dotfiles = [d1, d2, d3, d4, d5, d6, d7, d8, d9, d10, ddot]
self.fake_config(confpath, dotfiles,
profile, tmp, [act1], [tr])
conf = Cfg(confpath)
conf = Cfg(confpath, profile)
self.assertTrue(conf is not None)
# install them
@@ -305,7 +305,7 @@ exec bspwm
# create the importing base config file
importing_path = create_fake_config(tmp,
configname='config.yaml',
import_configs=('config-*.yaml',),
import_configs=['config-2.yaml'],
**importing['config'])
# edit the imported config

View File

@@ -117,7 +117,7 @@ class TestUpdate(unittest.TestCase):
# retrieve the path of the sub in the dotpath
d1indotpath = os.path.join(o.dotpath, dotfile.src)
d1indotpath = os.path.expanduser(d1indotpath)
dotfile.trans_w = trans
dotfile.trans_w = [trans]
# update template
o.update_path = [d3t]

View File

@@ -10,7 +10,7 @@ from unittest.mock import patch
import os
import yaml
from dotdrop.config import Cfg
from dotdrop.cfg_yaml import CfgYaml as Cfg
from dotdrop.options import Options
from dotdrop.linktypes import LinkTypes
from tests.helpers import (SubsetTestCase, _fake_args, clean,
@@ -41,14 +41,12 @@ class TestConfig(SubsetTestCase):
conf = Cfg(confpath)
self.assertTrue(conf is not None)
opts = conf.get_settings()
opts = conf.settings
self.assertTrue(opts is not None)
self.assertTrue(opts != {})
self.assertTrue(opts['backup'] == self.CONFIG_BACKUP)
self.assertTrue(opts['create'] == self.CONFIG_CREATE)
dotpath = os.path.join(tmp, self.CONFIG_DOTPATH)
self.assertTrue(opts['dotpath'] == dotpath)
self.assertTrue(conf._is_valid())
self.assertTrue(opts['dotpath'] == self.CONFIG_DOTPATH)
self.assertTrue(conf.dump() != '')
def test_def_link(self):
@@ -68,8 +66,8 @@ class TestConfig(SubsetTestCase):
'link_children')
self._test_link_import_fail('whatever')
@patch('dotdrop.config.open', create=True)
@patch('dotdrop.config.os.path.exists', create=True)
@patch('dotdrop.cfg_yaml.open', create=True)
@patch('dotdrop.cfg_yaml.os.path.exists', create=True)
def _test_link_import(self, cfgstring, expected,
cliargs, mock_exists, mock_open):
data = '''
@@ -99,8 +97,8 @@ profiles:
self.assertTrue(o.import_link == expected)
@patch('dotdrop.config.open', create=True)
@patch('dotdrop.config.os.path.exists', create=True)
@patch('dotdrop.cfg_yaml.open', create=True)
@patch('dotdrop.cfg_yaml.os.path.exists', create=True)
def _test_link_import_fail(self, value, mock_exists, mock_open):
data = '''
config:
@@ -125,7 +123,7 @@ profiles:
args['--profile'] = 'p1'
args['--cfg'] = 'mocked'
with self.assertRaisesRegex(ValueError, 'config is not valid'):
with self.assertRaises(ValueError):
o = Options(args=args)
print(o.import_link)
@@ -143,7 +141,7 @@ profiles:
# edit the config
with open(confpath, 'r') as f:
content = yaml.load(f)
content = yaml.safe_load(f)
# adding dotfiles
df1key = 'f_vimrc'
@@ -171,22 +169,22 @@ profiles:
self.assertTrue(conf is not None)
# test profile
profiles = conf.get_profiles()
profiles = conf.profiles
self.assertTrue(pf1key in profiles)
self.assertTrue(pf2key in profiles)
# test dotfiles
dotfiles = conf._get_dotfiles(pf1key)
self.assertTrue(df1key in [x.key for x in dotfiles])
self.assertTrue(df2key in [x.key for x in dotfiles])
dotfiles = conf._get_dotfiles(pf2key)
self.assertTrue(df1key in [x.key for x in dotfiles])
self.assertFalse(df2key in [x.key for x in dotfiles])
dotfiles = conf.profiles[pf1key]['dotfiles']
self.assertTrue(df1key in dotfiles)
self.assertTrue(df2key in dotfiles)
dotfiles = conf.profiles[pf2key]['dotfiles']
self.assertTrue(df1key in dotfiles)
self.assertFalse(df2key in dotfiles)
# test not existing included profile
# edit the config
with open(confpath, 'r') as f:
content = yaml.load(f)
content = yaml.safe_load(f)
content['profiles'] = {
pf1key: {'dotfiles': [df2key], 'include': ['host2']},
pf2key: {'dotfiles': [df1key], 'include': ['host3']}
@@ -227,22 +225,26 @@ profiles:
vars_ing_file = create_yaml_keyval(vars_ing, tmp)
actions_ed = {
'pre': {
'a_pre_action_ed': 'echo pre 22',
},
'post': {
'a_post_action_ed': 'echo post 22',
},
'a_action_ed': 'echo 22',
'actions': {
'pre': {
'a_pre_action_ed': 'echo pre 22',
},
'post': {
'a_post_action_ed': 'echo post 22',
},
'a_action_ed': 'echo 22',
}
}
actions_ing = {
'pre': {
'a_pre_action_ing': 'echo pre aa',
},
'post': {
'a_post_action_ing': 'echo post aa',
},
'a_action_ing': 'echo aa',
'actions': {
'pre': {
'a_pre_action_ing': 'echo pre aa',
},
'post': {
'a_post_action_ing': 'echo post aa',
},
'a_action_ing': 'echo aa',
}
}
actions_ed_file = create_yaml_keyval(actions_ed, tmp)
actions_ing_file = create_yaml_keyval(actions_ing, tmp)
@@ -328,7 +330,9 @@ profiles:
# create the importing base config file
importing_path = create_fake_config(tmp,
configname=self.CONFIG_NAME,
import_configs=('config-*.yaml',),
import_configs=[
self.CONFIG_NAME_2
],
**importing['config'])
# edit the imported config
@@ -352,17 +356,28 @@ profiles:
self.assertIsNotNone(imported_cfg)
# test profiles
self.assertIsSubset(imported_cfg.lnk_profiles,
importing_cfg.lnk_profiles)
self.assertIsSubset(imported_cfg.profiles,
importing_cfg.profiles)
# test dotfiles
self.assertIsSubset(imported_cfg.dotfiles, importing_cfg.dotfiles)
# test actions
self.assertIsSubset(imported_cfg.actions['pre'],
importing_cfg.actions['pre'])
self.assertIsSubset(imported_cfg.actions['post'],
importing_cfg.actions['post'])
pre_ed = post_ed = pre_ing = post_ing = {}
for k, v in imported_cfg.actions.items():
kind, _ = v
if kind == 'pre':
pre_ed[k] = v
elif kind == 'post':
post_ed[k] = v
for k, v in importing_cfg.actions.items():
kind, _ = v
if kind == 'pre':
pre_ing[k] = v
elif kind == 'post':
post_ing[k] = v
self.assertIsSubset(pre_ed, pre_ing)
self.assertIsSubset(post_ed, post_ing)
# test transactions
self.assertIsSubset(imported_cfg.trans_r, importing_cfg.trans_r)
@@ -371,18 +386,18 @@ profiles:
# test variables
imported_vars = {
k: v
for k, v in imported_cfg.get_variables(None).items()
for k, v in imported_cfg.variables.items()
if not k.startswith('_')
}
importing_vars = {
k: v
for k, v in importing_cfg.get_variables(None).items()
for k, v in importing_cfg.variables.items()
if not k.startswith('_')
}
self.assertIsSubset(imported_vars, importing_vars)
# test prodots
self.assertIsSubset(imported_cfg.prodots, importing_cfg.prodots)
self.assertIsSubset(imported_cfg.profiles, importing_cfg.profiles)
def test_import_configs_override(self):
"""Test import_configs when some config keys overlap."""
@@ -410,22 +425,26 @@ profiles:
vars_ing_file = create_yaml_keyval(vars_ing, tmp)
actions_ed = {
'pre': {
'a_pre_action': 'echo pre 22',
},
'post': {
'a_post_action': 'echo post 22',
},
'a_action': 'echo 22',
'actions': {
'pre': {
'a_pre_action': 'echo pre 22',
},
'post': {
'a_post_action': 'echo post 22',
},
'a_action': 'echo 22',
}
}
actions_ing = {
'pre': {
'a_pre_action': 'echo pre aa',
},
'post': {
'a_post_action': 'echo post aa',
},
'a_action': 'echo aa',
'actions': {
'pre': {
'a_pre_action': 'echo pre aa',
},
'post': {
'a_post_action': 'echo post aa',
},
'a_action': 'echo aa',
}
}
actions_ed_file = create_yaml_keyval(actions_ed, tmp)
actions_ing_file = create_yaml_keyval(actions_ing, tmp)
@@ -542,8 +561,8 @@ profiles:
self.assertIsNotNone(imported_cfg)
# test profiles
self.assertIsSubset(imported_cfg.lnk_profiles,
importing_cfg.lnk_profiles)
self.assertIsSubset(imported_cfg.profiles,
importing_cfg.profiles)
# test dotfiles
self.assertEqual(importing_cfg.dotfiles['f_vimrc'],
@@ -553,14 +572,9 @@ profiles:
# test actions
self.assertFalse(any(
(imported_cfg.actions['pre'][key]
== importing_cfg.actions['pre'][key])
for key in imported_cfg.actions['pre']
))
self.assertFalse(any(
(imported_cfg.actions['post'][key]
== importing_cfg.actions['post'][key])
for key in imported_cfg.actions['post']
(imported_cfg.actions[key]
== importing_cfg.actions[key])
for key in imported_cfg.actions
))
# test transactions
@@ -574,20 +588,20 @@ profiles:
))
# test variables
imported_vars = imported_cfg.get_variables(None)
imported_vars = imported_cfg.variables
self.assertFalse(any(
imported_vars[k] == v
for k, v in importing_cfg.get_variables(None).items()
for k, v in importing_cfg.variables.items()
if not k.startswith('_')
))
# test prodots
self.assertEqual(imported_cfg.prodots['host1'],
importing_cfg.prodots['host1'])
self.assertNotEqual(imported_cfg.prodots['host2'],
importing_cfg.prodots['host2'])
self.assertTrue(set(imported_cfg.prodots['host1'])
< set(importing_cfg.prodots['host2']))
# test profiles dotfiles
self.assertEqual(imported_cfg.profiles['host1']['dotfiles'],
importing_cfg.profiles['host1']['dotfiles'])
self.assertNotEqual(imported_cfg.profiles['host2']['dotfiles'],
importing_cfg.profiles['host2']['dotfiles'])
self.assertTrue(set(imported_cfg.profiles['host1']['dotfiles'])
< set(importing_cfg.profiles['host2']['dotfiles']))
def main():