mirror of
https://github.com/deadc0de6/dotdrop.git
synced 2026-02-13 01:05:13 +00:00
linting
This commit is contained in:
@@ -36,13 +36,13 @@ class Cmd(DictParser):
|
|||||||
try:
|
try:
|
||||||
action = templater.generate_string(self.action)
|
action = templater.generate_string(self.action)
|
||||||
except UndefinedException as exc:
|
except UndefinedException as exc:
|
||||||
err = 'undefined variable for {}: \"{}\"'.format(self.descr, exc)
|
err = f'undefined variable for {self.descr}: \"{exc}\"'
|
||||||
self.log.warn(err)
|
self.log.warn(err)
|
||||||
return False
|
return False
|
||||||
if debug:
|
if debug:
|
||||||
self.log.dbg('{}:'.format(self.descr))
|
self.log.dbg(f'{self.descr}:')
|
||||||
self.log.dbg(' - raw \"{}\"'.format(self.action))
|
self.log.dbg(f' - raw \"{self.action}\"')
|
||||||
self.log.dbg(' - templated \"{}\"'.format(action))
|
self.log.dbg(f' - templated \"{action}\"')
|
||||||
return action
|
return action
|
||||||
|
|
||||||
def _get_args(self, templater):
|
def _get_args(self, templater):
|
||||||
@@ -69,36 +69,34 @@ class Cmd(DictParser):
|
|||||||
if debug and args:
|
if debug and args:
|
||||||
self.log.dbg('action args:')
|
self.log.dbg('action args:')
|
||||||
for cnt, arg in enumerate(args):
|
for cnt, arg in enumerate(args):
|
||||||
self.log.dbg('\targs[{}]: {}'.format(cnt, arg))
|
self.log.dbg(f'\targs[{cnt}]: {arg}')
|
||||||
try:
|
try:
|
||||||
cmd = action.format(*args)
|
cmd = action.format(*args)
|
||||||
except IndexError as exc:
|
except IndexError as exc:
|
||||||
err = 'index error for {}: \"{}\"'.format(self.descr, action)
|
err = 'findex error for {self.descr}: \"{action}\"'
|
||||||
err += ' with \"{}\"'.format(args)
|
err += f' with \"{args}\"'
|
||||||
err += ': {}'.format(exc)
|
err += f': {exc}'
|
||||||
self.log.warn(err)
|
self.log.warn(err)
|
||||||
return False
|
return False
|
||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
err = 'key error for {}: \"{}\": {}'.format(self.descr,
|
err = f'key error for {self.descr}: \"{action}\": {exc}'
|
||||||
action,
|
err += ' with \"{args}\"'
|
||||||
exc)
|
|
||||||
err += ' with \"{}\"'.format(args)
|
|
||||||
self.log.warn(err)
|
self.log.warn(err)
|
||||||
return False
|
return False
|
||||||
if self.silent:
|
if self.silent:
|
||||||
self.log.sub('executing silent action \"{}\"'.format(self.key))
|
self.log.sub(f'executing silent action \"{self.key}\"')
|
||||||
if debug:
|
if debug:
|
||||||
self.log.dbg('action cmd silenced')
|
self.log.dbg('action cmd silenced')
|
||||||
else:
|
else:
|
||||||
if debug:
|
if debug:
|
||||||
self.log.dbg('action cmd: \"{}\"'.format(cmd))
|
self.log.dbg(f'action cmd: \"{cmd}\"')
|
||||||
self.log.sub('executing \"{}\"'.format(cmd))
|
self.log.sub(f'executing \"{cmd}\"')
|
||||||
try:
|
try:
|
||||||
ret = subprocess.call(cmd, shell=True)
|
ret = subprocess.call(cmd, shell=True)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
self.log.warn('{} interrupted'.format(self.descr))
|
self.log.warn('f{self.descr} interrupted')
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
self.log.warn('{} returned code {}'.format(self.descr, ret))
|
self.log.warn(f'{self.descr} returned code {ret}')
|
||||||
return ret == 0
|
return ret == 0
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -106,7 +104,7 @@ class Cmd(DictParser):
|
|||||||
return {'action': value}
|
return {'action': value}
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return 'key:{} -> \"{}\"'.format(self.key, self.action)
|
return 'key:{self.key} -> \"{self.action}\"'
|
||||||
|
|
||||||
|
|
||||||
class Action(Cmd):
|
class Action(Cmd):
|
||||||
@@ -144,7 +142,7 @@ class Action(Cmd):
|
|||||||
return out.format(self.key, self.kind, self.action)
|
return out.format(self.key, self.kind, self.action)
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'action({})'.format(self.__str__())
|
return f'action({self.__str__()})'
|
||||||
|
|
||||||
|
|
||||||
class Transform(Cmd):
|
class Transform(Cmd):
|
||||||
|
|||||||
@@ -221,7 +221,7 @@ class CfgAggregator:
|
|||||||
"""create a new dotfile"""
|
"""create a new dotfile"""
|
||||||
# get a new dotfile with a unique key
|
# get a new dotfile with a unique key
|
||||||
key = self._get_new_dotfile_key(dst)
|
key = self._get_new_dotfile_key(dst)
|
||||||
self.log.dbg('new dotfile key: {}'.format(key))
|
self.log.dbg(f'new dotfile key: {key}')
|
||||||
# add the dotfile
|
# add the dotfile
|
||||||
trans_r_key = trans_w_key = None
|
trans_r_key = trans_w_key = None
|
||||||
if trans_read:
|
if trans_read:
|
||||||
@@ -298,7 +298,7 @@ class CfgAggregator:
|
|||||||
self._patch_keys_to_objs([self.settings],
|
self._patch_keys_to_objs([self.settings],
|
||||||
"default_actions", self._get_action_w_args)
|
"default_actions", self._get_action_w_args)
|
||||||
|
|
||||||
msg = 'default actions: {}'.format(self.settings.default_actions)
|
msg = f'default actions: {self.settings.default_actions}'
|
||||||
self.log.dbg(msg)
|
self.log.dbg(msg)
|
||||||
|
|
||||||
# patch trans_w/trans_r in dotfiles
|
# patch trans_w/trans_r in dotfiles
|
||||||
@@ -318,7 +318,7 @@ class CfgAggregator:
|
|||||||
"""
|
"""
|
||||||
if not containers:
|
if not containers:
|
||||||
return
|
return
|
||||||
self.log.dbg('patching {} ...'.format(keys))
|
self.log.dbg(f'patching {keys} ...')
|
||||||
for container in containers:
|
for container in containers:
|
||||||
objects = []
|
objects = []
|
||||||
okeys = getattr(container, keys)
|
okeys = getattr(container, keys)
|
||||||
@@ -329,8 +329,8 @@ class CfgAggregator:
|
|||||||
for key in okeys:
|
for key in okeys:
|
||||||
obj = get_by_key(key)
|
obj = get_by_key(key)
|
||||||
if not obj:
|
if not obj:
|
||||||
err = '{} does not contain'.format(container)
|
err = f'{container} does not contain'
|
||||||
err += ' a {} entry named {}'.format(keys, key)
|
err += f' a {keys} entry named {key}'
|
||||||
self.log.err(err)
|
self.log.err(err)
|
||||||
raise Exception(err)
|
raise Exception(err)
|
||||||
objects.append(obj)
|
objects.append(obj)
|
||||||
|
|||||||
@@ -144,15 +144,16 @@ class CfgYaml:
|
|||||||
self.variables = {}
|
self.variables = {}
|
||||||
|
|
||||||
if not os.path.exists(self._path):
|
if not os.path.exists(self._path):
|
||||||
err = 'invalid config path: \"{}\"'.format(path)
|
err = f'invalid config path: \"{path}\"'
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg(err)
|
self._dbg(err)
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
|
|
||||||
self._dbg('START of config parsing')
|
self._dbg('START of config parsing')
|
||||||
self._dbg('reloading: {}'.format(reloading))
|
self._dbg(f'reloading: {reloading}')
|
||||||
self._dbg('profile: {}'.format(profile))
|
self._dbg(f'profile: {profile}')
|
||||||
self._dbg('included profiles: {}'.format(','.join(self._inc_profiles)))
|
pfs = ','.join(self._inc_profiles)
|
||||||
|
self._dbg(f'included profiles: {pfs}')
|
||||||
|
|
||||||
self._yaml_dict = self._load_yaml(self._path)
|
self._yaml_dict = self._load_yaml(self._path)
|
||||||
# live patch deprecated entries
|
# live patch deprecated entries
|
||||||
@@ -287,7 +288,7 @@ class CfgYaml:
|
|||||||
|
|
||||||
# end of parsing
|
# end of parsing
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('########### {} ###########'.format('final config'))
|
self._dbg('########### final config ###########')
|
||||||
self._debug_entries()
|
self._debug_entries()
|
||||||
self._dbg('END of config parsing')
|
self._dbg('END of config parsing')
|
||||||
|
|
||||||
@@ -300,10 +301,10 @@ class CfgYaml:
|
|||||||
newlink = self._template_item(link)
|
newlink = self._template_item(link)
|
||||||
# check link value
|
# check link value
|
||||||
if newlink not in self.allowed_link_val:
|
if newlink not in self.allowed_link_val:
|
||||||
err = 'bad link value: {}'.format(newlink)
|
err = f'bad link value: {newlink}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
self._log.err('allowed: {}'.format(self.allowed_link_val))
|
self._log.err(f'allowed: {self.allowed_link_val}')
|
||||||
raise YamlException('config content error: {}'.format(err))
|
raise YamlException(f'config content error: {err}')
|
||||||
return newlink
|
return newlink
|
||||||
|
|
||||||
def resolve_dotfile_src(self, src, templater=None):
|
def resolve_dotfile_src(self, src, templater=None):
|
||||||
@@ -314,7 +315,7 @@ class CfgYaml:
|
|||||||
if templater:
|
if templater:
|
||||||
new = templater.generate_string(src)
|
new = templater.generate_string(src)
|
||||||
if new != src and self._debug:
|
if new != src and self._debug:
|
||||||
msg = 'dotfile src: \"{}\" -> \"{}\"'.format(src, new)
|
msg = f'dotfile src: \"{src}\" -> \"{new}\"'
|
||||||
self._dbg(msg)
|
self._dbg(msg)
|
||||||
src = new
|
src = new
|
||||||
src = os.path.join(self.settings[self.key_settings_dotpath],
|
src = os.path.join(self.settings[self.key_settings_dotpath],
|
||||||
@@ -330,7 +331,7 @@ class CfgYaml:
|
|||||||
if templater:
|
if templater:
|
||||||
new = templater.generate_string(dst)
|
new = templater.generate_string(dst)
|
||||||
if new != dst and self._debug:
|
if new != dst and self._debug:
|
||||||
msg = 'dotfile dst: \"{}\" -> \"{}\"'.format(dst, new)
|
msg = f'dotfile dst: \"{dst}\" -> \"{new}\"'
|
||||||
self._dbg(msg)
|
self._dbg(msg)
|
||||||
dst = new
|
dst = new
|
||||||
newdst = self._norm_path(dst)
|
newdst = self._norm_path(dst)
|
||||||
@@ -361,8 +362,7 @@ class CfgYaml:
|
|||||||
pro = self._yaml_dict[self.key_profiles][profile_key]
|
pro = self._yaml_dict[self.key_profiles][profile_key]
|
||||||
pro[self.key_profile_dotfiles].append(dotfile_key)
|
pro[self.key_profile_dotfiles].append(dotfile_key)
|
||||||
if self._debug:
|
if self._debug:
|
||||||
msg = 'add \"{}\" to profile \"{}\"'.format(dotfile_key,
|
msg = f'add \"{dotfile_key}\" to profile \"{profile_key}\"'
|
||||||
profile_key)
|
|
||||||
msg.format(dotfile_key, profile_key)
|
msg.format(dotfile_key, profile_key)
|
||||||
self._dbg(msg)
|
self._dbg(msg)
|
||||||
self._dirty = True
|
self._dirty = True
|
||||||
@@ -383,9 +383,9 @@ class CfgYaml:
|
|||||||
if old == chmod:
|
if old == chmod:
|
||||||
return False
|
return False
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('update dotfile: {}'.format(key))
|
self._dbg(f'update dotfile: {key}')
|
||||||
self._dbg('old chmod value: {}'.format(old))
|
self._dbg(f'old chmod value: {old}')
|
||||||
self._dbg('new chmod value: {}'.format(chmod))
|
self._dbg(f'new chmod value: {chmod}')
|
||||||
dotfile = self._yaml_dict[self.key_dotfiles][key]
|
dotfile = self._yaml_dict[self.key_dotfiles][key]
|
||||||
if not chmod:
|
if not chmod:
|
||||||
del dotfile[self.key_dotfile_chmod]
|
del dotfile[self.key_dotfile_chmod]
|
||||||
@@ -400,13 +400,13 @@ class CfgYaml:
|
|||||||
if key in self.dotfiles.keys():
|
if key in self.dotfiles.keys():
|
||||||
return False
|
return False
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('adding new dotfile: {}'.format(key))
|
self._dbg(f'adding new dotfile: {key}')
|
||||||
self._dbg('new dotfile src: {}'.format(src))
|
self._dbg(f'new dotfile src: {src}')
|
||||||
self._dbg('new dotfile dst: {}'.format(dst))
|
self._dbg(f'new dotfile dst: {dst}')
|
||||||
self._dbg('new dotfile link: {}'.format(link))
|
self._dbg(f'new dotfile link: {link}')
|
||||||
self._dbg('new dotfile chmod: {}'.format(chmod))
|
self._dbg(f'new dotfile chmod: {chmod}')
|
||||||
self._dbg('new dotfile trans_r: {}'.format(trans_r_key))
|
self._dbg(f'new dotfile trans_r: {trans_r_key}')
|
||||||
self._dbg('new dotfile trans_w: {}'.format(trans_w_key))
|
self._dbg(f'new dotfile trans_w: {trans_w_key}')
|
||||||
|
|
||||||
# create the dotfile dict
|
# create the dotfile dict
|
||||||
df_dict = {
|
df_dict = {
|
||||||
@@ -430,7 +430,7 @@ class CfgYaml:
|
|||||||
df_dict[self.key_trans_w] = str(trans_w_key)
|
df_dict[self.key_trans_w] = str(trans_w_key)
|
||||||
|
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('dotfile dict: {}'.format(df_dict))
|
self._dbg(f'dotfile dict: {df_dict}')
|
||||||
|
|
||||||
# add to global dict
|
# add to global dict
|
||||||
self._yaml_dict[self.key_dotfiles][key] = df_dict
|
self._yaml_dict[self.key_dotfiles][key] = df_dict
|
||||||
@@ -440,26 +440,26 @@ class CfgYaml:
|
|||||||
def del_dotfile(self, key):
|
def del_dotfile(self, key):
|
||||||
"""remove this dotfile from config"""
|
"""remove this dotfile from config"""
|
||||||
if key not in self._yaml_dict[self.key_dotfiles]:
|
if key not in self._yaml_dict[self.key_dotfiles]:
|
||||||
self._log.err('key not in dotfiles: {}'.format(key))
|
self._log.err(f'key not in dotfiles: {key}')
|
||||||
return False
|
return False
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('remove dotfile: {}'.format(key))
|
self._dbg(f'remove dotfile: {key}')
|
||||||
del self._yaml_dict[self.key_dotfiles][key]
|
del self._yaml_dict[self.key_dotfiles][key]
|
||||||
if self._debug:
|
if self._debug:
|
||||||
dfs = self._yaml_dict[self.key_dotfiles]
|
dfs = self._yaml_dict[self.key_dotfiles]
|
||||||
self._dbg('new dotfiles: {}'.format(dfs))
|
self._dbg(f'new dotfiles: {dfs}')
|
||||||
self._dirty = True
|
self._dirty = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def del_dotfile_from_profile(self, df_key, pro_key):
|
def del_dotfile_from_profile(self, df_key, pro_key):
|
||||||
"""remove this dotfile from that profile"""
|
"""remove this dotfile from that profile"""
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('removing \"{}\" from \"{}\"'.format(df_key, pro_key))
|
self._dbg(f'removing \"{df_key}\" from \"{pro_key}\"')
|
||||||
if df_key not in self.dotfiles.keys():
|
if df_key not in self.dotfiles.keys():
|
||||||
self._log.err('key not in dotfiles: {}'.format(df_key))
|
self._log.err(f'key not in dotfiles: {df_key}')
|
||||||
return False
|
return False
|
||||||
if pro_key not in self.profiles.keys():
|
if pro_key not in self.profiles.keys():
|
||||||
self._log.err('key not in profile: {}'.format(pro_key))
|
self._log.err(f'key not in profile: {pro_key}')
|
||||||
return False
|
return False
|
||||||
# get the profile dictionary
|
# get the profile dictionary
|
||||||
profile = self._yaml_dict[self.key_profiles][pro_key]
|
profile = self._yaml_dict[self.key_profiles][pro_key]
|
||||||
@@ -470,12 +470,12 @@ class CfgYaml:
|
|||||||
return True
|
return True
|
||||||
if self._debug:
|
if self._debug:
|
||||||
dfs = profile[self.key_profile_dotfiles]
|
dfs = profile[self.key_profile_dotfiles]
|
||||||
self._dbg('{} profile dotfiles: {}'.format(pro_key, dfs))
|
self._dbg(f'{pro_key} profile dotfiles: {dfs}')
|
||||||
self._dbg('remove {} from profile {}'.format(df_key, pro_key))
|
self._dbg(f'remove {df_key} from profile {pro_key}')
|
||||||
profile[self.key_profile_dotfiles].remove(df_key)
|
profile[self.key_profile_dotfiles].remove(df_key)
|
||||||
if self._debug:
|
if self._debug:
|
||||||
dfs = profile[self.key_profile_dotfiles]
|
dfs = profile[self.key_profile_dotfiles]
|
||||||
self._dbg('{} profile dotfiles: {}'.format(pro_key, dfs))
|
self._dbg(f'{pro_key} profile dotfiles: {dfs}')
|
||||||
self._dirty = True
|
self._dirty = True
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -493,13 +493,13 @@ class CfgYaml:
|
|||||||
|
|
||||||
# save to file
|
# save to file
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('saving to {}'.format(self._path))
|
self._dbg(f'saving to {self._path}')
|
||||||
try:
|
try:
|
||||||
with open(self._path, 'w', encoding='utf8') as file:
|
with open(self._path, 'w', encoding='utf8') as file:
|
||||||
self._yaml_dump(content, file, fmt=self._config_format)
|
self._yaml_dump(content, file, fmt=self._config_format)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._log.err(exc)
|
self._log.err(exc)
|
||||||
err = 'error saving config: {}'.format(self._path)
|
err = f'error saving config: {self._path}'
|
||||||
raise YamlException(err) from exc
|
raise YamlException(err) from exc
|
||||||
|
|
||||||
if self._dirty_deprecated:
|
if self._dirty_deprecated:
|
||||||
@@ -558,7 +558,7 @@ class CfgYaml:
|
|||||||
keys = dotfiles.keys()
|
keys = dotfiles.keys()
|
||||||
if len(keys) != len(list(set(keys))):
|
if len(keys) != len(list(set(keys))):
|
||||||
dups = [x for x in keys if x not in list(set(keys))]
|
dups = [x for x in keys if x not in list(set(keys))]
|
||||||
err = 'duplicate dotfile keys found: {}'.format(dups)
|
err = f'duplicate dotfile keys found: {dups}'
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
|
|
||||||
dotfiles = self._norm_dotfiles(dotfiles)
|
dotfiles = self._norm_dotfiles(dotfiles)
|
||||||
@@ -645,7 +645,7 @@ class CfgYaml:
|
|||||||
if name in current:
|
if name in current:
|
||||||
# ignore if already defined
|
# ignore if already defined
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('ignore uservariables {}'.format(name))
|
self._dbg(f'ignore uservariables {name}')
|
||||||
continue
|
continue
|
||||||
content = userinput(prompt, debug=self._debug)
|
content = userinput(prompt, debug=self._debug)
|
||||||
uvars[name] = content
|
uvars[name] = content
|
||||||
@@ -764,24 +764,24 @@ class CfgYaml:
|
|||||||
if self.key_dotfile_chmod in val:
|
if self.key_dotfile_chmod in val:
|
||||||
value = str(val[self.key_dotfile_chmod])
|
value = str(val[self.key_dotfile_chmod])
|
||||||
if len(value) < 3:
|
if len(value) < 3:
|
||||||
err = 'bad format for chmod: {}'.format(value)
|
err = f'bad format for chmod: {value}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
raise YamlException('config content error: {}'.format(err))
|
raise YamlException(f'config content error: {err}')
|
||||||
try:
|
try:
|
||||||
int(value)
|
int(value)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
err = 'bad format for chmod: {}'.format(value)
|
err = f'bad format for chmod: {value}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
err = 'config content error: {}'.format(err)
|
err = f'config content error: {err}'
|
||||||
raise YamlException(err) from exc
|
raise YamlException(err) from exc
|
||||||
# normalize chmod value
|
# normalize chmod value
|
||||||
for chmodv in list(value):
|
for chmodv in list(value):
|
||||||
chmodint = int(chmodv)
|
chmodint = int(chmodv)
|
||||||
if chmodint < 0 or chmodint > 7:
|
if chmodint < 0 or chmodint > 7:
|
||||||
err = 'bad format for chmod: {}'.format(value)
|
err = f'bad format for chmod: {value}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
raise YamlException(
|
raise YamlException(
|
||||||
'config content error: {}'.format(err)
|
f'config content error: {err}'
|
||||||
)
|
)
|
||||||
val[self.key_dotfile_chmod] = int(value, 8)
|
val[self.key_dotfile_chmod] = int(value, 8)
|
||||||
|
|
||||||
@@ -877,7 +877,7 @@ class CfgYaml:
|
|||||||
continue
|
continue
|
||||||
if self.key_all in dfs:
|
if self.key_all in dfs:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('add ALL to profile \"{}\"'.format(k))
|
self._dbg(f'add ALL to profile \"{k}\"')
|
||||||
val[self.key_profile_dotfiles] = self.dotfiles.keys()
|
val[self.key_profile_dotfiles] = self.dotfiles.keys()
|
||||||
|
|
||||||
def _resolve_profile_includes(self):
|
def _resolve_profile_includes(self):
|
||||||
@@ -903,17 +903,15 @@ class CfgYaml:
|
|||||||
return dotfiles, actions
|
return dotfiles, actions
|
||||||
|
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('{} includes {}'.format(profile, ','.join(includes)))
|
incs = ','.join(includes)
|
||||||
self._dbg('{} dotfiles before include: {}'.format(profile,
|
self._dbg(f'{profile} includes {incs}')
|
||||||
dotfiles))
|
self._dbg(f'{profile} dotfiles before include: {dotfiles}')
|
||||||
self._dbg('{} actions before include: {}'.format(profile,
|
self._dbg(f'{profile} actions before include: {actions}')
|
||||||
actions))
|
|
||||||
|
|
||||||
seen = []
|
seen = []
|
||||||
for i in uniq_list(includes):
|
for i in uniq_list(includes):
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('resolving includes "{}" <- "{}"'
|
self._dbg(f'resolving includes "{profile}" <- "{i}"')
|
||||||
.format(profile, i))
|
|
||||||
|
|
||||||
# ensure no include loop occurs
|
# ensure no include loop occurs
|
||||||
if i in seen:
|
if i in seen:
|
||||||
@@ -921,37 +919,36 @@ class CfgYaml:
|
|||||||
seen.append(i)
|
seen.append(i)
|
||||||
# included profile even exists
|
# included profile even exists
|
||||||
if i not in self.profiles.keys():
|
if i not in self.profiles.keys():
|
||||||
self._log.warn('include unknown profile: {}'.format(i))
|
self._log.warn(f'include unknown profile: {i}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# recursive resolve
|
# recursive resolve
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('recursively resolving includes for profile "{}"'
|
self._dbg(f'recursively resolving includes for profile "{i}"')
|
||||||
.format(i))
|
|
||||||
o_dfs, o_actions = self._rec_resolve_profile_include(i)
|
o_dfs, o_actions = self._rec_resolve_profile_include(i)
|
||||||
|
|
||||||
# merge dotfile keys
|
# merge dotfile keys
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('Merging dotfiles {} <- {}: {} <- {}'
|
msg = f'Merging dotfiles {profile}'
|
||||||
.format(profile, i, dotfiles, o_dfs))
|
msg += f' <- {i}: {dotfiles} <- {o_dfs}'
|
||||||
|
self._dbg(msg)
|
||||||
dotfiles.extend(o_dfs)
|
dotfiles.extend(o_dfs)
|
||||||
this_profile[self.key_profile_dotfiles] = uniq_list(dotfiles)
|
this_profile[self.key_profile_dotfiles] = uniq_list(dotfiles)
|
||||||
|
|
||||||
# merge actions keys
|
# merge actions keys
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('Merging actions {} <- {}: {} <- {}'
|
msg = f'Merging actions {profile} '
|
||||||
.format(profile, i, actions, o_actions))
|
msg += '<- {i}: {actions} <- {o_actions}'
|
||||||
actions.extend(o_actions)
|
self._dbg(msg)
|
||||||
|
actions.extend(o_actions)
|
||||||
this_profile[self.key_profile_actions] = uniq_list(actions)
|
this_profile[self.key_profile_actions] = uniq_list(actions)
|
||||||
|
|
||||||
dotfiles = this_profile.get(self.key_profile_dotfiles, [])
|
dotfiles = this_profile.get(self.key_profile_dotfiles, [])
|
||||||
actions = this_profile.get(self.key_profile_actions, [])
|
actions = this_profile.get(self.key_profile_actions, [])
|
||||||
|
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('{} dotfiles after include: {}'.format(profile,
|
self._dbg(f'{profile} dotfiles after include: {dotfiles}')
|
||||||
dotfiles))
|
self._dbg(f'{profile} actions after include: {actions}')
|
||||||
self._dbg('{} actions after include: {}'.format(profile,
|
|
||||||
actions))
|
|
||||||
|
|
||||||
# since included items are resolved here
|
# since included items are resolved here
|
||||||
# we can clear these include
|
# we can clear these include
|
||||||
@@ -971,11 +968,11 @@ class CfgYaml:
|
|||||||
newvars = {}
|
newvars = {}
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import variables from {}'.format(path))
|
self._dbg(f'import variables from {path}')
|
||||||
var = self._import_sub(path, self.key_variables,
|
var = self._import_sub(path, self.key_variables,
|
||||||
mandatory=False)
|
mandatory=False)
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import dynvariables from {}'.format(path))
|
self._dbg(f'import dynvariables from {path}')
|
||||||
dvar = self._import_sub(path, self.key_dvariables,
|
dvar = self._import_sub(path, self.key_dvariables,
|
||||||
mandatory=False)
|
mandatory=False)
|
||||||
|
|
||||||
@@ -997,7 +994,7 @@ class CfgYaml:
|
|||||||
paths = self._resolve_paths(paths)
|
paths = self._resolve_paths(paths)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import actions from {}'.format(path))
|
self._dbg(f'import actions from {path}')
|
||||||
new = self._import_sub(path, self.key_actions,
|
new = self._import_sub(path, self.key_actions,
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
patch_func=self._norm_actions)
|
patch_func=self._norm_actions)
|
||||||
@@ -1010,7 +1007,7 @@ class CfgYaml:
|
|||||||
if not imp:
|
if not imp:
|
||||||
continue
|
continue
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import dotfiles for profile {}'.format(k))
|
self._dbg(f'import dotfiles for profile {k}')
|
||||||
paths = self._resolve_paths(imp)
|
paths = self._resolve_paths(imp)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
current = val.get(self.key_dotfiles, [])
|
current = val.get(self.key_dotfiles, [])
|
||||||
@@ -1021,9 +1018,9 @@ class CfgYaml:
|
|||||||
def _import_config(self, path):
|
def _import_config(self, path):
|
||||||
"""import config from path"""
|
"""import config from path"""
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import config from {}'.format(path))
|
self._dbg(f'import config from {path}')
|
||||||
self._dbg('profile: {}'.format(self._profile))
|
self._dbg(f'profile: {self._profile}')
|
||||||
self._dbg('included profiles: {}'.format(self._inc_profiles))
|
self._dbg(f'included profiles: {self._inc_profiles}')
|
||||||
sub = CfgYaml(path, profile=self._profile,
|
sub = CfgYaml(path, profile=self._profile,
|
||||||
addprofiles=self._inc_profiles,
|
addprofiles=self._inc_profiles,
|
||||||
debug=self._debug,
|
debug=self._debug,
|
||||||
@@ -1065,8 +1062,7 @@ class CfgYaml:
|
|||||||
paths = self._resolve_paths(imp)
|
paths = self._resolve_paths(imp)
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if path in self.imported_configs:
|
if path in self.imported_configs:
|
||||||
err = '{} imported more than once in {}'.format(path,
|
err = f'{path} imported more than once in {self._path}'
|
||||||
self._path)
|
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
self._import_config(path)
|
self._import_config(path)
|
||||||
|
|
||||||
@@ -1076,19 +1072,19 @@ class CfgYaml:
|
|||||||
patch_func is applied to each element if defined
|
patch_func is applied to each element if defined
|
||||||
"""
|
"""
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('import \"{}\" from \"{}\"'.format(key, path))
|
self._dbg(f'import \"{key}\" from \"{path}\"')
|
||||||
extdict = self._load_yaml(path)
|
extdict = self._load_yaml(path)
|
||||||
new = self._get_entry(extdict, key, mandatory=mandatory)
|
new = self._get_entry(extdict, key, mandatory=mandatory)
|
||||||
if patch_func:
|
if patch_func:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('calling patch: {}'.format(patch_func))
|
self._dbg(f'calling patch: {patch_func}')
|
||||||
new = patch_func(new)
|
new = patch_func(new)
|
||||||
if not new and mandatory:
|
if not new and mandatory:
|
||||||
err = 'no \"{}\" imported from \"{}\"'.format(key, path)
|
err = f'no \"{key}\" imported from \"{path}\"'
|
||||||
self._log.warn(err)
|
self._log.warn(err)
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('imported \"{}\": {}'.format(key, new))
|
self._dbg(f'imported \"{key}\": {new}')
|
||||||
return new
|
return new
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
@@ -1106,7 +1102,7 @@ class CfgYaml:
|
|||||||
self.key_profile_dotfiles: []
|
self.key_profile_dotfiles: []
|
||||||
}
|
}
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('adding new profile: {}'.format(key))
|
self._dbg(f'adding new profile: {key}')
|
||||||
self._dirty = True
|
self._dirty = True
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
@@ -1159,7 +1155,7 @@ class CfgYaml:
|
|||||||
self._dirty = True
|
self._dirty = True
|
||||||
self._dirty_deprecated = True
|
self._dirty_deprecated = True
|
||||||
warn = 'deprecated \"link: <boolean>\"'
|
warn = 'deprecated \"link: <boolean>\"'
|
||||||
warn += ', updated to \"link: {}\"'.format(new)
|
warn += f', updated to \"link: {new}\"'
|
||||||
self._log.warn(warn)
|
self._log.warn(warn)
|
||||||
|
|
||||||
if self.key_dotfile_link in dotfile and \
|
if self.key_dotfile_link in dotfile and \
|
||||||
@@ -1171,7 +1167,7 @@ class CfgYaml:
|
|||||||
self._dirty = True
|
self._dirty = True
|
||||||
self._dirty_deprecated = True
|
self._dirty_deprecated = True
|
||||||
warn = 'deprecated \"link: link\"'
|
warn = 'deprecated \"link: link\"'
|
||||||
warn += ', updated to \"link: {}\"'.format(new)
|
warn += f', updated to \"link: {new}\"'
|
||||||
self._log.warn(warn)
|
self._log.warn(warn)
|
||||||
|
|
||||||
if old_key in dotfile and \
|
if old_key in dotfile and \
|
||||||
@@ -1186,7 +1182,7 @@ class CfgYaml:
|
|||||||
self._dirty = True
|
self._dirty = True
|
||||||
self._dirty_deprecated = True
|
self._dirty_deprecated = True
|
||||||
warn = 'deprecated \"link_children\" value'
|
warn = 'deprecated \"link_children\" value'
|
||||||
warn += ', updated to \"{}\"'.format(new)
|
warn += f', updated to \"{new}\"'
|
||||||
self._log.warn(warn)
|
self._log.warn(warn)
|
||||||
|
|
||||||
########################################################
|
########################################################
|
||||||
@@ -1209,22 +1205,22 @@ class CfgYaml:
|
|||||||
"""load a yaml file to a dict"""
|
"""load a yaml file to a dict"""
|
||||||
content = {}
|
content = {}
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('----------dump:{}----------'.format(path))
|
self._dbg(f'----------dump:{path}----------')
|
||||||
cfg = '\n'
|
cfg = '\n'
|
||||||
with open(path, 'r', encoding='utf8') as file:
|
with open(path, 'r', encoding='utf8') as file:
|
||||||
for line in file:
|
for line in file:
|
||||||
cfg += line
|
cfg += line
|
||||||
self._dbg(cfg.rstrip())
|
self._dbg(cfg.rstrip())
|
||||||
self._dbg('----------end:{}----------'.format(path))
|
self._dbg(f'----------end:{path}----------')
|
||||||
try:
|
try:
|
||||||
content, fmt = self._yaml_load(path)
|
content, fmt = self._yaml_load(path)
|
||||||
self._config_format = fmt
|
self._config_format = fmt
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
self._log.err(exc)
|
self._log.err(exc)
|
||||||
err = 'config yaml error: {}'.format(path)
|
err = f'config yaml error: {path}'
|
||||||
raise YamlException(err) from exc
|
raise YamlException(err) from exc
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('format: {}'.format(self._config_format))
|
self._dbg(f'format: {self._config_format}')
|
||||||
return content
|
return content
|
||||||
|
|
||||||
def _validate(self, yamldict):
|
def _validate(self, yamldict):
|
||||||
@@ -1235,9 +1231,9 @@ class CfgYaml:
|
|||||||
# check top entries
|
# check top entries
|
||||||
for entry in self.top_entries:
|
for entry in self.top_entries:
|
||||||
if entry not in yamldict:
|
if entry not in yamldict:
|
||||||
err = 'no {} entry found'.format(entry)
|
err = f'no {entry} entry found'.format(entry)
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
raise YamlException('config format error: {}'.format(err))
|
raise YamlException(f'config format error: {err}')
|
||||||
|
|
||||||
# check link_dotfile_default
|
# check link_dotfile_default
|
||||||
if self.key_settings not in yamldict:
|
if self.key_settings not in yamldict:
|
||||||
@@ -1253,10 +1249,10 @@ class CfgYaml:
|
|||||||
return
|
return
|
||||||
val = settings[self.key_settings_link_dotfile_default]
|
val = settings[self.key_settings_link_dotfile_default]
|
||||||
if val not in self.allowed_link_val:
|
if val not in self.allowed_link_val:
|
||||||
err = 'bad link value: {}'.format(val)
|
err = f'bad link value: {val}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
self._log.err('allowed: {}'.format(self.allowed_link_val))
|
self._log.err(f'allowed: {self.allowed_link_val}')
|
||||||
raise YamlException('config content error: {}'.format(err))
|
raise YamlException(f'config content error: {err}')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _yaml_load(cls, path):
|
def _yaml_load(cls, path):
|
||||||
@@ -1352,7 +1348,7 @@ class CfgYaml:
|
|||||||
for entry in entries:
|
for entry in entries:
|
||||||
newe = self._template_item(entry)
|
newe = self._template_item(entry)
|
||||||
if self._debug and entry != newe:
|
if self._debug and entry != newe:
|
||||||
self._dbg('resolved: {} -> {}'.format(entry, newe))
|
self._dbg(f'resolved: {entry} -> {newe}')
|
||||||
new.append(newe)
|
new.append(newe)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
@@ -1364,7 +1360,7 @@ class CfgYaml:
|
|||||||
for k, val in entries.items():
|
for k, val in entries.items():
|
||||||
newv = self._template_item(val)
|
newv = self._template_item(val)
|
||||||
if self._debug and val != newv:
|
if self._debug and val != newv:
|
||||||
self._dbg('resolved: {} -> {}'.format(val, newv))
|
self._dbg(f'resolved: {val} -> {newv}')
|
||||||
new[k] = newv
|
new[k] = newv
|
||||||
return new
|
return new
|
||||||
|
|
||||||
@@ -1486,7 +1482,7 @@ class CfgYaml:
|
|||||||
def _parse_extended_import_path(self, path_entry):
|
def _parse_extended_import_path(self, path_entry):
|
||||||
"""Parse an import path in a tuple (path, fatal_not_found)."""
|
"""Parse an import path in a tuple (path, fatal_not_found)."""
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('parsing path entry {}'.format(path_entry))
|
self._dbg(f'parsing path entry {path_entry}')
|
||||||
|
|
||||||
path, _, attribute = path_entry.rpartition(self.key_import_sep)
|
path, _, attribute = path_entry.rpartition(self.key_import_sep)
|
||||||
fatal_not_found = attribute != self.key_import_ignore_key
|
fatal_not_found = attribute != self.key_import_ignore_key
|
||||||
@@ -1500,18 +1496,20 @@ class CfgYaml:
|
|||||||
# str.rpartition
|
# str.rpartition
|
||||||
# In both cases, path_entry is the path we're looking for.
|
# In both cases, path_entry is the path we're looking for.
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('using attribute default values for path {}'
|
msg = 'using attribute default values'
|
||||||
.format(path_entry))
|
msg += f' for path {path_entry}'
|
||||||
|
self._dbg(msg)
|
||||||
path = path_entry
|
path = path_entry
|
||||||
fatal_not_found = self.key_import_fatal_not_found
|
fatal_not_found = self.key_import_fatal_not_found
|
||||||
elif self._debug:
|
elif self._debug:
|
||||||
self._dbg('path entry {} has fatal_not_found flag set to {}'
|
msg = f'path entry {path_entry} has fatal_not_found'
|
||||||
.format(path_entry, fatal_not_found))
|
msg += f' flag set to {fatal_not_found}'
|
||||||
|
self._dbg(msg)
|
||||||
return path, fatal_not_found
|
return path, fatal_not_found
|
||||||
|
|
||||||
def _handle_non_existing_path(self, path, fatal_not_found=True):
|
def _handle_non_existing_path(self, path, fatal_not_found=True):
|
||||||
"""Raise an exception or log a warning to handle non-existing paths."""
|
"""Raise an exception or log a warning to handle non-existing paths."""
|
||||||
error = 'bad path {}'.format(path)
|
error = f'bad path {path}'
|
||||||
if fatal_not_found:
|
if fatal_not_found:
|
||||||
raise YamlException(error)
|
raise YamlException(error)
|
||||||
self._log.warn(error)
|
self._log.warn(error)
|
||||||
@@ -1520,7 +1518,7 @@ class CfgYaml:
|
|||||||
"""Check if a path exists, raising if necessary."""
|
"""Check if a path exists, raising if necessary."""
|
||||||
if os.path.exists(path):
|
if os.path.exists(path):
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('path {} exists'.format(path))
|
self._dbg(f'path {path} exists')
|
||||||
return path
|
return path
|
||||||
|
|
||||||
self._handle_non_existing_path(path, fatal_not_found)
|
self._handle_non_existing_path(path, fatal_not_found)
|
||||||
@@ -1542,7 +1540,7 @@ class CfgYaml:
|
|||||||
paths = self._glob_path(path) if self._is_glob(path) else [path]
|
paths = self._glob_path(path) if self._is_glob(path) else [path]
|
||||||
if not paths:
|
if not paths:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg("glob path {} didn't expand".format(path))
|
self._dbg(f"glob path {path} didn't expand")
|
||||||
self._handle_non_existing_path(path, fatal_not_found)
|
self._handle_non_existing_path(path, fatal_not_found)
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@@ -1598,7 +1596,7 @@ class CfgYaml:
|
|||||||
"""return copy of entry from yaml dictionary"""
|
"""return copy of entry from yaml dictionary"""
|
||||||
if key not in dic:
|
if key not in dic:
|
||||||
if mandatory:
|
if mandatory:
|
||||||
err = 'invalid config: no entry \"{}\" found'.format(key)
|
err = f'invalid config: no entry \"{key}\" found'
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
dic[key] = {}
|
dic[key] = {}
|
||||||
return deepcopy(dic[key])
|
return deepcopy(dic[key])
|
||||||
@@ -1643,7 +1641,7 @@ class CfgYaml:
|
|||||||
def _glob_path(self, path):
|
def _glob_path(self, path):
|
||||||
"""Expand a glob."""
|
"""Expand a glob."""
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('expanding glob {}'.format(path))
|
self._dbg(f'expanding glob {path}')
|
||||||
expanded_path = os.path.expanduser(path)
|
expanded_path = os.path.expanduser(path)
|
||||||
return glob.glob(expanded_path, recursive=True)
|
return glob.glob(expanded_path, recursive=True)
|
||||||
|
|
||||||
@@ -1661,7 +1659,7 @@ class CfgYaml:
|
|||||||
path = ret
|
path = ret
|
||||||
ret = os.path.normpath(path)
|
ret = os.path.normpath(path)
|
||||||
if self._debug and path != ret:
|
if self._debug and path != ret:
|
||||||
self._dbg('normalizing: {} -> {}'.format(path, ret))
|
self._dbg(f'normalizing: {path} -> {ret}')
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _shell_exec_dvars(self, dic, keys=None):
|
def _shell_exec_dvars(self, dic, keys=None):
|
||||||
@@ -1672,11 +1670,11 @@ class CfgYaml:
|
|||||||
val = dic[k]
|
val = dic[k]
|
||||||
ret, out = shellrun(val, debug=self._debug)
|
ret, out = shellrun(val, debug=self._debug)
|
||||||
if not ret:
|
if not ret:
|
||||||
err = 'var \"{}: {}\" failed: {}'.format(k, val, out)
|
err = f'var \"{k}: {val}\" failed: {out}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('{}: `{}` -> {}'.format(k, val, out))
|
self._dbg(f'{k}: `{val}` -> {out}')
|
||||||
dic[k] = out
|
dic[k] = out
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@@ -1687,7 +1685,7 @@ class CfgYaml:
|
|||||||
cur = ([int(x) for x in VERSION.split('.')])
|
cur = ([int(x) for x in VERSION.split('.')])
|
||||||
cfg = ([int(x) for x in minversion.split('.')])
|
cfg = ([int(x) for x in minversion.split('.')])
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
err = 'bad version: \"{}\" VS \"{}\"'.format(VERSION, minversion)
|
err = f'bad version: \"{VERSION}\" VS \"{minversion}\"'
|
||||||
raise YamlException(err) from exc
|
raise YamlException(err) from exc
|
||||||
if cur < cfg:
|
if cur < cfg:
|
||||||
err = 'current dotdrop version is too old for that config file.'
|
err = 'current dotdrop version is too old for that config file.'
|
||||||
@@ -1711,21 +1709,21 @@ class CfgYaml:
|
|||||||
"""pretty print dict"""
|
"""pretty print dict"""
|
||||||
if not self._debug:
|
if not self._debug:
|
||||||
return
|
return
|
||||||
self._dbg('{}:'.format(title))
|
self._dbg(f'{title}:')
|
||||||
if not elems:
|
if not elems:
|
||||||
return
|
return
|
||||||
for k, val in elems.items():
|
for k, val in elems.items():
|
||||||
if isinstance(val, dict):
|
if isinstance(val, dict):
|
||||||
self._dbg(' - \"{}\"'.format(k))
|
self._dbg(f' - \"{k}\"')
|
||||||
for subkey, sub in val.items():
|
for subkey, sub in val.items():
|
||||||
self._dbg(' * {}: \"{}\"'.format(subkey, sub))
|
self._dbg(f' * {subkey}: \"{sub}\"')
|
||||||
else:
|
else:
|
||||||
self._dbg(' - \"{}\": {}'.format(k, val))
|
self._dbg(f' - \"{k}\": {val}')
|
||||||
|
|
||||||
def _dbg(self, content):
|
def _dbg(self, content):
|
||||||
directory = os.path.basename(os.path.dirname(self._path))
|
directory = os.path.basename(os.path.dirname(self._path))
|
||||||
pre = os.path.join(directory, os.path.basename(self._path))
|
pre = os.path.join(directory, os.path.basename(self._path))
|
||||||
self._log.dbg('[{}] {}'.format(pre, content))
|
self._log.dbg(f'[{pre}] {content}')
|
||||||
|
|
||||||
def _save_uservariables(self, uvars):
|
def _save_uservariables(self, uvars):
|
||||||
"""save uservariables to file"""
|
"""save uservariables to file"""
|
||||||
@@ -1737,7 +1735,7 @@ class CfgYaml:
|
|||||||
if cnt == 0:
|
if cnt == 0:
|
||||||
name = self.save_uservariables_name.format('')
|
name = self.save_uservariables_name.format('')
|
||||||
else:
|
else:
|
||||||
name = self.save_uservariables_name.format('-{}'.format(cnt))
|
name = self.save_uservariables_name.format(f'-{cnt}')
|
||||||
cnt += 1
|
cnt += 1
|
||||||
|
|
||||||
path = os.path.join(parent, name)
|
path = os.path.join(parent, name)
|
||||||
@@ -1748,12 +1746,12 @@ class CfgYaml:
|
|||||||
content = {'variables': uvars}
|
content = {'variables': uvars}
|
||||||
try:
|
try:
|
||||||
if self._debug:
|
if self._debug:
|
||||||
self._dbg('saving uservariables values to {}'.format(path))
|
self._dbg(f'saving uservariables values to {path}')
|
||||||
with open(path, 'w', encoding='utf8') as file:
|
with open(path, 'w', encoding='utf8') as file:
|
||||||
self._yaml_dump(content, file, fmt=self._config_format)
|
self._yaml_dump(content, file, fmt=self._config_format)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
# self._log.err(exc)
|
# self._log.err(exc)
|
||||||
err = 'error saving uservariables to {}'.format(path)
|
err = f'error saving uservariables to {path}'
|
||||||
self._log.err(err)
|
self._log.err(err)
|
||||||
raise YamlException(err) from exc
|
raise YamlException(err) from exc
|
||||||
self._log.log('uservariables values saved to {}'.format(path))
|
self._log.log(f'uservariables values saved to {path}')
|
||||||
|
|||||||
@@ -38,33 +38,28 @@ class Comparator:
|
|||||||
ignore = []
|
ignore = []
|
||||||
local_path = os.path.expanduser(local_path)
|
local_path = os.path.expanduser(local_path)
|
||||||
deployed_path = os.path.expanduser(deployed_path)
|
deployed_path = os.path.expanduser(deployed_path)
|
||||||
self.log.dbg('comparing {} and {}'.format(
|
self.log.dbg(f'comparing {local_path} and {deployed_path}')
|
||||||
local_path,
|
self.log.dbg(f'ignore pattern(s): {ignore}')
|
||||||
deployed_path,
|
|
||||||
))
|
|
||||||
self.log.dbg('ignore pattern(s): {}'.format(ignore))
|
|
||||||
|
|
||||||
# test type of file
|
# test type of file
|
||||||
if os.path.isdir(local_path) and not os.path.isdir(deployed_path):
|
if os.path.isdir(local_path) and not os.path.isdir(deployed_path):
|
||||||
return '\"{}\" is a dir while \"{}\" is a file\n'.format(
|
ret = f'\"{local_path}\" is a dir'
|
||||||
local_path,
|
ret += f' while \"{deployed_path}\" is a file\n'
|
||||||
deployed_path,
|
return ret
|
||||||
)
|
|
||||||
if not os.path.isdir(local_path) and os.path.isdir(deployed_path):
|
if not os.path.isdir(local_path) and os.path.isdir(deployed_path):
|
||||||
return '\"{}\" is a file while \"{}\" is a dir\n'.format(
|
ret = f'\"{local_path}\" is a file'
|
||||||
local_path,
|
ret += f' while \"{deployed_path}\" is a dir\n'
|
||||||
deployed_path,
|
return ret
|
||||||
)
|
|
||||||
|
|
||||||
# test content
|
# test content
|
||||||
if not os.path.isdir(local_path):
|
if not os.path.isdir(local_path):
|
||||||
self.log.dbg('{} is a file'.format(local_path))
|
self.log.dbg(f'{local_path} is a file')
|
||||||
ret = self._comp_file(local_path, deployed_path, ignore)
|
ret = self._comp_file(local_path, deployed_path, ignore)
|
||||||
if not ret:
|
if not ret:
|
||||||
ret = self._comp_mode(local_path, deployed_path, mode=mode)
|
ret = self._comp_mode(local_path, deployed_path, mode=mode)
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
self.log.dbg('{} is a directory'.format(local_path))
|
self.log.dbg(f'{local_path} is a directory')
|
||||||
|
|
||||||
ret = self._comp_dir(local_path, deployed_path, ignore)
|
ret = self._comp_dir(local_path, deployed_path, ignore)
|
||||||
if not ret:
|
if not ret:
|
||||||
@@ -82,79 +77,68 @@ class Comparator:
|
|||||||
deployed_mode = get_file_perm(deployed_path)
|
deployed_mode = get_file_perm(deployed_path)
|
||||||
if local_mode == deployed_mode:
|
if local_mode == deployed_mode:
|
||||||
return ''
|
return ''
|
||||||
msg = 'mode differ {} ({:o}) and {} ({:o})'
|
msg = f'mode differ {local_path} ({local_mode:o}) '
|
||||||
self.log.dbg(msg.format(local_path, local_mode, deployed_path,
|
msg += f'and {deployed_path} ({deployed_mode:o})'
|
||||||
deployed_mode))
|
self.log.dbg(msg)
|
||||||
ret = 'modes differ for {} ({:o}) vs {:o}\n'
|
ret = f'modes differ for {deployed_path} '
|
||||||
return ret.format(deployed_path, deployed_mode, local_mode)
|
ret += f'({deployed_mode:o}) vs {local_mode:o}\n'
|
||||||
|
return ret
|
||||||
|
|
||||||
def _comp_file(self, local_path, deployed_path, ignore):
|
def _comp_file(self, local_path, deployed_path, ignore):
|
||||||
"""compare a file"""
|
"""compare a file"""
|
||||||
self.log.dbg('compare file {} with {}'.format(
|
self.log.dbg(f'compare file {local_path} with {deployed_path}')
|
||||||
local_path,
|
|
||||||
deployed_path,
|
|
||||||
))
|
|
||||||
if (self.ignore_missing_in_dotdrop and not
|
if (self.ignore_missing_in_dotdrop and not
|
||||||
os.path.exists(local_path)) \
|
os.path.exists(local_path)) \
|
||||||
or must_ignore([local_path, deployed_path], ignore,
|
or must_ignore([local_path, deployed_path], ignore,
|
||||||
debug=self.debug):
|
debug=self.debug):
|
||||||
self.log.dbg('ignoring diff {} and {}'.format(
|
self.log.dbg(f'ignoring diff {local_path} and {deployed_path}')
|
||||||
local_path,
|
|
||||||
deployed_path,
|
|
||||||
))
|
|
||||||
return ''
|
return ''
|
||||||
return self._diff(local_path, deployed_path)
|
return self._diff(local_path, deployed_path)
|
||||||
|
|
||||||
def _comp_dir(self, local_path, deployed_path, ignore):
|
def _comp_dir(self, local_path, deployed_path, ignore):
|
||||||
"""compare a directory"""
|
"""compare a directory"""
|
||||||
self.log.dbg('compare directory {} with {}'.format(
|
self.log.dbg(f'compare directory {local_path} with {deployed_path}')
|
||||||
local_path,
|
|
||||||
deployed_path,
|
|
||||||
))
|
|
||||||
if not os.path.exists(deployed_path):
|
if not os.path.exists(deployed_path):
|
||||||
return ''
|
return ''
|
||||||
if (self.ignore_missing_in_dotdrop and not
|
if (self.ignore_missing_in_dotdrop and not
|
||||||
os.path.exists(local_path)) \
|
os.path.exists(local_path)) \
|
||||||
or must_ignore([local_path, deployed_path], ignore,
|
or must_ignore([local_path, deployed_path], ignore,
|
||||||
debug=self.debug):
|
debug=self.debug):
|
||||||
self.log.dbg('ignoring diff {} and {}'.format(
|
self.log.dbg(f'ignoring diff {local_path} and {deployed_path}')
|
||||||
local_path,
|
|
||||||
deployed_path,
|
|
||||||
))
|
|
||||||
return ''
|
return ''
|
||||||
if not os.path.isdir(deployed_path):
|
if not os.path.isdir(deployed_path):
|
||||||
return '\"{}\" is a file\n'.format(deployed_path)
|
return f'\"{deployed_path}\" is a file\n'
|
||||||
|
|
||||||
return self._compare_dirs(local_path, deployed_path, ignore)
|
return self._compare_dirs(local_path, deployed_path, ignore)
|
||||||
|
|
||||||
def _compare_dirs(self, local_path, deployed_path, ignore):
|
def _compare_dirs(self, local_path, deployed_path, ignore):
|
||||||
"""compare directories"""
|
"""compare directories"""
|
||||||
self.log.dbg('compare {} and {}'.format(local_path, deployed_path))
|
self.log.dbg(f'compare {local_path} and {deployed_path}')
|
||||||
ret = []
|
ret = []
|
||||||
comp = filecmp.dircmp(local_path, deployed_path)
|
comp = filecmp.dircmp(local_path, deployed_path)
|
||||||
|
|
||||||
# handle files only in deployed dir
|
# handle files only in deployed dir
|
||||||
self.log.dbg('files only in deployed dir: {}'.format(comp.left_only))
|
self.log.dbg(f'files only in deployed dir: {comp.left_only}')
|
||||||
for i in comp.left_only:
|
for i in comp.left_only:
|
||||||
if self.ignore_missing_in_dotdrop or \
|
if self.ignore_missing_in_dotdrop or \
|
||||||
must_ignore([os.path.join(local_path, i)],
|
must_ignore([os.path.join(local_path, i)],
|
||||||
ignore, debug=self.debug):
|
ignore, debug=self.debug):
|
||||||
continue
|
continue
|
||||||
ret.append('=> \"{}\" does not exist on destination\n'.format(i))
|
ret.append(f'=> \"{i}\" does not exist on destination\n')
|
||||||
|
|
||||||
# handle files only in dotpath dir
|
# handle files only in dotpath dir
|
||||||
self.log.dbg('files only in dotpath dir: {}'.format(comp.right_only))
|
self.log.dbg(f'files only in dotpath dir: {comp.right_only}')
|
||||||
for i in comp.right_only:
|
for i in comp.right_only:
|
||||||
if must_ignore([os.path.join(deployed_path, i)],
|
if must_ignore([os.path.join(deployed_path, i)],
|
||||||
ignore, debug=self.debug):
|
ignore, debug=self.debug):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not self.ignore_missing_in_dotdrop:
|
if not self.ignore_missing_in_dotdrop:
|
||||||
ret.append('=> \"{}\" does not exist in dotdrop\n'.format(i))
|
ret.append(f'=> \"{i}\" does not exist in dotdrop\n')
|
||||||
|
|
||||||
# same local_path and deployed_path but different type
|
# same local_path and deployed_path but different type
|
||||||
funny = comp.common_funny
|
funny = comp.common_funny
|
||||||
self.log.dbg('files with different types: {}'.format(funny))
|
self.log.dbg(f'files with different types: {funny}')
|
||||||
for i in funny:
|
for i in funny:
|
||||||
source_file = os.path.join(local_path, i)
|
source_file = os.path.join(local_path, i)
|
||||||
deployed_file = os.path.join(deployed_path, i)
|
deployed_file = os.path.join(deployed_path, i)
|
||||||
@@ -166,13 +150,13 @@ class Comparator:
|
|||||||
continue
|
continue
|
||||||
short = os.path.basename(source_file)
|
short = os.path.basename(source_file)
|
||||||
# file vs dir
|
# file vs dir
|
||||||
ret.append('=> different type: \"{}\"\n'.format(short))
|
ret.append(f'=> different type: \"{short}\"\n')
|
||||||
|
|
||||||
# content is different
|
# content is different
|
||||||
funny = comp.diff_files
|
funny = comp.diff_files
|
||||||
funny.extend(comp.funny_files)
|
funny.extend(comp.funny_files)
|
||||||
funny = uniq_list(funny)
|
funny = uniq_list(funny)
|
||||||
self.log.dbg('files with different content: {}'.format(funny))
|
self.log.dbg(f'files with different content: {funny}')
|
||||||
for i in funny:
|
for i in funny:
|
||||||
source_file = os.path.join(local_path, i)
|
source_file = os.path.join(local_path, i)
|
||||||
deployed_file = os.path.join(deployed_path, i)
|
deployed_file = os.path.join(deployed_path, i)
|
||||||
@@ -198,5 +182,5 @@ class Comparator:
|
|||||||
diff_cmd=self.diff_cmd, debug=self.debug)
|
diff_cmd=self.diff_cmd, debug=self.debug)
|
||||||
if header:
|
if header:
|
||||||
lshort = os.path.basename(local_path)
|
lshort = os.path.basename(local_path)
|
||||||
out = '=> diff \"{}\":\n{}'.format(lshort, out)
|
out = f'=> diff \"{lshort}\":\n{out}'
|
||||||
return out
|
return out
|
||||||
|
|||||||
@@ -57,11 +57,10 @@ class Dotfile(DictParser):
|
|||||||
|
|
||||||
if self.link != LinkTypes.NOLINK and \
|
if self.link != LinkTypes.NOLINK and \
|
||||||
(
|
(
|
||||||
(trans_r and len(trans_r) > 0)
|
(trans_r and len(trans_r) > 0) or
|
||||||
or
|
|
||||||
(trans_w and len(trans_w) > 0)
|
(trans_w and len(trans_w) > 0)
|
||||||
):
|
):
|
||||||
msg = '[{}] transformations disabled'.format(key)
|
msg = f'[{key}] transformations disabled'
|
||||||
msg += ' because dotfile is linked'
|
msg += ' because dotfile is linked'
|
||||||
self.log.warn(msg)
|
self.log.warn(msg)
|
||||||
self.trans_r = []
|
self.trans_r = []
|
||||||
@@ -112,48 +111,48 @@ class Dotfile(DictParser):
|
|||||||
return hash(self.dst) ^ hash(self.src) ^ hash(self.key)
|
return hash(self.dst) ^ hash(self.src) ^ hash(self.key)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
msg = 'key:\"{}\"'.format(self.key)
|
msg = f'key:\"{self.key}\"'
|
||||||
msg += ', src:\"{}\"'.format(self.src)
|
msg += f', src:\"{self.src}\"'
|
||||||
msg += ', dst:\"{}\"'.format(self.dst)
|
msg += f', dst:\"{self.dst}\"'
|
||||||
msg += ', link:\"{}\"'.format(str(self.link))
|
msg += f', link:\"{self.link}\"'
|
||||||
msg += ', template:{}'.format(self.template)
|
msg += f', template:{self.template}'
|
||||||
if self.chmod:
|
if self.chmod:
|
||||||
msg += ', chmod:{:o}'.format(self.chmod)
|
msg += f', chmod:{self.chmod:o}'
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def prt(self):
|
def prt(self):
|
||||||
"""extended dotfile to str"""
|
"""extended dotfile to str"""
|
||||||
indent = ' '
|
indent = ' '
|
||||||
out = 'dotfile: \"{}\"'.format(self.key)
|
out = f'dotfile: \"{self.key}\"'
|
||||||
out += '\n{}src: \"{}\"'.format(indent, self.src)
|
out += f'\n{indent}src: \"{self.src}\"'
|
||||||
out += '\n{}dst: \"{}\"'.format(indent, self.dst)
|
out += f'\n{indent}dst: \"{self.dst}\"'
|
||||||
out += '\n{}link: \"{}\"'.format(indent, str(self.link))
|
out += f'\n{indent}link: \"{self.link}\"'
|
||||||
out += '\n{}template: \"{}\"'.format(indent, str(self.template))
|
out += f'\n{indent}template: \"{self.template}\"'
|
||||||
if self.chmod:
|
if self.chmod:
|
||||||
out += '\n{}chmod: \"{:o}\"'.format(indent, self.chmod)
|
out += f'\n{indent}chmod: \"{self.chmod:o}\"'
|
||||||
|
|
||||||
out += '\n{}pre-action:'.format(indent)
|
out += f'\n{indent}pre-action:'
|
||||||
some = self.get_pre_actions()
|
some = self.get_pre_actions()
|
||||||
if some:
|
if some:
|
||||||
for act in some:
|
for act in some:
|
||||||
out += '\n{}- {}'.format(2 * indent, act)
|
out += f'\n{2*indent}- {act}'
|
||||||
|
|
||||||
out += '\n{}post-action:'.format(indent)
|
out += f'\n{indent}post-action:'
|
||||||
some = self.get_post_actions()
|
some = self.get_post_actions()
|
||||||
if some:
|
if some:
|
||||||
for act in some:
|
for act in some:
|
||||||
out += '\n{}- {}'.format(2 * indent, act)
|
out += f'\n{2*indent}- {act}'
|
||||||
|
|
||||||
out += '\n{}trans_r:'.format(indent)
|
out += f'\n{indent}trans_r:'
|
||||||
some = self.get_trans_r()
|
some = self.get_trans_r()
|
||||||
if some:
|
if some:
|
||||||
out += '\n{}- {}'.format(2 * indent, some)
|
out += f'\n{2*indent}- {some}'
|
||||||
|
|
||||||
out += '\n{}trans_w:'.format(indent)
|
out += f'\n{indent}trans_w:'
|
||||||
some = self.get_trans_w()
|
some = self.get_trans_w()
|
||||||
if some:
|
if some:
|
||||||
out += '\n{}- {}'.format(2 * indent, some)
|
out += f'\n{2*indent}- {some}'
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'dotfile({!s})'.format(self)
|
return f'dotfile({self})'
|
||||||
|
|||||||
@@ -67,9 +67,9 @@ class Importer:
|
|||||||
-1: error
|
-1: error
|
||||||
"""
|
"""
|
||||||
path = os.path.abspath(path)
|
path = os.path.abspath(path)
|
||||||
self.log.dbg('import {}'.format(path))
|
self.log.dbg(f'import {path}'.format(path))
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
self.log.err('\"{}\" does not exist, ignored!'.format(path))
|
self.log.err(f'\"{path}\" does not exist, ignored!')
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
# check transw if any
|
# check transw if any
|
||||||
@@ -110,8 +110,8 @@ class Importer:
|
|||||||
if self.safe:
|
if self.safe:
|
||||||
realdst = os.path.realpath(dst)
|
realdst = os.path.realpath(dst)
|
||||||
if dst != realdst:
|
if dst != realdst:
|
||||||
msg = '\"{}\" is a symlink, dereference it and continue?'
|
msg = f'\"{dst}\" is a symlink, dereference it and continue?'
|
||||||
if not self.log.ask(msg.format(dst)):
|
if not self.log.ask(msg):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
# create src path
|
# create src path
|
||||||
@@ -122,7 +122,7 @@ class Importer:
|
|||||||
src = src.rstrip(os.sep)
|
src = src.rstrip(os.sep)
|
||||||
src = os.path.abspath(src)
|
src = os.path.abspath(src)
|
||||||
src = strip_home(src)
|
src = strip_home(src)
|
||||||
self.log.dbg('import src for {} as {}'.format(dst, src))
|
self.log.dbg(f'import src for {dst} as {src}')
|
||||||
# with or without dot prefix
|
# with or without dot prefix
|
||||||
strip = '.' + os.sep
|
strip = '.' + os.sep
|
||||||
if self.keepdot:
|
if self.keepdot:
|
||||||
@@ -136,13 +136,13 @@ class Importer:
|
|||||||
linktype = import_link
|
linktype = import_link
|
||||||
if linktype == LinkTypes.LINK_CHILDREN and \
|
if linktype == LinkTypes.LINK_CHILDREN and \
|
||||||
not os.path.isdir(path):
|
not os.path.isdir(path):
|
||||||
self.log.err('importing \"{}\" failed!'.format(path))
|
self.log.err(f'importing \"{path}\" failed!')
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
if self._already_exists(src, dst):
|
if self._already_exists(src, dst):
|
||||||
return -1
|
return -1
|
||||||
|
|
||||||
self.log.dbg('import dotfile: src:{} dst:{}'.format(src, dst))
|
self.log.dbg(f'import dotfile: src:{src} dst:{dst}')
|
||||||
|
|
||||||
if not self._import_file(src, dst, trans_write=trans_write):
|
if not self._import_file(src, dst, trans_write=trans_write):
|
||||||
return -1
|
return -1
|
||||||
@@ -165,7 +165,7 @@ class Importer:
|
|||||||
# handle file mode
|
# handle file mode
|
||||||
chmod = None
|
chmod = None
|
||||||
dflperm = get_default_file_perms(dst, self.umask)
|
dflperm = get_default_file_perms(dst, self.umask)
|
||||||
self.log.dbg('import chmod: {}'.format(import_mode))
|
self.log.dbg(f'import chmod: {import_mode}')
|
||||||
if import_mode or perm != dflperm:
|
if import_mode or perm != dflperm:
|
||||||
msg = 'adopt mode {:o} (umask {:o})'
|
msg = 'adopt mode {:o} (umask {:o})'
|
||||||
self.log.dbg(msg.format(perm, dflperm))
|
self.log.dbg(msg.format(perm, dflperm))
|
||||||
@@ -176,10 +176,10 @@ class Importer:
|
|||||||
trans_read=trans_r,
|
trans_read=trans_r,
|
||||||
trans_write=trans_w)
|
trans_write=trans_w)
|
||||||
if not retconf:
|
if not retconf:
|
||||||
self.log.warn('\"{}\" ignored during import'.format(path))
|
self.log.warn(f'\"{path}\" ignored during import')
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
self.log.sub('\"{}\" imported'.format(path))
|
self.log.sub(f'\"{path}\" imported')
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
def _check_existing_dotfile(self, src, dst):
|
def _check_existing_dotfile(self, src, dst):
|
||||||
@@ -196,7 +196,7 @@ class Importer:
|
|||||||
diff = cmp.compare(src, dst)
|
diff = cmp.compare(src, dst)
|
||||||
if diff != '':
|
if diff != '':
|
||||||
# files are different, dunno what to do
|
# files are different, dunno what to do
|
||||||
self.log.log('diff \"{}\" VS \"{}\"'.format(dst, src))
|
self.log.log(f'diff \"{dst}\" VS \"{src}\"')
|
||||||
self.log.emph(diff)
|
self.log.emph(diff)
|
||||||
# ask user
|
# ask user
|
||||||
msg = 'Dotfile \"{}\" already exists, overwrite?'
|
msg = 'Dotfile \"{}\" already exists, overwrite?'
|
||||||
@@ -225,18 +225,18 @@ class Importer:
|
|||||||
|
|
||||||
# create directory hierarchy
|
# create directory hierarchy
|
||||||
if self.dry:
|
if self.dry:
|
||||||
cmd = 'mkdir -p {}'.format(srcfd)
|
cmd = f'mkdir -p {srcfd}'
|
||||||
self.log.dry('would run: {}'.format(cmd))
|
self.log.dry(f'would run: {cmd}')
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
os.makedirs(srcfd, exist_ok=True)
|
os.makedirs(srcfd, exist_ok=True)
|
||||||
except OSError:
|
except OSError:
|
||||||
self.log.err('importing \"{}\" failed!'.format(dst))
|
self.log.err(f'importing \"{dst}\" failed!')
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# import the file
|
# import the file
|
||||||
if self.dry:
|
if self.dry:
|
||||||
self.log.dry('would copy {} to {}'.format(dst, srcf))
|
self.log.dry(f'would copy {dst} to {srcf}')
|
||||||
else:
|
else:
|
||||||
# apply trans_w
|
# apply trans_w
|
||||||
dst = self._apply_trans_w(dst, trans_write)
|
dst = self._apply_trans_w(dst, trans_write)
|
||||||
@@ -257,7 +257,7 @@ class Importer:
|
|||||||
except shutil.Error as exc:
|
except shutil.Error as exc:
|
||||||
src = exc.args[0][0][0]
|
src = exc.args[0][0][0]
|
||||||
why = exc.args[0][0][2]
|
why = exc.args[0][0][2]
|
||||||
self.log.err('importing \"{}\" failed: {}'.format(src, why))
|
self.log.err(f'importing \"{src}\" failed: {why}')
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -290,8 +290,8 @@ class Importer:
|
|||||||
|
|
||||||
def _ignore(self, path):
|
def _ignore(self, path):
|
||||||
if must_ignore([path], self.ignore, debug=self.debug):
|
if must_ignore([path], self.ignore, debug=self.debug):
|
||||||
self.log.dbg('ignoring import of {}'.format(path))
|
self.log.dbg(f'ignoring import of {path}')
|
||||||
self.log.warn('{} ignored'.format(path))
|
self.log.warn(f'{path} ignored')
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@@ -305,12 +305,12 @@ class Importer:
|
|||||||
"""
|
"""
|
||||||
if not trans:
|
if not trans:
|
||||||
return path
|
return path
|
||||||
self.log.dbg('executing write transformation {}'.format(trans))
|
self.log.dbg(f'executing write transformation {trans}')
|
||||||
tmp = get_unique_tmp_name()
|
tmp = get_unique_tmp_name()
|
||||||
if not trans.transform(path, tmp, debug=self.debug,
|
if not trans.transform(path, tmp, debug=self.debug,
|
||||||
templater=self.templater):
|
templater=self.templater):
|
||||||
msg = 'transformation \"{}\" failed for {}'
|
msg = f'transformation \"{trans.key}\" failed for {path}'
|
||||||
self.log.err(msg.format(trans.key, path))
|
self.log.err(msg)
|
||||||
if os.path.exists(tmp):
|
if os.path.exists(tmp):
|
||||||
removepath(tmp, logger=self.log)
|
removepath(tmp, logger=self.log)
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -27,7 +27,7 @@ class LinkTypes(IntEnum):
|
|||||||
except KeyError as exc:
|
except KeyError as exc:
|
||||||
if default:
|
if default:
|
||||||
return default
|
return default
|
||||||
err = 'bad {} value: "{}"'.format(cls.__name__, key)
|
err = f'bad {cls.__name__} value: "{key}"'
|
||||||
raise ValueError(err) from exc
|
raise ValueError(err) from exc
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
|
|||||||
@@ -31,24 +31,23 @@ class Logger:
|
|||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
if bold:
|
if bold:
|
||||||
bold = self._color(self.BOLD)
|
bold = self._color(self.BOLD)
|
||||||
fmt = '{}{}{}{}{}'.format(pre, cstart, bold,
|
fmt = f'{pre}{cstart}{bold}{string}{cend}'
|
||||||
string, cend)
|
fmt += f'{end}{cend}'
|
||||||
fmt += '{}{}'.format(end, cend)
|
|
||||||
else:
|
else:
|
||||||
fmt = '{}{}{}{}{}'.format(pre, cstart, string, end, cend)
|
fmt = f'{pre}{cstart}{string}{end}{cend}'
|
||||||
sys.stdout.write(fmt)
|
sys.stdout.write(fmt)
|
||||||
|
|
||||||
def sub(self, string, end='\n'):
|
def sub(self, string, end='\n'):
|
||||||
"""sub log"""
|
"""sub log"""
|
||||||
cstart = self._color(self.BLUE)
|
cstart = self._color(self.BLUE)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
sys.stdout.write('\t{}->{} {}{}'.format(cstart, cend, string, end))
|
sys.stdout.write(f'\t{cstart}->{cend} {string}{end}')
|
||||||
|
|
||||||
def emph(self, string, stdout=True):
|
def emph(self, string, stdout=True):
|
||||||
"""emphasis log"""
|
"""emphasis log"""
|
||||||
cstart = self._color(self.EMPH)
|
cstart = self._color(self.EMPH)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
content = '{}{}{}'.format(cstart, string, cend)
|
content = f'{cstart}{string}{cend}'
|
||||||
if not stdout:
|
if not stdout:
|
||||||
sys.stderr.write(content)
|
sys.stderr.write(content)
|
||||||
else:
|
else:
|
||||||
@@ -58,14 +57,14 @@ class Logger:
|
|||||||
"""error log"""
|
"""error log"""
|
||||||
cstart = self._color(self.RED)
|
cstart = self._color(self.RED)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
msg = '{} {}'.format(string, end)
|
msg = f'{string} {end}'.format(string, end)
|
||||||
sys.stderr.write('{}[ERR] {}{}'.format(cstart, msg, cend))
|
sys.stderr.write(f'{cstart}[ERR] {msg}{cend}')
|
||||||
|
|
||||||
def warn(self, string, end='\n'):
|
def warn(self, string, end='\n'):
|
||||||
"""warning log"""
|
"""warning log"""
|
||||||
cstart = self._color(self.YELLOW)
|
cstart = self._color(self.YELLOW)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
sys.stderr.write('{}[WARN] {} {}{}'.format(cstart, string, end, cend))
|
sys.stderr.write(f'{cstart}[WARN] {string} {end}{cend}')
|
||||||
|
|
||||||
def dbg(self, string, force=False):
|
def dbg(self, string, force=False):
|
||||||
"""debug log"""
|
"""debug log"""
|
||||||
@@ -78,29 +77,28 @@ class Logger:
|
|||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
clight = self._color(self.LMAGENTA)
|
clight = self._color(self.LMAGENTA)
|
||||||
bold = self._color(self.BOLD)
|
bold = self._color(self.BOLD)
|
||||||
line = '{}{}[DEBUG][{}.{}]{}{} {}{}\n'
|
line = f'{bold}{clight}[DEBUG][{mod}.{func}]'
|
||||||
sys.stderr.write(line.format(bold, clight,
|
line += f'{cend}{cstart} {string}{cend}\n'
|
||||||
mod, func,
|
sys.stderr.write(line)
|
||||||
cend, cstart,
|
|
||||||
string, cend))
|
|
||||||
|
|
||||||
def dry(self, string, end='\n'):
|
def dry(self, string, end='\n'):
|
||||||
"""dry run log"""
|
"""dry run log"""
|
||||||
cstart = self._color(self.GREEN)
|
cstart = self._color(self.GREEN)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
sys.stdout.write('{}[DRY] {} {}{}'.format(cstart, string, end, cend))
|
sys.stdout.write(f'{cstart}[DRY] {string} {end}{cend}')
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def raw(cls, string, end='\n'):
|
def raw(cls, string, end='\n'):
|
||||||
"""raw log"""
|
"""raw log"""
|
||||||
sys.stdout.write('{}{}'.format(string, end))
|
sys.stdout.write(f'{string}{end}')
|
||||||
|
|
||||||
def ask(self, query):
|
def ask(self, query):
|
||||||
"""ask user for confirmation"""
|
"""ask user for confirmation"""
|
||||||
cstart = self._color(self.BLUE)
|
cstart = self._color(self.BLUE)
|
||||||
cend = self._color(self.RESET)
|
cend = self._color(self.RESET)
|
||||||
query = '{}{}{}'.format(cstart, query + ' [y/N] ? ', cend)
|
question = query + ' [y/N] ? '
|
||||||
resp = input(query)
|
qmsg = f'{cstart}{question}{cend}'
|
||||||
|
resp = input(qmsg)
|
||||||
return resp == 'y'
|
return resp == 'y'
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
|||||||
@@ -38,9 +38,9 @@ if ENV_PROFILE in os.environ:
|
|||||||
NAME = 'dotdrop'
|
NAME = 'dotdrop'
|
||||||
CONFIGFILEYAML = 'config.yaml'
|
CONFIGFILEYAML = 'config.yaml'
|
||||||
CONFIGFILETOML = 'config.toml'
|
CONFIGFILETOML = 'config.toml'
|
||||||
HOMECFG = '~/.config/{}'.format(NAME)
|
HOMECFG = f'~/.config/{NAME}'
|
||||||
ETCXDGCFG = '/etc/xdg/{}'.format(NAME)
|
ETCXDGCFG = f'/etc/xdg/{NAME}'
|
||||||
ETCCFG = '/etc/{}'.format(NAME)
|
ETCCFG = f'/etc/{NAME}'
|
||||||
|
|
||||||
OPT_LINK = {
|
OPT_LINK = {
|
||||||
LinkTypes.NOLINK.name.lower(): LinkTypes.NOLINK,
|
LinkTypes.NOLINK.name.lower(): LinkTypes.NOLINK,
|
||||||
@@ -51,11 +51,11 @@ OPT_LINK = {
|
|||||||
BANNER = r""" _ _ _
|
BANNER = r""" _ _ _
|
||||||
__| | ___ | |_ __| |_ __ ___ _ __
|
__| | ___ | |_ __| |_ __ ___ _ __
|
||||||
/ _` |/ _ \| __/ _` | '__/ _ \| '_ |
|
/ _` |/ _ \| __/ _` | '__/ _ \| '_ |
|
||||||
\__,_|\___/ \__\__,_|_| \___/| .__/ v{}
|
\__,_|\___/ \__\__,_|_| \___/| .__/ v{VERSION}
|
||||||
|_|""".format(VERSION)
|
|_|"""
|
||||||
|
|
||||||
USAGE = """
|
USAGE = f"""
|
||||||
{}
|
{BANNER}
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
dotdrop install [-VbtfndDaW] [-c <path>] [-p <profile>]
|
dotdrop install [-VbtfndDaW] [-c <path>] [-p <profile>]
|
||||||
@@ -89,7 +89,7 @@ Options:
|
|||||||
-L --file-only Do not show diff but only the files that differ.
|
-L --file-only Do not show diff but only the files that differ.
|
||||||
-m --preserve-mode Insert a chmod entry in the dotfile with its mode.
|
-m --preserve-mode Insert a chmod entry in the dotfile with its mode.
|
||||||
-n --nodiff Do not diff when installing.
|
-n --nodiff Do not diff when installing.
|
||||||
-p --profile=<profile> Specify the profile to use [default: {}].
|
-p --profile=<profile> Specify the profile to use [default: {PROFILE}].
|
||||||
-P --show-patch Provide a one-liner to manually patch template.
|
-P --show-patch Provide a one-liner to manually patch template.
|
||||||
-s --as=<path> Import as a different path from actual path.
|
-s --as=<path> Import as a different path from actual path.
|
||||||
--transr=<key> Associate trans_read key on import.
|
--transr=<key> Associate trans_read key on import.
|
||||||
@@ -102,7 +102,7 @@ Options:
|
|||||||
-z --ignore-missing Ignore files in installed folders that are missing.
|
-z --ignore-missing Ignore files in installed folders that are missing.
|
||||||
-v --version Show version.
|
-v --version Show version.
|
||||||
-h --help Show this screen.
|
-h --help Show this screen.
|
||||||
""".format(BANNER, PROFILE)
|
"""
|
||||||
|
|
||||||
|
|
||||||
class AttrMonitor:
|
class AttrMonitor:
|
||||||
@@ -159,14 +159,15 @@ class Options(AttrMonitor):
|
|||||||
if not self.confpath:
|
if not self.confpath:
|
||||||
raise YamlException('no config file found')
|
raise YamlException('no config file found')
|
||||||
if not os.path.exists(self.confpath):
|
if not os.path.exists(self.confpath):
|
||||||
err = 'bad config file path: {}'.format(self.confpath)
|
err = f'bad config file path: {self.confpath}'
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
self.log.dbg('#################################################')
|
self.log.dbg('#################################################')
|
||||||
self.log.dbg('#################### DOTDROP ####################')
|
self.log.dbg('#################### DOTDROP ####################')
|
||||||
self.log.dbg('#################################################')
|
self.log.dbg('#################################################')
|
||||||
self.log.dbg('version: {}'.format(VERSION))
|
self.log.dbg(f'version: {VERSION}')
|
||||||
self.log.dbg('command: {}'.format(' '.join(sys.argv)))
|
args = ' '.join(sys.argv)
|
||||||
self.log.dbg('config file: {}'.format(self.confpath))
|
self.log.dbg(f'command: {args}')
|
||||||
|
self.log.dbg(f'config file: {self.confpath}')
|
||||||
|
|
||||||
self._read_config()
|
self._read_config()
|
||||||
self._apply_args()
|
self._apply_args()
|
||||||
@@ -290,7 +291,7 @@ class Options(AttrMonitor):
|
|||||||
self.compare_focus = self.args['--file']
|
self.compare_focus = self.args['--file']
|
||||||
self.compare_ignore = self.args['--ignore']
|
self.compare_ignore = self.args['--ignore']
|
||||||
self.compare_ignore.extend(self.cmpignore)
|
self.compare_ignore.extend(self.cmpignore)
|
||||||
self.compare_ignore.append('*{}'.format(self.install_backup_suffix))
|
self.compare_ignore.append(f'*{self.install_backup_suffix}')
|
||||||
self.compare_ignore = uniq_list(self.compare_ignore)
|
self.compare_ignore = uniq_list(self.compare_ignore)
|
||||||
self.compare_fileonly = self.args['--file-only']
|
self.compare_fileonly = self.args['--file-only']
|
||||||
self.ignore_missing_in_dotdrop = self.ignore_missing_in_dotdrop or \
|
self.ignore_missing_in_dotdrop = self.ignore_missing_in_dotdrop or \
|
||||||
@@ -303,7 +304,7 @@ class Options(AttrMonitor):
|
|||||||
self.import_mode = self.args['--preserve-mode'] or self.chmod_on_import
|
self.import_mode = self.args['--preserve-mode'] or self.chmod_on_import
|
||||||
self.import_ignore = self.args['--ignore']
|
self.import_ignore = self.args['--ignore']
|
||||||
self.import_ignore.extend(self.impignore)
|
self.import_ignore.extend(self.impignore)
|
||||||
self.import_ignore.append('*{}'.format(self.install_backup_suffix))
|
self.import_ignore.append(f'*{self.install_backup_suffix}')
|
||||||
self.import_ignore = uniq_list(self.import_ignore)
|
self.import_ignore = uniq_list(self.import_ignore)
|
||||||
self.import_transw = self.args['--transw']
|
self.import_transw = self.args['--transw']
|
||||||
self.import_transr = self.args['--transr']
|
self.import_transr = self.args['--transr']
|
||||||
@@ -314,7 +315,7 @@ class Options(AttrMonitor):
|
|||||||
self.update_iskey = self.args['--key']
|
self.update_iskey = self.args['--key']
|
||||||
self.update_ignore = self.args['--ignore']
|
self.update_ignore = self.args['--ignore']
|
||||||
self.update_ignore.extend(self.upignore)
|
self.update_ignore.extend(self.upignore)
|
||||||
self.update_ignore.append('*{}'.format(self.install_backup_suffix))
|
self.update_ignore.append(f'*{self.install_backup_suffix}')
|
||||||
self.update_ignore = uniq_list(self.update_ignore)
|
self.update_ignore = uniq_list(self.update_ignore)
|
||||||
self.update_showpatch = self.args['--show-patch']
|
self.update_showpatch = self.args['--show-patch']
|
||||||
|
|
||||||
@@ -362,7 +363,7 @@ class Options(AttrMonitor):
|
|||||||
# overwrite default import link with cli switch
|
# overwrite default import link with cli switch
|
||||||
link = self.args['--link']
|
link = self.args['--link']
|
||||||
if link not in OPT_LINK:
|
if link not in OPT_LINK:
|
||||||
self.log.err('bad option for --link: {}'.format(link))
|
self.log.err(f'bad option for --link: {link}')
|
||||||
sys.exit(USAGE)
|
sys.exit(USAGE)
|
||||||
self.import_link = OPT_LINK[link]
|
self.import_link = OPT_LINK[link]
|
||||||
|
|
||||||
@@ -411,12 +412,12 @@ class Options(AttrMonitor):
|
|||||||
if callable(val):
|
if callable(val):
|
||||||
continue
|
continue
|
||||||
if isinstance(val, list):
|
if isinstance(val, list):
|
||||||
debug_list('-> {}'.format(att), val, self.debug)
|
debug_list(f'-> {att}', val, self.debug)
|
||||||
elif isinstance(val, dict):
|
elif isinstance(val, dict):
|
||||||
debug_dict('-> {}'.format(att), val, self.debug)
|
debug_dict(f'-> {att}', val, self.debug)
|
||||||
else:
|
else:
|
||||||
self.log.dbg('-> {}: {}'.format(att, val))
|
self.log.dbg(f'-> {att}: {val}')
|
||||||
|
|
||||||
def _attr_set(self, attr):
|
def _attr_set(self, attr):
|
||||||
"""error when some inexistent attr is set"""
|
"""error when some inexistent attr is set"""
|
||||||
raise Exception('bad option: {}'.format(attr))
|
raise Exception(f'bad option: {attr}')
|
||||||
|
|||||||
@@ -55,8 +55,7 @@ class Profile(DictParser):
|
|||||||
hash(tuple(self.dotfiles)))
|
hash(tuple(self.dotfiles)))
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
msg = 'key:"{}"'
|
return f'key:"{self.key}"'
|
||||||
return msg.format(self.key)
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return 'profile({!s})'.format(self)
|
return f'profile({self})'
|
||||||
|
|||||||
@@ -115,7 +115,7 @@ class Settings(DictParser):
|
|||||||
|
|
||||||
# check diff command
|
# check diff command
|
||||||
if not is_bin_in_path(self.diff_command):
|
if not is_bin_in_path(self.diff_command):
|
||||||
err = 'bad diff_command: {}'.format(self.diff_command)
|
err = f'bad diff_command: {diff_command}'
|
||||||
raise YamlException(err)
|
raise YamlException(err)
|
||||||
|
|
||||||
def _serialize_seq(self, name, dic):
|
def _serialize_seq(self, name, dic):
|
||||||
|
|||||||
@@ -73,11 +73,11 @@ class Templategen:
|
|||||||
self._load_funcs_to_dic(jhelpers, self.env.globals)
|
self._load_funcs_to_dic(jhelpers, self.env.globals)
|
||||||
if func_file:
|
if func_file:
|
||||||
for ffile in func_file:
|
for ffile in func_file:
|
||||||
self.log.dbg('load custom functions from {}'.format(ffile))
|
self.log.dbg(f'load custom functions from {ffile}')
|
||||||
self._load_path_to_dic(ffile, self.env.globals)
|
self._load_path_to_dic(ffile, self.env.globals)
|
||||||
if filter_file:
|
if filter_file:
|
||||||
for ffile in filter_file:
|
for ffile in filter_file:
|
||||||
self.log.dbg('load custom filters from {}'.format(ffile))
|
self.log.dbg(f'load custom filters from {ffile}')
|
||||||
self._load_path_to_dic(ffile, self.env.filters)
|
self._load_path_to_dic(ffile, self.env.filters)
|
||||||
if self.debug:
|
if self.debug:
|
||||||
self._debug_dict('template additional variables', variables)
|
self._debug_dict('template additional variables', variables)
|
||||||
@@ -93,7 +93,7 @@ class Templategen:
|
|||||||
try:
|
try:
|
||||||
return self._handle_file(src)
|
return self._handle_file(src)
|
||||||
except UndefinedError as exc:
|
except UndefinedError as exc:
|
||||||
err = 'undefined variable: {}'.format(exc.message)
|
err = f'undefined variable: {exc.message}'
|
||||||
raise UndefinedException(err) from exc
|
raise UndefinedException(err) from exc
|
||||||
|
|
||||||
def generate_string(self, string):
|
def generate_string(self, string):
|
||||||
@@ -107,7 +107,7 @@ class Templategen:
|
|||||||
try:
|
try:
|
||||||
return self.env.from_string(string).render(self.variables)
|
return self.env.from_string(string).render(self.variables)
|
||||||
except UndefinedError as exc:
|
except UndefinedError as exc:
|
||||||
err = 'undefined variable: {}'.format(exc.message)
|
err = 'undefined variable: {exc.message}'
|
||||||
raise UndefinedException(err) from exc
|
raise UndefinedException(err) from exc
|
||||||
|
|
||||||
def add_tmp_vars(self, newvars=None):
|
def add_tmp_vars(self, newvars=None):
|
||||||
@@ -129,7 +129,7 @@ class Templategen:
|
|||||||
def _load_path_to_dic(self, path, dic):
|
def _load_path_to_dic(self, path, dic):
|
||||||
mod = utils.get_module_from_path(path)
|
mod = utils.get_module_from_path(path)
|
||||||
if not mod:
|
if not mod:
|
||||||
self.log.warn('cannot load module \"{}\"'.format(path))
|
self.log.warn(f'cannot load module \"{path}\"')
|
||||||
return
|
return
|
||||||
self._load_funcs_to_dic(mod, dic)
|
self._load_funcs_to_dic(mod, dic)
|
||||||
|
|
||||||
@@ -139,13 +139,13 @@ class Templategen:
|
|||||||
return
|
return
|
||||||
funcs = utils.get_module_functions(mod)
|
funcs = utils.get_module_functions(mod)
|
||||||
for name, func in funcs:
|
for name, func in funcs:
|
||||||
self.log.dbg('load function \"{}\"'.format(name))
|
self.log.dbg(f'load function \"{name}\"')
|
||||||
dic[name] = func
|
dic[name] = func
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def _header(cls, prepend=''):
|
def _header(cls, prepend=''):
|
||||||
"""add a comment usually in the header of a dotfile"""
|
"""add a comment usually in the header of a dotfile"""
|
||||||
return '{}{}'.format(prepend, utils.header())
|
return f'{prepend}{utils.header()}'
|
||||||
|
|
||||||
def _handle_file(self, src):
|
def _handle_file(self, src):
|
||||||
"""generate the file content from template"""
|
"""generate the file content from template"""
|
||||||
@@ -161,8 +161,8 @@ class Templategen:
|
|||||||
self.log.dbg('using \"file\" for filetype identification')
|
self.log.dbg('using \"file\" for filetype identification')
|
||||||
filetype = filetype.strip()
|
filetype = filetype.strip()
|
||||||
istext = self._is_text(filetype)
|
istext = self._is_text(filetype)
|
||||||
self.log.dbg('filetype \"{}\": {}'.format(src, filetype))
|
self.log.dbg(f'filetype \"{src}\": {filetype}')
|
||||||
self.log.dbg('is text \"{}\": {}'.format(src, istext))
|
self.log.dbg(f'is text \"{src}\": {istext}')
|
||||||
if not istext:
|
if not istext:
|
||||||
return self._handle_bin_file(src)
|
return self._handle_bin_file(src)
|
||||||
return self._handle_text_file(src)
|
return self._handle_text_file(src)
|
||||||
@@ -220,7 +220,7 @@ class Templategen:
|
|||||||
def is_template(path, ignore=None, debug=False):
|
def is_template(path, ignore=None, debug=False):
|
||||||
"""recursively check if any file is a template within path"""
|
"""recursively check if any file is a template within path"""
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('is template: {}'.format(path), force=True)
|
LOG.dbg(f'is template: {path}', force=True)
|
||||||
path = os.path.expanduser(path)
|
path = os.path.expanduser(path)
|
||||||
|
|
||||||
if not os.path.exists(path):
|
if not os.path.exists(path):
|
||||||
@@ -280,8 +280,8 @@ class Templategen:
|
|||||||
"""pretty print dict"""
|
"""pretty print dict"""
|
||||||
if not self.debug:
|
if not self.debug:
|
||||||
return
|
return
|
||||||
self.log.dbg('{}:'.format(title))
|
self.log.dbg(f'{title}:')
|
||||||
if not elems:
|
if not elems:
|
||||||
return
|
return
|
||||||
for k, val in elems.items():
|
for k, val in elems.items():
|
||||||
self.log.dbg(' - \"{}\": {}'.format(k, val))
|
self.log.dbg(f' - \"{k}\": {val}')
|
||||||
|
|||||||
@@ -42,11 +42,13 @@ NOREMOVE = [os.path.normpath(p) for p in DONOTDELETE]
|
|||||||
def run(cmd, debug=False):
|
def run(cmd, debug=False):
|
||||||
"""run a command (expects a list)"""
|
"""run a command (expects a list)"""
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('exec: {}'.format(' '.join(cmd)), force=True)
|
fcmd = ' '.join(cmd)
|
||||||
proc = subprocess.Popen(cmd, shell=False,
|
LOG.dbg(f'exec: {fcmd}', force=True)
|
||||||
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
with subprocess.Popen(cmd, shell=False,
|
||||||
out, _ = proc.communicate()
|
stdout=subprocess.PIPE,
|
||||||
ret = proc.returncode
|
stderr=subprocess.STDOUT) as proc:
|
||||||
|
out, _ = proc.communicate()
|
||||||
|
ret = proc.returncode
|
||||||
out = out.splitlines(keepends=True)
|
out = out.splitlines(keepends=True)
|
||||||
lines = ''.join([x.decode('utf-8', 'replace') for x in out])
|
lines = ''.join([x.decode('utf-8', 'replace') for x in out])
|
||||||
return ret == 0, lines
|
return ret == 0, lines
|
||||||
@@ -66,10 +68,10 @@ def shellrun(cmd, debug=False):
|
|||||||
returns True|False, output
|
returns True|False, output
|
||||||
"""
|
"""
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('shell exec: \"{}\"'.format(cmd), force=True)
|
LOG.dbg(f'shell exec: \"{cmd}\"', force=True)
|
||||||
ret, out = subprocess.getstatusoutput(cmd)
|
ret, out = subprocess.getstatusoutput(cmd)
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('shell result ({}): {}'.format(ret, out), force=True)
|
LOG.dbg(f'shell result ({ret}): {out}', force=True)
|
||||||
return ret == 0, out
|
return ret == 0, out
|
||||||
|
|
||||||
|
|
||||||
@@ -79,11 +81,11 @@ def userinput(prompt, debug=False):
|
|||||||
return user input
|
return user input
|
||||||
"""
|
"""
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('get user input for \"{}\"'.format(prompt), force=True)
|
LOG.dbg(f'get user input for \"{prompt}\"', force=True)
|
||||||
pre = 'Please provide the value for \"{}\": '.format(prompt)
|
pre = f'Please provide the value for \"{prompt}\": '
|
||||||
res = input(pre)
|
res = input(pre)
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('user input result: {}'.format(res), force=True)
|
LOG.dbg(f'user input result: {res}', force=True)
|
||||||
return res
|
return res
|
||||||
|
|
||||||
|
|
||||||
@@ -160,13 +162,13 @@ def removepath(path, logger=None):
|
|||||||
if not path:
|
if not path:
|
||||||
return
|
return
|
||||||
if not os.path.lexists(path):
|
if not os.path.lexists(path):
|
||||||
err = 'File not found: {}'.format(path)
|
err = f'File not found: {path}'
|
||||||
if logger:
|
if logger:
|
||||||
logger.warn(err)
|
logger.warn(err)
|
||||||
return
|
return
|
||||||
raise OSError(err)
|
raise OSError(err)
|
||||||
if os.path.normpath(os.path.expanduser(path)) in NOREMOVE:
|
if os.path.normpath(os.path.expanduser(path)) in NOREMOVE:
|
||||||
err = 'Dotdrop refuses to remove {}'.format(path)
|
err = f'Dotdrop refuses to remove {path}'
|
||||||
if logger:
|
if logger:
|
||||||
logger.warn(err)
|
logger.warn(err)
|
||||||
return
|
return
|
||||||
@@ -178,7 +180,7 @@ def removepath(path, logger=None):
|
|||||||
elif os.path.isdir(path):
|
elif os.path.isdir(path):
|
||||||
shutil.rmtree(path)
|
shutil.rmtree(path)
|
||||||
else:
|
else:
|
||||||
err = 'Unsupported file type for deletion: {}'.format(path)
|
err = f'Unsupported file type for deletion: {path}'
|
||||||
raise OSError(err)
|
raise OSError(err)
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
err = str(exc)
|
err = str(exc)
|
||||||
@@ -226,7 +228,7 @@ def must_ignore(paths, ignores, debug=False):
|
|||||||
if not ignores:
|
if not ignores:
|
||||||
return False
|
return False
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('must ignore? \"{}\" against {}'.format(paths, ignores),
|
LOG.dbg(f'must ignore? \"{paths}\" against {ignores}',
|
||||||
force=True)
|
force=True)
|
||||||
ignored_negative, ignored = categorize(
|
ignored_negative, ignored = categorize(
|
||||||
lambda ign: ign.startswith('!'), ignores)
|
lambda ign: ign.startswith('!'), ignores)
|
||||||
@@ -236,7 +238,7 @@ def must_ignore(paths, ignores, debug=False):
|
|||||||
for i in ignored:
|
for i in ignored:
|
||||||
if fnmatch.fnmatch(path, i):
|
if fnmatch.fnmatch(path, i):
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('ignore \"{}\" match: {}'.format(i, path),
|
LOG.dbg(f'ignore \"{i}\" match: {path}',
|
||||||
force=True)
|
force=True)
|
||||||
ignore_matches.append(path)
|
ignore_matches.append(path)
|
||||||
|
|
||||||
@@ -249,24 +251,24 @@ def must_ignore(paths, ignores, debug=False):
|
|||||||
LOG.dbg(msg.format(path, nign), force=True)
|
LOG.dbg(msg.format(path, nign), force=True)
|
||||||
if fnmatch.fnmatch(path, nign):
|
if fnmatch.fnmatch(path, nign):
|
||||||
if debug:
|
if debug:
|
||||||
msg = 'negative ignore \"{}\" match: {}'.format(nign, path)
|
msg = f'negative ignore \"{nign}\" match: {path}'
|
||||||
LOG.dbg(msg, force=True)
|
LOG.dbg(msg, force=True)
|
||||||
try:
|
try:
|
||||||
ignore_matches.remove(path)
|
ignore_matches.remove(path)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
LOG.warn('no files that are currently being '
|
warn = 'no files that are currently being '
|
||||||
'ignored match \"{}\". In order '
|
warn += f'ignored match \"{nign}\". In order '
|
||||||
'for a negative ignore pattern '
|
warn += 'for a negative ignore pattern '
|
||||||
'to work, it must match a file '
|
warn += 'to work, it must match a file '
|
||||||
'that is being ignored by a '
|
warn += 'that is being ignored by a '
|
||||||
'previous ignore pattern.'.format(nign)
|
warn += 'previous ignore pattern.'
|
||||||
)
|
LOG.warn(warn)
|
||||||
if ignore_matches:
|
if ignore_matches:
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('ignoring {}'.format(paths), force=True)
|
LOG.dbg(f'ignoring {paths}', force=True)
|
||||||
return True
|
return True
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('NOT ignoring {}'.format(paths), force=True)
|
LOG.dbg(f'NOT ignoring {paths}', force=True)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@@ -284,7 +286,7 @@ def uniq_list(a_list):
|
|||||||
def patch_ignores(ignores, prefix, debug=False):
|
def patch_ignores(ignores, prefix, debug=False):
|
||||||
"""allow relative ignore pattern"""
|
"""allow relative ignore pattern"""
|
||||||
new = []
|
new = []
|
||||||
LOG.dbg('ignores before patching: {}'.format(ignores), force=debug)
|
LOG.dbg(f'ignores before patching: {ignores}', force=debug)
|
||||||
for ignore in ignores:
|
for ignore in ignores:
|
||||||
negative = ignore.startswith('!')
|
negative = ignore.startswith('!')
|
||||||
if negative:
|
if negative:
|
||||||
@@ -311,7 +313,7 @@ def patch_ignores(ignores, prefix, debug=False):
|
|||||||
new.append('!' + path)
|
new.append('!' + path)
|
||||||
else:
|
else:
|
||||||
new.append(path)
|
new.append(path)
|
||||||
LOG.dbg('ignores after patching: {}'.format(new), force=debug)
|
LOG.dbg(f'ignores after patching: {new}', force=debug)
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
@@ -424,7 +426,7 @@ def get_file_perm(path):
|
|||||||
def chmod(path, mode, debug=False):
|
def chmod(path, mode, debug=False):
|
||||||
"""change mode of file"""
|
"""change mode of file"""
|
||||||
if debug:
|
if debug:
|
||||||
LOG.dbg('chmod {} {}'.format(oct(mode), path), force=True)
|
LOG.dbg(f'chmod {mode:o} {path}', force=True)
|
||||||
os.chmod(path, mode)
|
os.chmod(path, mode)
|
||||||
return get_file_perm(path) == mode
|
return get_file_perm(path) == mode
|
||||||
|
|
||||||
@@ -452,23 +454,23 @@ def debug_list(title, elems, debug):
|
|||||||
"""pretty print list"""
|
"""pretty print list"""
|
||||||
if not debug:
|
if not debug:
|
||||||
return
|
return
|
||||||
LOG.dbg('{}:'.format(title), force=debug)
|
LOG.dbg(f'{title}:', force=debug)
|
||||||
for elem in elems:
|
for elem in elems:
|
||||||
LOG.dbg('\t- {}'.format(elem), force=debug)
|
LOG.dbg(f'\t- {elem}', force=debug)
|
||||||
|
|
||||||
|
|
||||||
def debug_dict(title, elems, debug):
|
def debug_dict(title, elems, debug):
|
||||||
"""pretty print dict"""
|
"""pretty print dict"""
|
||||||
if not debug:
|
if not debug:
|
||||||
return
|
return
|
||||||
LOG.dbg('{}:'.format(title), force=debug)
|
LOG.dbg(f'{title}:', force=debug)
|
||||||
for k, val in elems.items():
|
for k, val in elems.items():
|
||||||
if isinstance(val, list):
|
if isinstance(val, list):
|
||||||
LOG.dbg('\t- \"{}\":'.format(k), force=debug)
|
LOG.dbg(f'\t- \"{k}\":', force=debug)
|
||||||
for i in val:
|
for i in val:
|
||||||
LOG.dbg('\t\t- {}'.format(i), force=debug)
|
LOG.dbg(f'\t\t- {i}', force=debug)
|
||||||
else:
|
else:
|
||||||
LOG.dbg('\t- \"{}\": {}'.format(k, val), force=debug)
|
LOG.dbg(f'\t- \"{k}\": {val}', force=debug)
|
||||||
|
|
||||||
|
|
||||||
def check_version():
|
def check_version():
|
||||||
@@ -493,14 +495,14 @@ def check_version():
|
|||||||
def pivot_path(path, newdir, striphome=False, logger=None):
|
def pivot_path(path, newdir, striphome=False, logger=None):
|
||||||
"""change path to be under newdir"""
|
"""change path to be under newdir"""
|
||||||
if logger:
|
if logger:
|
||||||
logger.dbg('pivot new dir: \"{}\"'.format(newdir))
|
logger.dbg(f'pivot new dir: \"{newdir}\"')
|
||||||
logger.dbg('strip home: {}'.format(striphome))
|
logger.dbg(f'strip home: {striphome}')
|
||||||
if striphome:
|
if striphome:
|
||||||
path = strip_home(path)
|
path = strip_home(path)
|
||||||
sub = path.lstrip(os.sep)
|
sub = path.lstrip(os.sep)
|
||||||
new = os.path.join(newdir, sub)
|
new = os.path.join(newdir, sub)
|
||||||
if logger:
|
if logger:
|
||||||
logger.dbg('pivot \"{}\" to \"{}\"'.format(path, new))
|
logger.dbg(f'pivot \"{path}\" to \"{new}\"')
|
||||||
return new
|
return new
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
14
tests.sh
vendored
14
tests.sh
vendored
@@ -18,7 +18,9 @@ pyflakes --version
|
|||||||
which pycodestyle >/dev/null 2>&1
|
which pycodestyle >/dev/null 2>&1
|
||||||
[ "$?" != "0" ] && echo "Install pycodestyle" && exit 1
|
[ "$?" != "0" ] && echo "Install pycodestyle" && exit 1
|
||||||
echo "testing with pycodestyle"
|
echo "testing with pycodestyle"
|
||||||
pycodestyle --ignore=W503,W504,W605 dotdrop/
|
# W503: Line break occurred before a binary operator
|
||||||
|
# W504: Line break occurred after a binary operator
|
||||||
|
pycodestyle --ignore=W503,W504 dotdrop/
|
||||||
pycodestyle tests/
|
pycodestyle tests/
|
||||||
pycodestyle scripts/
|
pycodestyle scripts/
|
||||||
|
|
||||||
@@ -29,6 +31,15 @@ pyflakes tests/
|
|||||||
|
|
||||||
# pylint
|
# pylint
|
||||||
echo "testing with pylint"
|
echo "testing with pylint"
|
||||||
|
# https://pylint.pycqa.org/en/latest/user_guide/checkers/features.html
|
||||||
|
# R0902: too-many-instance-attributes
|
||||||
|
# R0913: too-many-arguments
|
||||||
|
# R0903: too-few-public-methods
|
||||||
|
# R0914: too-many-locals
|
||||||
|
# R0915: too-many-statements
|
||||||
|
# R0912: too-many-branches
|
||||||
|
# R0911: too-many-return-statements
|
||||||
|
# C0209: consider-using-f-string
|
||||||
pylint \
|
pylint \
|
||||||
--disable=R0902 \
|
--disable=R0902 \
|
||||||
--disable=R0913 \
|
--disable=R0913 \
|
||||||
@@ -37,7 +48,6 @@ pylint \
|
|||||||
--disable=R0915 \
|
--disable=R0915 \
|
||||||
--disable=R0912 \
|
--disable=R0912 \
|
||||||
--disable=R0911 \
|
--disable=R0911 \
|
||||||
--disable=R1732 \
|
|
||||||
--disable=C0209 \
|
--disable=C0209 \
|
||||||
dotdrop/
|
dotdrop/
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user