diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 28d3d0f..8b0613c 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,3 +1,17 @@ +Content + +* [code base](#code-base) +* [config parsing](#config-parsing) + * [lower layer](#lower-layer) + * [higher layer](#higher-layer) + * [Precedence](#precedence) + * [variables resolution](#variables-resolution) + * [rules](#rules) +* [testing](#testing) + * [testing with unittest](#testing-with-unittest) + * [testing with bash scripts](#testing-with-bash-scripts) +* [documentation](#documentation) + Thanks for helping out! Feature requests, bug reports and PRs are always welcome! @@ -6,9 +20,9 @@ This file provides a few pointers on how to contribute to dotdrop and where to find information. For any question, feel free to open an issue. For PR adding new features, I'd be very thankful if you could add either -a unittest testing the added feature or a bash script test, thanks! +a unittest testing the added feature or a bash script test ((see [testing](#testing), thanks! -# Code base +# code base Dotdrop's code base is located in the [dotdrop directory](/dotdrop). @@ -32,20 +46,24 @@ Here's an overview of the different files and their role: * **updater.py**: the class handling the update of dotfiles for `update` * **utils.py**: some useful methods -## Config parsing +# config parsing -The configuration file (yaml) is parsed in two layers: +The configuration file (yaml) is parsed using two layers: - * the lower layer in `cfg_yaml.py` - * the higher layer in `cfg_aggregator.py` + * first in the lower layer in [cfg_yaml.py](/dotdrop/cfg_yaml.py) + * then in the higher layer in [cfg_aggregator.py](/dotdrop/cfg_aggregator.py) Only the higher layer is accessible to other classes of dotdrop. -The lower layer part is only taking care of basic types and -does the following: +## lower layer + +This is done in [cfg_yaml.py](/dotdrop/cfg_yaml.py) + +The lower layer part is only taking care of basic types +and does the following: * normalize all config entries * resolve paths (dotfiles src, dotpath, etc) - * refactor actions to a common format + * refactor actions/transformations to a common format * etc * import any data from external files (configs, variables, etc) * apply variable substitutions @@ -55,11 +73,14 @@ does the following: * fix any deprecated entries (link_by_default, etc) * clear empty entries -In the end it makes sure the dictionary (or parts of it) accessed -by the higher layer is clean and normalized. +In the end it builds a cleaned and normalized dictionary to be accessed by the higher layer. + +## higher layer + +This is done in [cfg_aggregator.py](/dotdrop/cfg_aggregator.py) The higher layer will transform the dictionary parsed by the lower layer -into objects (profiles, dotfiles, actions, etc). +into objects (profiles, dotfiles, actions, transformations, etc). The higher layer has no notion of inclusion (profile included for example) or file importing (import actions, etc) or even interpreted variables (it only sees variables that have already been interpreted). @@ -74,37 +95,70 @@ example) won't be *seen* by the higher layer until the config is reloaded. Consi `dirty` flag as a sign the file needs to be written and its representation in higher levels in not accurate anymore. -## Variables resolution - -How variables are resolved (pass through jinja2's -templating function) in the config file. - -* resolve `include` (the below merge is temporary just to resolve the `includes`) - * `variables` and `dynvariables` are first merged and recursively resolved - * `dynvariables` are executed - * they are all merged and `include` paths are resolved - (allows to use something like `include {{@@ os @@}}.variables.yaml`) -* `variables` and profile's `variables` are merged -* `dynvariables` and profile's `dynvariables` are merged -* `dynvariables` are executed -* they are all merged into the final *local* `variables` - -These are then used to resolve different elements in the config file: -see [this](https://github.com/deadc0de6/dotdrop/wiki/config-variables#config-available-variables) - -Then additional variables (`import_variables` and `import_configs`) are -then merged and take precedence over local variables. - -Note: +## precedence * `dynvariables` > `variables` * profile `(dyn)variables` > any other `(dyn)variables` * profile `(dyn)variables` > profile's included `(dyn)variables` * imported `variables`/`dynvariables` > `(dyn)variables` -* actions/transformations using variables are resolved at runtime + +## variables resolution + +How variables are resolved (through jinja2's +templating) in the config file. + +* resolve main config file variables + * merge `variables` and `dynvariables` (allowing cycling reference) + * recursively template merged `variables` and `dynvariables` + * `dynvariables` are executed + * profile's `variables` and `dynvariables` are merged +* resolve *included* entries (see below) + * paths and entries are templated + (allows to use something like `include {{@@ os @@}}.variables.yaml`) +* *included* entries are processed + * dyn-/variables are all resolved in their own file + +potential *included* entries + +* entry *import_actions* +* entry *import_configs* +* entry *import_variables* +* profile's *import* +* profile's *include + +Variables are then used to resolve different elements in the config file: +see [this](https://github.com/deadc0de6/dotdrop/wiki/config-variables#config-available-variables) + +## rules + +* `dynvariables` are executed in their own config file +* since `variables` and `dynvariables` are templated before the `dynvariables` + are executed, this means that `dynvariables` can safely reference `variables` however + `variables` referencing `dynvariables` will result with the *not-executed* value of the + referenced `dynvariables` (see examples below) +* profile cannot include profiles defined above in the import tree +* config files do not have access to variables defined above in the import tree +* actions/transformations using variables are resolved at runtime (when action/transformation is executed) and not when loading the config -# Testing +This will result with `dvar0 = "test"` and `var0 = "echo test"` (**not** `var0 = test`) +```yaml +variables: + var0: "{{@@ dvar0 @@}}" +dynvariables: + dvar0: "echo test" +``` + +This will result with `dvar0 = "test"` and `var0 = "test"` +```yaml +variables: + var0: "test" +dynvariables: + dvar0: "echo {{@@ var0 @@}}" +``` + + +# testing Dotdrop is tested with the use of the [tests.sh](/tests.sh) script. @@ -129,6 +183,6 @@ for different use-cases (usually described in their filename). Each script starts with the same boiler plate code that you can paste at the start of your new test (see the head of the file down to `# this is the test`). -# Documentation +# documentation Most of dotdrop documentation is hosted in [its wiki](https://github.com/deadc0de6/dotdrop/wiki) diff --git a/README.md b/README.md index 9194de0..85a2484 100644 --- a/README.md +++ b/README.md @@ -38,7 +38,7 @@ Features: Check also the [blog post](https://deadc0de.re/articles/dotfiles.html), the [example](#getting-started), the [wiki](https://github.com/deadc0de6/dotdrop/wiki) or -how [people are using dotdrop](https://github.com/deadc0de6/dotdrop/wiki/people-using-dotdrop) +how [people are using dotdrop](https://github.com/deadc0de6/dotdrop/wiki/meta-people-using-dotdrop) for more. Quick start: @@ -261,8 +261,8 @@ That's it, a single repository with all your dotfiles for your different hosts. You can then -* [create actions](https://github.com/deadc0de6/dotdrop/wiki/usage-actions) -* [use transformations](https://github.com/deadc0de6/dotdrop/wiki/usage-transformations) +* [create actions](https://github.com/deadc0de6/dotdrop/wiki/config-actions) +* [use transformations](https://github.com/deadc0de6/dotdrop/wiki/config-transformations) * [use variables](https://github.com/deadc0de6/dotdrop/wiki/config-variables) * [symlink dotfiles](https://github.com/deadc0de6/dotdrop/wiki/symlinked-dotfiles) * [and more](https://github.com/deadc0de6/dotdrop/wiki) diff --git a/dotdrop/action.py b/dotdrop/action.py index ccc7bf5..c31254e 100644 --- a/dotdrop/action.py +++ b/dotdrop/action.py @@ -11,6 +11,7 @@ import os # local imports from dotdrop.dictparser import DictParser +from dotdrop.exceptions import UndefinedException class Cmd(DictParser): @@ -32,7 +33,12 @@ class Cmd(DictParser): ret = 1 action = self.action if templater: - action = templater.generate_string(self.action) + try: + action = templater.generate_string(self.action) + except UndefinedException as e: + err = 'bad {}: {}'.format(self.descr, e) + self.log.warn(err) + return False if debug: self.log.dbg('{}:'.format(self.descr)) self.log.dbg(' - raw \"{}\"'.format(self.action)) @@ -42,7 +48,12 @@ class Cmd(DictParser): if self.args: args = self.args if templater: - args = [templater.generate_string(a) for a in args] + try: + args = [templater.generate_string(a) for a in args] + except UndefinedException as e: + err = 'bad arguments for {}: {}'.format(self.descr, e) + self.log.warn(err) + return False if debug and args: self.log.dbg('action args:') for cnt, arg in enumerate(args): diff --git a/dotdrop/cfg_aggregator.py b/dotdrop/cfg_aggregator.py index 8360ff4..ecc3d9c 100644 --- a/dotdrop/cfg_aggregator.py +++ b/dotdrop/cfg_aggregator.py @@ -17,6 +17,7 @@ from dotdrop.profile import Profile from dotdrop.action import Action, Transform from dotdrop.logger import Logger from dotdrop.utils import strip_home +from dotdrop.exceptions import UndefinedException TILD = '~' @@ -77,7 +78,7 @@ class CfgAggregator: self._debug_list('trans_w', self.trans_w) # variables - self.variables = self.cfgyaml.get_variables() + self.variables = self.cfgyaml.variables if self.debug: self._debug_dict('variables', self.variables) @@ -134,8 +135,9 @@ class CfgAggregator: objects.append(o) if not islist: objects = objects[0] - if self.debug: - self.log.dbg('patching {}.{} with {}'.format(c, keys, objects)) + # if self.debug: + # er = 'patching {}.{} with {}' + # self.log.dbg(er.format(c, keys, objects)) setattr(c, keys, objects) def del_dotfile(self, dotfile): @@ -281,7 +283,12 @@ class CfgAggregator: @src: dotfile src (in dotpath) @dst: dotfile dst (on filesystem) """ - src = self.cfgyaml.resolve_dotfile_src(src) + try: + src = self.cfgyaml.resolve_dotfile_src(src) + except UndefinedException as e: + err = 'unable to resolve {}: {}' + self.log.err(err.format(src, e)) + return None dotfiles = self.get_dotfile_by_dst(dst) for d in dotfiles: if d.src == src: diff --git a/dotdrop/cfg_yaml.py b/dotdrop/cfg_yaml.py index 5dfa50a..ce514b9 100644 --- a/dotdrop/cfg_yaml.py +++ b/dotdrop/cfg_yaml.py @@ -3,6 +3,19 @@ author: deadc0de6 (https://github.com/deadc0de6) Copyright (c) 2019, deadc0de6 handle lower level of the config file + +will provide the following dictionaries to +the upper layer: + +* self.settings +* self.dotfiles +* self.profiles +* self.actions +* self.trans_r +* self.trans_w +* self.variables + +Additionally a few methods are exported. """ import os @@ -19,7 +32,7 @@ from dotdrop.logger import Logger from dotdrop.templategen import Templategen from dotdrop.linktypes import LinkTypes from dotdrop.utils import shell, uniq_list -from dotdrop.exceptions import YamlException +from dotdrop.exceptions import YamlException, UndefinedException class CfgYaml: @@ -76,44 +89,129 @@ class CfgYaml: lnk_link = LinkTypes.LINK.name.lower() lnk_children = LinkTypes.LINK_CHILDREN.name.lower() - def __init__(self, path, profile=None, debug=False): + def __init__(self, path, profile=None, addprofiles=[], debug=False): """ config parser @path: config file path @profile: the selected profile + @addprofiles: included profiles @debug: debug flag """ - self.path = os.path.abspath(path) - self.profile = profile - self.debug = debug - self.log = Logger() + self._path = os.path.abspath(path) + self._profile = profile + self._debug = debug + self._log = Logger() # config needs to be written - self.dirty = False + self._dirty = False # indicates the config has been updated - self.dirty_deprecated = False + self._dirty_deprecated = False + # profile variables + self._profilevarskeys = [] + # included profiles + self._inc_profiles = addprofiles - if not os.path.exists(path): + # init the dictionaries + self.settings = {} + self.dotfiles = {} + self.profiles = {} + self.actions = {} + self.trans_r = {} + self.trans_w = {} + self.variables = {} + + if not os.path.exists(self._path): err = 'invalid config path: \"{}\"'.format(path) - if self.debug: - self.log.dbg(err) + if self._debug: + self._dbg(err) raise YamlException(err) - self.yaml_dict = self._load_yaml(self.path) + self._yaml_dict = self._load_yaml(self._path) # live patch deprecated entries - self._fix_deprecated(self.yaml_dict) - # parse to self variables - self._parse_main_yaml(self.yaml_dict) - if self.debug: - self.log.dbg('BEFORE normalization: {}'.format(self.yaml_dict)) + self._fix_deprecated(self._yaml_dict) - # resolve variables - self.variables, self.prokeys = self._merge_variables() + ################################################## + # parse the config and variables + ################################################## - # apply variables - self._apply_variables() + # parse the "config" block + self.settings = self._parse_blk_settings(self._yaml_dict) + + # base templater (when no vars/dvars exist) + self.variables = self._enrich_vars(self.variables, self._profile) + self._redefine_templater() + + # variables and dynvariables need to be first merged + # before being templated in order to allow cyclic + # references between them + + # parse the "variables" block + var = self._parse_blk_variables(self._yaml_dict) + self._add_variables(var, template=False) + + # parse the "dynvariables" block + dvariables = self._parse_blk_dynvariables(self._yaml_dict) + self._add_variables(dvariables, template=False) + + # now template variables and dynvariables from the same pool + self._rec_resolve_variables(self.variables) + # and execute dvariables + # since this is done after recursively resolving variables + # and dynvariables this means that variables referencing + # dynvariables will result with the not executed value + if dvariables.keys(): + self._shell_exec_dvars(self.variables, keys=dvariables.keys()) + # finally redefine the template + self._redefine_templater() + + if self._debug: + self._debug_dict('current variables defined', self.variables) + + # parse the "profiles" block + self.profiles = self._parse_blk_profiles(self._yaml_dict) + + # include the profile's variables/dynvariables last + # as it overwrites existing ones + self._inc_profiles, pv, pvd = self._get_profile_included_vars() + self._add_variables(pv, prio=True) + self._add_variables(pvd, shell=True, prio=True) + self._profilevarskeys.extend(pv.keys()) + self._profilevarskeys.extend(pvd.keys()) + + # template variables + self.variables = self._template_dict(self.variables) + if self._debug: + self._debug_dict('current variables defined', self.variables) + + ################################################## + # template the "include" entries + ################################################## + + self._template_include_entry() + if self._debug: + self._debug_dict('current variables defined', self.variables) + + ################################################## + # parse the other blocks + ################################################## + + # parse the "dotfiles" block + self.dotfiles = self._parse_blk_dotfiles(self._yaml_dict) + # parse the "actions" block + self.actions = self._parse_blk_actions(self._yaml_dict) + # parse the "trans_r" block + self.trans_r = self._parse_blk_trans_r(self._yaml_dict) + # parse the "trans_w" block + self.trans_w = self._parse_blk_trans_w(self._yaml_dict) + + ################################################## + # import elements + ################################################## # process imported variables (import_variables) - self._import_variables() + newvars = self._import_variables() + self._clear_profile_vars(newvars) + self._add_variables(newvars) + # process imported actions (import_actions) self._import_actions() # process imported profile dotfiles (import) @@ -123,126 +221,31 @@ class CfgYaml: # process profile include self._resolve_profile_includes() + + # add the current profile variables + _, pv, pvd = self._get_profile_included_vars() + self._add_variables(pv, prio=True) + self._add_variables(pvd, shell=True, prio=True) + self._profilevarskeys.extend(pv.keys()) + self._profilevarskeys.extend(pvd.keys()) + + # resolve variables + self._clear_profile_vars(newvars) + self._add_variables(newvars) + # process profile ALL self._resolve_profile_all() # patch dotfiles paths - self._resolve_dotfile_paths() + self._template_dotfiles_paths() - if self.debug: - self.log.dbg('AFTER normalization: {}'.format(self.yaml_dict)) - - def get_variables(self): - """retrieve all variables""" - return self.variables + if self._debug: + self._dbg('########### {} ###########'.format('final config')) + self._debug_entries() ######################################################## - # parsing + # outside available methods ######################################################## - def _parse_main_yaml(self, dic): - """parse the different blocks""" - self.ori_settings = self._get_entry(dic, self.key_settings) - self.settings = Settings(None).serialize().get(self.key_settings) - self.settings.update(self.ori_settings) - - # resolve minimum version - if self.key_settings_minversion in self.settings: - minversion = self.settings[self.key_settings_minversion] - self._check_minversion(minversion) - - # resolve settings paths - p = self._norm_path(self.settings[self.key_settings_dotpath]) - self.settings[self.key_settings_dotpath] = p - p = self._norm_path(self.settings[self.key_settings_workdir]) - self.settings[self.key_settings_workdir] = p - p = [ - self._norm_path(p) - for p in self.settings[Settings.key_filter_file] - ] - self.settings[Settings.key_filter_file] = p - p = [ - self._norm_path(p) - for p in self.settings[Settings.key_func_file] - ] - self.settings[Settings.key_func_file] = p - if self.debug: - self._debug_dict('settings', self.settings) - - # dotfiles - self.ori_dotfiles = self._get_entry(dic, self.key_dotfiles) - self.dotfiles = deepcopy(self.ori_dotfiles) - keys = self.dotfiles.keys() - if len(keys) != len(list(set(keys))): - dups = [x for x in keys if x not in list(set(keys))] - err = 'duplicate dotfile keys found: {}'.format(dups) - raise YamlException(err) - self.dotfiles = self._norm_dotfiles(self.dotfiles) - if self.debug: - self._debug_dict('dotfiles', self.dotfiles) - - # profiles - self.ori_profiles = self._get_entry(dic, self.key_profiles) - self.profiles = deepcopy(self.ori_profiles) - self.profiles = self._norm_profiles(self.profiles) - if self.debug: - self._debug_dict('profiles', self.profiles) - - # actions - self.ori_actions = self._get_entry(dic, self.key_actions, - mandatory=False) - self.actions = deepcopy(self.ori_actions) - self.actions = self._norm_actions(self.actions) - if self.debug: - self._debug_dict('actions', self.actions) - - # trans_r - key = self.key_trans_r - if self.old_key_trans_r in dic: - self.log.warn('\"trans\" is deprecated, please use \"trans_read\"') - dic[self.key_trans_r] = dic[self.old_key_trans_r] - del dic[self.old_key_trans_r] - self.ori_trans_r = self._get_entry(dic, key, mandatory=False) - self.trans_r = deepcopy(self.ori_trans_r) - if self.debug: - self._debug_dict('trans_r', self.trans_r) - - # trans_w - self.ori_trans_w = self._get_entry(dic, self.key_trans_w, - mandatory=False) - self.trans_w = deepcopy(self.ori_trans_w) - if self.debug: - self._debug_dict('trans_w', self.trans_w) - - # variables - self.ori_variables = self._get_entry(dic, - self.key_variables, - mandatory=False) - if self.debug: - self._debug_dict('variables', self.ori_variables) - - # dynvariables - self.ori_dvariables = self._get_entry(dic, - self.key_dvariables, - mandatory=False) - if self.debug: - self._debug_dict('dynvariables', self.ori_dvariables) - - def _resolve_dotfile_paths(self): - """resolve dotfiles paths""" - t = Templategen(variables=self.variables, - func_file=self.settings[Settings.key_func_file], - filter_file=self.settings[Settings.key_filter_file]) - - for dotfile in self.dotfiles.values(): - # src - src = dotfile[self.key_dotfile_src] - newsrc = self.resolve_dotfile_src(src, templater=t) - dotfile[self.key_dotfile_src] = newsrc - # dst - dst = dotfile[self.key_dotfile_dst] - newdst = self.resolve_dotfile_dst(dst, templater=t) - dotfile[self.key_dotfile_dst] = newdst - def resolve_dotfile_src(self, src, templater=None): """resolve dotfile src path""" newsrc = '' @@ -250,9 +253,9 @@ class CfgYaml: new = src if templater: new = templater.generate_string(src) - if new != src and self.debug: + if new != src and self._debug: msg = 'dotfile src: \"{}\" -> \"{}\"'.format(src, new) - self.log.dbg(msg) + self._dbg(msg) src = new src = os.path.join(self.settings[self.key_settings_dotpath], src) @@ -266,123 +269,273 @@ class CfgYaml: new = dst if templater: new = templater.generate_string(dst) - if new != dst and self.debug: + if new != dst and self._debug: msg = 'dotfile dst: \"{}\" -> \"{}\"'.format(dst, new) - self.log.dbg(msg) + self._dbg(msg) dst = new newdst = self._norm_path(dst) return newdst - def _rec_resolve_vars(self, variables): - """recursive resolve variables""" - default = self._get_variables_dict(self.profile) - t = Templategen(variables=self._merge_dict(default, variables), - func_file=self.settings[Settings.key_func_file], - filter_file=self.settings[Settings.key_filter_file]) - for k in variables.keys(): - val = variables[k] - while Templategen.var_is_template(val): - val = t.generate_string(val) - variables[k] = val - t.update_variables(variables) + def add_dotfile_to_profile(self, dotfile_key, profile_key): + """add an existing dotfile key to a profile_key""" + self._new_profile(profile_key) + profile = self._yaml_dict[self.key_profiles][profile_key] + if self.key_profile_dotfiles not in profile or \ + profile[self.key_profile_dotfiles] is None: + profile[self.key_profile_dotfiles] = [] + pdfs = profile[self.key_profile_dotfiles] + if self.key_all not in pdfs and \ + dotfile_key not in pdfs: + profile[self.key_profile_dotfiles].append(dotfile_key) + if self._debug: + msg = 'add \"{}\" to profile \"{}\"'.format(dotfile_key, + profile_key) + msg.format(dotfile_key, profile_key) + self._dbg(msg) + self._dirty = True + return self._dirty + + def get_all_dotfile_keys(self): + """return all existing dotfile keys""" + return self.dotfiles.keys() + + def add_dotfile(self, key, src, dst, link): + """add a new dotfile""" + if key in self.dotfiles.keys(): + return False + if self._debug: + self._dbg('adding new dotfile: {}'.format(key)) + self._dbg('new dotfile src: {}'.format(src)) + self._dbg('new dotfile dst: {}'.format(dst)) + + df_dict = { + self.key_dotfile_src: src, + self.key_dotfile_dst: dst, + } + dfl = self.settings[self.key_settings_link_dotfile_default] + if str(link) != dfl: + df_dict[self.key_dotfile_link] = str(link) + self._yaml_dict[self.key_dotfiles][key] = df_dict + self._dirty = True + + def del_dotfile(self, key): + """remove this dotfile from config""" + if key not in self._yaml_dict[self.key_dotfiles]: + self._log.err('key not in dotfiles: {}'.format(key)) + return False + if self._debug: + self._dbg('remove dotfile: {}'.format(key)) + del self._yaml_dict[self.key_dotfiles][key] + if self._debug: + dfs = self._yaml_dict[self.key_dotfiles] + self._dbg('new dotfiles: {}'.format(dfs)) + self._dirty = True + return True + + def del_dotfile_from_profile(self, df_key, pro_key): + """remove this dotfile from that profile""" + if df_key not in self.dotfiles.keys(): + self._log.err('key not in dotfiles: {}'.format(df_key)) + return False + if pro_key not in self.profiles.keys(): + self._log.err('key not in profile: {}'.format(pro_key)) + return False + # get the profile dictionary + profile = self._yaml_dict[self.key_profiles][pro_key] + if df_key not in profile[self.key_profile_dotfiles]: + return True + if self._debug: + dfs = profile[self.key_profile_dotfiles] + self._dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) + self._dbg('remove {} from profile {}'.format(df_key, pro_key)) + profile[self.key_profile_dotfiles].remove(df_key) + if self._debug: + dfs = profile[self.key_profile_dotfiles] + self._dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) + self._dirty = True + return True + + def save(self): + """save this instance and return True if saved""" + if not self._dirty: + return False + + content = self._prepare_to_save(self._yaml_dict) + + if self._dirty_deprecated: + # add minversion + settings = content[self.key_settings] + settings[self.key_settings_minversion] = VERSION + + # save to file + if self._debug: + self._dbg('saving to {}'.format(self._path)) + try: + with open(self._path, 'w') as f: + self._yaml_dump(content, f) + except Exception as e: + self._log.err(e) + raise YamlException('error saving config: {}'.format(self._path)) + + if self._dirty_deprecated: + warn = 'your config contained deprecated entries' + warn += ' and was updated' + self._log.warn(warn) + + self._dirty = False + self.cfg_updated = False + return True + + def dump(self): + """dump the config dictionary""" + output = io.StringIO() + content = self._prepare_to_save(self._yaml_dict.copy()) + self._yaml_dump(content, output) + return output.getvalue() + + ######################################################## + # block parsing + ######################################################## + + def _parse_blk_settings(self, dic): + """parse the "config" block""" + block = self._get_entry(dic, self.key_settings).copy() + # set defaults + settings = Settings(None).serialize().get(self.key_settings) + settings.update(block) + + # resolve minimum version + if self.key_settings_minversion in settings: + minversion = settings[self.key_settings_minversion] + self._check_minversion(minversion) + + # normalize paths + p = self._norm_path(settings[self.key_settings_dotpath]) + settings[self.key_settings_dotpath] = p + p = self._norm_path(settings[self.key_settings_workdir]) + settings[self.key_settings_workdir] = p + p = [ + self._norm_path(p) + for p in settings[Settings.key_filter_file] + ] + settings[Settings.key_filter_file] = p + p = [ + self._norm_path(p) + for p in settings[Settings.key_func_file] + ] + settings[Settings.key_func_file] = p + if self._debug: + self._debug_dict('settings block:', settings) + return settings + + def _parse_blk_dotfiles(self, dic): + """parse the "dotfiles" block""" + dotfiles = self._get_entry(dic, self.key_dotfiles).copy() + keys = dotfiles.keys() + if len(keys) != len(list(set(keys))): + dups = [x for x in keys if x not in list(set(keys))] + err = 'duplicate dotfile keys found: {}'.format(dups) + raise YamlException(err) + + dotfiles = self._norm_dotfiles(dotfiles) + if self._debug: + self._debug_dict('dotfiles block', dotfiles) + return dotfiles + + def _parse_blk_profiles(self, dic): + """parse the "profiles" block""" + profiles = self._get_entry(dic, self.key_profiles).copy() + profiles = self._norm_profiles(profiles) + if self._debug: + self._debug_dict('profiles block', profiles) + return profiles + + def _parse_blk_actions(self, dic): + """parse the "actions" block""" + actions = self._get_entry(dic, self.key_actions, + mandatory=False) + if actions: + actions = actions.copy() + actions = self._norm_actions(actions) + if self._debug: + self._debug_dict('actions block', actions) + return actions + + def _parse_blk_trans_r(self, dic): + """parse the "trans_r" block""" + key = self.key_trans_r + if self.old_key_trans_r in dic: + msg = '\"trans\" is deprecated, please use \"trans_read\"' + self._log.warn(msg) + dic[self.key_trans_r] = dic[self.old_key_trans_r] + del dic[self.old_key_trans_r] + trans_r = self._get_entry(dic, key, mandatory=False) + if trans_r: + trans_r = trans_r.copy() + if self._debug: + self._debug_dict('trans_r block', trans_r) + return trans_r + + def _parse_blk_trans_w(self, dic): + """parse the "trans_w" block""" + trans_w = self._get_entry(dic, self.key_trans_w, + mandatory=False) + if trans_w: + trans_w = trans_w.copy() + if self._debug: + self._debug_dict('trans_w block', trans_w) + return trans_w + + def _parse_blk_variables(self, dic): + """parse the "variables" block""" + variables = self._get_entry(dic, + self.key_variables, + mandatory=False) + if variables: + variables = variables.copy() + if self._debug: + self._debug_dict('variables block', variables) return variables - def _get_profile_included_vars(self, tvars): - """resolve profile included variables/dynvariables""" - t = Templategen(variables=tvars, - func_file=self.settings[Settings.key_func_file], - filter_file=self.settings[Settings.key_filter_file]) - - for k, v in self.profiles.items(): - if self.key_profile_include in v: - new = [] - for x in v[self.key_profile_include]: - new.append(t.generate_string(x)) - v[self.key_profile_include] = new - - # now get the included ones - pro_var = self._get_profile_included_item(self.profile, - self.key_profile_variables, - seen=[self.profile]) - pro_dvar = self._get_profile_included_item(self.profile, - self.key_profile_dvariables, - seen=[self.profile]) - - # exec incl dynvariables - self._shell_exec_dvars(pro_dvar.keys(), pro_dvar) - return pro_var, pro_dvar - - def _merge_variables(self): - """ - resolve all variables across the config - apply them to any needed entries - and return the full list of variables - """ - if self.debug: - self.log.dbg('get local variables') - - # get all variables from local and resolve - var = self._get_variables_dict(self.profile) - - # get all dynvariables from local and resolve - dvar = self._get_dvariables_dict() - - # temporarly resolve all variables for "include" - merged = self._merge_dict(dvar, var) - merged = self._rec_resolve_vars(merged) - if self.debug: - self._debug_dict('variables', merged) - # exec dynvariables - self._shell_exec_dvars(dvar.keys(), merged) - - if self.debug: - self.log.dbg('local variables resolved') - self._debug_dict('variables', merged) - - # resolve profile included variables/dynvariables - pro_var, pro_dvar = self._get_profile_included_vars(merged) - - # merge all and resolve - merged = self._merge_dict(pro_var, merged) - merged = self._merge_dict(pro_dvar, merged) - merged = self._rec_resolve_vars(merged) - - if self.debug: - self.log.dbg('resolve all uses of variables in config') - self._debug_dict('variables', merged) - - prokeys = list(pro_var.keys()) + list(pro_dvar.keys()) - return merged, prokeys - - def _apply_variables(self): - """template any needed parts of the config""" - t = Templategen(variables=self.variables, - func_file=self.settings[Settings.key_func_file], - filter_file=self.settings[Settings.key_filter_file]) + def _parse_blk_dynvariables(self, dic): + """parse the "dynvariables" block""" + dvariables = self._get_entry(dic, + self.key_dvariables, + mandatory=False) + if dvariables: + dvariables = dvariables.copy() + if self._debug: + self._debug_dict('dynvariables block', dvariables) + return dvariables + ######################################################## + # parsing helpers + ######################################################## + def _template_include_entry(self): + """template all "include" entries""" # import_actions new = [] entries = self.settings.get(self.key_import_actions, []) - new = self._template_list(t, entries) + new = self._template_list(entries) if new: self.settings[self.key_import_actions] = new # import_configs entries = self.settings.get(self.key_import_configs, []) - new = self._template_list(t, entries) + new = self._template_list(entries) if new: self.settings[self.key_import_configs] = new # import_variables entries = self.settings.get(self.key_import_variables, []) - new = self._template_list(t, entries) + new = self._template_list(entries) if new: self.settings[self.key_import_variables] = new # profile's import for k, v in self.profiles.items(): entries = v.get(self.key_import_profile_dfs, []) - new = self._template_list(t, entries) + new = self._template_list(entries) if new: v[self.key_import_profile_dfs] = new @@ -432,7 +585,7 @@ class CfgYaml: # fix deprecated trans key if self.old_key_trans_r in v: msg = '\"trans\" is deprecated, please use \"trans_read\"' - self.log.warn(msg) + self._log.warn(msg) v[self.key_trans_r] = v[self.old_key_trans_r] del v[self.old_key_trans_r] new[k] = v @@ -446,9 +599,35 @@ class CfgYaml: v[self.key_dotfile_noempty] = val return new - def _get_variables_dict(self, profile): + def _add_variables(self, new, shell=False, template=True, prio=False): + """ + add new variables + @shell: execute the variable through the shell + @template: template the variable + @prio: new takes priority over existing variables + """ + if not new: + return + # merge + if prio: + self.variables = self._merge_dict(new, self.variables) + else: + self.variables = self._merge_dict(self.variables, new) + # ensure enriched variables are relative to this config + self.variables = self._enrich_vars(self.variables, self._profile) + # re-create the templater + self._redefine_templater() + if template: + # rec resolve variables with new ones + self._rec_resolve_variables(self.variables) + if shell: + # shell exec + self._shell_exec_dvars(self.variables, keys=new.keys()) + # re-create the templater + self._redefine_templater() + + def _enrich_vars(self, variables, profile): """return enriched variables""" - variables = deepcopy(self.ori_variables) # add profile variable if profile: variables['profile'] = profile @@ -456,19 +635,24 @@ class CfgYaml: p = self.settings.get(self.key_settings_dotpath) p = self._norm_path(p) variables['_dotdrop_dotpath'] = p - variables['_dotdrop_cfgpath'] = self._norm_path(self.path) + variables['_dotdrop_cfgpath'] = self._norm_path(self._path) p = self.settings.get(self.key_settings_workdir) p = self._norm_path(p) variables['_dotdrop_workdir'] = p return variables - def _get_dvariables_dict(self): - """return dynvariables""" - variables = deepcopy(self.ori_dvariables) - return variables + def _get_profile_included_item(self, keyitem): + """recursively get included in profile""" + profiles = [self._profile] + self._inc_profiles + items = {} + for profile in profiles: + seen = [self._profile] + i = self.__get_profile_included_item(profile, keyitem, seen) + items = self._merge_dict(i, items) + return items - def _get_profile_included_item(self, profile, item, seen): - """recursively get included from profile""" + def __get_profile_included_item(self, profile, keyitem, seen): + """recursively get included from profile""" items = {} if not profile or profile not in self.profiles.keys(): return items @@ -476,19 +660,19 @@ class CfgYaml: # considered profile entry pentry = self.profiles.get(profile) - # recursively get from inherited profile + # recursively get from inherited profile for inherited_profile in pentry.get(self.key_profile_include, []): if inherited_profile == profile or inherited_profile in seen: raise YamlException('\"include\" loop') seen.append(inherited_profile) - new = self._get_profile_included_item(inherited_profile, - item, seen) - if self.debug: + new = self.__get_profile_included_item(inherited_profile, + keyitem, seen) + if self._debug: msg = 'included {} from {}: {}' - self.log.dbg(msg.format(item, inherited_profile, new)) + self._dbg(msg.format(keyitem, inherited_profile, new)) items.update(new) - cur = pentry.get(item, {}) + cur = pentry.get(keyitem, {}) return self._merge_dict(cur, items) def _resolve_profile_all(self): @@ -499,12 +683,12 @@ class CfgYaml: if not dfs: continue if self.key_all in dfs: - if self.debug: - self.log.dbg('add ALL to profile {}'.format(k)) + if self._debug: + self._dbg('add ALL to profile \"{}\"'.format(k)) v[self.key_profile_dotfiles] = self.dotfiles.keys() def _resolve_profile_includes(self): - # profiles -> include other profile + """resolve profile(s) including other profiles""" for k, v in self.profiles.items(): self._rec_resolve_profile_include(k) @@ -513,12 +697,7 @@ class CfgYaml: recursively resolve include of other profiles's: * dotfiles * actions - * variables - * dynvariables - variables/dynvariables are directly merged with the - global variables (self.variables) if these are - included in the selected profile - returns dotfiles, actions, variables, dynvariables + returns dotfiles, actions """ this_profile = self.profiles[profile] @@ -526,28 +705,22 @@ class CfgYaml: dotfiles = this_profile.get(self.key_profile_dotfiles, []) or [] actions = this_profile.get(self.key_profile_actions, []) or [] includes = this_profile.get(self.key_profile_include, []) or [] - pvars = this_profile.get(self.key_profile_variables, {}) or {} - pdvars = this_profile.get(self.key_profile_dvariables, {}) or {} if not includes: # nothing to include - return dotfiles, actions, pvars, pdvars + return dotfiles, actions - if self.debug: - self.log.dbg('{} includes {}'.format(profile, ','.join(includes))) - self.log.dbg('{} dotfiles before include: {}'.format(profile, - dotfiles)) - self.log.dbg('{} actions before include: {}'.format(profile, - actions)) - self.log.dbg('{} variables before include: {}'.format(profile, - pvars)) - self.log.dbg('{} dynvariables before include: {}'.format(profile, - pdvars)) + if self._debug: + self._dbg('{} includes {}'.format(profile, ','.join(includes))) + self._dbg('{} dotfiles before include: {}'.format(profile, + dotfiles)) + self._dbg('{} actions before include: {}'.format(profile, + actions)) seen = [] for i in uniq_list(includes): - if self.debug: - self.log.dbg('resolving includes "{}" <- "{}"' - .format(profile, i)) + if self._debug: + self._dbg('resolving includes "{}" <- "{}"' + .format(profile, i)) # ensure no include loop occurs if i in seen: @@ -555,70 +728,42 @@ class CfgYaml: seen.append(i) # included profile even exists if i not in self.profiles.keys(): - self.log.warn('include unknown profile: {}'.format(i)) + self._log.warn('include unknown profile: {}'.format(i)) continue # recursive resolve - if self.debug: - self.log.dbg('recursively resolving includes for profile "{}"' - .format(i)) - o_dfs, o_actions, o_v, o_dv = self._rec_resolve_profile_include(i) + if self._debug: + self._dbg('recursively resolving includes for profile "{}"' + .format(i)) + o_dfs, o_actions = self._rec_resolve_profile_include(i) # merge dotfile keys - if self.debug: - self.log.dbg('Merging dotfiles {} <- {}: {} <- {}' - .format(profile, i, dotfiles, o_dfs)) + if self._debug: + self._dbg('Merging dotfiles {} <- {}: {} <- {}' + .format(profile, i, dotfiles, o_dfs)) dotfiles.extend(o_dfs) this_profile[self.key_profile_dotfiles] = uniq_list(dotfiles) # merge actions keys - if self.debug: - self.log.dbg('Merging actions {} <- {}: {} <- {}' - .format(profile, i, actions, o_actions)) + if self._debug: + self._dbg('Merging actions {} <- {}: {} <- {}' + .format(profile, i, actions, o_actions)) actions.extend(o_actions) this_profile[self.key_profile_actions] = uniq_list(actions) - # merge variables - if self.debug: - self.log.dbg('Merging variables {} <- {}: {} <- {}' - .format(profile, i, dict(pvars), dict(o_v))) - pvars = self._merge_dict(o_v, pvars) - this_profile[self.key_profile_variables] = pvars - - # merge dynvariables - if self.debug: - self.log.dbg('Merging dynamic variables {} <- {}: {} <- {}' - .format(profile, i, dict(pdvars), - dict(o_dv))) - pdvars = self._merge_dict(o_dv, pdvars) - this_profile[self.key_profile_dvariables] = pdvars - dotfiles = this_profile.get(self.key_profile_dotfiles, []) actions = this_profile.get(self.key_profile_actions, []) - pvars = this_profile.get(self.key_profile_variables, {}) or {} - pdvars = this_profile.get(self.key_profile_dvariables, {}) or {} - if self.debug: - self.log.dbg('{} dotfiles after include: {}'.format(profile, - dotfiles)) - self.log.dbg('{} actions after include: {}'.format(profile, - actions)) - self.log.dbg('{} variables after include: {}'.format(profile, - pvars)) - self.log.dbg('{} dynvariables after include: {}'.format(profile, - pdvars)) - - if profile == self.profile: - # Only for the selected profile, we execute dynamic variables and - # we merge variables/dynvariables into the global variables - self._shell_exec_dvars(pdvars.keys(), pdvars) - self.variables = self._merge_dict(pvars, self.variables) - self.variables = self._merge_dict(pdvars, self.variables) + if self._debug: + self._dbg('{} dotfiles after include: {}'.format(profile, + dotfiles)) + self._dbg('{} actions after include: {}'.format(profile, + actions)) # since included items are resolved here # we can clear these include - self.profiles[profile][self.key_profile_include] = None - return dotfiles, actions, pvars, pdvars + self.profiles[profile][self.key_profile_include] = [] + return dotfiles, actions ######################################################## # handle imported entries @@ -630,113 +775,26 @@ class CfgYaml: if not paths: return paths = self._resolve_paths(paths) + newvars = {} for path in paths: - if self.debug: - self.log.dbg('import variables from {}'.format(path)) + if self._debug: + self._dbg('import variables from {}'.format(path)) var = self._import_sub(path, self.key_variables, mandatory=False) - if self.debug: - self.log.dbg('import dynvariables from {}'.format(path)) + if self._debug: + self._dbg('import dynvariables from {}'.format(path)) dvar = self._import_sub(path, self.key_dvariables, mandatory=False) + merged = self._merge_dict(dvar, var) - merged = self._rec_resolve_vars(merged) - # execute dvar - self._shell_exec_dvars(dvar.keys(), merged) + self._rec_resolve_variables(merged) + if dvar.keys(): + self._shell_exec_dvars(merged, keys=dvar.keys()) self._clear_profile_vars(merged) - self.variables = self._merge_dict(merged, self.variables) - - def _clear_profile_vars(self, dic): - """remove profile variables from dic if found""" - [dic.pop(k, None) for k in self.prokeys] - - def _parse_extended_import_path(self, path_entry): - """Parse an import path in a tuple (path, fatal_not_found).""" - if self.debug: - self.log.dbg('parsing path entry {}'.format(path_entry)) - - path, _, attribute = path_entry.rpartition(self.key_import_sep) - fatal_not_found = attribute != self.key_import_ignore_key - is_valid_attribute = attribute in ('', self.key_import_ignore_key) - if not is_valid_attribute: - # If attribute is not valid it can mean that: - # - path_entry doesn't contain the separator, and attribute is set - # to the whole path by str.rpartition - # - path_entry contains a separator, but it's in the file path, so - # attribute is set to whatever comes after the separator by - # str.rpartition - # In both cases, path_entry is the path we're looking for. - if self.debug: - self.log.dbg('using attribute default values for path {}' - .format(path_entry)) - path = path_entry - fatal_not_found = self.key_import_fatal_not_found - elif self.debug: - self.log.dbg('path entry {} has fatal_not_found flag set to {}' - .format(path_entry, fatal_not_found)) - return path, fatal_not_found - - def _handle_non_existing_path(self, path, fatal_not_found=True): - """Raise an exception or log a warning to handle non-existing paths.""" - error = 'bad path {}'.format(path) - if fatal_not_found: - raise YamlException(error) - self.log.warn(error) - - def _check_path_existence(self, path, fatal_not_found=True): - """Check if a path exists, raising if necessary.""" - if os.path.exists(path): - if self.debug: - self.log.dbg('path {} exists'.format(path)) - return path - - self._handle_non_existing_path(path, fatal_not_found) - # Explicit return for readability. Anything evaluating to false is ok. - return None - - def _process_path(self, path_entry): - """Process a path entry to a normalized form. - - This method processed a path entry. Namely it: - - Normalizes the path. - - Expands globs. - - Checks for path existence, taking in account fatal_not_found. - This method always returns a list containing only absolute paths - existing on the filesystem. If the input is not a glob, the list - contains at most one element, otheriwse it could hold more. - - :param path_entry: A path with an optional attribute. - :type path_entry: str - :return: A list of normalized existing paths, obtained from the input. - :rtype: List of str - """ - path, fatal_not_found = self._parse_extended_import_path(path_entry) - path = self._norm_path(path) - paths = self._glob_path(path) if self._is_glob(path) else [path] - if not paths: - if self.debug: - self.log.dbg("glob path {} didn't expand".format(path)) - self._handle_non_existing_path(path, fatal_not_found) - return [] - - checked_paths = (self._check_path_existence(p, fatal_not_found) - for p in paths) - return [p for p in checked_paths if p] - - def _resolve_paths(self, paths): - """Resolve a list of path to existing paths. - - This function resolves a list of paths. This means normalizing, - expanding globs and checking for existence, taking in account - fatal_not_found flags. - - :param paths: A list of paths. Might contain globs and options. - :type paths: List of str - :return: A list of processed paths. - :rtype: List of str - """ - processed_paths = (self._process_path(p) for p in paths) - return list(chain.from_iterable(processed_paths)) + newvars = self._merge_dict(newvars, merged) + if self._debug: + self._debug_dict('imported variables', newvars) + return newvars def _import_actions(self): """import external actions from paths""" @@ -745,8 +803,8 @@ class CfgYaml: return paths = self._resolve_paths(paths) for path in paths: - if self.debug: - self.log.dbg('import actions from {}'.format(path)) + if self._debug: + self._dbg('import actions from {}'.format(path)) new = self._import_sub(path, self.key_actions, mandatory=False, patch_func=self._norm_actions) @@ -758,8 +816,8 @@ class CfgYaml: imp = v.get(self.key_import_profile_dfs, None) if not imp: continue - if self.debug: - self.log.dbg('import dotfiles for profile {}'.format(k)) + if self._debug: + self._dbg('import dotfiles for profile {}'.format(k)) paths = self._resolve_paths(imp) for path in paths: current = v.get(self.key_dotfiles, []) @@ -769,9 +827,11 @@ class CfgYaml: def _import_config(self, path): """import config from path""" - if self.debug: - self.log.dbg('import config from {}'.format(path)) - sub = CfgYaml(path, profile=self.profile, debug=self.debug) + if self._debug: + self._dbg('import config from {}'.format(path)) + sub = CfgYaml(path, profile=self._profile, + addprofiles=self._inc_profiles, + debug=self._debug) # settings are ignored from external file # except for filter_file and func_file @@ -792,9 +852,9 @@ class CfgYaml: self.trans_w = self._merge_dict(self.trans_w, sub.trans_w) self._clear_profile_vars(sub.variables) - if self.debug: + if self._debug: self._debug_dict('add import_configs var', sub.variables) - self.variables = self._merge_dict(sub.variables, self.variables) + self._add_variables(sub.variables, prio=True) def _import_configs(self): """import configs from external files""" @@ -811,20 +871,20 @@ class CfgYaml: import the block "key" from "path" patch_func is applied to each element if defined """ - if self.debug: - self.log.dbg('import \"{}\" from \"{}\"'.format(key, path)) + if self._debug: + self._dbg('import \"{}\" from \"{}\"'.format(key, path)) extdict = self._load_yaml(path) new = self._get_entry(extdict, key, mandatory=mandatory) if patch_func: - if self.debug: - self.log.dbg('calling patch: {}'.format(patch_func)) + if self._debug: + self._dbg('calling patch: {}'.format(patch_func)) new = patch_func(new) if not new and mandatory: err = 'no \"{}\" imported from \"{}\"'.format(key, path) - self.log.warn(err) + self._log.warn(err) raise YamlException(err) - if self.debug: - self.log.dbg('imported \"{}\": {}'.format(key, new)) + if self._debug: + self._dbg('imported \"{}\": {}'.format(key, new)) return new ######################################################## @@ -835,89 +895,12 @@ class CfgYaml: """add a new profile if it doesn't exist""" if key not in self.profiles.keys(): # update yaml_dict - self.yaml_dict[self.key_profiles][key] = { + self._yaml_dict[self.key_profiles][key] = { self.key_profile_dotfiles: [] } - if self.debug: - self.log.dbg('adding new profile: {}'.format(key)) - self.dirty = True - - def add_dotfile_to_profile(self, dotfile_key, profile_key): - """add an existing dotfile key to a profile_key""" - self._new_profile(profile_key) - profile = self.yaml_dict[self.key_profiles][profile_key] - if self.key_profile_dotfiles not in profile or \ - profile[self.key_profile_dotfiles] is None: - profile[self.key_profile_dotfiles] = [] - pdfs = profile[self.key_profile_dotfiles] - if self.key_all not in pdfs and \ - dotfile_key not in pdfs: - profile[self.key_profile_dotfiles].append(dotfile_key) - if self.debug: - msg = 'add \"{}\" to profile \"{}\"'.format(dotfile_key, - profile_key) - msg.format(dotfile_key, profile_key) - self.log.dbg(msg) - self.dirty = True - return self.dirty - - def get_all_dotfile_keys(self): - """return all existing dotfile keys""" - return self.dotfiles.keys() - - def add_dotfile(self, key, src, dst, link): - """add a new dotfile""" - if key in self.dotfiles.keys(): - return False - if self.debug: - self.log.dbg('adding new dotfile: {}'.format(key)) - - df_dict = { - self.key_dotfile_src: src, - self.key_dotfile_dst: dst, - } - dfl = self.settings[self.key_settings_link_dotfile_default] - if str(link) != dfl: - df_dict[self.key_dotfile_link] = str(link) - self.yaml_dict[self.key_dotfiles][key] = df_dict - self.dirty = True - - def del_dotfile(self, key): - """remove this dotfile from config""" - if key not in self.yaml_dict[self.key_dotfiles]: - self.log.err('key not in dotfiles: {}'.format(key)) - return False - if self.debug: - self.log.dbg('remove dotfile: {}'.format(key)) - del self.yaml_dict[self.key_dotfiles][key] - if self.debug: - dfs = self.yaml_dict[self.key_dotfiles] - self.log.dbg('new dotfiles: {}'.format(dfs)) - self.dirty = True - return True - - def del_dotfile_from_profile(self, df_key, pro_key): - """remove this dotfile from that profile""" - if df_key not in self.dotfiles.keys(): - self.log.err('key not in dotfiles: {}'.format(df_key)) - return False - if pro_key not in self.profiles.keys(): - self.log.err('key not in profile: {}'.format(pro_key)) - return False - # get the profile dictionary - profile = self.yaml_dict[self.key_profiles][pro_key] - if df_key not in profile[self.key_profile_dotfiles]: - return True - if self.debug: - dfs = profile[self.key_profile_dotfiles] - self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) - self.log.dbg('remove {} from profile {}'.format(df_key, pro_key)) - profile[self.key_profile_dotfiles].remove(df_key) - if self.debug: - dfs = profile[self.key_profile_dotfiles] - self.log.dbg('{} profile dotfiles: {}'.format(pro_key, dfs)) - self.dirty = True - return True + if self._debug: + self._dbg('adding new profile: {}'.format(key)) + self._dirty = True ######################################################## # handle deprecated entries @@ -925,8 +908,11 @@ class CfgYaml: def _fix_deprecated(self, yamldict): """fix deprecated entries""" + if not yamldict: + return self._fix_deprecated_link_by_default(yamldict) self._fix_deprecated_dotfile_link(yamldict) + return yamldict def _fix_deprecated_link_by_default(self, yamldict): """fix deprecated link_by_default""" @@ -944,9 +930,9 @@ class CfgYaml: else: config[newkey] = self.lnk_nolink del config[key] - self.log.warn('deprecated \"link_by_default\"') - self.dirty = True - self.dirty_deprecated = True + self._log.warn('deprecated \"link_by_default\"') + self._dirty = True + self._dirty_deprecated = True def _fix_deprecated_dotfile_link(self, yamldict): """fix deprecated link in dotfiles""" @@ -964,9 +950,9 @@ class CfgYaml: if cur: new = self.lnk_link dotfile[self.key_dotfile_link] = new - self.dirty = True - self.dirty_deprecated = True - self.log.warn('deprecated \"link\" value') + self._dirty = True + self._dirty_deprecated = True + self._log.warn('deprecated \"link\" value') elif self.key_dotfile_link_children in dotfile and \ type(dotfile[self.key_dotfile_link_children]) is bool: @@ -977,9 +963,9 @@ class CfgYaml: new = self.lnk_children del dotfile[self.key_dotfile_link_children] dotfile[self.key_dotfile_link] = new - self.dirty = True - self.dirty_deprecated = True - self.log.warn('deprecated \"link_children\" value') + self._dirty = True + self._dirty_deprecated = True + self._log.warn('deprecated \"link_children\" value') ######################################################## # yaml utils @@ -995,62 +981,23 @@ class CfgYaml: content[self.key_dotfiles] = None if self.key_profiles not in content: content[self.key_profiles] = None - return content - def save(self): - """save this instance and return True if saved""" - if not self.dirty: - return False - - content = self._prepare_to_save(self.yaml_dict) - - if self.dirty_deprecated: - # add minversion - settings = content[self.key_settings] - settings[self.key_settings_minversion] = VERSION - - # save to file - if self.debug: - self.log.dbg('saving to {}'.format(self.path)) - try: - with open(self.path, 'w') as f: - self._yaml_dump(content, f) - except Exception as e: - self.log.err(e) - raise YamlException('error saving config: {}'.format(self.path)) - - if self.dirty_deprecated: - warn = 'your config contained deprecated entries' - warn += ' and was updated' - self.log.warn(warn) - - self.dirty = False - self.cfg_updated = False - return True - - def dump(self): - """dump the config dictionary""" - output = io.StringIO() - content = self._prepare_to_save(self.yaml_dict.copy()) - self._yaml_dump(content, output) - return output.getvalue() - def _load_yaml(self, path): """load a yaml file to a dict""" content = {} - if self.debug: - self.log.dbg('----------start:{}----------'.format(path)) + if self._debug: + self._dbg('----------start:{}----------'.format(path)) cfg = '\n' with open(path, 'r') as f: for line in f: cfg += line - self.log.dbg(cfg.rstrip()) - self.log.dbg('----------end:{}----------'.format(path)) + self._dbg(cfg.rstrip()) + self._dbg('----------end:{}----------'.format(path)) try: content = self._yaml_load(path) except Exception as e: - self.log.err(e) + self._log.err(e) raise YamlException('invalid config: {}'.format(path)) return content @@ -1070,10 +1017,223 @@ class CfgYaml: y.typ = 'rt' y.dump(content, where) + ######################################################## + # templating + ######################################################## + + def _redefine_templater(self): + """create templater based on current variables""" + fufile = self.settings[Settings.key_func_file] + fifile = self.settings[Settings.key_filter_file] + self._tmpl = Templategen(variables=self.variables, + func_file=fufile, + filter_file=fifile) + + def _template_item(self, item, exc_if_fail=True): + """ + template an item using the templategen + will raise an exception if template failed and exc_if_fail + """ + if not Templategen.var_is_template(item): + return item + try: + val = item + while Templategen.var_is_template(val): + val = self._tmpl.generate_string(val) + except UndefinedException as e: + if exc_if_fail: + raise e + return val + + def _template_list(self, entries): + """template a list of entries""" + new = [] + if not entries: + return new + for e in entries: + et = self._template_item(e) + if self._debug and e != et: + self._dbg('resolved: {} -> {}'.format(e, et)) + new.append(et) + return new + + def _template_dict(self, entries): + """template a dictionary of entries""" + new = {} + if not entries: + return new + for k, v in entries.items(): + vt = self._template_item(v) + if self._debug and v != vt: + self._dbg('resolved: {} -> {}'.format(v, vt)) + new[k] = vt + return new + + def _template_dotfiles_paths(self): + """template dotfiles paths""" + if self._debug: + self._dbg('templating dotfiles paths') + dotfiles = self.dotfiles.copy() + + # only keep dotfiles related to the selected profile + pdfs = [] + pro = self.profiles.get(self._profile, []) + if pro: + pdfs = list(pro.get(self.key_profile_dotfiles, [])) + for addpro in self._inc_profiles: + pro = self.profiles.get(addpro, []) + if not pro: + continue + pdfsalt = pro.get(self.key_profile_dotfiles, []) + pdfs.extend(pdfsalt) + pdfs = uniq_list(pdfs) + + if self.key_all not in pdfs: + # take a subset of the dotfiles + newdotfiles = {} + for k, v in dotfiles.items(): + if k in pdfs: + newdotfiles[k] = v + dotfiles = newdotfiles + + for dotfile in dotfiles.values(): + # src + src = dotfile[self.key_dotfile_src] + newsrc = self.resolve_dotfile_src(src, templater=self._tmpl) + dotfile[self.key_dotfile_src] = newsrc + # dst + dst = dotfile[self.key_dotfile_dst] + newdst = self.resolve_dotfile_dst(dst, templater=self._tmpl) + dotfile[self.key_dotfile_dst] = newdst + + def _rec_resolve_variables(self, variables): + """recursive resolve variables""" + var = self._enrich_vars(variables, self._profile) + # use a separated templategen to handle variables + # resolved outside the main config + t = Templategen(variables=var, + func_file=self.settings[Settings.key_func_file], + filter_file=self.settings[Settings.key_filter_file]) + for k in variables.keys(): + val = variables[k] + while Templategen.var_is_template(val): + val = t.generate_string(val) + variables[k] = val + t.update_variables(variables) + if variables is self.variables: + self._redefine_templater() + + def _get_profile_included_vars(self): + """resolve profile included variables/dynvariables""" + for k, v in self.profiles.items(): + if self.key_profile_include in v and v[self.key_profile_include]: + new = [] + for x in v[self.key_profile_include]: + new.append(self._tmpl.generate_string(x)) + v[self.key_profile_include] = new + + # now get the included ones + pro_var = self._get_profile_included_item(self.key_profile_variables) + pro_dvar = self._get_profile_included_item(self.key_profile_dvariables) + + # the included profiles + inc_profiles = [] + if self._profile and self._profile in self.profiles.keys(): + pentry = self.profiles.get(self._profile) + inc_profiles = pentry.get(self.key_profile_include, []) + + # exec incl dynvariables + return inc_profiles, pro_var, pro_dvar + ######################################################## # helpers ######################################################## + def _clear_profile_vars(self, dic): + """ + remove profile variables from dic if found inplace + to avoid profile variables being overwriten + """ + if not dic: + return + [dic.pop(k, None) for k in self._profilevarskeys] + + def _parse_extended_import_path(self, path_entry): + """Parse an import path in a tuple (path, fatal_not_found).""" + if self._debug: + self._dbg('parsing path entry {}'.format(path_entry)) + + path, _, attribute = path_entry.rpartition(self.key_import_sep) + fatal_not_found = attribute != self.key_import_ignore_key + is_valid_attribute = attribute in ('', self.key_import_ignore_key) + if not is_valid_attribute: + # If attribute is not valid it can mean that: + # - path_entry doesn't contain the separator, and attribute is set + # to the whole path by str.rpartition + # - path_entry contains a separator, but it's in the file path, so + # attribute is set to whatever comes after the separator by + # str.rpartition + # In both cases, path_entry is the path we're looking for. + if self._debug: + self._dbg('using attribute default values for path {}' + .format(path_entry)) + path = path_entry + fatal_not_found = self.key_import_fatal_not_found + elif self._debug: + self._dbg('path entry {} has fatal_not_found flag set to {}' + .format(path_entry, fatal_not_found)) + return path, fatal_not_found + + def _handle_non_existing_path(self, path, fatal_not_found=True): + """Raise an exception or log a warning to handle non-existing paths.""" + error = 'bad path {}'.format(path) + if fatal_not_found: + raise YamlException(error) + self._log.warn(error) + + def _check_path_existence(self, path, fatal_not_found=True): + """Check if a path exists, raising if necessary.""" + if os.path.exists(path): + if self._debug: + self._dbg('path {} exists'.format(path)) + return path + + self._handle_non_existing_path(path, fatal_not_found) + # Explicit return for readability. Anything evaluating to false is ok. + return None + + def _process_path(self, path_entry): + """ + This method processed a path entry. Namely it: + - Normalizes the path. + - Expands globs. + - Checks for path existence, taking in account fatal_not_found. + This method always returns a list containing only absolute paths + existing on the filesystem. If the input is not a glob, the list + contains at most one element, otheriwse it could hold more. + """ + path, fatal_not_found = self._parse_extended_import_path(path_entry) + path = self._norm_path(path) + paths = self._glob_path(path) if self._is_glob(path) else [path] + if not paths: + if self._debug: + self._dbg("glob path {} didn't expand".format(path)) + self._handle_non_existing_path(path, fatal_not_found) + return [] + + checked_paths = (self._check_path_existence(p, fatal_not_found) + for p in paths) + return [p for p in checked_paths if p] + + def _resolve_paths(self, paths): + """ + This function resolves a list of paths. This means normalizing, + expanding globs and checking for existence, taking in account + fatal_not_found flags. + """ + processed_paths = (self._process_path(p) for p in paths) + return list(chain.from_iterable(processed_paths)) + def _merge_dict(self, high, low): """merge high and low dict""" if not high: @@ -1083,21 +1243,24 @@ class CfgYaml: return {**low, **high} def _get_entry(self, dic, key, mandatory=True): - """return entry from yaml dictionary""" + """return copy of entry from yaml dictionary""" if key not in dic: if mandatory: raise YamlException('invalid config: no {} found'.format(key)) dic[key] = {} - return dic[key] + return deepcopy(dic[key]) if mandatory and not dic[key]: # ensure is not none dic[key] = {} - return dic[key] + return deepcopy(dic[key]) def _clear_none(self, dic): """recursively delete all none/empty values in a dictionary.""" new = {} for k, v in dic.items(): + if k == self.key_dotfiles and v: + new[k] = v + continue newv = v if isinstance(v, dict): newv = self._clear_none(v) @@ -1119,8 +1282,8 @@ class CfgYaml: def _glob_path(self, path): """Expand a glob.""" - if self.debug: - self.log.dbg('expanding glob {}'.format(path)) + if self._debug: + self._dbg('expanding glob {}'.format(path)) expanded_path = os.path.expanduser(path) return glob.glob(expanded_path, recursive=True) @@ -1130,40 +1293,31 @@ class CfgYaml: return path path = os.path.expanduser(path) if not os.path.isabs(path): - d = os.path.dirname(self.path) + d = os.path.dirname(self._path) ret = os.path.join(d, path) - if self.debug: + if self._debug: msg = 'normalizing relative to cfg: {} -> {}' - self.log.dbg(msg.format(path, ret)) + self._dbg(msg.format(path, ret)) return ret ret = os.path.normpath(path) - if self.debug and path != ret: - self.log.dbg('normalizing: {} -> {}'.format(path, ret)) + if self._debug and path != ret: + self._dbg('normalizing: {} -> {}'.format(path, ret)) return ret - def _shell_exec_dvars(self, keys, variables): - """shell execute dynvariables""" - for k in list(keys): - ret, out = shell(variables[k], debug=self.debug) + def _shell_exec_dvars(self, dic, keys=[]): + """shell execute dynvariables in-place""" + if not keys: + keys = dic.keys() + for k in keys: + v = dic[k] + ret, out = shell(v, debug=self._debug) if not ret: - err = 'var \"{}: {}\" failed: {}'.format(k, variables[k], out) - self.log.err(err) + err = 'var \"{}: {}\" failed: {}'.format(k, v, out) + self._log.err(err) raise YamlException(err) - if self.debug: - self.log.dbg('\"{}\": {} -> {}'.format(k, variables[k], out)) - variables[k] = out - - def _template_list(self, t, entries): - """template a list of entries""" - new = [] - if not entries: - return new - for e in entries: - et = t.generate_string(e) - if self.debug and e != et: - self.log.dbg('resolved: {} -> {}'.format(e, et)) - new.append(et) - return new + if self._debug: + self._dbg('{}: `{}` -> {}'.format(k, v, out)) + dic[k] = out def _check_minversion(self, minversion): if not minversion: @@ -1179,12 +1333,29 @@ class CfgYaml: err += ' Please update.' raise YamlException(err) + def _debug_entries(self): + """debug print all interesting entries""" + if not self._debug: + return + self._dbg('Current entries') + self._debug_dict('entry settings', self.settings) + self._debug_dict('entry dotfiles', self.dotfiles) + self._debug_dict('entry profiles', self.profiles) + self._debug_dict('entry actions', self.actions) + self._debug_dict('entry trans_r', self.trans_r) + self._debug_dict('entry trans_w', self.trans_w) + self._debug_dict('entry variables', self.variables) + def _debug_dict(self, title, elems): """pretty print dict""" - if not self.debug: + if not self._debug: return - self.log.dbg('{}:'.format(title)) + self._dbg('{}:'.format(title)) if not elems: return for k, v in elems.items(): - self.log.dbg('\t- \"{}\": {}'.format(k, v)) + self._dbg('\t- \"{}\": {}'.format(k, v)) + + def _dbg(self, content): + pre = os.path.basename(self._path) + self._log.dbg('[{}] {}'.format(pre, content)) diff --git a/dotdrop/dotdrop.py b/dotdrop/dotdrop.py index 417c095..d27a437 100644 --- a/dotdrop/dotdrop.py +++ b/dotdrop/dotdrop.py @@ -20,7 +20,7 @@ from dotdrop.comparator import Comparator from dotdrop.utils import get_tmpdir, remove, strip_home, \ run, uniq_list, patch_ignores, dependencies_met from dotdrop.linktypes import LinkTypes -from dotdrop.exceptions import YamlException +from dotdrop.exceptions import YamlException, UndefinedException LOG = Logger() TRANS_SUFFIX = 'trans' @@ -230,6 +230,7 @@ def cmd_compare(o, tmp): newvars = dotfile.get_dotfile_variables() t.add_tmp_vars(newvars=newvars) + # dotfiles does not exist / not installed if o.debug: LOG.dbg('comparing {}'.format(dotfile)) src = dotfile.src @@ -239,9 +240,9 @@ def cmd_compare(o, tmp): same = False continue + # apply transformation tmpsrc = None if dotfile.trans_r: - # apply transformation if o.debug: LOG.dbg('applying transformation before comparing') tmpsrc = apply_trans(o.dotpath, dotfile, t, debug=o.debug) @@ -261,20 +262,26 @@ def cmd_compare(o, tmp): LOG.dbg('points to itself') continue - # install dotfile to temporary dir - ret, insttmp = inst.install_to_temp(t, tmp, src, dotfile.dst) + # install dotfile to temporary dir and compare + ret, err, insttmp = inst.install_to_temp(t, tmp, src, dotfile.dst) if not ret: # failed to install to tmp + line = '=> compare {}: error' + LOG.log(line.format(dotfile.key, err)) + LOG.err(err) same = False continue ignores = list(set(o.compare_ignore + dotfile.cmpignore)) ignores = patch_ignores(ignores, dotfile.dst, debug=o.debug) diff = comp.compare(insttmp, dotfile.dst, ignore=ignores) + + # clean tmp transformed dotfile if any if tmpsrc: - # clean tmp transformed dotfile if any tmpsrc = os.path.join(o.dotpath, tmpsrc) if os.path.exists(tmpsrc): remove(tmpsrc) + + # print diff result if diff == '': if o.debug: line = '=> compare {}: diffing with \"{}\"' @@ -655,7 +662,10 @@ def main(): try: o = Options() except YamlException as e: - LOG.err('config file error: {}'.format(str(e))) + LOG.err('config error: {}'.format(str(e))) + return False + except UndefinedException as e: + LOG.err('config error: {}'.format(str(e))) return False if o.debug: diff --git a/dotdrop/exceptions.py b/dotdrop/exceptions.py index a327696..9c2df22 100644 --- a/dotdrop/exceptions.py +++ b/dotdrop/exceptions.py @@ -9,3 +9,8 @@ diverse exceptions class YamlException(Exception): """exception in CfgYaml""" pass + + +class UndefinedException(Exception): + """exception in templating""" + pass diff --git a/dotdrop/installer.py b/dotdrop/installer.py index 0cc6856..ef0e0a9 100644 --- a/dotdrop/installer.py +++ b/dotdrop/installer.py @@ -12,6 +12,7 @@ import errno from dotdrop.logger import Logger from dotdrop.templategen import Templategen import dotdrop.utils as utils +from dotdrop.exceptions import UndefinedException class Installer: @@ -239,7 +240,6 @@ class Installer: actionexec = None else: if err: - return ret, err return self._log_install(ret, err) return self._log_install(installed > 0, None) @@ -325,8 +325,12 @@ class Installer: err = 'dotfile points to itself: {}'.format(dst) return False, err saved = templater.add_tmp_vars(self._get_tmp_file_vars(src, dst)) - content = templater.generate(src) - templater.restore_vars(saved) + try: + content = templater.generate(src) + except UndefinedException as e: + return False, str(e) + finally: + templater.restore_vars(saved) if noempty and utils.content_empty(content): if self.debug: self.log.dbg('ignoring empty template: {}'.format(src)) @@ -547,9 +551,10 @@ class Installer: src = os.path.expanduser(src) dst = os.path.expanduser(dst) if self.debug: - self.log.dbg('tmp install {} to {}'.format(src, dst)) + self.log.dbg('tmp install {} (defined dst: {})'.format(src, dst)) # install the dotfile to a temp directory for comparing - ret, tmpdst = self._install_to_temp(templater, src, dst, tmpdir) + r, tmpdst = self._install_to_temp(templater, src, dst, tmpdir) + ret, err = r if self.debug: self.log.dbg('tmp installed in {}'.format(tmpdst)) # reset flags @@ -557,4 +562,4 @@ class Installer: self.diff = diffsaved self.comparing = False self.create = createsaved - return ret, tmpdst + return ret, err, tmpdst diff --git a/dotdrop/options.py b/dotdrop/options.py index d36025f..6f64593 100644 --- a/dotdrop/options.py +++ b/dotdrop/options.py @@ -109,9 +109,11 @@ class Options(AttrMonitor): """constructor @args: argument dictionary (if None use sys) """ - self.args = args + self.args = {} if not args: self.args = docopt(USAGE, version=VERSION) + if args: + self.args = args.copy() self.log = Logger() self.debug = self.args['--verbose'] or ENV_DEBUG in os.environ self.dry = self.args['--dry'] @@ -122,6 +124,7 @@ class Options(AttrMonitor): self.confpath = self._get_config_path() if self.debug: self.log.dbg('version: {}'.format(VERSION)) + self.log.dbg('command: {}'.format(' '.join(sys.argv))) self.log.dbg('config file: {}'.format(self.confpath)) self._read_config() diff --git a/dotdrop/templategen.py b/dotdrop/templategen.py index 54aa90c..feb7db0 100644 --- a/dotdrop/templategen.py +++ b/dotdrop/templategen.py @@ -7,12 +7,16 @@ jinja2 template generator import os from jinja2 import Environment, FileSystemLoader, \ - ChoiceLoader, FunctionLoader, TemplateNotFound + ChoiceLoader, FunctionLoader, TemplateNotFound, \ + StrictUndefined +from jinja2.exceptions import UndefinedError + # local imports import dotdrop.utils as utils from dotdrop.logger import Logger import dotdrop.jhelpers as jhelpers +from dotdrop.exceptions import UndefinedException BLOCK_START = '{%@@' BLOCK_END = '@@%}' @@ -36,6 +40,7 @@ class Templategen: self.base = base.rstrip(os.sep) self.debug = debug self.log = Logger() + self.variables = {} loader1 = FileSystemLoader(self.base) loader2 = FunctionLoader(self._template_loader) loader = ChoiceLoader([loader1, loader2]) @@ -47,11 +52,14 @@ class Templategen: variable_start_string=VAR_START, variable_end_string=VAR_END, comment_start_string=COMMENT_START, - comment_end_string=COMMENT_END) + comment_end_string=COMMENT_END, + undefined=StrictUndefined) + # adding variables - self.env.globals['env'] = os.environ + self.variables['env'] = os.environ if variables: - self.env.globals.update(variables) + self.variables.update(variables) + # adding header method self.env.globals['header'] = self._header # adding helper methods @@ -72,32 +80,48 @@ class Templategen: self._debug_dict('template additional variables', variables) def generate(self, src): - """render template from path""" + """ + render template from path + may raise a UndefinedException + in case a variable is undefined + """ if not os.path.exists(src): return '' - return self._handle_file(src) + try: + return self._handle_file(src) + except UndefinedError as e: + err = 'undefined variable: {}'.format(e.message) + raise UndefinedException(err) def generate_string(self, string): - """render template from string""" + """ + render template from string + may raise a UndefinedException + in case a variable is undefined + """ if not string: return '' - return self.env.from_string(string).render() + try: + return self.env.from_string(string).render(self.variables) + except UndefinedError as e: + err = 'undefined variable: {}'.format(e.message) + raise UndefinedException(err) def add_tmp_vars(self, newvars={}): """add vars to the globals, make sure to call restore_vars""" - saved_globals = self.env.globals.copy() + saved_variables = self.variables.copy() if not newvars: - return saved_globals - self.env.globals.update(newvars) - return saved_globals + return saved_variables + self.variables.update(newvars) + return saved_variables def restore_vars(self, saved_globals): """restore globals from add_tmp_vars""" - self.env.globals = saved_globals.copy() + self.variables = saved_globals.copy() def update_variables(self, variables): """update variables""" - self.env.globals.update(variables) + self.variables.update(variables) def _load_path_to_dic(self, path, dic): mod = utils.get_module_from_path(path) @@ -160,7 +184,7 @@ class Templategen: template_rel_path = os.path.relpath(src, self.base) try: template = self.env.get_template(template_rel_path) - content = template.render() + content = template.render(self.variables) except UnicodeDecodeError: data = self._read_bad_encoded_text(src) content = self.generate_string(data) diff --git a/dotdrop/updater.py b/dotdrop/updater.py index 67b9f6a..c5cef2f 100644 --- a/dotdrop/updater.py +++ b/dotdrop/updater.py @@ -14,6 +14,7 @@ from dotdrop.logger import Logger from dotdrop.templategen import Templategen from dotdrop.utils import patch_ignores, remove, get_unique_tmp_name, \ write_to_tmpfile, must_ignore, mirror_file_rights +from dotdrop.exceptions import UndefinedException TILD = '~' @@ -186,7 +187,11 @@ class Updater: if self.debug: self.log.dbg('{} is a template'.format(dtpath)) if self.showpatch: - self._show_patch(path, dtpath) + try: + self._show_patch(path, dtpath) + except UndefinedException as e: + msg = 'unable to show patch for {}: {}'.format(path, e) + self.log.warn(msg) return False if compare and filecmp.cmp(path, dtpath, shallow=False) and \ self._same_rights(path, dtpath): diff --git a/tests-ng/actions-args-template.sh b/tests-ng/actions-args-template.sh index 42dc3c7..f8d469d 100755 --- a/tests-ng/actions-args-template.sh +++ b/tests-ng/actions-args-template.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/actions-args.sh b/tests-ng/actions-args.sh index 6da4f43..b15d9d5 100755 --- a/tests-ng/actions-args.sh +++ b/tests-ng/actions-args.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/actions-default.sh b/tests-ng/actions-default.sh index 5eb4572..e144243 100755 --- a/tests-ng/actions-default.sh +++ b/tests-ng/actions-default.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/actions-pre.sh b/tests-ng/actions-pre.sh index 48dc59a..24ae575 100755 --- a/tests-ng/actions-pre.sh +++ b/tests-ng/actions-pre.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/actions-template.sh b/tests-ng/actions-template.sh index 36a8036..577a28f 100755 --- a/tests-ng/actions-template.sh +++ b/tests-ng/actions-template.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/actions.sh b/tests-ng/actions.sh index c4e40f9..20486f2 100755 --- a/tests-ng/actions.sh +++ b/tests-ng/actions.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/compare-ignore-relative.sh b/tests-ng/compare-ignore-relative.sh index 57c27a3..a8c1260 100755 --- a/tests-ng/compare-ignore-relative.sh +++ b/tests-ng/compare-ignore-relative.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -66,9 +67,9 @@ create_conf ${cfg} # sets token # import echo "[+] import" -cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/program -cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/config -cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/vscode +cd ${ddpath} | ${bin} import --verbose -c ${cfg} ${tmpd}/program || exit 1 +cd ${ddpath} | ${bin} import --verbose -c ${cfg} ${tmpd}/config || exit 1 +cd ${ddpath} | ${bin} import --verbose -c ${cfg} ${tmpd}/vscode || exit 1 # add files on filesystem echo "[+] add files" diff --git a/tests-ng/compare-ignore.sh b/tests-ng/compare-ignore.sh index 98c986b..53d86c9 100755 --- a/tests-ng/compare-ignore.sh +++ b/tests-ng/compare-ignore.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/compare.sh b/tests-ng/compare.sh index 7b8039e..15f34e4 100755 --- a/tests-ng/compare.sh +++ b/tests-ng/compare.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/corner-case.sh b/tests-ng/corner-case.sh new file mode 100755 index 0000000..1bc4e2a --- /dev/null +++ b/tests-ng/corner-case.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2019, deadc0de6 +# +# the only purpose is to test corner-cases +# not covered by other tests like +# dry +# diff before write +# etc +# +# returns 1 in case of error +# + +# exit on first error +#set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# dotdrop directory +basedir=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +echo "[+] dotdrop dir: ${basedir}" +echo "[+] dotpath dir: ${basedir}/dotfiles" + +# create the config file +cfg="${basedir}/config.yaml" +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_x: + src: /tmp/x + dst: + f_y: + src: /tmp/.i-do-not-exist-dotdrop + dst: /tmp/y +profiles: + p1: + dotfiles: + - f_x + - f_y + +_EOF + +echo "[+] test install dry" +cd ${ddpath} | ${bin} install -c ${cfg} --dry -p p1 --verbose f_x +[ "$?" != "0" ] && exit 1 + +echo "[+] test install show-diff" +cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --verbose f_x +[ "$?" != "0" ] && exit 1 +cd ${ddpath} | ${bin} install -D -c ${cfg} -p p1 --verbose f_x +[ "$?" != "0" ] && exit 1 + +echo "[+] test install not existing src" +cd ${ddpath} | ${bin} install -c ${cfg} --dry -p p1 --verbose f_y + +echo "[+] test install to temp" +cd ${ddpath} | ${bin} install -t -c ${cfg} -p p1 --verbose f_x +[ "$?" != "0" ] && exit 1 + +## CLEANING +rm -rf ${basedir} + +echo "OK" +exit 0 diff --git a/tests-ng/deprecated-link.sh b/tests-ng/deprecated-link.sh index 262c2b5..b63238b 100755 --- a/tests-ng/deprecated-link.sh +++ b/tests-ng/deprecated-link.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/diff-cmd.sh b/tests-ng/diff-cmd.sh index 8e711c0..b09378d 100755 --- a/tests-ng/diff-cmd.sh +++ b/tests-ng/diff-cmd.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -67,6 +68,9 @@ dotfiles: profiles: _EOF +export DOTDROP_FORCE_NODEBUG=yes +export DOTDROP_NOBANNER=yes + # import echo "[+] import" cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/singlefile @@ -74,9 +78,6 @@ cd ${ddpath} | ${bin} import -c ${cfg} ${tmpd}/singlefile # modify the file echo "modified" > ${tmpd}/singlefile -# suppressing the banner, so we can compare dotdrop diff with UNIX diff -export DOTDROP_NOBANNER=yes - # default diff (unified) echo "[+] comparing with default diff (unified)" set +e @@ -121,7 +122,9 @@ grep fakediff ${tmpd}/fake &> /dev/null || exit 1 ## CLEANING rm -rf ${basedir} ${tmpd} + unset DOTDROP_NOBANNER +unset DOTDROP_FORCE_NODEBUG echo "OK" exit 0 diff --git a/tests-ng/dir-import-update.sh b/tests-ng/dir-import-update.sh index dfd0a3f..79dcad6 100755 --- a/tests-ng/dir-import-update.sh +++ b/tests-ng/dir-import-update.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotdrop-variables.sh b/tests-ng/dotdrop-variables.sh index f2a7fb8..c5d7639 100755 --- a/tests-ng/dotdrop-variables.sh +++ b/tests-ng/dotdrop-variables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotfile-no-src.sh b/tests-ng/dotfile-no-src.sh index 4c6ecb1..ccfaf37 100755 --- a/tests-ng/dotfile-no-src.sh +++ b/tests-ng/dotfile-no-src.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotfile-sub-variables.sh b/tests-ng/dotfile-sub-variables.sh index d0d0ad2..c03a730 100755 --- a/tests-ng/dotfile-sub-variables.sh +++ b/tests-ng/dotfile-sub-variables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotfile-variables.sh b/tests-ng/dotfile-variables.sh index 533a48e..cda2104 100755 --- a/tests-ng/dotfile-variables.sh +++ b/tests-ng/dotfile-variables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotfiles-all.sh b/tests-ng/dotfiles-all.sh index 8335994..fc5fdfd 100755 --- a/tests-ng/dotfiles-all.sh +++ b/tests-ng/dotfiles-all.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dotfiles-dyn-paths.sh b/tests-ng/dotfiles-dyn-paths.sh index 542b92a..7450752 100755 --- a/tests-ng/dotfiles-dyn-paths.sh +++ b/tests-ng/dotfiles-dyn-paths.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/duplicate-key.sh b/tests-ng/duplicate-key.sh index b40451f..3d11cc2 100755 --- a/tests-ng/duplicate-key.sh +++ b/tests-ng/duplicate-key.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -86,7 +87,7 @@ mkdir -p ${tmpd}/sub/sub echo "test2" > ${tmpd}/sub/sub/abc # import -cd ${ddpath} | ${bin} import -c ${cfg} -p p2 \ +cd ${ddpath} | ${bin} import --verbose -c ${cfg} -p p2 \ ${tmpd}/abc \ ${tmpd}/sub/abc \ ${tmpd}/sub/abc \ @@ -94,7 +95,7 @@ cd ${ddpath} | ${bin} import -c ${cfg} -p p2 \ ${tmpd}/sub/sub2/abc # count dotfiles for p2 -cnt=`cd ${ddpath} | ${bin} files -c ${cfg} -p p2 -b | grep '^f_' | wc -l` +cnt=`cd ${ddpath} | ${bin} files --verbose -c ${cfg} -p p2 -b | grep '^f_' | wc -l` [ "${cnt}" != "4" ] && exit 1 ## CLEANING diff --git a/tests-ng/dynactions.sh b/tests-ng/dynactions.sh index 6c9f1fd..81bb3b3 100755 --- a/tests-ng/dynactions.sh +++ b/tests-ng/dynactions.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dyndotfilepaths.sh b/tests-ng/dyndotfilepaths.sh index f26c638..9b26d8a 100755 --- a/tests-ng/dyndotfilepaths.sh +++ b/tests-ng/dyndotfilepaths.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dynextvariables.sh b/tests-ng/dynextvariables.sh index 05a843f..83e9799 100755 --- a/tests-ng/dynextvariables.sh +++ b/tests-ng/dynextvariables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dyninclude.sh b/tests-ng/dyninclude.sh index 34b500b..6db0155 100755 --- a/tests-ng/dyninclude.sh +++ b/tests-ng/dyninclude.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/dynvariables.sh b/tests-ng/dynvariables.sh index 8ab70e4..89527ba 100755 --- a/tests-ng/dynvariables.sh +++ b/tests-ng/dynvariables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -68,10 +69,12 @@ config: dotpath: dotfiles variables: var1: "this is some test" + var2: "the_dvar4" dynvariables: dvar1: head -1 /proc/meminfo dvar2: "echo 'this is some test' | rev | tr ' ' ','" dvar3: ${scr} + dvar4: "echo {{@@ var2 @@}} | rev" dotfiles: f_abc: dst: ${tmpd}/abc @@ -88,17 +91,19 @@ echo "{{@@ var1 @@}}" > ${tmps}/dotfiles/abc echo "{{@@ dvar1 @@}}" >> ${tmps}/dotfiles/abc echo "{{@@ dvar2 @@}}" >> ${tmps}/dotfiles/abc echo "{{@@ dvar3 @@}}" >> ${tmps}/dotfiles/abc +echo "{{@@ dvar4 @@}}" >> ${tmps}/dotfiles/abc echo "test" >> ${tmps}/dotfiles/abc # install cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V -#cat ${tmpd}/abc +cat ${tmpd}/abc grep '^this is some test' ${tmpd}/abc >/dev/null grep "^MemTotal" ${tmpd}/abc >/dev/null grep '^tset,emos,si,siht' ${tmpd}/abc >/dev/null grep "^${TESTENV}" ${tmpd}/abc > /dev/null +grep '^4ravd_eht' ${tmpd}/abc >/dev/null #cat ${tmpd}/abc diff --git a/tests-ng/ext-actions.sh b/tests-ng/ext-actions.sh index f57ff17..968b01b 100755 --- a/tests-ng/ext-actions.sh +++ b/tests-ng/ext-actions.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/extvariables.sh b/tests-ng/extvariables.sh index d51814d..aae8e17 100755 --- a/tests-ng/extvariables.sh +++ b/tests-ng/extvariables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/filter_file.sh b/tests-ng/filter_file.sh index d82edf2..def9e09 100755 --- a/tests-ng/filter_file.sh +++ b/tests-ng/filter_file.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/force-actions.sh b/tests-ng/force-actions.sh index c0342e8..242904c 100755 --- a/tests-ng/force-actions.sh +++ b/tests-ng/force-actions.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/func_file.sh b/tests-ng/func_file.sh index 7c68a1c..081f747 100755 --- a/tests-ng/func_file.sh +++ b/tests-ng/func_file.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/global-compare-ignore.sh b/tests-ng/global-compare-ignore.sh index f3dde98..056be9a 100755 --- a/tests-ng/global-compare-ignore.sh +++ b/tests-ng/global-compare-ignore.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/global-update-ignore.sh b/tests-ng/global-update-ignore.sh index 53285fb..5a387a6 100755 --- a/tests-ng/global-update-ignore.sh +++ b/tests-ng/global-update-ignore.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/globs.sh b/tests-ng/globs.sh index 6acbf18..a682961 100755 --- a/tests-ng/globs.sh +++ b/tests-ng/globs.sh @@ -35,6 +35,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/header.sh b/tests-ng/header.sh index 5c16ac3..311be3c 100755 --- a/tests-ng/header.sh +++ b/tests-ng/header.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/ignore-empty.sh b/tests-ng/ignore-empty.sh index fe8dbf1..92a3d2f 100755 --- a/tests-ng/ignore-empty.sh +++ b/tests-ng/ignore-empty.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -76,7 +77,7 @@ _EOF # create the dotfile mkdir -p ${tmps}/dotfiles/d1 -echo "{{@@ var1 @@}}" > ${tmps}/dotfiles/d1/empty +echo "{#@@ should be stripped @@#}" > ${tmps}/dotfiles/d1/empty echo "not empty" > ${tmps}/dotfiles/d1/notempty # install diff --git a/tests-ng/import-as.sh b/tests-ng/import-as.sh index 34d7f98..d5b29e9 100755 --- a/tests-ng/import-as.sh +++ b/tests-ng/import-as.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-configs.sh b/tests-ng/import-configs.sh index e000de6..4c75d4c 100755 --- a/tests-ng/import-configs.sh +++ b/tests-ng/import-configs.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-duplicate.sh b/tests-ng/import-duplicate.sh index 50cd1f2..262e3a5 100755 --- a/tests-ng/import-duplicate.sh +++ b/tests-ng/import-duplicate.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-link-children.sh b/tests-ng/import-link-children.sh index 8fae7c2..036e667 100755 --- a/tests-ng/import-link-children.sh +++ b/tests-ng/import-link-children.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-non-existing.sh b/tests-ng/import-non-existing.sh index 8b0f897..9b05efa 100755 --- a/tests-ng/import-non-existing.sh +++ b/tests-ng/import-non-existing.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-profile-dotfiles.sh b/tests-ng/import-profile-dotfiles.sh index ea565e2..1215388 100755 --- a/tests-ng/import-profile-dotfiles.sh +++ b/tests-ng/import-profile-dotfiles.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-subfile.sh b/tests-ng/import-subfile.sh index 141cf97..b416a63 100755 --- a/tests-ng/import-subfile.sh +++ b/tests-ng/import-subfile.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/import-with-empty.sh b/tests-ng/import-with-empty.sh new file mode 100755 index 0000000..b43fb93 --- /dev/null +++ b/tests-ng/import-with-empty.sh @@ -0,0 +1,113 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2019, deadc0de6 +# +# test import new dotfiles with empty dst/src on existing dotfiles +# returns 1 in case of error +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# dotdrop directory +basedir=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +echo "[+] dotdrop dir: ${basedir}" +echo "[+] dotpath dir: ${basedir}/dotfiles" +# the temp directory +tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` + +# create a dotfile +dftoimport="${tmpd}/a_dotfile" +echo 'some content' > ${dftoimport} + +# create the config file +cfg="${basedir}/config.yaml" +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_x: + src: /tmp/x + dst: + f_y: + src: + dst: /tmp/y + f_z: + src: + dst: + f_l: + src: + dst: + link: link + f_lc: + src: + dst: + link: link_children +profiles: + p1: + dotfiles: + - f_x + - f_y + - f_z + - f_l + - f_lc +_EOF + +echo "[+] import" +cd ${ddpath} | ${bin} import -c ${cfg} -p p1 --verbose ${dftoimport} +[ "$?" != "0" ] && exit 1 + +echo "[+] install" +cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --verbose | grep '^5 dotfile(s) installed.$' +rm -f ${dftoimport} +cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --verbose | grep '^6 dotfile(s) installed.$' + +nb=`cd ${ddpath} | ${bin} files -c ${cfg} -p p1 --verbose | grep '^[a-zA-Z]' | wc -l` +[ "${nb}" != "6" ] && echo 'error in dotfile list' && exit 1 + +#cat ${cfg} + +## CLEANING +rm -rf ${basedir} ${tmpd} + +echo "OK" +exit 0 diff --git a/tests-ng/import.sh b/tests-ng/import.sh index 78ab6ff..854388e 100755 --- a/tests-ng/import.sh +++ b/tests-ng/import.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/imported-configs-variables.sh b/tests-ng/imported-configs-variables.sh index c6e0993..f852c00 100755 --- a/tests-ng/imported-configs-variables.sh +++ b/tests-ng/imported-configs-variables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/imported-variables-from-config.sh b/tests-ng/imported-variables-from-config.sh new file mode 100755 index 0000000..004c5df --- /dev/null +++ b/tests-ng/imported-variables-from-config.sh @@ -0,0 +1,110 @@ +#!/usr/bin/env bash +# author: davla (https://github.com/davls) +# Copyright (c) 2020, davla +# +# test variables imported from config and used in the importing yaml config +# returns 1 in case of error +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# the dotfile source +tmps=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +mkdir -p ${tmps}/dotfiles +# the dotfile destination +tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` + +# create the config file +cfg="${tmps}/config.yaml" +subcfg="${tmps}/subconfig.yaml" + +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles + import_configs: + - ${subcfg} +dotfiles: + f_abc: + dst: ${tmpd}/abc + src: '{{@@ abc_dyn_src @@}}{{@@ abc_src @@}}' +profiles: + p1: + dotfiles: + - f_abc +_EOF +cat ${cfg} + +# create the subconfig file +cat > ${subcfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +variables: + abc_src: c +dynvariables: + abc_dyn_src: 'echo ab' +dotfiles: [] +profiles: [] +_EOF + +# create the dotfile +dirname ${tmps}/dotfiles/abc | xargs mkdir -p +cat > ${tmps}/dotfiles/abc << _EOF +Hell yeah +_EOF + +# install +cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 -V + +# test file existence and content +[ -f "${tmpd}/abc" ] || { + echo 'Dotfile not installed' + exit 1 +} + +## CLEANING +rm -rf ${tmps} ${tmpd} + +echo "OK" +exit 0 diff --git a/tests-ng/include-actions.sh b/tests-ng/include-actions.sh index 3f292c5..abe4f75 100755 --- a/tests-ng/include-actions.sh +++ b/tests-ng/include-actions.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/include-order.sh b/tests-ng/include-order.sh index d903f59..c6da500 100755 --- a/tests-ng/include-order.sh +++ b/tests-ng/include-order.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/include.sh b/tests-ng/include.sh index 8f0d638..526ffa4 100755 --- a/tests-ng/include.sh +++ b/tests-ng/include.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -108,6 +109,9 @@ cd ${ddpath} | ${bin} files -c ${cfg} -p p2 | grep f_abc cd ${ddpath} | ${bin} files -c ${cfg} -p p3 | grep f_abc cd ${ddpath} | ${bin} files -c ${cfg} -p p0 | grep f_abc +cnt=`cd ${ddpath} | ${bin} files -c ${cfg} -p p0 | grep f_abc | wc -l` +[ "${cnt}" != "1" ] && echo "dotfiles displayed more than once" && exit 1 + # count cnt=`cd ${ddpath} | ${bin} files -c ${cfg} -p p1 -b | grep '^f_' | wc -l` [ "${cnt}" != "1" ] && exit 1 diff --git a/tests-ng/inst-link-default.sh b/tests-ng/inst-link-default.sh index b175f98..c3ed955 100755 --- a/tests-ng/inst-link-default.sh +++ b/tests-ng/inst-link-default.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/install-empty.sh b/tests-ng/install-empty.sh index 14e05ee..6773ca7 100755 --- a/tests-ng/install-empty.sh +++ b/tests-ng/install-empty.sh @@ -7,7 +7,7 @@ # # exit on first error -#set -e +set -e # all this crap to get current path rl="readlink -f" @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -67,17 +68,26 @@ dotfiles: f_z: src: dst: + f_l: + src: + dst: + link: link + f_lc: + src: + dst: + link: link_children profiles: p1: dotfiles: - f_x - f_y - f_z - + - f_l + - f_lc _EOF echo "[+] install" -cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --verbose | grep '^3 dotfile(s) installed.$' +cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --verbose | grep '^5 dotfile(s) installed.$' [ "$?" != "0" ] && exit 1 ## CLEANING diff --git a/tests-ng/install-ignore.sh b/tests-ng/install-ignore.sh index edae408..adc4298 100755 --- a/tests-ng/install-ignore.sh +++ b/tests-ng/install-ignore.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -81,7 +82,7 @@ echo "new data" > ${basedir}/dotfiles/${tmpd}/readmes/README.md # install rm -rf ${tmpd} echo "[+] install normal" -cd ${ddpath} | ${bin} install -c ${cfg} --verbose +cd ${ddpath} | ${bin} install --showdiff -c ${cfg} --verbose [ "$?" != "0" ] && exit 1 nb=`find ${tmpd} -iname 'README.md' | wc -l` echo "(1) found ${nb} README.md file(s)" diff --git a/tests-ng/install-to-temp.sh b/tests-ng/install-to-temp.sh new file mode 100755 index 0000000..902920b --- /dev/null +++ b/tests-ng/install-to-temp.sh @@ -0,0 +1,89 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2019, deadc0de6 +# +# test install to temp +# returns 1 in case of error +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# dotdrop directory +basedir=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +mkdir -p ${basedir}/dotfiles +tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +echo "[+] dotdrop dir: ${basedir}" +echo "[+] dotpath dir: ${basedir}/dotfiles" + +# create the config file +cfg="${basedir}/config.yaml" +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_x: + src: x + dst: ${tmpd}/x + f_y: + src: y + dst: ${tmpd}/y + link: link +profiles: + p1: + dotfiles: + - f_x + - f_y +_EOF + +echo 'test_x' > ${basedir}/dotfiles/x +echo 'test_y' > ${basedir}/dotfiles/y + +echo "[+] install" +cd ${ddpath} | ${bin} install -c ${cfg} -p p1 --showdiff --verbose --temp | grep '^2 dotfile(s) installed.$' +[ "$?" != "0" ] && exit 1 + +## CLEANING +rm -rf ${basedir} + +echo "OK" +exit 0 diff --git a/tests-ng/jhelpers.sh b/tests-ng/jhelpers.sh index 0e44adb..f502ed6 100755 --- a/tests-ng/jhelpers.sh +++ b/tests-ng/jhelpers.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/link-import-default.sh b/tests-ng/link-import-default.sh index f46908a..b20f145 100755 --- a/tests-ng/link-import-default.sh +++ b/tests-ng/link-import-default.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/link-templates-dir-home.sh b/tests-ng/link-templates-dir-home.sh index 1b7ed0e..0b58c16 100755 --- a/tests-ng/link-templates-dir-home.sh +++ b/tests-ng/link-templates-dir-home.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/link-templates-dir.sh b/tests-ng/link-templates-dir.sh index cf15d1a..8d57133 100755 --- a/tests-ng/link-templates-dir.sh +++ b/tests-ng/link-templates-dir.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/link-templates.sh b/tests-ng/link-templates.sh index bea1e53..74d7acb 100755 --- a/tests-ng/link-templates.sh +++ b/tests-ng/link-templates.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/link-value-tests.sh b/tests-ng/link-value-tests.sh index f1b7b01..4988e6f 100755 --- a/tests-ng/link-value-tests.sh +++ b/tests-ng/link-value-tests.sh @@ -33,6 +33,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/macro-with-globals.sh b/tests-ng/macro-with-globals.sh new file mode 100755 index 0000000..48bb1d3 --- /dev/null +++ b/tests-ng/macro-with-globals.sh @@ -0,0 +1,102 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2019, deadc0de6 +# +# import variables from file +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# the dotfile source +tmps=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +mkdir -p ${tmps}/dotfiles +# the dotfile destination +tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` + +# create the config file +cfg="${tmps}/config.yaml" + +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_abc: + dst: ${tmpd}/abc + src: abc +profiles: + p0: + dotfiles: + - f_abc +variables: + global: global_var + local: local_var +_EOF + +# create the source +mkdir -p ${tmps}/dotfiles/ + +cat > ${tmps}/dotfiles/macro_file << _EOF +{%@@ macro macro(var) @@%} +{{@@ global @@}} +{{@@ var @@}} +{%@@ endmacro @@%} +_EOF + +cat > ${tmps}/dotfiles/abc << _EOF +{%@@ from 'macro_file' import macro with context @@%} +{{@@ macro(local) @@}} +_EOF + +# install +cd ${ddpath} | ${bin} install -c ${cfg} -p p0 -V + +# test file content +cat ${tmpd}/abc +grep 'global_var' ${tmpd}/abc >/dev/null 2>&1 +grep 'local_var' ${tmpd}/abc >/dev/null 2>&1 + +## CLEANING +rm -rf ${tmps} ${tmpd} + +echo "OK" +exit 0 diff --git a/tests-ng/minversion.sh b/tests-ng/minversion.sh index e827287..2442354 100755 --- a/tests-ng/minversion.sh +++ b/tests-ng/minversion.sh @@ -30,6 +30,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/profile-actions.sh b/tests-ng/profile-actions.sh index 2c5875d..5840021 100755 --- a/tests-ng/profile-actions.sh +++ b/tests-ng/profile-actions.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/profile-dyninclude.sh b/tests-ng/profile-dyninclude.sh index b21022e..44db6f9 100755 --- a/tests-ng/profile-dyninclude.sh +++ b/tests-ng/profile-dyninclude.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -90,6 +91,9 @@ dotfiles: f_def: dst: ${tmpd}/def src: def + f_ghi: + dst: '${tmpd}/{{@@ ghi @@}}' + src: ghi variables: mainvar: 'bad0' subvar: 'bad1' @@ -100,8 +104,10 @@ profiles: subprofile: dotfiles: - f_abc + - f_ghi dynvariables: subdyn: 'echo subdyncontent' + ghi: 'echo ghi' variables: subvar: 'subcontent' subignore: @@ -118,6 +124,7 @@ echo "{{@@ subdyn @@}}" >> ${tmps}/dotfiles/abc echo "{{@@ subvar @@}}" >> ${tmps}/dotfiles/abc echo "end" >> ${tmps}/dotfiles/abc #cat ${tmps}/dotfiles/abc +echo "ghi content" > ${tmps}/dotfiles/ghi # install cd ${ddpath} | ${bin} install -f -c ${cfg} -p profile_1 --verbose @@ -129,6 +136,7 @@ grep 'maindyncontent' ${tmpd}/abc >/dev/null || (echo "dynvariables 1 not resolv grep 'subcontent' ${tmpd}/abc >/dev/null || (echo "variables 2 not resolved" && exit 1) grep 'subdyncontent' ${tmpd}/abc >/dev/null || (echo "dynvariables 2 not resolved" && exit 1) #cat ${tmpd}/abc +[ ! -e ${tmpd}/ghi ] && exit 1 ## CLEANING rm -rf ${tmps} ${tmpd} diff --git a/tests-ng/profile-dynvariables.sh b/tests-ng/profile-dynvariables.sh index bdfb247..9455401 100755 --- a/tests-ng/profile-dynvariables.sh +++ b/tests-ng/profile-dynvariables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/profile-undefined-variables.sh b/tests-ng/profile-undefined-variables.sh new file mode 100755 index 0000000..157be19 --- /dev/null +++ b/tests-ng/profile-undefined-variables.sh @@ -0,0 +1,124 @@ +#!/usr/bin/env bash +# author: deadc0de6 (https://github.com/deadc0de6) +# Copyright (c) 2017, deadc0de6 +# +# test variables defined in a different profile +# than the one selected +# returns 1 in case of error +# + +# exit on first error +set -e + +# all this crap to get current path +rl="readlink -f" +if ! ${rl} "${0}" >/dev/null 2>&1; then + rl="realpath" + + if ! hash ${rl}; then + echo "\"${rl}\" not found !" && exit 1 + fi +fi +cur=$(dirname "$(${rl} "${0}")") + +#hash dotdrop >/dev/null 2>&1 +#[ "$?" != "0" ] && echo "install dotdrop to run tests" && exit 1 + +#echo "called with ${1}" + +# dotdrop path can be pass as argument +ddpath="${cur}/../" +[ "${1}" != "" ] && ddpath="${1}" +[ ! -d ${ddpath} ] && echo "ddpath \"${ddpath}\" is not a directory" && exit 1 + +export PYTHONPATH="${ddpath}:${PYTHONPATH}" +bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true + +echo "dotdrop path: ${ddpath}" +echo "pythonpath: ${PYTHONPATH}" + +# get the helpers +source ${cur}/helpers + +echo -e "$(tput setaf 6)==> RUNNING $(basename $BASH_SOURCE) <==$(tput sgr0)" + +################################################################ +# this is the test +################################################################ + +# the dotfile source +tmps=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +mkdir -p ${tmps}/dotfiles +# the dotfile destination +tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` +#echo "dotfile destination: ${tmpd}" + +# create the config file +cfg="${tmps}/config.yaml" + +cat > ${cfg} << _EOF +config: + backup: true + create: true + dotpath: dotfiles +dotfiles: + f_abc: + dst: "${tmpd}/{{@@ defined_in_main @@}}" + src: abc + f_def: + dst: "${tmpd}/{{@@ defined_in_alt @@}}" + src: def +profiles: + pmain: + dynvariables: + defined_in_main: echo abc + dotfiles: + - f_abc + palt: + dynvariables: + defined_in_alt: echo def + dotfiles: + - f_def + pall: + dynvariables: + defined_in_main: echo abcall + defined_in_alt: echo defall + dotfiles: + - ALL + pinclude: + include: + - pmain +_EOF +#cat ${cfg} + +# create the dotfile +echo "main" > ${tmps}/dotfiles/abc +echo "alt" > ${tmps}/dotfiles/def + +# install pmain +echo "install pmain" +cd ${ddpath} | ${bin} install -f -c ${cfg} -p pmain -V +[ ! -e ${tmpd}/abc ] && echo "dotfile not installed" && exit 1 +grep main ${tmpd}/abc + +# install pall +echo "install pall" +cd ${ddpath} | ${bin} install -f -c ${cfg} -p pall -V +[ ! -e ${tmpd}/abcall ] && echo "dotfile not installed" && exit 1 +grep main ${tmpd}/abcall +[ ! -e ${tmpd}/defall ] && echo "dotfile not installed" && exit 1 +grep alt ${tmpd}/defall + +# install pinclude +echo "install pinclude" +rm -f ${tmpd}/abc +cd ${ddpath} | ${bin} install -f -c ${cfg} -p pinclude -V +[ ! -e ${tmpd}/abc ] && echo "dotfile not installed" && exit 1 +grep main ${tmpd}/abc + +## CLEANING +rm -rf ${tmps} ${tmpd} ${scr} ${scr2} + +echo "OK" +exit 0 diff --git a/tests-ng/re-import.sh b/tests-ng/re-import.sh index 5974c31..99e1736 100755 --- a/tests-ng/re-import.sh +++ b/tests-ng/re-import.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/recinclude.sh b/tests-ng/recinclude.sh index cbef945..af63484 100755 --- a/tests-ng/recinclude.sh +++ b/tests-ng/recinclude.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/recvariables.sh b/tests-ng/recvariables.sh index c1a4f3d..3255718 100755 --- a/tests-ng/recvariables.sh +++ b/tests-ng/recvariables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/remove.sh b/tests-ng/remove.sh index c8f5c8b..fb7c004 100755 --- a/tests-ng/remove.sh +++ b/tests-ng/remove.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/symlink.sh b/tests-ng/symlink.sh index 829a9e7..818cba4 100755 --- a/tests-ng/symlink.sh +++ b/tests-ng/symlink.sh @@ -31,6 +31,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/transformations-template.sh b/tests-ng/transformations-template.sh index dfad1d4..4c3bafc 100755 --- a/tests-ng/transformations-template.sh +++ b/tests-ng/transformations-template.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/transformations-with-args.sh b/tests-ng/transformations-with-args.sh index faeffec..5815620 100755 --- a/tests-ng/transformations-with-args.sh +++ b/tests-ng/transformations-with-args.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/transformations.sh b/tests-ng/transformations.sh index eae4e47..0bd329f 100755 --- a/tests-ng/transformations.sh +++ b/tests-ng/transformations.sh @@ -34,6 +34,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update-ignore-relative.sh b/tests-ng/update-ignore-relative.sh index ac9ef60..09d4021 100755 --- a/tests-ng/update-ignore-relative.sh +++ b/tests-ng/update-ignore-relative.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update-ignore.sh b/tests-ng/update-ignore.sh index 273482f..af9e34c 100755 --- a/tests-ng/update-ignore.sh +++ b/tests-ng/update-ignore.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update-rights.sh b/tests-ng/update-rights.sh index 5ff2fa3..2e804de 100755 --- a/tests-ng/update-rights.sh +++ b/tests-ng/update-rights.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update-templates.sh b/tests-ng/update-templates.sh index abdad4c..67bfcbd 100755 --- a/tests-ng/update-templates.sh +++ b/tests-ng/update-templates.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update-with-key.sh b/tests-ng/update-with-key.sh index 47ba116..8c44d0e 100755 --- a/tests-ng/update-with-key.sh +++ b/tests-ng/update-with-key.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/update.sh b/tests-ng/update.sh index 83ca1d5..2535b3d 100755 --- a/tests-ng/update.sh +++ b/tests-ng/update.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/variables-include.sh b/tests-ng/variables-include.sh index 655fbb3..e87b60d 100755 --- a/tests-ng/variables-include.sh +++ b/tests-ng/variables-include.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests-ng/variables.sh b/tests-ng/variables.sh index b0ba712..aa0680c 100755 --- a/tests-ng/variables.sh +++ b/tests-ng/variables.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" @@ -55,6 +56,7 @@ tmpd=`mktemp -d --suffix='-dotdrop-tests' || mktemp -d` # create the config file cfg="${tmps}/config.yaml" +export dotdrop_test_dst="${tmpd}/def" cat > ${cfg} << _EOF config: @@ -65,14 +67,19 @@ variables: var1: "this is some test" var2: 12 var3: another test + vardst: "{{@@ env['dotdrop_test_dst'] @@}}" dotfiles: f_abc: dst: ${tmpd}/abc src: abc + f_def: + dst: "{{@@ vardst @@}}" + src: def profiles: p1: dotfiles: - f_abc + - f_def _EOF #cat ${cfg} @@ -82,13 +89,19 @@ echo "{{@@ var2 @@}}" >> ${tmps}/dotfiles/abc echo "{{@@ var3 @@}}" >> ${tmps}/dotfiles/abc echo "test" >> ${tmps}/dotfiles/abc -# install -cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 +echo "test_def" > ${tmps}/dotfiles/def +# install +cd ${ddpath} | ${bin} install -f -c ${cfg} -p p1 --verbose + +[ ! -e ${tmpd}/abc ] && echo "abc not installed" && exit 1 grep '^this is some test' ${tmpd}/abc >/dev/null grep '^12' ${tmpd}/abc >/dev/null grep '^another test' ${tmpd}/abc >/dev/null +[ ! -e ${tmpd}/def ] && echo "def not installed" && exit 1 +grep '^test_def' ${tmpd}/def >/dev/null + #cat ${tmpd}/abc ## CLEANING diff --git a/tests-ng/workdir.sh b/tests-ng/workdir.sh index 1563921..dae5751 100755 --- a/tests-ng/workdir.sh +++ b/tests-ng/workdir.sh @@ -32,6 +32,7 @@ ddpath="${cur}/../" export PYTHONPATH="${ddpath}:${PYTHONPATH}" bin="python3 -m dotdrop.dotdrop" +hash coverage 2>/dev/null && bin="coverage run -a --source=dotdrop -m dotdrop.dotdrop" || true echo "dotdrop path: ${ddpath}" echo "pythonpath: ${PYTHONPATH}" diff --git a/tests.sh b/tests.sh index ce49ee0..16be406 100755 --- a/tests.sh +++ b/tests.sh @@ -6,37 +6,52 @@ set -ev # PEP8 tests -which pycodestyle 2>/dev/null +which pycodestyle >/dev/null 2>&1 [ "$?" != "0" ] && echo "Install pycodestyle" && exit 1 +echo "testing with pycodestyle" pycodestyle --ignore=W503,W504,W605 dotdrop/ pycodestyle tests/ pycodestyle scripts/ # pyflakes tests +echo "testing with pyflakes" pyflakes dotdrop/ pyflakes tests/ # retrieve the nosetests binary -set +e nosebin="nosetests" -which ${nosebin} 2>/dev/null +which ${nosebin} >/dev/null 2>&1 [ "$?" != "0" ] && nosebin="nosetests3" -which ${nosebin} 2>/dev/null +which ${nosebin} >/dev/null 2>&1 [ "$?" != "0" ] && echo "Install nosetests" && exit 1 -set -e # do not print debugs when running tests (faster) export DOTDROP_FORCE_NODEBUG=yes +# coverage file location +cur=`dirname $(readlink -f "${0}")` +export COVERAGE_FILE="${cur}/.coverage" + # execute tests with coverage -PYTHONPATH=dotdrop ${nosebin} -s --with-coverage --cover-package=dotdrop -#PYTHONPATH=dotdrop python3 -m pytest tests +PYTHONPATH="dotdrop" ${nosebin} -s --with-coverage --cover-package=dotdrop +#PYTHONPATH="dotdrop" python3 -m pytest tests + +# enable debug logs +export DOTDROP_DEBUG= +unset DOTDROP_FORCE_NODEBUG +# do not print debugs when running tests (faster) +#export DOTDROP_FORCE_NODEBUG=yes ## execute bash script tests [ "$1" = '--python-only' ] || { + echo "doing extended tests" log=`mktemp` for scr in tests-ng/*.sh; do - ${scr} > "${log}" 2>&1 & + if [ -z ${TRAVIS} ]; then + ${scr} > "${log}" 2>&1 & + else + ${scr} > "${log}" >/dev/null 2>&1 & + fi tail --pid="$!" -f "${log}" set +e wait "$!" diff --git a/tests/dummy.py b/tests/dummy.py new file mode 100644 index 0000000..b85de83 --- /dev/null +++ b/tests/dummy.py @@ -0,0 +1,22 @@ +""" +author: deadc0de6 (https://github.com/deadc0de6) +Copyright (c) 2017, deadc0de6 +basic unittest for the import function +""" + + +import unittest +import dotdrop + + +class TestDummy(unittest.TestCase): + + dotdrop.main() + + +def main(): + unittest.main() + + +if __name__ == '__main__': + main() diff --git a/tests/helpers.py b/tests/helpers.py index d8cef2a..ed2ae0e 100644 --- a/tests/helpers.py +++ b/tests/helpers.py @@ -158,7 +158,6 @@ def load_options(confpath, profile): o.import_link = LinkTypes.NOLINK o.install_showdiff = True o.debug = True - o.variables = {} return o diff --git a/tests/test_compare.py b/tests/test_compare.py index 6c24db4..b436c92 100644 --- a/tests/test_compare.py +++ b/tests/test_compare.py @@ -36,8 +36,8 @@ class TestCompare(unittest.TestCase): results = {} for dotfile in dotfiles: path = os.path.expanduser(dotfile.dst) - ret, insttmp = inst.install_to_temp(t, tmp, dotfile.src, - dotfile.dst) + ret, err, insttmp = inst.install_to_temp(t, tmp, dotfile.src, + dotfile.dst) if not ret: results[path] = False continue diff --git a/tests/test_install.py b/tests/test_install.py index 5ba35cb..2da99be 100644 --- a/tests/test_install.py +++ b/tests/test_install.py @@ -186,7 +186,6 @@ exec bspwm o = load_options(confpath, profile) o.safe = False o.install_showdiff = True - o.variables = {} cmd_install(o) # now compare the generated files @@ -363,8 +362,8 @@ exec bspwm src = '/some/non/existant/file' installer = Installer() - logger = MagicMock() - installer.log.err = logger + # logger = MagicMock() + # installer.log.err = logger res, err = installer.link_children(templater=MagicMock(), src=src, dst='/dev/null', actionexec=None) @@ -382,10 +381,10 @@ exec bspwm src = create_random_file(src_dir)[0] - logger = MagicMock() + # logger = MagicMock() templater = MagicMock() installer = Installer() - installer.log.err = logger + # installer.log.err = logger # pass src file not src dir res, err = installer.link_children(templater=templater, src=src,