diff --git a/.gitignore b/.gitignore index 77a09c89..bb309268 100644 --- a/.gitignore +++ b/.gitignore @@ -20,4 +20,6 @@ index.html .idea/* /polemarch.egg-info/ /polemarch/projects/ -doc/_build \ No newline at end of file +doc/_build +test_screenshot.png +.coverage.* diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 943f23d6..1eb9822f 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -71,13 +71,6 @@ stages: - issue_building retry: 2 -.publish_template: &publishing - stage: publish - image: onegreyonewhite/tox:ubuntu - allow_failure: true - only: - - tags - # Branch tests ########################################### @@ -164,9 +157,10 @@ pages: release_pypi: stage: release only: - - tags - before_script: - - pip install vstcompile[doc] + refs: + - tags + variables: + - $PYPI_UPLOAD_PASSWORD script: - make test ENVS=build allow_failure: true @@ -190,7 +184,10 @@ publish_pypi: release_rpm: stage: release only: - - tags + refs: + - tags + variables: + - $PYPI_UPLOAD_PASSWORD script: - make test ENVS=rpm allow_failure: false @@ -199,20 +196,31 @@ release_rpm: paths: - dist/ +# DEPRECATED release_deb: stage: release only: - - tags + refs: + - tags + variables: + - $PYPI_UPLOAD_PASSWORD image: onegreyonewhite/tox:ubuntu script: - make test ENVS=deb allow_failure: false artifacts: - name: "release-rpm-${CI_BUILD_REF_NAME}.${CI_BUILD_ID}" + name: "release-deb-${CI_BUILD_REF_NAME}.${CI_BUILD_ID}" paths: - dist/ publish_release: - <<: *publishing + stage: publish + image: onegreyonewhite/tox:ubuntu + allow_failure: true + only: + refs: + - tags + variables: + - $PYPI_UPLOAD_PASSWORD script: - make test ENVS=release diff --git a/.pylintrc b/.pylintrc index 200d8da3..0c93bf1c 100644 --- a/.pylintrc +++ b/.pylintrc @@ -65,7 +65,7 @@ confidence= # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" -disable=deprecated-lambda,old-style-class,no-init,expression-not-assigned,broad-except,logging-format-interpolation,model-no-explicit-unicode,too-many-ancestors,bad-continuation,bad-whitespace,redefined-builtin,missing-docstring,redefined-variable-type,no-self-use,line-too-long,suppressed-message,cmp-method,no-absolute-import,xrange-builtin,using-cmp-argument,basestring-builtin,backtick,unpacking-in-except,old-raise-syntax,getslice-method,long-builtin,print-statement,reduce-builtin,filter-builtin-not-iterating,import-star-module-level,unichr-builtin,dict-iter-method,range-builtin-not-iterating,file-builtin,old-division,standarderror-builtin,coerce-builtin,setslice-method,old-ne-operator,long-suffix,execfile-builtin,oct-method,metaclass-assignment,intern-builtin,apply-builtin,dict-view-method,raw_input-builtin,raising-string,coerce-method,unicode-builtin,next-method-called,hex-method,nonzero-method,round-builtin,cmp-builtin,reload-builtin,buffer-builtin,useless-suppression,zip-builtin-not-iterating,indexing-exception,map-builtin-not-iterating,delslice-method,old-octal-literal,input-builtin,parameter-unpacking,model-has-unicode,bare-except,too-few-public-methods,fixme,dangerous-default-value,attribute-defined-outside-init,pointless-string-statement,too-many-instance-attributes,arguments-differ,binary-op-exception,bad-classmethod-argument,locally-disabled,file-ignored,multiple-statements,superfluous-parens,bad-mcs-classmethod-argument +disable=too-many-branches,deprecated-lambda,old-style-class,no-init,expression-not-assigned,broad-except,logging-format-interpolation,model-no-explicit-unicode,too-many-ancestors,bad-continuation,bad-whitespace,redefined-builtin,missing-docstring,redefined-variable-type,no-self-use,line-too-long,suppressed-message,cmp-method,no-absolute-import,xrange-builtin,using-cmp-argument,basestring-builtin,backtick,unpacking-in-except,old-raise-syntax,getslice-method,long-builtin,print-statement,reduce-builtin,filter-builtin-not-iterating,import-star-module-level,unichr-builtin,dict-iter-method,range-builtin-not-iterating,file-builtin,old-division,standarderror-builtin,coerce-builtin,setslice-method,old-ne-operator,long-suffix,execfile-builtin,oct-method,metaclass-assignment,intern-builtin,apply-builtin,dict-view-method,raw_input-builtin,raising-string,coerce-method,unicode-builtin,next-method-called,hex-method,nonzero-method,round-builtin,cmp-builtin,reload-builtin,buffer-builtin,useless-suppression,zip-builtin-not-iterating,indexing-exception,map-builtin-not-iterating,delslice-method,old-octal-literal,input-builtin,parameter-unpacking,model-has-unicode,bare-except,too-few-public-methods,fixme,dangerous-default-value,attribute-defined-outside-init,pointless-string-statement,too-many-instance-attributes,arguments-differ,binary-op-exception,bad-classmethod-argument,locally-disabled,file-ignored,multiple-statements,superfluous-parens,bad-mcs-classmethod-argument [REPORTS] diff --git a/doc/api_schema.yaml b/doc/api_schema.yaml index b6041ef8..77a0f47f 100755 --- a/doc/api_schema.yaml +++ b/doc/api_schema.yaml @@ -9,9 +9,9 @@ info: name: System Administrator x-links: Request: - - url: https://gitlab.com/vstconsulting/polemarch/issues/new?issuable_template%5D=Ask&issue%5Btitle%5D=Ask%20about%20version%200.2.0 + - url: https://gitlab.com/vstconsulting/polemarch/issues/new?issuable_template%5D=Ask&issue%5Btitle%5D=Ask%20about%20version%200.2.2 name: Question - - url: https://gitlab.com/vstconsulting/polemarch/issues/new?issuable_template%5D=Bug&issue%5Btitle%5D=Bug%20in%20version%200.2.0 + - url: https://gitlab.com/vstconsulting/polemarch/issues/new?issuable_template%5D=Bug&issue%5Btitle%5D=Bug%20in%20version%200.2.2 name: Bug report - url: https://gitlab.com/vstconsulting/polemarch/issues/new?issuable_template%5D=Feature%20request&issue%5Btitle%5D= name: Feature request @@ -22,9 +22,9 @@ info: url: https://gitlab.com/vstconsulting/polemarch.git name: Official repository x-versions: - application: 0.2.0 - library: 0.2.0 - vstutils: 1.2.0.dev20 + application: 0.2.2 + library: 0.2.2 + vstutils: 1.2.1 django: 1.11.15 ansible: 2.6.4 version: v2 @@ -3018,6 +3018,40 @@ paths: tags: - inventory parameters: [] + /inventory/import_inventory/: + post: + operationId: inventory_import_inventory + description: '' + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/InventoryImport' + responses: + '201': + description: Action accepted. + schema: + $ref: '#/definitions/InventoryImport' + '400': + description: Validation error or some data error. + schema: + $ref: '#/definitions/Error' + '404': + description: Not found error. + schema: + $ref: '#/definitions/Error' + '403': + description: Permission denied error. + schema: + $ref: '#/definitions/Error' + '401': + description: Unauthorized access error. + schema: + $ref: '#/definitions/Error' + tags: + - inventory + parameters: [] /inventory/{pk}/: get: operationId: inventory_get @@ -6924,6 +6958,45 @@ paths: description: A unique integer value identifying this project. required: true type: integer + /project/{pk}/inventory/import_inventory/: + post: + operationId: project_inventory_import_inventory + description: Create a new inventory. + parameters: + - name: data + in: body + required: true + schema: + $ref: '#/definitions/InventoryImport' + responses: + '201': + description: Action accepted. + schema: + $ref: '#/definitions/InventoryImport' + '400': + description: Validation error or some data error. + schema: + $ref: '#/definitions/Error' + '404': + description: Not found error. + schema: + $ref: '#/definitions/Error' + '403': + description: Permission denied error. + schema: + $ref: '#/definitions/Error' + '401': + description: Unauthorized access error. + schema: + $ref: '#/definitions/Error' + tags: + - project + parameters: + - name: pk + in: path + description: A unique integer value identifying this project. + required: true + type: integer /project/{pk}/inventory/{inventory_id}/: get: operationId: project_inventory_get @@ -11090,12 +11163,11 @@ paths: /team/{pk}/user/{user_id}/settings/: get: operationId: team_user_settings_get - summary: "A settings object, that allows API settings to be accessed as properties.\n\ - \ For example:" - description: " from rest_framework.settings import api_settings\n \ - \ print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n Any setting with\ - \ string import paths will be automatically resolved\n and return the class,\ - \ rather than the string literal." + description: "A settings object, that allows API settings to be accessed as\ + \ properties.\n For example:\n\n from rest_framework.settings import\ + \ api_settings\n print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n \ + \ Any setting with string import paths will be automatically resolved\n\ + \ and return the class, rather than the string literal." parameters: [] responses: '200': @@ -11122,12 +11194,11 @@ paths: - team post: operationId: team_user_settings_add - summary: "A settings object, that allows API settings to be accessed as properties.\n\ - \ For example:" - description: " from rest_framework.settings import api_settings\n \ - \ print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n Any setting with\ - \ string import paths will be automatically resolved\n and return the class,\ - \ rather than the string literal." + description: "A settings object, that allows API settings to be accessed as\ + \ properties.\n For example:\n\n from rest_framework.settings import\ + \ api_settings\n print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n \ + \ Any setting with string import paths will be automatically resolved\n\ + \ and return the class, rather than the string literal." parameters: - name: data in: body @@ -11159,12 +11230,11 @@ paths: - team delete: operationId: team_user_settings_remove - summary: "A settings object, that allows API settings to be accessed as properties.\n\ - \ For example:" - description: " from rest_framework.settings import api_settings\n \ - \ print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n Any setting with\ - \ string import paths will be automatically resolved\n and return the class,\ - \ rather than the string literal." + description: "A settings object, that allows API settings to be accessed as\ + \ properties.\n For example:\n\n from rest_framework.settings import\ + \ api_settings\n print(api_settings.DEFAULT_RENDERER_CLASSES)\n\n \ + \ Any setting with string import paths will be automatically resolved\n\ + \ and return the class, rather than the string literal." parameters: [] responses: '204': @@ -11672,6 +11742,7 @@ definitions: title: Is active type: boolean default: true + readOnly: true GroupCreateMaster: type: object properties: @@ -12044,6 +12115,25 @@ definitions: format: textarea owner: $ref: '#/definitions/User' + InventoryImport: + required: + - name + - raw_data + type: object + properties: + inventory_id: + title: Inventory id + type: integer + additionalProperties: + redirect: true + name: + title: Name + type: string + minLength: 1 + raw_data: + title: Raw data + type: string + minLength: 1 Project: type: object properties: @@ -12159,6 +12249,8 @@ definitions: type: string format: html readOnly: true + execute_view_data: + $ref: '#/definitions/Data' AnsibleModule: required: - module @@ -12173,54 +12265,12 @@ definitions: $ref: '#/definitions/Module' value_field: name view_field: path - args: - title: Args - description: module arguments - type: string - background: - title: Background - description: run asynchronously, failing after X seconds (default=N/A) - type: integer - become: - title: Become - description: run operations with become (does not imply password prompting) - type: boolean - default: false - become_method: - title: Become method - description: 'privilege escalation method to use (default=sudo), valid choices: - [ sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas | pmrun | enable - | machinectl ]' - type: string - become_user: - title: Become user - description: run operations as this user (default=root) - type: string - check: - title: Check - description: don't make any changes; instead, try to predict some of the changes - that may occur - type: boolean - default: false - connection: - title: Connection - description: connection type to use (default=smart) - type: string - diff: - title: Diff - description: when changing (small) files and templates, show the differences - in those files; works great with --check - type: boolean - default: false - extra_vars: - title: Extra vars - description: set additional variables as key=value or YAML/JSON, if filename - prepend with @ - type: string - forks: - title: Forks - description: specify number of parallel processes to use (default=5) + verbose: + title: Verbose + description: verbose mode (-vvv for more, -vvvv to enable connection debugging) type: integer + default: 0 + maximum: 4 inventory: title: Inventory description: specify inventory host path or comma separated host list. --inventory-file @@ -12232,71 +12282,120 @@ definitions: $ref: '#/definitions/Inventory' value_field: id view_field: name - key_file: - title: Key file - description: use this file to authenticate the connection - type: string - format: secretfile - limit: - title: Limit - description: further limit selected hosts to an additional pattern - type: string list_hosts: title: List hosts description: outputs a list of matching hosts; does not execute anything else type: boolean default: false + limit: + title: Limit + description: further limit selected hosts to an additional pattern + type: string module_path: title: Module path description: prepend colon-separated path(s) to module library (default=['/home/grey/.ansible/plugins/modules', '/usr/share/ansible/plugins/modules']) type: string + extra_vars: + title: Extra vars + description: set additional variables as key=value or YAML/JSON, if filename + prepend with @ + type: string + forks: + title: Forks + description: specify number of parallel processes to use (default=5) + type: integer + vault_password_file: + title: Vault password file + description: vault password file + type: string + format: secretfile + vault_id: + title: Vault id + description: the vault identity to use + type: string one_line: title: One line description: condense output type: boolean default: false + tree: + title: Tree + description: log output to this directory + type: string + poll: + title: Poll + description: set the poll interval if using -B (default=15) + type: integer + background: + title: Background + description: run asynchronously, failing after X seconds (default=N/A) + type: integer + check: + title: Check + description: don't make any changes; instead, try to predict some of the changes + that may occur + type: boolean + default: false + syntax_check: + title: Syntax check + description: perform a syntax check on the playbook, but do not execute it + type: boolean + default: false + diff: + title: Diff + description: when changing (small) files and templates, show the differences + in those files; works great with --check + type: boolean + default: false playbook_dir: title: Playbook dir description: Since this tool does not use playbooks, use this as a subsitute playbook directory.This sets the relative path for many features including roles/ group_vars/ etc. type: string - poll: - title: Poll - description: set the poll interval if using -B (default=15) - type: integer + args: + title: Args + description: module arguments + type: string private_key: title: Private key description: use this file to authenticate the connection type: string format: secretfile - scp_extra_args: - title: Scp extra args - description: specify extra arguments to pass to scp only (e.g. -l) + key_file: + title: Key file + description: use this file to authenticate the connection type: string - sftp_extra_args: - title: Sftp extra args - description: specify extra arguments to pass to sftp only (e.g. -f, -l) + format: secretfile + user: + title: User + description: connect as this user (default=None) + type: string + connection: + title: Connection + description: connection type to use (default=smart) type: string + timeout: + title: Timeout + description: override the connection timeout in seconds (default=10) + type: integer ssh_common_args: title: Ssh common args description: specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand) type: string + sftp_extra_args: + title: Sftp extra args + description: specify extra arguments to pass to sftp only (e.g. -f, -l) + type: string + scp_extra_args: + title: Scp extra args + description: specify extra arguments to pass to scp only (e.g. -l) + type: string ssh_extra_args: title: Ssh extra args description: specify extra arguments to pass to ssh only (e.g. -R) type: string - su: - title: Su - description: run operations with su (deprecated, use become) - type: boolean - default: false - su_user: - title: Su user - description: run operations with su as this user (default=None) (deprecated, - use become) - type: string sudo: title: Sudo description: run operations with sudo (nopasswd) (deprecated, use become) @@ -12306,38 +12405,31 @@ definitions: title: Sudo user description: desired sudo user (default=root) (deprecated, use become) type: string - syntax_check: - title: Syntax check - description: perform a syntax check on the playbook, but do not execute it + su: + title: Su + description: run operations with su (deprecated, use become) type: boolean default: false - timeout: - title: Timeout - description: override the connection timeout in seconds (default=10) - type: integer - tree: - title: Tree - description: log output to this directory - type: string - user: - title: User - description: connect as this user (default=None) + su_user: + title: Su user + description: run operations with su as this user (default=None) (deprecated, + use become) type: string - vault_id: - title: Vault id - description: the vault identity to use + become: + title: Become + description: run operations with become (does not imply password prompting) + type: boolean + default: false + become_method: + title: Become method + description: 'privilege escalation method to use (default=sudo), valid choices: + [ sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas | pmrun | enable + | machinectl ]' type: string - vault_password_file: - title: Vault password file - description: vault password file + become_user: + title: Become user + description: run operations as this user (default=root) type: string - format: secretfile - verbose: - title: Verbose - description: verbose mode (-vvv for more, -vvvv to enable connection debugging) - type: integer - default: 0 - maximum: 4 group: title: Group type: string @@ -12373,20 +12465,62 @@ definitions: $ref: '#/definitions/Playbook' value_field: playbook view_field: name - become: - title: Become - description: run operations with become (does not imply password prompting) + verbose: + title: Verbose + description: verbose mode (-vvv for more, -vvvv to enable connection debugging) + type: integer + default: 0 + maximum: 4 + inventory: + title: Inventory + description: specify inventory host path or comma separated host list. --inventory-file + is deprecated + type: string + format: autocomplete + additionalProperties: + model: + $ref: '#/definitions/Inventory' + value_field: id + view_field: name + list_hosts: + title: List hosts + description: outputs a list of matching hosts; does not execute anything else type: boolean default: false - become_method: - title: Become method - description: 'privilege escalation method to use (default=sudo), valid choices: - [ sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas | pmrun | enable - | machinectl ]' + limit: + title: Limit + description: further limit selected hosts to an additional pattern type: string - become_user: - title: Become user - description: run operations as this user (default=root) + module_path: + title: Module path + description: prepend colon-separated path(s) to module library (default=['/home/grey/.ansible/plugins/modules', + '/usr/share/ansible/plugins/modules']) + type: string + extra_vars: + title: Extra vars + description: set additional variables as key=value or YAML/JSON, if filename + prepend with @ + type: string + forks: + title: Forks + description: specify number of parallel processes to use (default=5) + type: integer + vault_password_file: + title: Vault password file + description: vault password file + type: string + format: secretfile + vault_id: + title: Vault id + description: the vault identity to use + type: string + tags: + title: Tags + description: only run plays and tasks tagged with these values + type: string + skip_tags: + title: Skip tags + description: only run plays and tasks whose tags do not match these values type: string check: title: Check @@ -12394,58 +12528,30 @@ definitions: that may occur type: boolean default: false - connection: - title: Connection - description: connection type to use (default=smart) - type: string + syntax_check: + title: Syntax check + description: perform a syntax check on the playbook, but do not execute it + type: boolean + default: false diff: title: Diff description: when changing (small) files and templates, show the differences in those files; works great with --check type: boolean default: false - extra_vars: - title: Extra vars - description: set additional variables as key=value or YAML/JSON, if filename - prepend with @ - type: string - flush_cache: - title: Flush cache - description: clear the fact cache for every host in inventory - type: boolean - default: false force_handlers: title: Force handlers description: run handlers even if a task fails type: boolean default: false - forks: - title: Forks - description: specify number of parallel processes to use (default=5) - type: integer - inventory: - title: Inventory - description: specify inventory host path or comma separated host list. --inventory-file - is deprecated - type: string - format: autocomplete - additionalProperties: - model: - $ref: '#/definitions/Inventory' - value_field: id - view_field: name - key_file: - title: Key file - description: use this file to authenticate the connection - type: string - format: secretfile - limit: - title: Limit - description: further limit selected hosts to an additional pattern - type: string - list_hosts: - title: List hosts - description: outputs a list of matching hosts; does not execute anything else + flush_cache: + title: Flush cache + description: clear the fact cache for every host in inventory + type: boolean + default: false + list_tasks: + title: List tasks + description: list all tasks that would be executed type: boolean default: false list_tags: @@ -12453,50 +12559,62 @@ definitions: description: list all available tags type: boolean default: false - list_tasks: - title: List tasks - description: list all tasks that would be executed + step: + title: Step + description: 'one-step-at-a-time: confirm each task before running' type: boolean default: false - module_path: - title: Module path - description: prepend colon-separated path(s) to module library (default=['/home/grey/.ansible/plugins/modules', - '/usr/share/ansible/plugins/modules']) + start_at_task: + title: Start at task + description: start the playbook at the task matching this name type: string private_key: title: Private key description: use this file to authenticate the connection type: string format: secretfile - scp_extra_args: - title: Scp extra args - description: specify extra arguments to pass to scp only (e.g. -l) + key_file: + title: Key file + description: use this file to authenticate the connection type: string - sftp_extra_args: - title: Sftp extra args - description: specify extra arguments to pass to sftp only (e.g. -f, -l) + format: secretfile + user: + title: User + description: connect as this user (default=None) type: string - skip_tags: - title: Skip tags - description: only run plays and tasks whose tags do not match these values + connection: + title: Connection + description: connection type to use (default=smart) type: string + timeout: + title: Timeout + description: override the connection timeout in seconds (default=10) + type: integer ssh_common_args: title: Ssh common args description: specify common arguments to pass to sftp/scp/ssh (e.g. ProxyCommand) type: string + sftp_extra_args: + title: Sftp extra args + description: specify extra arguments to pass to sftp only (e.g. -f, -l) + type: string + scp_extra_args: + title: Scp extra args + description: specify extra arguments to pass to scp only (e.g. -l) + type: string ssh_extra_args: title: Ssh extra args description: specify extra arguments to pass to ssh only (e.g. -R) type: string - start_at_task: - title: Start at task - description: start the playbook at the task matching this name - type: string - step: - title: Step - description: 'one-step-at-a-time: confirm each task before running' + sudo: + title: Sudo + description: run operations with sudo (nopasswd) (deprecated, use become) type: boolean default: false + sudo_user: + title: Sudo user + description: desired sudo user (default=root) (deprecated, use become) + type: string su: title: Su description: run operations with su (deprecated, use become) @@ -12507,47 +12625,21 @@ definitions: description: run operations with su as this user (default=None) (deprecated, use become) type: string - sudo: - title: Sudo - description: run operations with sudo (nopasswd) (deprecated, use become) - type: boolean - default: false - sudo_user: - title: Sudo user - description: desired sudo user (default=root) (deprecated, use become) - type: string - syntax_check: - title: Syntax check - description: perform a syntax check on the playbook, but do not execute it + become: + title: Become + description: run operations with become (does not imply password prompting) type: boolean default: false - tags: - title: Tags - description: only run plays and tasks tagged with these values - type: string - timeout: - title: Timeout - description: override the connection timeout in seconds (default=10) - type: integer - user: - title: User - description: connect as this user (default=None) - type: string - vault_id: - title: Vault id - description: the vault identity to use + become_method: + title: Become method + description: 'privilege escalation method to use (default=sudo), valid choices: + [ sudo | su | pbrun | pfexec | doas | dzdo | ksu | runas | pmrun | enable + | machinectl ]' type: string - vault_password_file: - title: Vault password file - description: vault password file + become_user: + title: Become user + description: run operations as this user (default=root) type: string - format: secretfile - verbose: - title: Verbose - description: verbose mode (-vvv for more, -vvvv to enable connection debugging) - type: integer - default: 0 - maximum: 4 ProjectHistory: required: - mode @@ -13203,6 +13295,10 @@ definitions: - widgetSettings type: object properties: + autoupdateInterval: + title: Autoupdateinterval + type: integer + default: 15000 chartLineSettings: $ref: '#/definitions/ChartLineSettings' widgetSettings: diff --git a/doc/gui.rst b/doc/gui.rst index 3a732d79..20f95819 100644 --- a/doc/gui.rst +++ b/doc/gui.rst @@ -69,7 +69,7 @@ As you can see, the form of new project creation consist of 5 fields: * **repo auth data** - key or password value. -After project creation you will the next page: +After project creation you will see the next page: .. image:: gui_screenshots/test_project_1.png .. image:: gui_screenshots/test_project_2.png @@ -598,6 +598,186 @@ As you can see there is only 1 new field on this page: * **id** - |id_field_def| +Import inventory +---------------- + +If you have some inventory file and you want to add objects from it to Polemarch +you can do it in rather simple, convenient and quick way: let us introduce you +very useful action - "Import inventory". + +For example, let's use next inventory file: + +.. sourcecode:: ini + + [imported-test-group] + imported-test-host ansible_host=10.10.10.17 + + [imported-test-group:vars] + ansible_user=ubuntu + ansible_ssh_private_key_file=example_key + +To import inventory you should open inventory list page: + +.. image:: gui_screenshots/import_inventory.png + +And click on "Import inventory" button. Then you will see the next page: + +.. image:: gui_screenshots/import_inventory_2.png + +As you can see, the form of "Import inventory" action consist of 2 fields: + +* **name** - name of your inventory. +* **inventory file** - value of your inventory file. + +After filling of all fields you should click on "Exec" button and then you will see +page of your imported inventory: + +.. image:: gui_screenshots/import_inventory_3.png + +This inventory includes "imported-test-group" from imported inventory file: + +.. image:: gui_screenshots/import_inventory_4.png + +And "imported-test-group" includes "imported-test-host" from imported inventory file: + +.. image:: gui_screenshots/import_inventory_5.png + +"imported-test-host" includes variable "ansible-host" from imported inventory file: + +.. image:: gui_screenshots/import_inventory_6.png + + +.polemarch.yaml +--------------- + +``.polemarch.yaml`` is a file for a quick deployment of Polemarch project. +By quick deployment of Polemarch project we mean automatic creation of some templates +for this project (during project sync) and using of additional interface for quick task execution. + +``.polemarch.yaml`` is not required file for Polemarch work, +but if you want to use features of ``.polemarch.yaml``, you have to store it in +the base directory of (GIT, MANUAL, TAR) project. + +Structure of ``.polemarch.yaml`` consists of next fields: + +* **sync_on_run** - boolean, it means to get or not to get settings from ``.polemarch.yaml`` + during each project sync. +* **templates** - dictionary, consists of template objects + (their structure is similar to template's API structure except the 'name' field). +* **templates_rewrite** - boolean, it means to rewrite or not to rewrite templates in project + with names equal to templates' names from ``.polemarch.yaml``. +* **view** - dictionary, it is a description of web-form, that will be generated from ``.polemarch.yaml``. + It consists of: + + * **fields** - dictionary, it consists of objects, that describe fields properties: + + * **title**: title of field, that Polemarch will show in web-form. + * **default**: default value of field. Default: '' - for strings, 0 - for numbers. + * **format**: format of field. For today next field formats are available: string, integer, float, boolean. Default: string. + * **help**: some help text for this field. + + * **playbooks** - dictionary, it consists of objects, that describes playbook properties: + + * **title**: title of playbook, that Polemarch will use during playbook execution. + * **help**: some help text for this playbook. + +Example of ``.polemarch.yaml``: + +.. sourcecode:: yaml + + --- + sync_on_run: true + templates: + test-module: + notes: Module test template + kind: Module + data: + group: all + vars: {} + args: '' + module: ping + inventory: localhost, + options: + uptime: + args: uptime + module: shell + test playbook: + notes: Playbook test template + kind: Task + data: + vars: {"become": true} + playbook: main.yml + inventory: localhost, + options: + update: {"playbook": "other.yml"} + templates_rewrite: true + view: + fields: + string: + title: Field string + default: 0 + format: string + help: Some help text + integer: + title: Field integer + default: 0 + format: integer + help: Some help text + float: + title: Field float + default: 0 + format: float + help: Some help text + boolean: + title: Field boolean + default: 0 + format: boolean + help: Some help text + enum_string: + title: Field enum_string + default: 0 + format: string + help: Some help text + enum: ['Choice1', 'Choice2', 'Choice3'] + unknown: + title: Field unknown + default: 0 + format: invalid_or_unknown + help: Some help text + playbooks: + main.yml: + title: Execute title + help: Some help text + + +In GUI process of working with ``.polemarch.yaml`` will be the following: + +Firstly, you need to create a project with ``.polemarch.yaml`` +(or to add ``.polemarch.yaml`` to existing project). +For example, let's create new GIT project, that has in its base directory ``.polemarch.yaml`` file +from the example above: + +.. image:: gui_screenshots/create_project_with_polemarch_yaml.png + +In the field 'repo url' you should insert url of project that has in its base directory +``.polemarch.yaml`` file. + +After project creation you will see the ordinary project page: + +.. image:: gui_screenshots/create_project_with_polemarch_yaml_2.png + +Then you need to click on "sync" button. After project synchronization you will see the next page: + +.. image:: gui_screenshots/create_project_with_polemarch_yaml_3.png +.. image:: gui_screenshots/create_project_with_polemarch_yaml_4.png + +As you can see, all fields that we mentioned in the exmaple ``.polemarch.yaml`` were added +to this web-form. + +Also templates from ``.polemarch.yaml`` were added to just created Polemarch project. + +.. image:: gui_screenshots/create_project_with_polemarch_yaml_5.png + Hooks ----- diff --git a/doc/gui_screenshots/create_project_with_polemarch_yaml.png b/doc/gui_screenshots/create_project_with_polemarch_yaml.png new file mode 100644 index 00000000..375191c2 Binary files /dev/null and b/doc/gui_screenshots/create_project_with_polemarch_yaml.png differ diff --git a/doc/gui_screenshots/create_project_with_polemarch_yaml_2.png b/doc/gui_screenshots/create_project_with_polemarch_yaml_2.png new file mode 100644 index 00000000..c0b15833 Binary files /dev/null and b/doc/gui_screenshots/create_project_with_polemarch_yaml_2.png differ diff --git a/doc/gui_screenshots/create_project_with_polemarch_yaml_3.png b/doc/gui_screenshots/create_project_with_polemarch_yaml_3.png new file mode 100644 index 00000000..a0ff1709 Binary files /dev/null and b/doc/gui_screenshots/create_project_with_polemarch_yaml_3.png differ diff --git a/doc/gui_screenshots/create_project_with_polemarch_yaml_4.png b/doc/gui_screenshots/create_project_with_polemarch_yaml_4.png new file mode 100644 index 00000000..5ee086f2 Binary files /dev/null and b/doc/gui_screenshots/create_project_with_polemarch_yaml_4.png differ diff --git a/doc/gui_screenshots/create_project_with_polemarch_yaml_5.png b/doc/gui_screenshots/create_project_with_polemarch_yaml_5.png new file mode 100644 index 00000000..d4c7fc4b Binary files /dev/null and b/doc/gui_screenshots/create_project_with_polemarch_yaml_5.png differ diff --git a/doc/gui_screenshots/import_inventory.png b/doc/gui_screenshots/import_inventory.png new file mode 100644 index 00000000..659560c0 Binary files /dev/null and b/doc/gui_screenshots/import_inventory.png differ diff --git a/doc/gui_screenshots/import_inventory_2.png b/doc/gui_screenshots/import_inventory_2.png new file mode 100644 index 00000000..dbe8c677 Binary files /dev/null and b/doc/gui_screenshots/import_inventory_2.png differ diff --git a/doc/gui_screenshots/import_inventory_3.png b/doc/gui_screenshots/import_inventory_3.png new file mode 100644 index 00000000..580c009b Binary files /dev/null and b/doc/gui_screenshots/import_inventory_3.png differ diff --git a/doc/gui_screenshots/import_inventory_4.png b/doc/gui_screenshots/import_inventory_4.png new file mode 100644 index 00000000..62e8ef15 Binary files /dev/null and b/doc/gui_screenshots/import_inventory_4.png differ diff --git a/doc/gui_screenshots/import_inventory_5.png b/doc/gui_screenshots/import_inventory_5.png new file mode 100644 index 00000000..83d720bd Binary files /dev/null and b/doc/gui_screenshots/import_inventory_5.png differ diff --git a/doc/gui_screenshots/import_inventory_6.png b/doc/gui_screenshots/import_inventory_6.png new file mode 100644 index 00000000..5c2d759c Binary files /dev/null and b/doc/gui_screenshots/import_inventory_6.png differ diff --git a/doc/quickstart.rst b/doc/quickstart.rst index 67c8dd2c..1e6dc5aa 100644 --- a/doc/quickstart.rst +++ b/doc/quickstart.rst @@ -31,8 +31,6 @@ Install from PyPI * LDAP (```libldap2-dev```) - * python 2.7 (```libpython2.7```) - * SSHPass(```sshpass```) to get working ssh password auth during playbook execution * GIT (```git```) to get working git import diff --git a/polemarch/__init__.py b/polemarch/__init__.py index a3977b23..687fb39d 100644 --- a/polemarch/__init__.py +++ b/polemarch/__init__.py @@ -31,6 +31,6 @@ "VST_ROOT_URLCONF": os.getenv("VST_ROOT_URLCONF", 'vstutils.urls'), } -__version__ = "0.2.1" +__version__ = "0.2.2" prepare_environment(**default_settings) diff --git a/polemarch/api/v2/serializers.py b/polemarch/api/v2/serializers.py index 1fd6b696..c59ab244 100644 --- a/polemarch/api/v2/serializers.py +++ b/polemarch/api/v2/serializers.py @@ -96,7 +96,7 @@ def func_wrapper(*args, **kwargs): # Serializers class ActionResponseSerializer(DataSerializer, EmptySerializer): - detail = serializers.CharField() + detail = vst_fields.VSTCharField() class ExecuteResponseSerializer(ActionResponseSerializer): @@ -216,8 +216,8 @@ class Meta(vst_serializers.OneUserSerializer.Meta): class CreateUserSerializer(OneUserSerializer): - password = serializers.CharField(write_only=True) - password2 = serializers.CharField(write_only=True, label='Repeat password') + password = vst_fields.VSTCharField(write_only=True) + password2 = vst_fields.VSTCharField(write_only=True, label='Repeat password') class Meta(OneUserSerializer.Meta): fields = list(OneUserSerializer.Meta.fields) + ['password', 'password2'] @@ -410,6 +410,8 @@ def to_representation(self, instance): result = super(VariableSerializer, self).to_representation(instance) if instance.key in getattr(instance.content_object, 'HIDDEN_VARS', []): result['value'] = "[~~ENCRYPTED~~]" + elif instance.key in getattr(instance.content_object, 'BOOLEAN_VARS', []): + result['value'] = True if instance.value == 'True' else False return result @@ -513,7 +515,7 @@ class Meta: class OnePlaybookSerializer(PlaybookSerializer): - playbook = serializers.CharField(read_only=True) + playbook = vst_fields.VSTCharField(read_only=True) class Meta: model = models.Task @@ -722,7 +724,7 @@ def execute(self, request): class TemplateExecSerializer(DataSerializer): - option = serializers.CharField( + option = vst_fields.VSTCharField( help_text='Option name from template options.', min_length=0, allow_blank=True, required=False @@ -801,9 +803,9 @@ class ProjectCreateMasterSerializer(vst_serializers.VSTSerializer): types = list_to_choices(models.Project.repo_handlers.keys()) auth_types = list_to_choices(['NONE', 'KEY', 'PASSWORD']) - status = serializers.CharField(read_only=True) + status = vst_fields.VSTCharField(read_only=True) type = serializers.ChoiceField(choices=types, default='MANUAL', label='Repo type') - repository = serializers.CharField(default='MANUAL', label='Repo url') + repository = vst_fields.VSTCharField(default='MANUAL', label='Repo url') repo_auth = serializers.ChoiceField(choices=auth_types, default='NONE', label='Repo auth type', @@ -848,8 +850,8 @@ def create(self, validated_data): class ProjectSerializer(_InventoryOperations): - status = serializers.CharField(read_only=True) - type = serializers.CharField(read_only=True) + status = vst_fields.VSTCharField(read_only=True) + type = vst_fields.VSTCharField(read_only=True) class Meta: model = models.Project @@ -865,10 +867,11 @@ def _do_with_vars(self, *args, **kw): class OneProjectSerializer(ProjectSerializer, _InventoryOperations): - repository = serializers.CharField(default='MANUAL') + repository = vst_fields.VSTCharField(default='MANUAL') owner = UserSerializer(read_only=True) notes = vst_fields.TextareaField(required=False, allow_blank=True) readme_content = vst_fields.HtmlField(read_only=True, label='Information') + execute_view_data = vst_serializers.DataSerializer(read_only=True, allow_null=True) class Meta: model = models.Project @@ -880,7 +883,8 @@ class Meta: 'branch', 'owner', 'notes', - 'readme_content',) + 'readme_content', + 'execute_view_data',) @transaction.atomic() def sync(self): @@ -969,7 +973,7 @@ def generate_fileds(ansible_type): elif ref_type == 'int': field = serializers.IntegerField elif ref_type == 'string' or 'choice': - field = serializers.CharField + field = vst_fields.VSTCharField kwargs['allow_blank'] = True if ref == 'verbose': @@ -1061,7 +1065,7 @@ class DashboardStatisticSerializer(DataSerializer): class InventoryImportSerializer(DataSerializer): inventory_id = vst_fields.RedirectIntegerField(default=None, allow_null=True) name = serializers.CharField(required=True) - raw_data = serializers.CharField() + raw_data = vst_fields.VSTCharField() @transaction.atomic() def create(self, validated_data): @@ -1069,6 +1073,7 @@ def create(self, validated_data): inv_json = parser.get_inventory_data(validated_data['raw_data']) inventory = Inventory.objects.create(name=validated_data['name']) + inventory.vars = inv_json['vars'] created_hosts, created_groups = dict(), dict() for host in inv_json['hosts']: diff --git a/polemarch/api/v2/views.py b/polemarch/api/v2/views.py index 9747b895..f3dec47e 100644 --- a/polemarch/api/v2/views.py +++ b/polemarch/api/v2/views.py @@ -438,6 +438,9 @@ class InventoryViewSet(_GroupMixin): serializer_class_one = sers.OneInventorySerializer filter_class = filters.InventoryFilter copy_related = ['hosts', 'groups'] + action_serializers = { + 'import_inventory': sers.InventoryImportSerializer + } @deco.action(methods=["post"], detail=no) def import_inventory(self, request, **kwargs): diff --git a/polemarch/main/models/__init__.py b/polemarch/main/models/__init__.py index 09926ca4..32bdc1a6 100644 --- a/polemarch/main/models/__init__.py +++ b/polemarch/main/models/__init__.py @@ -63,7 +63,7 @@ def raise_linked_error(exception_class=ValidationError, **kwargs): ##################################### @receiver(signals.post_save, sender=Variable) def remove_existed(instance, **kwargs): - if 'loaddata' in sys.argv: # nocv + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv return Variable.objects.filter( object_id=instance.object_id, @@ -73,8 +73,8 @@ def remove_existed(instance, **kwargs): @receiver(signals.pre_save, sender=Variable) -def check_variables_values(instance, *args, **kw): - if 'loaddata' in sys.argv: # nocv +def check_variables_values(instance, *args, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv return content_object = instance.content_object if isinstance(content_object, PeriodicTask): @@ -87,6 +87,8 @@ def check_variables_values(instance, *args, **kw): @receiver(signals.pre_save, sender=Group) def validate_group_name(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return validate_name = RegexValidator( regex=r'^[a-zA-Z0-9\-\._]*$', message='Name must be Alphanumeric' @@ -95,7 +97,9 @@ def validate_group_name(instance, **kwargs): @receiver(signals.m2m_changed, sender=Group.parents.through) -def check_circular_deps(instance, action, pk_set, *args, **kw): +def check_circular_deps(instance, action, pk_set, *args, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if (action in ["pre_add", "post_add"]) and ('loaddata' not in sys.argv): if instance.id in pk_set: raise instance.CiclicDependencyError("The group can not refer to itself.") @@ -107,6 +111,8 @@ def check_circular_deps(instance, action, pk_set, *args, **kw): @receiver(signals.pre_save, sender=PeriodicTask) def validate_types(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if (instance.kind not in instance.kinds) or (instance.type not in instance.types): # Deprecated, because moved to serializers raise UnknownTypeException(instance.kind, "Unknown kind {}.") # nocv @@ -114,6 +120,8 @@ def validate_types(instance, **kwargs): @receiver(signals.pre_save, sender=PeriodicTask) def validate_crontab(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return try: instance.get_schedule() except ValueError as ex: @@ -123,6 +131,8 @@ def validate_crontab(instance, **kwargs): @receiver(signals.pre_save, sender=Host) def validate_type_and_name(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if instance.type not in instance.types: # nocv # Deprecated, because moved to serializers raise UnknownTypeException(instance.type) @@ -134,6 +144,8 @@ def validate_type_and_name(instance, **kwargs): @receiver(signals.pre_save, sender=Template) def validate_template_keys(instance, **kwargs): # nocv + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return # Deprecated, because moved to serializers if instance.kind not in instance.template_fields.keys(): raise UnknownTypeException(instance.kind) @@ -149,6 +161,8 @@ def validate_template_keys(instance, **kwargs): # nocv @receiver(signals.pre_save, sender=Template) def validate_template_executes(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if instance.kind in ["Host", "Group"]: return # nocv errors = {} @@ -160,6 +174,8 @@ def validate_template_executes(instance, **kwargs): @receiver(signals.pre_save, sender=Template) def validate_template_args(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if instance.kind in ["Host", "Group"]: return # nocv command = "playbook" @@ -181,6 +197,8 @@ def clean_dirs(instance, **kwargs): @receiver(signals.post_save, sender=PeriodicTask) def save_to_beat(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return task = settings.TASKS_HANDLERS["SCHEDUER"]["BACKEND"] manager = django_celery_beat.models.PeriodicTask.objects delete_from_beat(instance) @@ -206,6 +224,8 @@ def save_to_beat(instance, **kwargs): @receiver(signals.post_delete, sender=PeriodicTask) def delete_from_beat(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return manager = django_celery_beat.models.PeriodicTask.objects celery_tasks = manager.filter(name=str(instance.id)) for task in celery_tasks: @@ -225,6 +245,8 @@ def delete_from_beat(instance, **kwargs): @receiver(signals.m2m_changed, sender=Project.inventories.through) def check_if_inventory_linked(instance, action, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if action != "pre_remove": return removing_inventories = instance.inventories.filter(pk__in=kwargs['pk_set']) @@ -245,6 +267,8 @@ def check_if_inventory_linked(instance, action, **kwargs): @receiver(signals.pre_delete, sender=Inventory) def check_if_inventory_linked_project(instance, **kwargs): # pylint: disable=invalid-name + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return if instance.projects.exists(): raise_linked_error( linked_projects=list(instance.projects.values_list('id', flat=True)) @@ -253,6 +277,8 @@ def check_if_inventory_linked_project(instance, **kwargs): @receiver(signals.pre_save, sender=Hook) def check_hook(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return errors = instance.handlers.validate(instance) if errors: raise ValidationError(errors) @@ -261,6 +287,8 @@ def check_hook(instance, **kwargs): @receiver([signals.post_save, signals.post_delete], sender=BaseUser, dispatch_uid='user_add_hook') def user_add_hook(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return created = kwargs.get('created', None) when = None if created is None: @@ -280,6 +308,8 @@ def user_add_hook(instance, **kwargs): @receiver([signals.post_save, signals.post_delete], sender=Group) @receiver([signals.post_save, signals.post_delete], sender=Host) def polemarch_hook(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return created = kwargs.get('created', None) when = "on_object_add" if isinstance(instance, Variable): @@ -296,9 +326,13 @@ def polemarch_hook(instance, **kwargs): @receiver(signals.post_save, sender=BaseUser) def create_settings_for_user(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return UserSettings.objects.get_or_create(user=instance) @receiver(signals.pre_save, sender=Template) def update_ptasks_with_templates(instance, **kwargs): + if 'loaddata' in sys.argv or kwargs.get('raw', False): # nocv + return instance.periodic_task.all().update(project=instance.project) diff --git a/polemarch/main/models/projects.py b/polemarch/main/models/projects.py index e1eddfcd..9b007e5c 100644 --- a/polemarch/main/models/projects.py +++ b/polemarch/main/models/projects.py @@ -3,6 +3,7 @@ import os import logging +import traceback import uuid import six from docutils.core import publish_parts as rst_gen @@ -10,7 +11,7 @@ from django.conf import settings from django.db.models import Q from django.core.validators import ValidationError -from vstutils.utils import ModelHandlers +from vstutils.utils import ModelHandlers, raise_context from yaml import load try: from yaml import CLoader as Loader @@ -22,7 +23,7 @@ from ..exceptions import PMException from .base import ManyToManyFieldACL, BQuerySet, BModel from .hooks import Hook -from ..utils import AnsibleModules +from ..utils import AnsibleModules, SubCacheInterface logger = logging.getLogger("polemarch") @@ -52,6 +53,7 @@ class SyncError(PMException): pass class ReadMe(object): + __slots__ = 'path', 'ext', 'content', 'file_name' def __init__(self, project): self.path = project.path @@ -66,7 +68,7 @@ def _make_rst(self, file): return rst_gen(file.read(), writer_name='html')['html_body'] def _make_md(self, file): - return Markdown(extras=['tables']).convert(file.read()) + return Markdown(extras=['tables', 'header-ids']).convert(file.read()) def set_readme(self): if not os.path.exists(self.path): @@ -98,6 +100,14 @@ def set_readme(self): 'template_option': None } + PM_YAML_FORMATS = { + 'unknown': str, + 'string': str, + 'integer': int, + 'float': float, + 'boolean': bool, + } + def __unicode__(self): return str(self.name) # pragma: no cover @@ -128,6 +138,67 @@ def type(self): except self.variables.model.DoesNotExist: # nocv return 'MANUAL' + def get_yaml_subcache(self, suffix=''): + return SubCacheInterface(''.join(['project', str(self.id), suffix])) + + def __parse_yaml_view(self, data): + valid_formats = self.PM_YAML_FORMATS + parsed_data = dict(fields=dict(), playbooks=dict()) + # Parse fields + for fieldname, field_data in data['fields'].items(): + parsed_data['fields'][fieldname] = dict( + title=field_data.get('title', fieldname.upper()), + help=field_data.get('help', ''), + ) + field_format = field_data.get('format', 'string') + if field_format not in valid_formats.keys(): + field_format = 'unknown' + parsed_data['fields'][fieldname]['format'] = field_format + default_value = valid_formats[field_format](field_data.get('default', '')) + parsed_data['fields'][fieldname]['default'] = default_value + enum = field_data.get('enum', None) + if enum and isinstance(enum, (list, tuple)): + enum = list(map(valid_formats[field_format], enum)) + parsed_data['fields'][fieldname]['enum'] = enum + # Parse playbooks for execution + for playbook, pb_data in data['playbooks'].items(): + parsed_data['playbooks'][playbook] = dict( + title=pb_data.get('title', playbook.replace('.yml', '')), + help=pb_data.get('help', ''), + ) + return parsed_data + + def get_yaml(self): + yaml_path = '/'.join([self.path, '.polemarch.yaml']).replace('//', '/') + if not (os.path.exists(yaml_path) and os.path.isfile(yaml_path)): + return + cache = self.get_yaml_subcache() + cache_data = cache.get() or None + if cache_data: + return cache_data + try: + cache.clear() + with open(yaml_path, 'r') as fd: + data = load(fd.read(), Loader=Loader) + cache.set(data) + return data + except: # nocv + logger.debug(traceback.format_exc()) + return cache_data + + @property + @raise_context() + def execute_view_data(self): + cached_view_data = self.get_yaml_subcache('view').get() + if cached_view_data: + return cached_view_data + yaml_data = self.get_yaml() or {} + view_data = yaml_data.get('view', None) + if view_data: + view_data = self.__parse_yaml_view(view_data) + self.get_yaml_subcache('view').set(view_data) + return view_data + def check_path(self, inventory): if not isinstance(inventory, (six.string_types, six.text_type)): return diff --git a/polemarch/main/models/utils.py b/polemarch/main/models/utils.py index 314a63c5..53ee567e 100644 --- a/polemarch/main/models/utils.py +++ b/polemarch/main/models/utils.py @@ -56,10 +56,13 @@ def save(self): class Executor(CmdExecutor): + __slots__ = 'history', 'counter', 'exchanger' + def __init__(self, history): super(Executor, self).__init__() self.history = history self.counter = 0 + self.exchanger = KVExchanger(self.CANCEL_PREFIX + str(self.history.id)) @property def output(self): @@ -84,7 +87,6 @@ def write_output(self, line): def execute(self, cmd, cwd): pm_ansible_path = ' '.join(self.pm_ansible()) self.history.raw_args = " ".join(cmd).replace(pm_ansible_path, '').lstrip() - self.exchanger = KVExchanger(self.CANCEL_PREFIX + str(self.history.id)) return super(Executor, self).execute(cmd, cwd) diff --git a/polemarch/main/repo/_base.py b/polemarch/main/repo/_base.py index afa6aba9..2fee7628 100644 --- a/polemarch/main/repo/_base.py +++ b/polemarch/main/repo/_base.py @@ -9,11 +9,14 @@ from six.moves.urllib.request import urlretrieve from django.db import transaction +from vstutils.utils import raise_context logger = logging.getLogger("polemarch") class _Base(object): + __slots__ = 'options', 'proj', 'path' + regex = r"(^[\w\d\.\-_]{1,})\.yml" def __init__(self, project, **options): @@ -24,7 +27,103 @@ def __init__(self, project, **options): def _set_status(self, status): self.proj.set_status(status) + @raise_context() + def _load_yaml(self): + ''' + Loading `.polemarch.yaml` data. + + :return: Data from `.polemarch.yaml` file. + :type ret: dict + ''' + self.proj.get_yaml_subcache().clear() + return self.proj.get_yaml() + + def message(self, message, level='debug'): + getattr(logger, level.lower(), logger.debug)( + 'Syncing project [{}] - {}'.format(self.proj.id, message) + ) + + def pm_handle_sync_on_run(self, feature, data): + ''' + Set sync_on_run if it is setted in `.polemarch.yaml`. + + :param feature: feature name + :param data: all data from file + ''' + value = str(data[feature]) + _, created = self.proj.variables.update_or_create( + key='repo_sync_on_run', defaults=dict(value=value) + ) + self.message( + '{} repo_sync_on_run to {}'.format('Set' if created else 'Update', value) + ) + + @raise_context() + def __create_template(self, template_name, template_data): + ''' + Creates one template from `.polemarch.yaml`. + + :param template_name: Template name + :param template_data: Template data + :return: created Template object + ''' + self.message('Loading template[{}] into the project.'.format(template_name)) + return self.proj.template.create(name=template_name, **template_data) + + def pm_handle_templates(self, feature, data): + ''' + Get and create (if is not existed) templates from `.polemarch.yaml`. + + :param feature: feature name + :param data: all data from file + ''' + rewrite = data.get('templates_rewrite', False) + data = data[feature] + qs_existed = self.proj.template.filter(name__in=data.keys()) + existed = qs_existed.values_list('name', flat=True) + for template_name, template_data in data.items(): + if not rewrite and template_name in existed: + self.message('Template[{}] already in project.'.format(template_name)) + continue + self.__create_template(template_name, template_data) + + def pm_handle_view(self, feature, data): + ''' + Clear view data from cache + + :param feature: feature name + :param data: all data from file + ''' + # pylint: disable=unused-argument + self.proj.get_yaml_subcache('view').clear() + self.message(self.proj.execute_view_data, 'debug') + + def pm_handle_unknown(self, feature, data): # nocv + ''' + Logging unknowing data from `.polemarch.yaml`. + ''' + self.message('{} - this feature is not realised yet.'.format(feature), 'info') + logger.debug(str(data)) + + def _handle_yaml(self, data): + """ + Loads and returns data from `.polemarch.yaml` file + + :rtype: dict + """ + for feature in data.keys(): + if feature in ['templates_rewrite', ]: + continue + self.message('Set settings from ".polemarch.yaml" - {}.'.format(feature)) + feature_name = 'pm_handle_{}'.format(feature) + getattr(self, feature_name, self.pm_handle_unknown)(feature, data) + def _set_tasks_list(self, playbooks_names): + """ + Updates playbooks in project. + + :rtype: None + """ # pylint: disable=invalid-name project = self.proj project.playbook.all().delete() @@ -40,11 +139,24 @@ def _set_tasks_list(self, playbooks_names): PlaybookModel.objects.bulk_create(playbook_objects) if playbook_objects else None def _update_tasks(self, files): + ''' + Find and update playbooks in project. + :param files: list of filenames. + :type files: list, tuple + :rtype: None + ''' reg = re.compile(self.regex) playbooks = filter(reg.match, files) self._set_tasks_list(playbooks) def _get_files(self, repo=None): + ''' + Get all files, where playbooks should be. + :param repo: Repo object + :type repo: object, None + :return: list of files in dir + :rtype: list + ''' # pylint: disable=unused-argument return os.listdir(self.path) @@ -52,15 +164,22 @@ def _operate(self, operation, **kwargs): return operation(kwargs) def _make_operations(self, operation): + ''' + Handle VCS operations and sync data from project. + + :param operation: function that should be hdandled. + :return: tuple with repo-object and fetch-results + ''' self._set_status("SYNC") try: with transaction.atomic(): result = self._operate(operation) self._set_status("OK") self._update_tasks(self._get_files(result[0])) + self._handle_yaml(self._load_yaml() or dict()) except Exception as err: - logger.info(traceback.format_exc()) - logger.error("Project[{}] sync error:\n{}".format(self.proj, err)) + logger.debug(traceback.format_exc()) + self.message('Sync error: {}'.format(err), 'error') self._set_status("ERROR") raise else: @@ -90,6 +209,11 @@ def get_branch_name(self): return "NO VCS" def delete(self): + ''' + Handler, which removes project data directory. + + :return: user message + ''' if os.path.exists(self.path): if os.path.isfile(self.path): os.remove(self.path) # nocv diff --git a/polemarch/main/repo/vcs.py b/polemarch/main/repo/vcs.py index 99b25f02..525af721 100644 --- a/polemarch/main/repo/vcs.py +++ b/polemarch/main/repo/vcs.py @@ -17,6 +17,8 @@ def get_repo(self, *args, **kwargs): class Git(_VCS): + __slots__ = 'env', '_fetch_map' + _fetch_statuses = [ "NEW_TAG", "NEW_HEAD", "HEAD_UPTODATE", "TAG_UPDATE", "REJECTED", "FORCED_UPDATE", diff --git a/polemarch/main/tasks/tasks.py b/polemarch/main/tasks/tasks.py index eb1613cb..9822486f 100644 --- a/polemarch/main/tasks/tasks.py +++ b/polemarch/main/tasks/tasks.py @@ -13,6 +13,7 @@ @task(app, ignore_result=True, default_retry_delay=1, max_retries=clone_retry, bind=True) class RepoTask(BaseTask): + __slots__ = 'project', 'operation' accepted_operations = ["clone", "sync"] class RepoTaskError(TaskError): @@ -37,6 +38,8 @@ def run(self): @task(app, ignore_result=True, bind=True) class ScheduledTask(BaseTask): + __slots__ = 'job_id', + def __init__(self, app, job_id, *args, **kwargs): super(self.__class__, self).__init__(app, *args, **kwargs) self.job_id = job_id diff --git a/polemarch/main/tests/executions.py b/polemarch/main/tests/executions.py index b9b0ab83..cde9ab59 100644 --- a/polemarch/main/tests/executions.py +++ b/polemarch/main/tests/executions.py @@ -6,6 +6,7 @@ import git from datetime import timedelta from django.utils.timezone import now +from yaml import dump from ._base import BaseTestCase, os from ..tasks import ScheduledTask @@ -21,6 +22,90 @@ ping: ''' +test_yaml_templates = { + 'test module': { + "notes": "Module test template.", + "kind": "Module", + "data": { + "group": "all", + "vars": {}, + "args": "", + "module": "ping", + "inventory": 'localhost,' + }, + "options": { + "uptime": { + "args": "uptime", + "module": "shell" + }, + } + }, + 'test playbook': { + "notes": "Playbook test template.", + "kind": "Task", + "data": { + "vars": { + "become": True + }, + "playbook": "main.yml", + "inventory": 'localhost,' + }, + "options": { + "update": { + "playbook": "other.yml" + } + } + } +} + +test_yaml_view = { + 'fields': { + 'string': { + 'title': 'Field string', + 'default': 0, + 'format': 'string', + 'help': 'Some help text' + }, + 'integer': { + 'title': 'Field integer', + 'default': 0, + 'format': 'integer', + 'help': 'Some help text' + }, + 'float': { + 'title': 'Field float', + 'default': 0, + 'format': 'float', + 'help': 'Some help text' + }, + 'boolean': { + 'title': 'Field boolean', + 'default': 0, + 'format': 'boolean', + 'help': 'Some help text' + }, + 'enum_string': { + 'title': 'Field enum_string', + 'default': 0, + 'format': 'string', + 'help': 'Some help text', + 'enum': list(range(10)) + }, + 'unknown': { + 'title': 'Field unknown', + 'default': 0, + 'format': 'invalid_or_unknown', + 'help': 'Some help text' + }, + }, + 'playbooks': { + 'main.yml': { + 'title': 'Execute title', + 'help': 'Some help text' + } + } +} + class Object(object): pass @@ -302,7 +387,9 @@ def _check_copy_project(self, id, **kwargs): self.assertEqual(results[0]['data']['status'], 'NEW') self.assertEqual(results[1]['data']['count'], len(obj.vars)) for value in results[1]['data']['results']: - self.assertIn(value['value'], [obj.vars[value['key']], '[~~ENCRYPTED~~]']) + self.assertIn( + value['value'], [obj.vars[value['key']], '[~~ENCRYPTED~~]'], value + ) def project_workflow(self, repo_type, **kwargs): execute = kwargs.pop('execute', False) @@ -326,6 +413,19 @@ def get_file_path(self, name, path): return "{}/{}".format(path, name) def generate_playbook(self, path, name='test', count=1, data=test_playbook_content): + ''' + Generate playbooks in project path + + :param path: path, where playbook will appear + :type path: str,unicode + :param name: filename pattern or list of names for playbooks + :type name: str,unicode,list,tuple + :param count: count files for pattern + :type count: int + :param data: playbook data + :type data: str,bytes,unicode + :return: + ''' files = [] if isinstance(name, (list, tuple)): _files = name[:count or len(name)] @@ -438,23 +538,61 @@ def wip_manual(self, project_data): return dict(playbook_count=len(files), execute=True) def wip_git(self, project_data): + # Check brunch and revision self.assertEqual(project_data['revision'], self.revisions[-1]) self.assertEqual(project_data['branch'], 'master') + # Update branch new_branch_var = dict(key='repo_branch', value='new_branch') self.make_bulk([ self.get_mod_bulk('project', project_data['id'], new_branch_var) ]) project_data = self.sync_project(project_data['id']) + # Check updated brunch and revision self.assertEqual(project_data['revision'], self.revisions[0]) self.assertEqual(project_data['branch'], 'new_branch') + # Return old branch new_branch_var['value'] = 'master' - self.make_bulk([ - self.get_mod_bulk('project', project_data['id'], new_branch_var) - ]) - repo_autosync_var = dict(key='repo_sync_on_run', value='True') - self.make_bulk([ - self.get_mod_bulk('project', project_data['id'], repo_autosync_var) + results = self.make_bulk([ + self.get_mod_bulk('project', project_data['id'], new_branch_var), + self.get_mod_bulk( + 'project', project_data['id'], {}, + method='get', filters='key=repo_sync_on_run' + ), + self.get_mod_bulk('project', project_data['id'], {}, 'template', 'get'), ]) + # Check synced templates + self.assertTrue(results[1]['data']['results'][0]['value']) + self.assertEqual(results[2]['data']['count'], 2) + for template in results[2]['data']['results']: + origin_template_data = test_yaml_templates[template['name']] + for option in origin_template_data['options'].keys(): + self.assertIn(option, template['options_list']) + # Check extra execute-view data in project + extra_view_data = project_data['execute_view_data'] + for field_name in test_yaml_view['fields']: + self.assertIn(field_name, extra_view_data['fields'].keys()) + field = extra_view_data['fields'][field_name] + for required_field in ['title', 'default', 'format', 'help']: + self.assertIn(required_field, field.keys()) + self.assertEqual(field_name.split('_')[-1], field['format'], field) + default_type = (six.string_types, six.text_type) + if field['format'] == 'boolean': + default_type = bool + elif field['format'] == 'integer': + default_type = int + elif field['format'] == 'float': + default_type = float + self.assertTrue(isinstance(field['default'], default_type), field) + if field_name == 'enum_string': + self.assertIn('enum', field.keys()) + self.assertTrue(isinstance(field['enum'], (list, tuple))) + for value in field['enum']: + self.assertTrue(isinstance(value, default_type)) + for playbook_name in test_yaml_view['playbooks']: + self.assertIn(playbook_name, extra_view_data['playbooks'].keys()) + playbook = extra_view_data['playbooks'][playbook_name] + for required_field in ['title', 'help']: + self.assertIn(required_field, playbook.keys()) return dict(playbook_count=len(self.revisions), execute=True) def make_test_templates(self, project_data): @@ -775,8 +913,8 @@ def get_bulk_readme(): self.assertEqual( get_bulk_readme()['readme_content'], - "
bold" + - " \n italic
\n" + 'bold' + + ' \n italic
\n' ) with open(project.path+"/readme.rst", "w") as f: f.write("test README.rst \n **bold** \n *italic* \n") @@ -842,16 +980,28 @@ def over_download(*args, **kwargs): self.assertEqual(results[-1]['data']['results'][-2]['status'], 'ERROR') def test_project_git(self): + # Prepare .polemarch.yaml + pm_yaml = dict() + # sync_on_run + pm_yaml['sync_on_run'] = True + # templates + pm_yaml['templates'] = test_yaml_templates + pm_yaml['templates_rewrite'] = False + # fast task widget + pm_yaml['view'] = test_yaml_view # Prepare repo self.repo_dir = tempfile.mkdtemp() self.generate_playbook(self.repo_dir, ['main.yml']) + self.generate_playbook(self.repo_dir, ['.polemarch.yaml'], data=dump(pm_yaml)) repo = git.Repo.init(self.repo_dir) - repo.index.add(["main.yml"]) + repo.index.add(["main.yml", ".polemarch.yaml"]) repo.index.commit("no message") first_revision = repo.head.object.hexsha repo.create_head('new_branch') + pm_yaml['sync_on_run'] = False + self.generate_playbook(self.repo_dir, ['.polemarch.yaml'], data=dump(pm_yaml)) self.generate_playbook(self.repo_dir, ['other.yml']) - repo.index.add(["other.yml"]) + repo.index.add(["other.yml", ".polemarch.yaml"]) repo.index.commit("no message 2") second_revision = repo.head.object.hexsha diff --git a/polemarch/main/tests/hosts.py b/polemarch/main/tests/hosts.py index bb0b8bdf..9c52a801 100644 --- a/polemarch/main/tests/hosts.py +++ b/polemarch/main/tests/hosts.py @@ -274,6 +274,13 @@ def test_import_inventory(self): 'all_groups', method='get' ), + self.get_mod_bulk( + 'inventory', + '<0[data][id]>', + {}, + 'variables', + method='get' + ), ] results = self.make_bulk(bulk_data, 'put') self.assertEqual( @@ -288,7 +295,13 @@ def test_import_inventory(self): results[2]['data']['count'], len(valid_inventory['groups']) ) + self.assertEqual( + results[3]['data']['count'], + len(valid_inventory['vars']) + ) for host in results[1]['data']['results']: self.assertIn(host['name'], valid_inventory['hosts'].keys()) for group in results[2]['data']['results']: self.assertIn(group['name'], valid_inventory['groups'].keys()) + for variable in results[3]['data']['results']: + self.assertIn(variable['key'], valid_inventory['vars'].keys()) diff --git a/polemarch/main/unittests/ansible.py b/polemarch/main/unittests/ansible.py index 4492df08..ae33bf75 100644 --- a/polemarch/main/unittests/ansible.py +++ b/polemarch/main/unittests/ansible.py @@ -18,6 +18,9 @@ [parent-group:children] child-group + +[all:vars] +ansible_connection=ssh ''' valid_inventory = { 'groups': { @@ -61,7 +64,9 @@ 'vars': {} } }, - + 'vars': { + 'ansible_connection': 'ssh', + }, } @@ -101,3 +106,10 @@ def test_inventory_parser(self): ) for key, value in valid_inventory['hosts'][record['name']]['vars'].items(): self.assertEqual(record['vars'][key], value) + + self.assertEqual( + list(valid_inventory['vars'].keys()), + list(inv_json['vars'].keys()) + ) + for key, value in valid_inventory['vars'].items(): + self.assertEqual(inv_json['vars'][key], value) diff --git a/polemarch/main/utils.py b/polemarch/main/utils.py index 7d5807ec..3c8dd8ed 100644 --- a/polemarch/main/utils.py +++ b/polemarch/main/utils.py @@ -40,6 +40,7 @@ def project_path(): class PMObject(object): + __slots__ = '__pm_ansible__', '__django_settings__' def pm_ansible(self, *args): # pylint: disable=access-member-before-definition @@ -62,6 +63,8 @@ class CmdExecutor(PMObject): ''' Command executor with realtime output write ''' + __slots__ = 'output', '_stdout', '_stderr' + CANCEL_PREFIX = "CANCEL_EXECUTE_" newlines = ['\n', '\r\n', '\r'] @@ -165,6 +168,8 @@ class SomeTask2(BaseTask): def run(self): return "Result of task" ''' + __slots__ = 'app', 'args', 'kwargs' + def __init__(self, app, *args, **kwargs): ''' :param app: -- CeleryApp object @@ -191,6 +196,8 @@ class BaseTask(PMObject): ''' BaseTask class for all tasks. ''' + __slots__ = 'app', 'args', 'kwargs', 'task_class' + def __init__(self, app, *args, **kwargs): ''' :param app: -- CeleryApp object @@ -213,19 +220,22 @@ def run(self): # pragma: no cover raise NotImplementedError -class AnsibleCache(PMObject): +class SubCacheInterface(PMObject): + __slots__ = 'prefix', 'timeout', 'cache' + cache_name = "subcache" + def __init__(self, prefix, timeout=86400*7): from django.core.cache import caches, InvalidCacheBackendError self.prefix = prefix self.timeout = timeout try: - self.cache = caches["ansible"] + self.cache = caches[self.cache_name] except InvalidCacheBackendError: self.cache = caches["default"] @property def key(self): - return 'ansible-{}'.format(self.prefix) + return '{}-{}'.format(self.cache_name, self.prefix) def set(self, value): self.cache.set(self.key, dump(value, Dumper=Dumper), self.timeout) @@ -238,7 +248,12 @@ def clear(self): self.set(None) +class AnsibleCache(SubCacheInterface): + cache_name = "ansible" + + class PMAnsible(PMObject): + __slots__ = () # Json regex _regex = re.compile(r"([\{\[][^\w\d\.].*[\}\]]$)", re.MULTILINE) ref_name = 'object' @@ -275,6 +290,8 @@ def clear_cache(self): class AnsibleArgumentsReference(PMAnsible): + __slots__ = 'raw_dict', 'version' + ref_name = 'reference' # Excluded args from user calls _EXCLUDE_ARGS = [ @@ -300,6 +317,7 @@ def is_valid_value(self, command, argument, value): return True def validate_args(self, command, args): + argument = None try: for argument, value in args.items(): self.is_valid_value(command, argument, value) @@ -334,6 +352,7 @@ def _extract_from_cli(self): class AnsibleModules(PMAnsible): + __slots__ = 'detailed', 'key' ref_name = 'modules' def __init__(self, detailed=False): @@ -367,8 +386,15 @@ def get(self, key=""): class AnsibleInventoryParser(PMAnsible): + __slots__ = 'path', ref_name = 'inventory_parser' + def get_ansible_cache(self): + cache = super(AnsibleInventoryParser, self).get_ansible_cache() + cache.get = lambda: None + cache.set = lambda value: None + return cache + def get_args(self): args = super(AnsibleInventoryParser, self).get_args() args += [self.path] diff --git a/polemarch/static/css/polemarch-gui.css b/polemarch/static/css/polemarch-gui.css index f0871310..72323b49 100644 --- a/polemarch/static/css/polemarch-gui.css +++ b/polemarch/static/css/polemarch-gui.css @@ -60,6 +60,7 @@ @media (max-width: 1050px) { .td_history_initiator, .td_project-history_initiator, + .td_history_revision, .td_project-history_revision { display: none; } diff --git a/polemarch/static/js/common.js b/polemarch/static/js/common.js index fc26252d..e946ec82 100644 --- a/polemarch/static/js/common.js +++ b/polemarch/static/js/common.js @@ -1,8 +1,8 @@ if(window.moment && window.moment.tz) -{ +{ window.moment.tz.setDefault(window.timeZone); } - + if(guiLocalSettings.get('hideMenu')) { if(window.innerWidth>767){ @@ -12,45 +12,45 @@ if(guiLocalSettings.get('hideMenu')) function setActiveMenuLiBase() { - if(/\#project/.test(window.location.href)) + if(/\#\/project/.test(window.location.href)) { $("#Projects").addClass("active active-li active-bold"); } - else if(/\#host/.test(window.location.href)) + else if(/\#\/host/.test(window.location.href)) { $("#menu-inventories").addClass("menu-treeview-active active active-li"); $("#menu-inventories-hosts").addClass("active-bold"); $("#menu-inventories").removeClass("menu-treeview"); } - else if(/\#group/.test(window.location.href)) + else if(/\#\/group/.test(window.location.href)) { $("#menu-inventories").addClass("menu-treeview-active active active-li"); $("#menu-inventories-groups").addClass("active-bold"); $("#menu-inventories").removeClass("menu-treeview"); } - else if(/\#inventory/.test(window.location.href)) + else if(/\#\/inventory/.test(window.location.href)) { $("#menu-inventories").addClass("menu-treeview-active active active-li active-bold"); $("#menu-inventories-inventories").addClass("active-bold"); $("#menu-inventories").removeClass("menu-treeview"); } - else if(/\#history/.test(window.location.href)){ + else if(/\#\/history/.test(window.location.href)){ $("#History").addClass("active active-li active-bold"); } - else if(/\#hook/.test(window.location.href)) + else if(/\#\/hook/.test(window.location.href)) { $("#menu-system").addClass("menu-treeview-active active active-li"); $("#menu-system-hooks").addClass("active-bold"); $("#menu-system").removeClass("menu-treeview"); } - else if(/\#team/.test(window.location.href)) + else if(/\#\/team/.test(window.location.href)) { $("#menu-system").addClass("menu-treeview-active active active-li"); $("#menu-system-teams").addClass("active-bold"); $("#menu-system").removeClass("menu-treeview"); } - else if(/\#user/.test(window.location.href) || /\#profile/.test(window.location.href)) + else if(/\#\/user/.test(window.location.href) || /\#profile/.test(window.location.href)) { $("#menu-system").addClass("menu-treeview-active active active-li"); $("#menu-system-users").addClass("active-bold"); @@ -139,4 +139,13 @@ tabSignal.connect("loading.completed", function() setActiveMenuLiBase(); // Добавляем файл тестов к списку файлов для тестов гуя -window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmUnitTest.js') +/**/ +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmHook.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmHosts.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmUsers.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmGroups.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmHistory.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmDashboard.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmInventories.js') +window.guiTestsFiles.push(hostname + window.guiStaticPath + 'js/tests/pmProjects.js') + diff --git a/polemarch/static/js/pmGroups.js b/polemarch/static/js/pmGroups.js index 64471cfb..f0c11496 100644 --- a/polemarch/static/js/pmGroups.js +++ b/polemarch/static/js/pmGroups.js @@ -1,29 +1,26 @@ tabSignal.connect("openapi.schema", function(data) -{ - window.guiSchema.path["/group/{pk}/"].schema.edit.fields.children.readOnly = true -}) +{ + window.guiSchema.path["/group/{pk}/"].schema.edit.fields.children.readOnly = true -tabSignal.connect("openapi.schema", function(data) -{ // Adding links to the link scheme window.guiSchema.path["/group/{pk}/group/{group_id}/"].links['__link__group'] = "/group/{pk}/" window.guiSchema.path["/group/{pk}/group/"].sublinks_l2['__link__group'] = "/group/{pk}/" window.guiSchema.path["/group/{pk}/group/{group_id}/"].links['__link__host'] = "/host/{pk}/" window.guiSchema.path["/group/{pk}/group/"].sublinks_l2['__link__host'] = "/host/{pk}/" - + window.guiSchema.path["/inventory/{pk}/group/{group_id}/"].links['__link__group'] = "/group/{pk}/" - window.guiSchema.path["/inventory/{pk}/group/"].sublinks_l2['__link__group'] = "/group/{pk}/" - + window.guiSchema.path["/inventory/{pk}/group/"].sublinks_l2['__link__group'] = "/group/{pk}/" + window.guiSchema.path["/inventory/{pk}/group/{group_id}/"].links['__link__host'] = "/host/{pk}/" - window.guiSchema.path["/inventory/{pk}/group/"].sublinks_l2['__link__host'] = "/host/{pk}/" - + window.guiSchema.path["/inventory/{pk}/group/"].sublinks_l2['__link__host'] = "/host/{pk}/" + window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/{group_id}/"].links['__link__group'] = "/group/{pk}/" - window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/"].sublinks_l2['__link__group'] = "/group/{pk}/" - + window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/"].sublinks_l2['__link__group'] = "/group/{pk}/" + window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/{group_id}/"].links['__link__host'] = "/host/{pk}/" - window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/"].sublinks_l2['__link__host'] = "/host/{pk}/" + window.guiSchema.path["/project/{pk}/inventory/{inventory_id}/group/"].sublinks_l2['__link__host'] = "/host/{pk}/" }) tabSignal.connect("guiList.renderLine.group", function(obj){ @@ -37,26 +34,26 @@ tabSignal.connect("guiList.renderLine.group", function(obj){ if(obj.dataLine.sublinks_l2['group']) { - obj.dataLine.sublinks_l2['group'].hidden = false + obj.dataLine.sublinks_l2['group'].hidden = false } } else - { + { if(obj.dataLine.sublinks_l2['host']) { - obj.dataLine.sublinks_l2['host'].hidden = false + obj.dataLine.sublinks_l2['host'].hidden = false } - + if(obj.dataLine.sublinks_l2['group']) { - obj.dataLine.sublinks_l2['group'].hidden = true + obj.dataLine.sublinks_l2['group'].hidden = true } } - + }) tabSignal.connect("guiList.renderPage.group", function(obj){ - + if(obj.data.children) { if(obj.options.links['host']) @@ -65,19 +62,19 @@ tabSignal.connect("guiList.renderPage.group", function(obj){ } if(obj.options.links['group']) { - obj.options.links['group'].hidden = false + obj.options.links['group'].hidden = false } } else - { + { if(obj.options.links['host']) { - obj.options.links['host'].hidden = false + obj.options.links['host'].hidden = false } if(obj.options.links['group']) { - obj.options.links['group'].hidden = true + obj.options.links['group'].hidden = true } } - + }) diff --git a/polemarch/static/js/pmGuiElements.js b/polemarch/static/js/pmGuiElements.js deleted file mode 100644 index e69de29b..00000000 diff --git a/polemarch/static/js/pmHistory.js b/polemarch/static/js/pmHistory.js index f3519c89..77f2885e 100644 --- a/polemarch/static/js/pmHistory.js +++ b/polemarch/static/js/pmHistory.js @@ -1,6 +1,6 @@ gui_history = { - + loadLines : function(item_id, opt) { var thisObj = this; @@ -13,13 +13,13 @@ gui_history = { { opt.offset = 0; } - + let query = { - method: "get", - data_type: ["history", item_id, "lines"], - filters:"limit="+opt.limit+"&offset="+opt.offset - } - + method: "get", + data_type: ["history", item_id, "lines"], + filters:"limit="+opt.limit+"&offset="+opt.offset + } + if(opt.before !== undefined) { query.filters += "&before="+opt.before; @@ -28,7 +28,7 @@ gui_history = { { query.filters += "&after="+opt.after; } - + let def = new $.Deferred(); $.when(api.query(query)).done(function(data) { @@ -44,7 +44,7 @@ gui_history = { thisObj.model.lines_data.stdout_maxline = 0 thisObj.model.lines_data.stdout_minline = 999999999 } - + thisObj.model.lines_data.stdout_count = data.count; for(var i in data.results) { @@ -75,7 +75,7 @@ gui_history = { webGui.showErrors(e) def.reject(e) }) - + return def.promise(); }, @@ -83,7 +83,7 @@ gui_history = { { jQuery('#history-stdout').scrollTop(9999999); }, - + linePerPage:1000, loadNewLines : function(item_id, last_stdout_maxline) { @@ -98,9 +98,9 @@ gui_history = { { last_stdout_maxline = 0; } - + return $.when(this.load(item_id), this.loadLines(item_id, {after:last_stdout_maxline, limit:this.linePerPage})).always(function() - { + { var addData = false; var history_stdout = $("#history-stdout"); if(!history_stdout || !history_stdout.length) @@ -148,7 +148,7 @@ gui_history = { } }).promise() }, - + /** * Подсветка синтаксиса * @link https://habrahabr.ru/post/43030/ @@ -191,7 +191,7 @@ gui_history = { bindStdoutUpdates : function(item_id) { var thisObj = this; - tabSignal.once("spajs.open", () => { + tabSignal.once("spajs.open", () => { clearTimeout(this.model.loadNewLines_timeoutId) this.model.loadNewLines_timeoutId = undefined; }) @@ -248,16 +248,47 @@ gui_history = { } }); }); - } + }, + + onUpdateFromServer : function () + { + if(this.model.data.status == 'DELAY' || this.model.data.status == 'RUN') + { + if(this.api.actions['cancel']) + { + this.api.actions['cancel'].hidden = false; + $('.btn_cancel').addClass('hidden-false').removeClass('hidden-true'); + } + } + else + { + if(this.api.actions['cancel']) + { + this.api.actions['cancel'].hidden = true; + $('.btn_cancel').addClass('hidden-true').removeClass('hidden-false'); + } + } + + if(this.model.data.status == 'OK' && this.model.data.kind == 'MODULE' && this.model.data.mode == "setup") + { + this.api.sublinks['facts'].hidden = false; + $('.sublink-btn-facts').addClass('hidden-false').removeClass('hidden-true'); + } + else + { + this.api.sublinks['facts'].hidden = true + $('.sublink-btn-facts').addClass('hidden-true').removeClass('hidden-false'); + } + }, } gui_project_history = gui_history -tabSignal.connect("guiList.renderPage.history", function(params){ +tabSignal.connect("guiList.renderPage.history", function(params){ params.guiObj.bindStdoutUpdates(params.guiObj.model.data.id); -}); - - +}); + + function format_history_time(opt) { if(opt.value) @@ -278,6 +309,16 @@ function format_executor(opt) return 'system'; } +function format_revision(opt) +{ + if(opt.value) + { + return opt.value.substr(0, 8); + } + + return ""; +} + function get_prefetch_history_executor_path(data_obj) { return "/user/" @@ -319,7 +360,7 @@ function get_prefetch_history_initiator_path_2(data_obj) function addHistoryPrefetchBase(obj){ let properties = obj.definition.properties - + if(properties['executor']) { properties['executor']['prefetch'] = { @@ -374,6 +415,10 @@ function addSettingsToHistoryListsFields(obj) properties['start_time'].__func__value = 'format_history_time'; properties['stop_time'].__func__value = 'format_history_time'; properties['executor'].__func__value = 'format_executor'; + if(properties['revision']) + { + properties['revision'].__func__value = 'format_revision'; + } } function addSettingsToOneHistoryFields(obj) @@ -393,15 +438,15 @@ tabSignal.connect("openapi.schema.definition.OneHistory", addSettingsToOneHistor //tabSignal.connect("openapi.schema.definition.History", hideFields); function hideFields(obj){ - + let properties = obj.definition.properties; - + if(properties['options']) properties['options'].type = 'hidden'; if(properties['raw_args']) properties['raw_args'].type = 'hidden'; if(properties['raw_stdout']) properties['raw_stdout'].type = 'hidden'; if(properties['raw_inventory']) properties['raw_inventory'].type = 'hidden'; if(properties['initiator_type']) properties['initiator_type'].type = 'hidden'; - + } tabSignal.connect("openapi.schema.definition.ProjectHistory", hideFields); @@ -410,46 +455,41 @@ tabSignal.connect("openapi.schema.definition.OneHistory", hideFields); tabSignal.connect("guiList.renderLine.history", function(obj){ - - if(obj.dataLine.line.status != 'RUN' && obj.dataLine.line.status != 'DELAY') + + if(!(obj.dataLine.line.status == 'RUN' || obj.dataLine.line.status == 'DELAY')) { - if(obj.dataLine.sublinks_l2['clear']) + if(obj.dataLine.sublinks_l2['cancel']) { - obj.dataLine.sublinks_l2['clear'].hidden = false - } - } - else - { + obj.dataLine.sublinks_l2['cancel'].hidden = true + } + if(obj.dataLine.sublinks_l2['clear']) { - obj.dataLine.sublinks_l2['clear'].hidden = true + obj.dataLine.sublinks_l2['clear'].hidden = false } } - - if(obj.dataLine.line.status == 'RUN' && obj.dataLine.line.status == 'DELAY') + else { if(obj.dataLine.sublinks_l2['cancel']) { obj.dataLine.sublinks_l2['cancel'].hidden = false - } - } - else - { - if(obj.dataLine.sublinks_l2['cancel']) + } + + if(obj.dataLine.sublinks_l2['clear']) { - obj.dataLine.sublinks_l2['cancel'].hidden = true + obj.dataLine.sublinks_l2['clear'].hidden = true } } - + if(obj.dataLine.line.status == 'OK' && obj.dataLine.line.kind == 'MODULE' && obj.dataLine.line.mode == "setup") { if(obj.dataLine.sublinks_l2['facts']) { obj.dataLine.sublinks_l2['facts'].hidden = false - } + } } else - { + { if(obj.dataLine.sublinks_l2['facts']) { obj.dataLine.sublinks_l2['facts'].hidden = true @@ -458,49 +498,49 @@ tabSignal.connect("guiList.renderLine.history", function(obj){ }) tabSignal.connect("guiList.renderPage.history", function(obj){ - - if(obj.data.status != 'RUN' && obj.data.status != 'DELAY') - { - if(obj.options.actions['clear']) - { - obj.options.actions['clear'].hidden = false - } - } - else - { - if(obj.options.actions['clear']) - { - obj.options.actions['clear'].hidden = true - } - } - + if(obj.data.status == 'OK' && obj.data.kind == 'MODULE' && obj.data.mode == "setup") { if(obj.options.links['facts']) { obj.options.links['facts'].hidden = false - } + } } else - { + { if(obj.options.links['facts']) { obj.options.links['facts'].hidden = true } } - + + obj.options.actions['clear'].hidden = true + if(obj.data.status == 'DELAY' || obj.data.status == 'RUN') { - if(obj.options.links['cancel']) + if(obj.options.actions['cancel']) { - obj.options.links['cancel'].hidden = false - } + obj.options.actions['cancel'].hidden = false + } } else - { - if(obj.options.links['cancel']) + { + if(obj.options.actions['cancel']) { - obj.options.links['cancel'].hidden = true + obj.options.actions['cancel'].hidden = true } } }) + +/** + * Function calls action, that cleans history Stdout. + * @param action_info(object) - action object + * @param obj(object) - object of history detail page + */ +function clearHistoryStdOut(action_info, obj) +{ + return $.when(emptyAction(action_info, obj)()).done(d => { + $('#history-stdout').html(d.data.detail); + }) +} + diff --git a/polemarch/static/js/pmInventories.js b/polemarch/static/js/pmInventories.js index 2863bb0d..8fde73ae 100644 --- a/polemarch/static/js/pmInventories.js +++ b/polemarch/static/js/pmInventories.js @@ -45,3 +45,12 @@ tabSignal.connect("openapi.schema.definition.InventoryVariable", function (obj) __func__callback: 'InventoryVariable_value_callback', } }) + +tabSignal.connect("openapi.schema.definition.InventoryImport", function (obj) { + let props = obj.definition.properties; + props['inventory_id'].hidden = true; + + props['raw_data'].format = 'file'; + props['raw_data'].title = 'Inventory file'; + +}) \ No newline at end of file diff --git a/polemarch/static/js/pmItems.js b/polemarch/static/js/pmItems.js deleted file mode 100644 index 0519ecba..00000000 --- a/polemarch/static/js/pmItems.js +++ /dev/null @@ -1 +0,0 @@ - \ No newline at end of file diff --git a/polemarch/static/js/pmPeriodicTasks.js b/polemarch/static/js/pmPeriodicTasks.js index 7c11f1ba..a04c961c 100644 --- a/polemarch/static/js/pmPeriodicTasks.js +++ b/polemarch/static/js/pmPeriodicTasks.js @@ -285,6 +285,8 @@ function signal_gui_schema_name_periodic_task(data){ data.value.fields.template.required = false data.value.fields.template.dynamic_properties = {} data.value.fields.template.dynamic_properties.__func__callback = "OnePeriodictask_template_callback" + + data.value.fields.schedule.dynamic_properties.types["INTERVAL"] = "uptime"; } tabSignal.connect("gui.schema.name.periodic_task.edit", signal_gui_schema_name_periodic_task) diff --git a/polemarch/static/js/pmProjects.js b/polemarch/static/js/pmProjects.js index 0bd63b60..6080eb3d 100644 --- a/polemarch/static/js/pmProjects.js +++ b/polemarch/static/js/pmProjects.js @@ -96,4 +96,191 @@ tabSignal.connect("openapi.schema.definition.AnsiblePlaybook", function(obj) { list_obj: [] } } -}) \ No newline at end of file +}) + + +tabSignal.connect("openapi.schema", function(data) +{ + window.guiSchema.path["/project/{pk}/"].schema.edit.fields.execute_view_data.format = 'null' +}) + + +gui_project = { + + polemarchYamlForm:undefined, + + hasForm:function() + { + return this.model && this.model.data && this.model.data.execute_view_data != null; + }, + + executePlaybook:function(data) + { + var def = new $.Deferred(); + + let q = { + data_type:["project", this.url_vars.api_pk, "execute_playbook"], + data:data, + method:'post' + } + + $.when(this.apiQuery(q)).done(data => + { + def.resolve(data) + vstGO(['project', this.url_vars.api_pk, 'history', data.history_id]) + }).fail(e => { + this.showErrors(e, q.method) + def.reject(e) + }) + def.reject() + + return def.promise(); + }, + + renderForm:function() + { + let thisObj = this; + let extra_fields = $.extend(true, {}, this.model.data.execute_view_data.fields) + for(let i in this.model.data.execute_view_data.playbooks) + { + let val = this.model.data.execute_view_data.playbooks[i] + + extra_fields[i] = { + title:val.title, + text:val.title, + description: val.help || val.description, + format:'formButton', + value: i, + class:'btn-primary', + onclick:function(){ + let val = thisObj.polemarchYamlForm.getValue() + val.playbook = this.getValue() + + delete val.extra_vars[val.playbook] + val.extra_vars = JSON.stringify(val.extra_vars); + + return thisObj.executePlaybook(val) + } + } + } + + let formData = { + title:"Deploy", + form:{ + 'inventory' : { + title:'inventory', + required:true, + format:'hybrid_autocomplete', + dynamic_properties:{ + list_obj: "/project/{pk}/inventory/", + value_field: "id", + view_field: "name", + } + }, + user:{ + title:'User', + description: "connect as this user (default=None)", + format:'string', + type: "string", + }, + key_file: { + title:'Key file', + description: "use this file to authenticate the connection", + format:'secretfile', + type: "string", + dynamic_properties:{ + list_obj: "/project/{pk}/inventory/", + value_field: "id", + view_field: "name", + } + }, + extra_vars: { + title:"Execute parametrs", + format:'form', + form:extra_fields + } + } + } + + this.polemarchYamlForm = new guiElements.form(undefined, formData); + return this.polemarchYamlForm.render(); + }, +} + +guiElements.form = function(opt = {}, value, parent_object) +{ + this.name = 'form' + guiElements.base.apply(this, arguments) + + this.realElements = {}; + + this.prepareFieldOptions = function(field) + { + if(field.enum) + { + field.format = "enum" + } + + return field + } + + this.setValue = function(value) + { + this.value = value + let realElements = {}; + if(value.form) + { + for(let i in value.form) + { + let field = value.form[i] + field.name = i + + field = this.prepareFieldOptions(field) + let type = getFieldType(field) + + realElements[i] = new guiElements[type]($.extend(true, {}, field), field.value); + } + } + + this.realElements = realElements + } + + if(opt.form && !value) + { + this.setValue(opt) + } + else + { + this.setValue(value) + } + + this.insertTestValue = function(value) + { + this.setValue(value); + return value; + } + + this.getValue = function() + { + let valueObj = {}; + for(let element_name in this.realElements) + { + let element = this.realElements[element_name]; + valueObj[element_name] = element.getValue(); + } + + return this.reductionToType(valueObj); + } + + this.getValidValue = function() + { + let valueObj = {}; + for(let element_name in this.realElements) + { + let element = this.realElements[element_name]; + valueObj[element_name] = element.getValidValue(); + } + + return this.reductionToType(valueObj); + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmDashboard.js b/polemarch/static/js/tests/pmDashboard.js new file mode 100644 index 00000000..ae4ef4c4 --- /dev/null +++ b/polemarch/static/js/tests/pmDashboard.js @@ -0,0 +1,12 @@ + +window.qunitTestsArray['pmDashboard'] = { + test:function() + { + // Проверка того что страница открывается + guiTests.openPage("/") + + guiTests.wait(300); + + + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmGroups.js b/polemarch/static/js/tests/pmGroups.js new file mode 100644 index 00000000..2cf2a595 --- /dev/null +++ b/polemarch/static/js/tests/pmGroups.js @@ -0,0 +1,29 @@ + +window.qunitTestsArray['guiPaths.group'] = { + test:function() + { + let path = '/group/' + let params = { + create:[ + { + is_valid:true, + data:{ + notes:{ + value:rundomString(6) + } + }, + }, + ], + update:[ + { + is_valid:true, + data:{ + notes : {value:rundomString(6)}, + name : {value:rundomString(6)}, + }, + }, + ] + } + guiTests.testForPath(path, params) + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmHistory.js b/polemarch/static/js/tests/pmHistory.js new file mode 100644 index 00000000..02f5f2de --- /dev/null +++ b/polemarch/static/js/tests/pmHistory.js @@ -0,0 +1,7 @@ + +window.qunitTestsArray['guiPaths.history'] = { + test:function() + { + guiTests.openPage("/history/") + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmHook.js b/polemarch/static/js/tests/pmHook.js new file mode 100644 index 00000000..3b165139 --- /dev/null +++ b/polemarch/static/js/tests/pmHook.js @@ -0,0 +1,39 @@ + +window.qunitTestsArray['guiPaths.hook'] = { + test:function() + { + let path = '/hook/' + let params = { + create:[ + { + is_valid:false, + data:{}, + }, + { + is_valid:true, + data:{ + recipients:{ + value:rundomString(6) + } + }, + }, + ], + update:[ + /*{ + is_valid:false, + data:{ + type : {value:"SCRIPT"}, + }, + },*/ + { + is_valid:true, + data:{ + type : {value:"HTTP"}, + name : {value:rundomString(6)}, + }, + }, + ] + } + guiTests.testForPath(path, params) + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmHosts.js b/polemarch/static/js/tests/pmHosts.js new file mode 100644 index 00000000..0ebf10c0 --- /dev/null +++ b/polemarch/static/js/tests/pmHosts.js @@ -0,0 +1,30 @@ + +window.qunitTestsArray['guiPaths.host'] = { + test:function() + { + let path = '/host/' + let params = { + create:[ + { + is_valid:true, + data:{ + notes:{ + value:rundomString(6) + } + }, + }, + ], + update:[ + { + is_valid:true, + data:{ + notes : {value:rundomString(6)}, + name : {value:rundomString(6)}, + type : {value:"RANGE"}, + }, + }, + ] + } + guiTests.testForPath(path, params) + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmInventories.js b/polemarch/static/js/tests/pmInventories.js new file mode 100644 index 00000000..160603f7 --- /dev/null +++ b/polemarch/static/js/tests/pmInventories.js @@ -0,0 +1,29 @@ + +window.qunitTestsArray['guiPaths.inventory'] = { + test:function() + { + let path = '/inventory/' + let params = { + create:[ + { + is_valid:true, + data:{ + notes:{ + value:rundomString(6) + } + }, + }, + ], + update:[ + { + is_valid:true, + data:{ + notes : {value:rundomString(6)}, + name : {value:rundomString(6)}, + }, + }, + ] + } + guiTests.testForPath(path, params) + } +} \ No newline at end of file diff --git a/polemarch/static/js/tests/pmProjects.js b/polemarch/static/js/tests/pmProjects.js new file mode 100644 index 00000000..119e0c56 --- /dev/null +++ b/polemarch/static/js/tests/pmProjects.js @@ -0,0 +1,258 @@ + +window.qunitTestsArray['guiPaths.project'] = { + test:function() + { + let path = '/project/' + let test_name = '/project/' + let env = {} + + guiTests.openPage(path) + + // Проверка наличия элемента на странице + guiTests.hasCreateButton(1, path) + + // Проверка возможности создания объекта + guiTests.openPage(path+"new") + + + let project = { + type:{ + value: "MANUAL", + do_not_compare:true + }, + name:{ + value: "test-"+rundomString(6), + } + } + + guiTests.setValuesAndCreate(test_name, project, (data) => { + env.objectId = data.id; + }, true) + + guiTests.hasDeleteButton(true, test_name) + guiTests.hasCreateButton(false, test_name) + guiTests.hasAddButton(false, test_name) + + + guiTests.updateObject(test_name, {notes:{value:rundomString(6)}}, true) + + guiTests.openPage(test_name, env, (env) =>{ return vstMakeLocalApiUrl("project/{pk}/template/new", {api_pk:env.objectId}) }) + + // Проверка того что страница с флагом api_obj.canCreate == true открывается + syncQUnit.addTest("guiPaths['project/{pk}/template/new'] create new template", function ( assert ) + { + let done = assert.async(); + + let fieldsData = { + name:{value:rundomString(6)} + } + + // @todo добавить проверку того что поля правильно меняются от значений других полей + + let values = guiTests.setValues(assert, fieldsData) + + // Создали объект с набором случайных данных + $.when(window.curentPageObject.createAndGoEdit()).done(() => { + + guiTests.compareValues(assert, test_name, fieldsData, values) + + env.template_id = window.curentPageObject.model.data.id; + + assert.ok(true, 'guiPaths["project/{pk}/template/new"] create new template ok'); + + // @todo добавить проверку того что поля правильно меняются от значений других полей + + testdone(done) + }).fail((err) => { + assert.ok(false, 'guiPaths["'+test_name+'new"] create new template fail'); + testdone(done) + }) + }) + + test_name = "project/{pk}/template/{template_id}" + guiTests.openPage(test_name, env, (env) =>{ return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/", {api_pk:env.objectId, api_template_id:env.template_id}) }) + + + test_name = "project/{pk}/template/{template_id}/option" + guiTests.openPage(test_name, env, (env) =>{ return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/", {api_pk:env.objectId, api_template_id:env.template_id}) }) + guiTests.hasCreateButton(true, test_name) + guiTests.hasAddButton(false, test_name) + + test_name = "project/{pk}/template/{template_id}/option/new" + guiTests.openPage(test_name, env, (env) =>{ return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/new", {api_pk:env.objectId, api_template_id:env.template_id}) }) + + + let option_data = { + module:{value:"shell"}, + args:{value:"uptime1"}, + group:{value:"all"}, + name:{value:"testUptime"}, + } + + guiTests.setValuesAndCreate(test_name, option_data, (data) =>{}, true) + + test_name = "project/{pk}/template/{template_id}/option/@testUptime" + guiTests.updateObject("project/{pk}/template/{template_id}/option/@testUptime", {args:{value:"uptime"}}, true); + + + test_name = "project/{pk}/template/{template_id}/option/@testUptime/variables" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/@testUptime/variables", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + guiTests.hasCreateButton(true, test_name) + guiTests.hasAddButton(false, test_name) + + test_name = "project/{pk}/template/{template_id}/option/@testUptime/variables/new" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/@testUptime/variables/new", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + let variables_data = { + key:{value:"timeout"}, + value:{value:"30"}, + } + + guiTests.setValuesAndCreate(test_name, variables_data, (data) =>{ }, true) + + guiTests.deleteObject(test_name) + + test_name = "project/{pk}/template/{template_id}/option/@testUptime/variables" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/@testUptime/variables", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + test_name = "project/{pk}/template/{template_id}/option/@testUptime" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option/@testUptime", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + guiTests.deleteObject(test_name) + + test_name = "project/{pk}/template/{template_id}/option" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}/option", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + test_name = "project/{pk}/template/{template_id}" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}/template/{template_id}", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + guiTests.deleteObject(test_name) + + test_name = "project/{pk}" + guiTests.openPage(test_name, env, (env) =>{ + return vstMakeLocalApiUrl("project/{pk}", {api_pk:env.objectId, api_template_id:env.template_id}) + }) + + guiTests.deleteObject(test_name) + } +} + + + +window.qunitTestsArray['guiElements.form'] = { + test:function() + { + syncQUnit.addTest('guiElements.form', function ( assert ) + { + let element; + let formData; + let done = assert.async(); + + $("#guiElementsTestForm").remove(); + $("body").append("") + + + formData = { + title:"Deploy", + form:{ + 'inventory' : { + title:'inventory', + required:true, + format:'hybrid_autocomplete', + dynamic_properties:{ + list_obj: "/project/{pk}/inventory/", + value_field: "id", + view_field: "name", + } + }, + user:{ + title:'User', + description: "connect as this user (default=None)", + format:'string', + type: "string", + }, + key_file: { + title:'Key file', + description: "use this file to authenticate the connection", + format:'secretfile', + type: "string", + dynamic_properties:{ + list_obj: "/project/{pk}/inventory/", + value_field: "id", + view_field: "name", + } + }, + extra_vars: { + title:"Execute parametrs", + format:'form', + form:{ + varName: { + name:'varName', + title:'Name', + default:'NameDefaultValue', + format:'string', + help:'Name', + }, + varTask: { + name:'varTask', + title:'Name', + default:'B', + format:'enum', + help:'Name', + enum:['A', 'B', 'C'], + }, + varVersion: { + name:'varVersion', + title:'Name', + default:true, + format:'boolean', + help:'Name', + }, + RunBtn: { + name:'RunBtn', + title:'abc_yml', + value:'abc.yml', + format:'button', + text:'Run abc.yml', + onclick:function(){ + + let val = element.getValue() + val.playbook = this.getValue() + + assert.ok(val.extra_vars['RunBtn'] == 'abc.yml', 'guiElements.form test RunBtn'); + assert.ok(val.playbook == 'abc.yml', 'guiElements.form test playbook'); + + assert.ok(val.extra_vars['varName'] == 'NameDefaultValue', 'guiElements.form test varName'); + + assert.ok(val.extra_vars['varVersion'] == true, 'guiElements.form test values'); + + testdone(done) + }, + class:'gui-test-form' + }, + } + } + } + } + + element = new guiElements.form(undefined, formData); + $("#guiElementsTestForm").insertTpl(element.render()) + + setTimeout(() => { + $("#guiElementsTestForm .btn_abc_yml").trigger('click') + }, 50) + }); + } +} diff --git a/polemarch/static/js/tests/pmUnitTest.js b/polemarch/static/js/tests/pmUnitTest.js deleted file mode 100644 index 92b10c5b..00000000 --- a/polemarch/static/js/tests/pmUnitTest.js +++ /dev/null @@ -1,10 +0,0 @@ -/** - * Файл вставляемый на страницу при тестировании из phantomjs - */ - -/////////////////////////////////////////////// -// Функции тестирования -/////////////////////////////////////////////// - - - \ No newline at end of file diff --git a/polemarch/static/js/tests/pmUsers.js b/polemarch/static/js/tests/pmUsers.js new file mode 100644 index 00000000..cf63dd74 --- /dev/null +++ b/polemarch/static/js/tests/pmUsers.js @@ -0,0 +1,15 @@ + +window.qunitTestsArray['guiPaths.profile/settings'] = { + test:function() + { + let path = "profile/settings" + guiTests.openPage(path) + guiTests.hasElement(1, ".btn_save", path) + guiTests.hasElement(1, ".gui-field-chartLineSettings", path) + + guiTests.hasAddButton(0, path) + + $(".btn_save").trigger('click') + + } +} \ No newline at end of file diff --git a/polemarch/static/templates/pmHistory.html b/polemarch/static/templates/pmHistory.html index 91966c33..b3e5f1a8 100644 --- a/polemarch/static/templates/pmHistory.html +++ b/polemarch/static/templates/pmHistory.html @@ -36,15 +36,14 @@