Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
70a24cb009 | ||
|
|
69a9e4bee4 | ||
|
|
8ef9d14a54 | ||
|
|
4422878ec7 | ||
|
|
e37bbf3be3 | ||
|
|
8e143ca8bf | ||
|
|
e5884a963b | ||
|
|
85763ccfce | ||
|
|
1ffb7ef0e7 | ||
|
|
c02ddd58a1 | ||
|
|
7abb57dcd3 | ||
|
|
963d026d06 |
@@ -2,4 +2,4 @@
|
|||||||
host=review.opendev.org
|
host=review.opendev.org
|
||||||
port=29418
|
port=29418
|
||||||
project=openstack/watcher.git
|
project=openstack/watcher.git
|
||||||
defaultbranch=stable/xena
|
defaultbranch=stable/train
|
||||||
|
|||||||
47
.zuul.yaml
47
.zuul.yaml
@@ -1,9 +1,10 @@
|
|||||||
- project:
|
- project:
|
||||||
queue: watcher
|
|
||||||
templates:
|
templates:
|
||||||
- check-requirements
|
- check-requirements
|
||||||
- openstack-cover-jobs
|
- openstack-cover-jobs
|
||||||
- openstack-python3-xena-jobs
|
- openstack-lower-constraints-jobs
|
||||||
|
- openstack-python-jobs
|
||||||
|
- openstack-python3-train-jobs
|
||||||
- publish-openstack-docs-pti
|
- publish-openstack-docs-pti
|
||||||
- release-notes-jobs-python3
|
- release-notes-jobs-python3
|
||||||
check:
|
check:
|
||||||
@@ -13,8 +14,10 @@
|
|||||||
- watcher-tempest-strategies
|
- watcher-tempest-strategies
|
||||||
- watcher-tempest-actuator
|
- watcher-tempest-actuator
|
||||||
- watcherclient-tempest-functional
|
- watcherclient-tempest-functional
|
||||||
|
- watcher-tls-test
|
||||||
- watcher-tempest-functional-ipv6-only
|
- watcher-tempest-functional-ipv6-only
|
||||||
gate:
|
gate:
|
||||||
|
queue: watcher
|
||||||
jobs:
|
jobs:
|
||||||
- watcher-tempest-functional
|
- watcher-tempest-functional
|
||||||
- watcher-tempest-functional-ipv6-only
|
- watcher-tempest-functional-ipv6-only
|
||||||
@@ -86,10 +89,21 @@
|
|||||||
tempest_concurrency: 1
|
tempest_concurrency: 1
|
||||||
tempest_test_regex: watcher_tempest_plugin.tests.scenario.test_execute_strategies
|
tempest_test_regex: watcher_tempest_plugin.tests.scenario.test_execute_strategies
|
||||||
|
|
||||||
|
- job:
|
||||||
|
name: watcher-tls-test
|
||||||
|
parent: watcher-tempest-multinode
|
||||||
|
group-vars:
|
||||||
|
subnode:
|
||||||
|
devstack_services:
|
||||||
|
tls-proxy: true
|
||||||
|
vars:
|
||||||
|
devstack_services:
|
||||||
|
tls-proxy: true
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: watcher-tempest-multinode
|
name: watcher-tempest-multinode
|
||||||
parent: watcher-tempest-functional
|
parent: watcher-tempest-functional
|
||||||
nodeset: openstack-two-node-focal
|
nodeset: openstack-two-node-bionic
|
||||||
roles:
|
roles:
|
||||||
- zuul: openstack/tempest
|
- zuul: openstack/tempest
|
||||||
group-vars:
|
group-vars:
|
||||||
@@ -107,6 +121,8 @@
|
|||||||
watcher-api: false
|
watcher-api: false
|
||||||
watcher-decision-engine: true
|
watcher-decision-engine: true
|
||||||
watcher-applier: false
|
watcher-applier: false
|
||||||
|
# We need to add TLS support for watcher plugin
|
||||||
|
tls-proxy: false
|
||||||
ceilometer: false
|
ceilometer: false
|
||||||
ceilometer-acompute: false
|
ceilometer-acompute: false
|
||||||
ceilometer-acentral: false
|
ceilometer-acentral: false
|
||||||
@@ -145,6 +161,7 @@
|
|||||||
timeout: 7200
|
timeout: 7200
|
||||||
required-projects: &base_required_projects
|
required-projects: &base_required_projects
|
||||||
- openstack/ceilometer
|
- openstack/ceilometer
|
||||||
|
- openstack/devstack-gate
|
||||||
- openstack/python-openstackclient
|
- openstack/python-openstackclient
|
||||||
- openstack/python-watcherclient
|
- openstack/python-watcherclient
|
||||||
- openstack/watcher
|
- openstack/watcher
|
||||||
@@ -154,6 +171,7 @@
|
|||||||
devstack_plugins:
|
devstack_plugins:
|
||||||
watcher: https://opendev.org/openstack/watcher
|
watcher: https://opendev.org/openstack/watcher
|
||||||
devstack_services:
|
devstack_services:
|
||||||
|
tls-proxy: false
|
||||||
watcher-api: true
|
watcher-api: true
|
||||||
watcher-decision-engine: true
|
watcher-decision-engine: true
|
||||||
watcher-applier: true
|
watcher-applier: true
|
||||||
@@ -162,10 +180,13 @@
|
|||||||
s-container: false
|
s-container: false
|
||||||
s-object: false
|
s-object: false
|
||||||
s-proxy: false
|
s-proxy: false
|
||||||
tempest_plugins:
|
devstack_localrc:
|
||||||
- watcher-tempest-plugin
|
TEMPEST_PLUGINS: /opt/stack/watcher-tempest-plugin
|
||||||
tempest_test_regex: watcher_tempest_plugin.tests.api
|
tempest_test_regex: watcher_tempest_plugin.tests.api
|
||||||
tox_envlist: all
|
tox_envlist: all
|
||||||
|
tox_environment:
|
||||||
|
# Do we really need to set this? It's cargo culted
|
||||||
|
PYTHONUNBUFFERED: 'true'
|
||||||
zuul_copy_output:
|
zuul_copy_output:
|
||||||
/etc/hosts: logs
|
/etc/hosts: logs
|
||||||
|
|
||||||
@@ -179,12 +200,10 @@
|
|||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: watcher-grenade
|
name: watcher-grenade
|
||||||
parent: grenade
|
parent: legacy-dsvm-base
|
||||||
required-projects:
|
timeout: 10800
|
||||||
- openstack/watcher
|
run: playbooks/legacy/grenade-devstack-watcher/run.yaml
|
||||||
- openstack/python-watcherclient
|
post-run: playbooks/legacy/grenade-devstack-watcher/post.yaml
|
||||||
- openstack/watcher-tempest-plugin
|
|
||||||
vars: *base_vars
|
|
||||||
irrelevant-files:
|
irrelevant-files:
|
||||||
- ^(test-|)requirements.txt$
|
- ^(test-|)requirements.txt$
|
||||||
- ^.*\.rst$
|
- ^.*\.rst$
|
||||||
@@ -196,6 +215,12 @@
|
|||||||
- ^setup.cfg$
|
- ^setup.cfg$
|
||||||
- ^tools/.*$
|
- ^tools/.*$
|
||||||
- ^tox.ini$
|
- ^tox.ini$
|
||||||
|
required-projects:
|
||||||
|
- openstack/grenade
|
||||||
|
- openstack/devstack-gate
|
||||||
|
- openstack/watcher
|
||||||
|
- openstack/python-watcherclient
|
||||||
|
- openstack/watcher-tempest-plugin
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
# This job is used in python-watcherclient repo
|
# This job is used in python-watcherclient repo
|
||||||
|
|||||||
10
README.rst
10
README.rst
@@ -1,6 +1,6 @@
|
|||||||
=======
|
========================
|
||||||
Watcher
|
Team and repository tags
|
||||||
=======
|
========================
|
||||||
|
|
||||||
.. image:: https://governance.openstack.org/tc/badges/watcher.svg
|
.. image:: https://governance.openstack.org/tc/badges/watcher.svg
|
||||||
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
:target: https://governance.openstack.org/tc/reference/tags/index.html
|
||||||
@@ -13,6 +13,10 @@ Watcher
|
|||||||
|
|
||||||
https://creativecommons.org/licenses/by/3.0/
|
https://creativecommons.org/licenses/by/3.0/
|
||||||
|
|
||||||
|
=======
|
||||||
|
Watcher
|
||||||
|
=======
|
||||||
|
|
||||||
OpenStack Watcher provides a flexible and scalable resource optimization
|
OpenStack Watcher provides a flexible and scalable resource optimization
|
||||||
service for multi-tenant OpenStack-based clouds.
|
service for multi-tenant OpenStack-based clouds.
|
||||||
Watcher provides a robust framework to realize a wide range of cloud
|
Watcher provides a robust framework to realize a wide range of cloud
|
||||||
|
|||||||
@@ -22,6 +22,9 @@
|
|||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
|
from watcher import version as watcher_version
|
||||||
|
|
||||||
|
|
||||||
extensions = [
|
extensions = [
|
||||||
'openstackdocstheme',
|
'openstackdocstheme',
|
||||||
'os_api_ref',
|
'os_api_ref',
|
||||||
@@ -43,13 +46,21 @@ project = u'Infrastructure Optimization API Reference'
|
|||||||
copyright = u'2010-present, OpenStack Foundation'
|
copyright = u'2010-present, OpenStack Foundation'
|
||||||
|
|
||||||
# openstackdocstheme options
|
# openstackdocstheme options
|
||||||
openstackdocs_repo_name = 'openstack/watcher'
|
repository_name = 'openstack/watcher'
|
||||||
openstackdocs_auto_name = False
|
bug_project = 'watcher'
|
||||||
openstackdocs_bug_project = 'watcher'
|
bug_tag = ''
|
||||||
openstackdocs_bug_tag = ''
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = watcher_version.version_info.release_string()
|
||||||
|
# The short X.Y version.
|
||||||
|
version = watcher_version.version_string
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'native'
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
# -- Options for HTML output --------------------------------------------------
|
# -- Options for HTML output --------------------------------------------------
|
||||||
|
|
||||||
@@ -64,6 +75,10 @@ html_theme_options = {
|
|||||||
"sidebar_mode": "toc",
|
"sidebar_mode": "toc",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
|
# using the given strftime format.
|
||||||
|
html_last_updated_fmt = '%Y-%m-%d %H:%M'
|
||||||
|
|
||||||
# -- Options for LaTeX output -------------------------------------------------
|
# -- Options for LaTeX output -------------------------------------------------
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
|||||||
@@ -16,4 +16,3 @@ Watcher API
|
|||||||
.. include:: watcher-api-v1-services.inc
|
.. include:: watcher-api-v1-services.inc
|
||||||
.. include:: watcher-api-v1-scoring_engines.inc
|
.. include:: watcher-api-v1-scoring_engines.inc
|
||||||
.. include:: watcher-api-v1-datamodel.inc
|
.. include:: watcher-api-v1-datamodel.inc
|
||||||
.. include:: watcher-api-v1-webhooks.inc
|
|
||||||
|
|||||||
@@ -4,8 +4,6 @@
|
|||||||
Data Model
|
Data Model
|
||||||
==========
|
==========
|
||||||
|
|
||||||
.. versionadded:: 1.3
|
|
||||||
|
|
||||||
``Data Model`` is very important for Watcher to generate resource
|
``Data Model`` is very important for Watcher to generate resource
|
||||||
optimization solutions. Users can easily view the data model by the
|
optimization solutions. Users can easily view the data model by the
|
||||||
API.
|
API.
|
||||||
@@ -20,7 +18,7 @@ Returns the information about Data Model.
|
|||||||
|
|
||||||
Normal response codes: 200
|
Normal response codes: 200
|
||||||
|
|
||||||
Error codes: 400,401,406
|
Error codes: 400,401
|
||||||
|
|
||||||
Request
|
Request
|
||||||
-------
|
-------
|
||||||
|
|||||||
@@ -1,26 +0,0 @@
|
|||||||
.. -*- rst -*-
|
|
||||||
|
|
||||||
========
|
|
||||||
Webhooks
|
|
||||||
========
|
|
||||||
|
|
||||||
.. versionadded:: 1.4
|
|
||||||
|
|
||||||
Triggers an event based Audit.
|
|
||||||
|
|
||||||
|
|
||||||
Trigger EVENT Audit
|
|
||||||
===================
|
|
||||||
|
|
||||||
.. rest_method:: POST /v1/webhooks/{audit_ident}
|
|
||||||
|
|
||||||
Normal response codes: 202
|
|
||||||
|
|
||||||
Error codes: 400,404
|
|
||||||
|
|
||||||
Request
|
|
||||||
-------
|
|
||||||
|
|
||||||
.. rest_parameters:: parameters.yaml
|
|
||||||
|
|
||||||
- audit_ident: audit_ident
|
|
||||||
@@ -298,7 +298,7 @@ function start_watcher_api {
|
|||||||
service_protocol="http"
|
service_protocol="http"
|
||||||
fi
|
fi
|
||||||
if [[ "$WATCHER_USE_WSGI_MODE" == "uwsgi" ]]; then
|
if [[ "$WATCHER_USE_WSGI_MODE" == "uwsgi" ]]; then
|
||||||
run_process "watcher-api" "$(which uwsgi) --procname-prefix watcher-api --ini $WATCHER_UWSGI_CONF"
|
run_process "watcher-api" "$WATCHER_BIN_DIR/uwsgi --ini $WATCHER_UWSGI_CONF"
|
||||||
watcher_url=$service_protocol://$SERVICE_HOST/infra-optim
|
watcher_url=$service_protocol://$SERVICE_HOST/infra-optim
|
||||||
else
|
else
|
||||||
watcher_url=$service_protocol://$SERVICE_HOST:$service_port
|
watcher_url=$service_protocol://$SERVICE_HOST:$service_port
|
||||||
|
|||||||
@@ -13,6 +13,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import importlib
|
import importlib
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
# The order of packages is significant, because pip processes them in the order
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
openstackdocstheme>=2.2.1 # Apache-2.0
|
openstackdocstheme>=1.20.0 # Apache-2.0
|
||||||
sphinx>=2.0.0,!=2.1.0 # BSD
|
sphinx>=1.6.5,!=1.6.6,!=1.6.7,<2.0.0;python_version=='2.7' # BSD
|
||||||
|
sphinx>=1.6.5,!=1.6.6,!=1.6.7,!=2.1.0;python_version>='3.4' # BSD
|
||||||
sphinxcontrib-pecanwsme>=0.8.0 # Apache-2.0
|
sphinxcontrib-pecanwsme>=0.8.0 # Apache-2.0
|
||||||
sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD
|
sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD
|
||||||
reno>=3.1.0 # Apache-2.0
|
reno>=2.7.0 # Apache-2.0
|
||||||
sphinxcontrib-apidoc>=0.2.0 # BSD
|
sphinxcontrib-apidoc>=0.2.0 # BSD
|
||||||
os-api-ref>=1.4.0 # Apache-2.0
|
os-api-ref>=1.4.0 # Apache-2.0
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ Administrator Guide
|
|||||||
apache-mod-wsgi
|
apache-mod-wsgi
|
||||||
gmr
|
gmr
|
||||||
policy
|
policy
|
||||||
|
ways-to-install
|
||||||
../strategies/index
|
../strategies/index
|
||||||
../datasources/index
|
../datasources/index
|
||||||
../contributor/notifications
|
|
||||||
../contributor/concurrency
|
|
||||||
|
|||||||
@@ -17,14 +17,6 @@
|
|||||||
Policies
|
Policies
|
||||||
========
|
========
|
||||||
|
|
||||||
.. warning::
|
|
||||||
|
|
||||||
JSON formatted policy file is deprecated since Watcher 6.0.0 (Wallaby).
|
|
||||||
This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing
|
|
||||||
JSON-formatted policy file to YAML in a backward-compatible way.
|
|
||||||
|
|
||||||
.. __: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html
|
|
||||||
|
|
||||||
Watcher's public API calls may be restricted to certain sets of users using a
|
Watcher's public API calls may be restricted to certain sets of users using a
|
||||||
policy configuration file. This document explains exactly how policies are
|
policy configuration file. This document explains exactly how policies are
|
||||||
configured and what they apply to.
|
configured and what they apply to.
|
||||||
|
|||||||
@@ -281,13 +281,11 @@ previously created :ref:`Audit template <audit_template_definition>`:
|
|||||||
:width: 100%
|
:width: 100%
|
||||||
|
|
||||||
The :ref:`Administrator <administrator_definition>` also can specify type of
|
The :ref:`Administrator <administrator_definition>` also can specify type of
|
||||||
Audit and interval (in case of CONTINUOUS type). There is three types of Audit:
|
Audit and interval (in case of CONTINUOUS type). There is two types of Audit:
|
||||||
ONESHOT, CONTINUOUS and EVENT. ONESHOT Audit is launched once and if it
|
ONESHOT and CONTINUOUS. Oneshot Audit is launched once and if it succeeded
|
||||||
succeeded executed new action plan list will be provided; CONTINUOUS Audit
|
executed new action plan list will be provided. Continuous Audit creates
|
||||||
creates action plans with specified interval (in seconds or cron format, cron
|
action plans with specified interval (in seconds); if action plan
|
||||||
inteval can be used like: `*/5 * * * *`), if action plan
|
has been created, all previous action plans get CANCELLED state.
|
||||||
has been created, all previous action plans get CANCELLED state;
|
|
||||||
EVENT audit is launched when receiving webhooks API.
|
|
||||||
|
|
||||||
A message is sent on the :ref:`AMQP bus <amqp_bus_definition>` which triggers
|
A message is sent on the :ref:`AMQP bus <amqp_bus_definition>` which triggers
|
||||||
the Audit in the
|
the Audit in the
|
||||||
|
|||||||
@@ -14,6 +14,7 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from watcher import version as watcher_version
|
||||||
from watcher import objects
|
from watcher import objects
|
||||||
|
|
||||||
objects.register_all()
|
objects.register_all()
|
||||||
@@ -35,6 +36,7 @@ extensions = [
|
|||||||
'sphinxcontrib.httpdomain',
|
'sphinxcontrib.httpdomain',
|
||||||
'sphinxcontrib.pecanwsme.rest',
|
'sphinxcontrib.pecanwsme.rest',
|
||||||
'stevedore.sphinxext',
|
'stevedore.sphinxext',
|
||||||
|
'wsmeext.sphinxext',
|
||||||
'ext.term',
|
'ext.term',
|
||||||
'ext.versioned_notifications',
|
'ext.versioned_notifications',
|
||||||
'oslo_config.sphinxconfiggen',
|
'oslo_config.sphinxconfiggen',
|
||||||
@@ -59,6 +61,16 @@ master_doc = 'index'
|
|||||||
project = u'Watcher'
|
project = u'Watcher'
|
||||||
copyright = u'OpenStack Foundation'
|
copyright = u'OpenStack Foundation'
|
||||||
|
|
||||||
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
# built documents.
|
||||||
|
#
|
||||||
|
# The short X.Y version.
|
||||||
|
# The full version, including alpha/beta/rc tags.
|
||||||
|
release = watcher_version.version_info.release_string()
|
||||||
|
# The short X.Y version.
|
||||||
|
version = watcher_version.version_string
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
modindex_common_prefix = ['watcher.']
|
modindex_common_prefix = ['watcher.']
|
||||||
|
|
||||||
@@ -83,7 +95,7 @@ add_module_names = True
|
|||||||
suppress_warnings = ['app.add_directive']
|
suppress_warnings = ['app.add_directive']
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'native'
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
# -- Options for man page output --------------------------------------------
|
# -- Options for man page output --------------------------------------------
|
||||||
|
|
||||||
@@ -114,13 +126,12 @@ html_theme = 'openstackdocs'
|
|||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = '%sdoc' % project
|
htmlhelp_basename = '%sdoc' % project
|
||||||
|
|
||||||
|
html_last_updated_fmt = '%Y-%m-%d %H:%M'
|
||||||
|
|
||||||
#openstackdocstheme options
|
#openstackdocstheme options
|
||||||
openstackdocs_repo_name = 'openstack/watcher'
|
repository_name = 'openstack/watcher'
|
||||||
openstackdocs_pdf_link = True
|
bug_project = 'watcher'
|
||||||
openstackdocs_auto_name = False
|
bug_tag = ''
|
||||||
openstackdocs_bug_project = 'watcher'
|
|
||||||
openstackdocs_bug_tag = ''
|
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
# (source start file, target name, title, author, documentclass
|
# (source start file, target name, title, author, documentclass
|
||||||
@@ -128,7 +139,7 @@ openstackdocs_bug_tag = ''
|
|||||||
latex_documents = [
|
latex_documents = [
|
||||||
('index',
|
('index',
|
||||||
'doc-watcher.tex',
|
'doc-watcher.tex',
|
||||||
u'Watcher Documentation',
|
u'%s Documentation' % project,
|
||||||
u'OpenStack Foundation', 'manual'),
|
u'OpenStack Foundation', 'manual'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|||||||
@@ -1,248 +0,0 @@
|
|||||||
===========
|
|
||||||
Concurrency
|
|
||||||
===========
|
|
||||||
|
|
||||||
Introduction
|
|
||||||
************
|
|
||||||
|
|
||||||
Modern processors typically contain multiple cores all capable of executing
|
|
||||||
instructions in parallel. Ensuring applications can fully utilize modern
|
|
||||||
underlying hardware requires developing with these concepts in mind. The
|
|
||||||
OpenStack foundation maintains a number of libraries to facilitate this
|
|
||||||
utilization, combined with constructs like CPython's GIL_ the proper use of
|
|
||||||
these concepts becomes more straightforward compared to other programming
|
|
||||||
languages.
|
|
||||||
|
|
||||||
The primary libraries maintained by OpenStack to facilitate concurrency are
|
|
||||||
futurist_ and taskflow_. Here futurist is a more straightforward and
|
|
||||||
lightweight library while taskflow is more advanced supporting features like
|
|
||||||
rollback mechanisms. Within Watcher both libraries are used to facilitate
|
|
||||||
concurrency.
|
|
||||||
|
|
||||||
.. _GIL: https://wiki.python.org/moin/GlobalInterpreterLock
|
|
||||||
.. _futurist: https://docs.openstack.org/futurist/latest/
|
|
||||||
.. _taskflow: https://docs.openstack.org/taskflow/latest/
|
|
||||||
|
|
||||||
Threadpool
|
|
||||||
**********
|
|
||||||
|
|
||||||
A threadpool is a collection of one or more threads typically called *workers*
|
|
||||||
to which tasks can be submitted. These submitted tasks will be scheduled by a
|
|
||||||
threadpool and subsequently executed. In the case of Python tasks typically are
|
|
||||||
bounded or unbounded methods while other programming languages like Java
|
|
||||||
require implementing an interface.
|
|
||||||
|
|
||||||
The order and amount of concurrency with which these tasks are executed is up
|
|
||||||
to the threadpool to decide. Some libraries like taskflow allow for either
|
|
||||||
strong or loose ordering of tasks while others like futurist might only support
|
|
||||||
loose ordering. Taskflow supports building tree-based hierarchies of dependent
|
|
||||||
tasks for example.
|
|
||||||
|
|
||||||
Upon submission of a task to a threadpool a so called future_ is returned.
|
|
||||||
These objects allow to determine information about the task such as if it is
|
|
||||||
currently being executed or if it has finished execution. When the task has
|
|
||||||
finished execution the future can also be used to retrieve what was returned by
|
|
||||||
the method.
|
|
||||||
|
|
||||||
Some libraries like futurist provide synchronization primitives for collections
|
|
||||||
of futures such as wait_for_any_. The following sections will cover different
|
|
||||||
types of concurrency used in various services of Watcher.
|
|
||||||
|
|
||||||
.. _future: https://docs.python.org/3/library/concurrent.futures.html
|
|
||||||
.. _wait_for_any: https://docs.openstack.org/futurist/latest/reference/index.html#waiters
|
|
||||||
|
|
||||||
|
|
||||||
Decision engine concurrency
|
|
||||||
***************************
|
|
||||||
|
|
||||||
The concurrency in the decision engine is governed by two independent
|
|
||||||
threadpools. Both of these threadpools are GreenThreadPoolExecutor_ from the
|
|
||||||
futurist_ library. One of these is used automatically and most contributors
|
|
||||||
will not interact with it while developing new features. The other threadpool
|
|
||||||
can frequently be used while developing new features or updating existing ones.
|
|
||||||
It is known as the DecisionEngineThreadpool and allows to achieve performance
|
|
||||||
improvements in network or I/O bound operations.
|
|
||||||
|
|
||||||
.. _GreenThreadPoolExecutor: https://docs.openstack.org/futurist/latest/reference/index.html#executors
|
|
||||||
|
|
||||||
AuditEndpoint
|
|
||||||
#############
|
|
||||||
|
|
||||||
The first threadpool is used to allow multiple audits to be run in parallel.
|
|
||||||
In practice, however, only one audit can be run in parallel. This is due to
|
|
||||||
the data model used by audits being a singleton. To prevent audits destroying
|
|
||||||
each others data model one must wait for the other to complete before being
|
|
||||||
allowed to access this data model. A performance improvement could be achieved
|
|
||||||
by being more intelligent in the use, caching and construction of these
|
|
||||||
data models.
|
|
||||||
|
|
||||||
DecisionEngineThreadPool
|
|
||||||
########################
|
|
||||||
|
|
||||||
The second threadpool is used for generic tasks, typically networking and I/O
|
|
||||||
could benefit the most of this threadpool. Upon execution of an audit this
|
|
||||||
threadpool can be utilized to retrieve information from the Nova compute
|
|
||||||
service for instance. This second threadpool is a singleton and is shared
|
|
||||||
amongst concurrently running audits as a result the amount of workers is static
|
|
||||||
and independent from the amount of workers in the first threadpool. The use of
|
|
||||||
the :class:`~.DecisionEngineThreadpool` while building the Nova compute data
|
|
||||||
model is demonstrated to show how it can effectively be used.
|
|
||||||
|
|
||||||
In the following example a reference to the
|
|
||||||
:class:`~.DecisionEngineThreadpool` is stored in ``self.executor``. Here two
|
|
||||||
tasks are submitted one with function ``self._collect_aggregates`` and the
|
|
||||||
other function ``self._collect_zones``. With both ``self.executor.submit``
|
|
||||||
calls subsequent arguments are passed to the function. All subsequent arguments
|
|
||||||
are passed to the function being submitted as task following the common
|
|
||||||
``(fn, *args, **kwargs)`` signature. One of the original signatures would be
|
|
||||||
``def _collect_aggregates(host_aggregates, compute_nodes)`` for example.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
zone_aggregate_futures = {
|
|
||||||
self.executor.submit(
|
|
||||||
self._collect_aggregates, host_aggregates, compute_nodes),
|
|
||||||
self.executor.submit(
|
|
||||||
self._collect_zones, availability_zones, compute_nodes)
|
|
||||||
}
|
|
||||||
waiters.wait_for_all(zone_aggregate_futures)
|
|
||||||
|
|
||||||
The last statement of the example above waits on all futures to complete.
|
|
||||||
Similarly, ``waiters.wait_for_any`` will wait for any future of the specified
|
|
||||||
collection to complete. To simplify the usage of ``wait_for_any`` the
|
|
||||||
:class:`~.DecisiongEngineThreadpool` defines a ``do_while_futures`` method.
|
|
||||||
This method will iterate in a do_while loop over a collection of futures until
|
|
||||||
all of them have completed. The advantage of ``do_while_futures`` is that it
|
|
||||||
allows to immediately call a method as soon as a future finishes. The arguments
|
|
||||||
for this callback method can be supplied when calling ``do_while_futures``,
|
|
||||||
however, the first argument to the callback is always the future itself! If
|
|
||||||
the collection of futures can safely be modified ``do_while_futures_modify``
|
|
||||||
can be used and should have slightly better performance. The following example
|
|
||||||
will show how ``do_while_futures`` is used in the decision engine.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# For every compute node from compute_nodes submit a task to gather the node it's information.
|
|
||||||
# List comprehension is used to store all the futures of the submitted tasks in node_futures.
|
|
||||||
node_futures = [self.executor.submit(
|
|
||||||
self.nova_helper.get_compute_node_by_name,
|
|
||||||
node, servers=True, detailed=True)
|
|
||||||
for node in compute_nodes]
|
|
||||||
LOG.debug("submitted {0} jobs".format(len(compute_nodes)))
|
|
||||||
|
|
||||||
future_instances = []
|
|
||||||
# do_while iterate over node_futures and upon completion of a future call
|
|
||||||
# self._compute_node_future with the future and future_instances as arguments.
|
|
||||||
self.executor.do_while_futures_modify(
|
|
||||||
node_futures, self._compute_node_future, future_instances)
|
|
||||||
|
|
||||||
# Wait for all instance jobs to finish
|
|
||||||
waiters.wait_for_all(future_instances)
|
|
||||||
|
|
||||||
Finally, let's demonstrate how powerful this ``do_while_futures`` can be by
|
|
||||||
showing what the ``compute_node_future`` callback does. First, it retrieves the
|
|
||||||
result from the future and adds the compute node to the data model. Afterwards,
|
|
||||||
it checks if the compute node has any associated instances and if so it submits
|
|
||||||
an additional task to the :class:`~.DecisionEngineThreadpool`. The future is
|
|
||||||
appended to the ``future_instances`` so ``waiters.wait_for_all`` can be called
|
|
||||||
on this list. This is important as otherwise the building of the data model
|
|
||||||
might return before all tasks for instances have finished.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
# Get the result from the future.
|
|
||||||
node_info = future.result()[0]
|
|
||||||
|
|
||||||
# Filter out baremetal nodes.
|
|
||||||
if node_info.hypervisor_type == 'ironic':
|
|
||||||
LOG.debug("filtering out baremetal node: %s", node_info)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Add the compute node to the data model.
|
|
||||||
self.add_compute_node(node_info)
|
|
||||||
# Get the instances from the compute node.
|
|
||||||
instances = getattr(node_info, "servers", None)
|
|
||||||
# Do not submit job if there are no instances on compute node.
|
|
||||||
if instances is None:
|
|
||||||
LOG.info("No instances on compute_node: {0}".format(node_info))
|
|
||||||
return
|
|
||||||
# Submit a job to retrieve detailed information about the instances.
|
|
||||||
future_instances.append(
|
|
||||||
self.executor.submit(
|
|
||||||
self.add_instance_node, node_info, instances)
|
|
||||||
)
|
|
||||||
|
|
||||||
Without ``do_while_futures`` an additional ``waiters.wait_for_all`` would be
|
|
||||||
required in between the compute node tasks and the instance tasks. This would
|
|
||||||
cause the progress of the decision engine to stall as less and less tasks
|
|
||||||
remain active before the instance tasks could be submitted. This demonstrates
|
|
||||||
how ``do_while_futures`` can be used to achieve more constant utilization of
|
|
||||||
the underlying hardware.
|
|
||||||
|
|
||||||
Applier concurrency
|
|
||||||
*******************
|
|
||||||
|
|
||||||
The applier does not use the futurist_ GreenThreadPoolExecutor_ directly but
|
|
||||||
instead uses taskflow_. However, taskflow still utilizes a greenthreadpool.
|
|
||||||
This threadpool is initialized in the workflow engine called
|
|
||||||
:class:`~.DefaultWorkFlowEngine`. Currently Watcher supports one workflow
|
|
||||||
engine but the base class allows contributors to develop other workflow engines
|
|
||||||
as well. In taskflow tasks are created using different types of flows such as a
|
|
||||||
linear, unordered or a graph flow. The linear and graph flow allow for strong
|
|
||||||
ordering between individual tasks and it is for this reason that the workflow
|
|
||||||
engine utilizes a graph flow. The creation of tasks, subsequently linking them
|
|
||||||
into a graph like structure and submitting them is shown below.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
self.execution_rule = self.get_execution_rule(actions)
|
|
||||||
flow = gf.Flow("watcher_flow")
|
|
||||||
actions_uuid = {}
|
|
||||||
for a in actions:
|
|
||||||
task = TaskFlowActionContainer(a, self)
|
|
||||||
flow.add(task)
|
|
||||||
actions_uuid[a.uuid] = task
|
|
||||||
|
|
||||||
for a in actions:
|
|
||||||
for parent_id in a.parents:
|
|
||||||
flow.link(actions_uuid[parent_id], actions_uuid[a.uuid],
|
|
||||||
decider=self.decider)
|
|
||||||
|
|
||||||
e = engines.load(
|
|
||||||
flow, executor='greenthreaded', engine='parallel',
|
|
||||||
max_workers=self.config.max_workers)
|
|
||||||
e.run()
|
|
||||||
|
|
||||||
return flow
|
|
||||||
|
|
||||||
In the applier tasks are contained in a :class:`~.TaskFlowActionContainer`
|
|
||||||
which allows them to trigger events in the workflow engine. This way the
|
|
||||||
workflow engine can halt or take other actions while the action plan is being
|
|
||||||
executed based on the success or failure of individual actions. However, the
|
|
||||||
base workflow engine simply uses these notifies to store the result of
|
|
||||||
individual actions in the database. Additionally, since taskflow uses a graph
|
|
||||||
flow if any of the tasks would fail all childs of this tasks not be executed
|
|
||||||
while ``do_revert`` will be triggered for all parents.
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
class TaskFlowActionContainer(...):
|
|
||||||
...
|
|
||||||
def do_execute(self, *args, **kwargs):
|
|
||||||
...
|
|
||||||
result = self.action.execute()
|
|
||||||
if result is True:
|
|
||||||
return self.engine.notify(self._db_action,
|
|
||||||
objects.action.State.SUCCEEDED)
|
|
||||||
else:
|
|
||||||
self.engine.notify(self._db_action,
|
|
||||||
objects.action.State.FAILED)
|
|
||||||
|
|
||||||
class BaseWorkFlowEngine(...):
|
|
||||||
...
|
|
||||||
def notify(self, action, state):
|
|
||||||
db_action = objects.Action.get_by_uuid(self.context, action.uuid,
|
|
||||||
eager=True)
|
|
||||||
db_action.state = state
|
|
||||||
db_action.save()
|
|
||||||
return db_action
|
|
||||||
@@ -1,111 +1,71 @@
|
|||||||
============================
|
..
|
||||||
So You Want to Contribute...
|
Except where otherwise noted, this document is licensed under Creative
|
||||||
============================
|
Commons Attribution 3.0 License. You can view the license at:
|
||||||
|
|
||||||
For general information on contributing to OpenStack, please check out the
|
https://creativecommons.org/licenses/by/3.0/
|
||||||
`contributor guide <https://docs.openstack.org/contributors/>`_ to get started.
|
|
||||||
It covers all the basics that are common to all OpenStack projects:
|
|
||||||
the accounts you need, the basics of interacting with our Gerrit review system,
|
|
||||||
how we communicate as a community, etc.
|
|
||||||
|
|
||||||
Below will cover the more project specific information you need to get started
|
.. _contributing:
|
||||||
with Watcher.
|
|
||||||
|
|
||||||
Communication
|
=======================
|
||||||
~~~~~~~~~~~~~~
|
Contributing to Watcher
|
||||||
.. This would be a good place to put the channel you chat in as a project; when/
|
=======================
|
||||||
where your meeting is, the tags you prepend to your ML threads, etc.
|
|
||||||
|
If you're interested in contributing to the Watcher project,
|
||||||
|
the following will help get you started.
|
||||||
|
|
||||||
|
Contributor License Agreement
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
.. index::
|
||||||
|
single: license; agreement
|
||||||
|
|
||||||
|
In order to contribute to the Watcher project, you need to have
|
||||||
|
signed OpenStack's contributor's agreement.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
* https://docs.openstack.org/infra/manual/developers.html
|
||||||
|
* https://wiki.openstack.org/CLA
|
||||||
|
|
||||||
|
LaunchPad Project
|
||||||
|
-----------------
|
||||||
|
|
||||||
|
Most of the tools used for OpenStack depend on a launchpad.net ID for
|
||||||
|
authentication. After signing up for a launchpad account, join the
|
||||||
|
"openstack" team to have access to the mailing list and receive
|
||||||
|
notifications of important events.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
* https://launchpad.net
|
||||||
|
* https://launchpad.net/watcher
|
||||||
|
* https://launchpad.net/openstack
|
||||||
|
|
||||||
|
|
||||||
|
Project Hosting Details
|
||||||
|
-----------------------
|
||||||
|
|
||||||
|
Bug tracker
|
||||||
|
https://launchpad.net/watcher
|
||||||
|
|
||||||
|
Mailing list (prefix subjects with ``[watcher]`` for faster responses)
|
||||||
|
http://lists.openstack.org/pipermail/openstack-discuss/
|
||||||
|
|
||||||
|
Wiki
|
||||||
|
https://wiki.openstack.org/Watcher
|
||||||
|
|
||||||
|
Code Hosting
|
||||||
|
https://opendev.org/openstack/watcher
|
||||||
|
|
||||||
|
Code Review
|
||||||
|
https://review.opendev.org/#/q/status:open+project:openstack/watcher,n,z
|
||||||
|
|
||||||
IRC Channel
|
IRC Channel
|
||||||
``#openstack-watcher`` (changelog_)
|
``#openstack-watcher`` (changelog_)
|
||||||
|
|
||||||
Mailing list(prefix subjects with ``[watcher]``)
|
|
||||||
http://lists.openstack.org/pipermail/openstack-discuss/
|
|
||||||
|
|
||||||
Weekly Meetings
|
Weekly Meetings
|
||||||
Bi-weekly, on Wednesdays at 08:00 UTC on odd weeks in the
|
Bi-weekly, on Wednesdays at 08:00 UTC on odd weeks in the
|
||||||
``#openstack-meeting-alt`` IRC channel (`meetings logs`_)
|
``#openstack-meeting-alt`` IRC channel (`meetings logs`_)
|
||||||
|
|
||||||
Meeting Agenda
|
|
||||||
https://wiki.openstack.org/wiki/Watcher_Meeting_Agenda
|
|
||||||
|
|
||||||
.. _changelog: http://eavesdrop.openstack.org/irclogs/%23openstack-watcher/
|
.. _changelog: http://eavesdrop.openstack.org/irclogs/%23openstack-watcher/
|
||||||
.. _meetings logs: http://eavesdrop.openstack.org/meetings/watcher/
|
.. _meetings logs: http://eavesdrop.openstack.org/meetings/watcher/
|
||||||
|
|
||||||
Contacting the Core Team
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
.. This section should list the core team, their irc nicks, emails, timezones etc.
|
|
||||||
If all this info is maintained elsewhere (i.e. a wiki), you can link to that
|
|
||||||
instead of enumerating everyone here.
|
|
||||||
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
| Name | IRC | Email |
|
|
||||||
+====================+===============+====================================+
|
|
||||||
| `Li Canwei`_ | licanwei | li.canwei2@zte.com.cn |
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
| `chen ke`_ | chenke | chen.ke14@zte.com.cn |
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
| `Corne Lukken`_ | dantalion | info@dantalion.nl |
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
| `su zhengwei`_ | suzhengwei | sugar-2008@163.com |
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
| `Yumeng Bao`_ | Yumeng | yumeng_bao@yahoo.com |
|
|
||||||
+--------------------+---------------+------------------------------------+
|
|
||||||
|
|
||||||
.. _Corne Lukken: https://launchpad.net/~dantalion
|
|
||||||
.. _Li Canwei: https://launchpad.net/~li-canwei2
|
|
||||||
.. _su zhengwei: https://launchpad.net/~sue.sam
|
|
||||||
.. _Yumeng Bao: https://launchpad.net/~yumeng-bao
|
|
||||||
.. _chen ke: https://launchpad.net/~chenker
|
|
||||||
|
|
||||||
New Feature Planning
|
|
||||||
~~~~~~~~~~~~~~~~~~~~
|
|
||||||
.. This section is for talking about the process to get a new feature in. Some
|
|
||||||
projects use blueprints, some want specs, some want both! Some projects
|
|
||||||
stick to a strict schedule when selecting what new features will be reviewed
|
|
||||||
for a release.
|
|
||||||
|
|
||||||
New feature will be discussed via IRC or ML (with [Watcher] prefix).
|
|
||||||
Watcher team uses blueprints in `Launchpad`_ to manage the new features.
|
|
||||||
|
|
||||||
.. _Launchpad: https://launchpad.net/watcher
|
|
||||||
|
|
||||||
Task Tracking
|
|
||||||
~~~~~~~~~~~~~~
|
|
||||||
.. This section is about where you track tasks- launchpad? storyboard?
|
|
||||||
is there more than one launchpad project? what's the name of the project
|
|
||||||
group in storyboard?
|
|
||||||
|
|
||||||
We track our tasks in Launchpad.
|
|
||||||
If you're looking for some smaller, easier work item to pick up and get started
|
|
||||||
on, search for the 'low-hanging-fruit' tag.
|
|
||||||
|
|
||||||
.. NOTE: If your tag is not 'low-hanging-fruit' please change the text above.
|
|
||||||
|
|
||||||
Reporting a Bug
|
|
||||||
~~~~~~~~~~~~~~~
|
|
||||||
.. Pretty self explanatory section, link directly to where people should report bugs for
|
|
||||||
your project.
|
|
||||||
|
|
||||||
You found an issue and want to make sure we are aware of it? You can do so
|
|
||||||
`HERE`_.
|
|
||||||
|
|
||||||
.. _HERE: https://bugs.launchpad.net/watcher
|
|
||||||
|
|
||||||
Getting Your Patch Merged
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
.. This section should have info about what it takes to get something merged.
|
|
||||||
Do you require one or two +2's before +W? Do some of your repos require
|
|
||||||
unit test changes with all patches? etc.
|
|
||||||
|
|
||||||
Due to the small number of core reviewers of the Watcher project,
|
|
||||||
we only need one +2 before +W (merge). All patches excepting for documentation
|
|
||||||
or typos fixes must have unit test.
|
|
||||||
|
|
||||||
Project Team Lead Duties
|
|
||||||
------------------------
|
|
||||||
.. this section is where you can put PTL specific duties not already listed in
|
|
||||||
the common PTL guide (linked below) or if you already have them written
|
|
||||||
up elsewhere, you can link to that doc here.
|
|
||||||
|
|
||||||
All common PTL duties are enumerated here in the `PTL guide <https://docs.openstack.org/project-team-guide/ptl.html>`_.
|
|
||||||
|
|||||||
@@ -47,8 +47,6 @@ unavailable as well as `instance_l3_cpu_cache`::
|
|||||||
[[local|localrc]]
|
[[local|localrc]]
|
||||||
enable_plugin watcher https://opendev.org/openstack/watcher
|
enable_plugin watcher https://opendev.org/openstack/watcher
|
||||||
|
|
||||||
enable_plugin watcher-dashboard https://opendev.org/openstack/watcher-dashboard
|
|
||||||
|
|
||||||
enable_plugin ceilometer https://opendev.org/openstack/ceilometer.git
|
enable_plugin ceilometer https://opendev.org/openstack/ceilometer.git
|
||||||
CEILOMETER_BACKEND=gnocchi
|
CEILOMETER_BACKEND=gnocchi
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,8 @@
|
|||||||
==================
|
|
||||||
Contribution Guide
|
|
||||||
==================
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 1
|
||||||
|
|
||||||
contributing
|
|
||||||
environment
|
environment
|
||||||
devstack
|
devstack
|
||||||
|
notifications
|
||||||
testing
|
testing
|
||||||
rally_link
|
rally_link
|
||||||
|
|||||||
@@ -1,7 +1,3 @@
|
|||||||
============
|
|
||||||
Plugin Guide
|
|
||||||
============
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
|
|||||||
@@ -56,6 +56,9 @@ Here is an example showing how you can write a plugin called ``NewStrategy``:
|
|||||||
# filepath: thirdparty/new.py
|
# filepath: thirdparty/new.py
|
||||||
# import path: thirdparty.new
|
# import path: thirdparty.new
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
from watcher.decision_engine.strategy.strategies import base
|
from watcher.decision_engine.strategy.strategies import base
|
||||||
|
|
||||||
|
|||||||
@@ -4,9 +4,9 @@
|
|||||||
|
|
||||||
https://creativecommons.org/licenses/by/3.0/
|
https://creativecommons.org/licenses/by/3.0/
|
||||||
|
|
||||||
=================
|
=======
|
||||||
Developer Testing
|
Testing
|
||||||
=================
|
=======
|
||||||
|
|
||||||
.. _unit_tests:
|
.. _unit_tests:
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ Unit tests
|
|||||||
|
|
||||||
All unit tests should be run using `tox`_. Before running the unit tests, you
|
All unit tests should be run using `tox`_. Before running the unit tests, you
|
||||||
should download the latest `watcher`_ from the github. To run the same unit
|
should download the latest `watcher`_ from the github. To run the same unit
|
||||||
tests that are executing onto `Gerrit`_ which includes ``py36``, ``py37`` and
|
tests that are executing onto `Gerrit`_ which includes ``py35``, ``py27`` and
|
||||||
``pep8``, you can issue the following command::
|
``pep8``, you can issue the following command::
|
||||||
|
|
||||||
$ git clone https://opendev.org/openstack/watcher
|
$ git clone https://opendev.org/openstack/watcher
|
||||||
@@ -26,8 +26,8 @@ tests that are executing onto `Gerrit`_ which includes ``py36``, ``py37`` and
|
|||||||
If you only want to run one of the aforementioned, you can then issue one of
|
If you only want to run one of the aforementioned, you can then issue one of
|
||||||
the following::
|
the following::
|
||||||
|
|
||||||
$ tox -e py36
|
$ tox -e py35
|
||||||
$ tox -e py37
|
$ tox -e py27
|
||||||
$ tox -e pep8
|
$ tox -e pep8
|
||||||
|
|
||||||
.. _tox: https://tox.readthedocs.org/
|
.. _tox: https://tox.readthedocs.org/
|
||||||
@@ -38,7 +38,7 @@ If you only want to run specific unit test code and don't like to waste time
|
|||||||
waiting for all unit tests to execute, you can add parameters ``--`` followed
|
waiting for all unit tests to execute, you can add parameters ``--`` followed
|
||||||
by a regex string::
|
by a regex string::
|
||||||
|
|
||||||
$ tox -e py37 -- watcher.tests.api
|
$ tox -e py27 -- watcher.tests.api
|
||||||
|
|
||||||
.. _tempest_tests:
|
.. _tempest_tests:
|
||||||
|
|
||||||
|
|||||||
@@ -32,21 +32,91 @@ specific prior release.
|
|||||||
.. _python-watcherclient: https://opendev.org/openstack/python-watcherclient/
|
.. _python-watcherclient: https://opendev.org/openstack/python-watcherclient/
|
||||||
.. _watcher-dashboard: https://opendev.org/openstack/watcher-dashboard/
|
.. _watcher-dashboard: https://opendev.org/openstack/watcher-dashboard/
|
||||||
|
|
||||||
|
Developer Guide
|
||||||
|
===============
|
||||||
|
|
||||||
|
Introduction
|
||||||
|
------------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
glossary
|
||||||
|
architecture
|
||||||
|
contributor/contributing
|
||||||
|
|
||||||
|
|
||||||
|
Getting Started
|
||||||
|
---------------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
contributor/index
|
||||||
|
|
||||||
|
Installation
|
||||||
|
============
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
architecture
|
|
||||||
contributor/index
|
|
||||||
install/index
|
install/index
|
||||||
|
|
||||||
|
Admin Guide
|
||||||
|
===========
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
admin/index
|
admin/index
|
||||||
|
|
||||||
|
User Guide
|
||||||
|
==========
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
user/index
|
user/index
|
||||||
configuration/index
|
|
||||||
contributor/plugin/index
|
API References
|
||||||
man/index
|
==============
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
API Reference <https://docs.openstack.org/api-ref/resource-optimization/>
|
API Reference <https://docs.openstack.org/api-ref/resource-optimization/>
|
||||||
Watcher API Microversion History </contributor/api_microversion_history>
|
Watcher API Microversion History </contributor/api_microversion_history>
|
||||||
glossary
|
|
||||||
|
Plugins
|
||||||
|
-------
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
contributor/plugin/index
|
||||||
|
|
||||||
|
Watcher Configuration Options
|
||||||
|
=============================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 2
|
||||||
|
|
||||||
|
configuration/index
|
||||||
|
|
||||||
|
Watcher Manual Pages
|
||||||
|
====================
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 1
|
||||||
|
|
||||||
|
man/index
|
||||||
|
|
||||||
|
|
||||||
|
.. only:: html
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
||||||
|
* :ref:`search`
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
=============
|
===================================
|
||||||
Install Guide
|
Infrastructure Optimization service
|
||||||
=============
|
===================================
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|||||||
@@ -1,7 +1,3 @@
|
|||||||
====================
|
|
||||||
Watcher Manual Pages
|
|
||||||
====================
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:glob:
|
:glob:
|
||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|||||||
@@ -1,195 +0,0 @@
|
|||||||
..
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
not use this file except in compliance with the License. You may obtain
|
|
||||||
a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
License for the specific language governing permissions and limitations
|
|
||||||
under the License.
|
|
||||||
|
|
||||||
|
|
||||||
======================
|
|
||||||
Audit using Aodh alarm
|
|
||||||
======================
|
|
||||||
|
|
||||||
Audit with EVENT type can be triggered by special alarm. This guide walks
|
|
||||||
you through the steps to build an event-driven optimization solution by
|
|
||||||
integrating Watcher with Ceilometer/Aodh.
|
|
||||||
|
|
||||||
Step 1: Create an audit with EVENT type
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The first step is to create an audit with EVENT type,
|
|
||||||
you can create an audit template firstly:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack optimize audittemplate create your_template_name <your_goal> \
|
|
||||||
--strategy <your_strategy>
|
|
||||||
|
|
||||||
or create an audit directly with special goal and strategy:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack optimize audit create --goal <your_goal> \
|
|
||||||
--strategy <your_strategy> --audit_type EVENT
|
|
||||||
|
|
||||||
This is an example for creating an audit with dummy strategy:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack optimize audit create --goal dummy \
|
|
||||||
--strategy dummy --audit_type EVENT
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
| Field | Value |
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
| UUID | a3326a6a-c18e-4e8e-adba-d0c61ad404c5 |
|
|
||||||
| Name | dummy-2020-01-14T03:21:19.168467 |
|
|
||||||
| Created At | 2020-01-14T03:21:19.200279+00:00 |
|
|
||||||
| Updated At | None |
|
|
||||||
| Deleted At | None |
|
|
||||||
| State | PENDING |
|
|
||||||
| Audit Type | EVENT |
|
|
||||||
| Parameters | {u'para2': u'hello', u'para1': 3.2} |
|
|
||||||
| Interval | None |
|
|
||||||
| Goal | dummy |
|
|
||||||
| Strategy | dummy |
|
|
||||||
| Audit Scope | [] |
|
|
||||||
| Auto Trigger | False |
|
|
||||||
| Next Run Time | None |
|
|
||||||
| Hostname | None |
|
|
||||||
| Start Time | None |
|
|
||||||
| End Time | None |
|
|
||||||
| Force | False |
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
|
|
||||||
We need to build Aodh action url using Watcher webhook API.
|
|
||||||
For convenience we export the url into an environment variable:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ export AUDIT_UUID=a3326a6a-c18e-4e8e-adba-d0c61ad404c5
|
|
||||||
$ export ALARM_URL="trust+http://localhost/infra-optim/v1/webhooks/$AUDIT_UUID"
|
|
||||||
|
|
||||||
Step 2: Create Aodh Alarm
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Once we have the audit created, we can continue to create Aodh alarm and
|
|
||||||
set the alarm action to Watcher webhook API. The alarm type can be event(
|
|
||||||
i.e. ``compute.instance.create.end``) or gnocchi_resources_threshold(i.e.
|
|
||||||
``cpu_util``), more info refer to alarm-creation_
|
|
||||||
|
|
||||||
For example:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack alarm create \
|
|
||||||
--type event --name instance_create \
|
|
||||||
--event-type "compute.instance.create.end" \
|
|
||||||
--enable True --repeat-actions False \
|
|
||||||
--alarm-action $ALARM_URL
|
|
||||||
+---------------------------+------------------------------------------------------------------------------------------+
|
|
||||||
| Field | Value |
|
|
||||||
+---------------------------+------------------------------------------------------------------------------------------+
|
|
||||||
| alarm_actions | [u'trust+http://localhost/infra-optim/v1/webhooks/a3326a6a-c18e-4e8e-adba-d0c61ad404c5'] |
|
|
||||||
| alarm_id | b9e381fc-8e3e-4943-82ee-647e7a2ef644 |
|
|
||||||
| description | Alarm when compute.instance.create.end event occurred. |
|
|
||||||
| enabled | True |
|
|
||||||
| event_type | compute.instance.create.end |
|
|
||||||
| insufficient_data_actions | [] |
|
|
||||||
| name | instance_create |
|
|
||||||
| ok_actions | [] |
|
|
||||||
| project_id | 728d66e18c914af1a41e2a585cf766af |
|
|
||||||
| query | |
|
|
||||||
| repeat_actions | False |
|
|
||||||
| severity | low |
|
|
||||||
| state | insufficient data |
|
|
||||||
| state_reason | Not evaluated yet |
|
|
||||||
| state_timestamp | 2020-01-14T03:56:26.894416 |
|
|
||||||
| time_constraints | [] |
|
|
||||||
| timestamp | 2020-01-14T03:56:26.894416 |
|
|
||||||
| type | event |
|
|
||||||
| user_id | 88c40156af7445cc80580a1e7e3ba308 |
|
|
||||||
+---------------------------+------------------------------------------------------------------------------------------+
|
|
||||||
|
|
||||||
.. _alarm-creation: https://docs.openstack.org/aodh/latest/admin/telemetry-alarms.html#alarm-creation
|
|
||||||
|
|
||||||
Step 3: Trigger the alarm
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
In this example, you can create a new instance to trigger the alarm.
|
|
||||||
The alarm state will translate from ``insufficient data`` to ``alarm``.
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack alarm show b9e381fc-8e3e-4943-82ee-647e7a2ef644
|
|
||||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------+
|
|
||||||
| Field | Value |
|
|
||||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------+
|
|
||||||
| alarm_actions | [u'trust+http://localhost/infra-optim/v1/webhooks/a3326a6a-c18e-4e8e-adba-d0c61ad404c5'] |
|
|
||||||
| alarm_id | b9e381fc-8e3e-4943-82ee-647e7a2ef644 |
|
|
||||||
| description | Alarm when compute.instance.create.end event occurred. |
|
|
||||||
| enabled | True |
|
|
||||||
| event_type | compute.instance.create.end |
|
|
||||||
| insufficient_data_actions | [] |
|
|
||||||
| name | instance_create |
|
|
||||||
| ok_actions | [] |
|
|
||||||
| project_id | 728d66e18c914af1a41e2a585cf766af |
|
|
||||||
| query | |
|
|
||||||
| repeat_actions | False |
|
|
||||||
| severity | low |
|
|
||||||
| state | alarm |
|
|
||||||
| state_reason | Event <id=67dd0afa-2082-45a4-8825-9573b2cc60e5,event_type=compute.instance.create.end> hits the query <query=[]>. |
|
|
||||||
| state_timestamp | 2020-01-14T03:56:26.894416 |
|
|
||||||
| time_constraints | [] |
|
|
||||||
| timestamp | 2020-01-14T06:17:40.350649 |
|
|
||||||
| type | event |
|
|
||||||
| user_id | 88c40156af7445cc80580a1e7e3ba308 |
|
|
||||||
+---------------------------+-------------------------------------------------------------------------------------------------------------------+
|
|
||||||
|
|
||||||
Step 4: Verify the audit
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
This can be verified to check if the audit state was ``SUCCEEDED``:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack optimize audit show a3326a6a-c18e-4e8e-adba-d0c61ad404c5
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
| Field | Value |
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
| UUID | a3326a6a-c18e-4e8e-adba-d0c61ad404c5 |
|
|
||||||
| Name | dummy-2020-01-14T03:21:19.168467 |
|
|
||||||
| Created At | 2020-01-14T03:21:19+00:00 |
|
|
||||||
| Updated At | 2020-01-14T06:26:40+00:00 |
|
|
||||||
| Deleted At | None |
|
|
||||||
| State | SUCCEEDED |
|
|
||||||
| Audit Type | EVENT |
|
|
||||||
| Parameters | {u'para2': u'hello', u'para1': 3.2} |
|
|
||||||
| Interval | None |
|
|
||||||
| Goal | dummy |
|
|
||||||
| Strategy | dummy |
|
|
||||||
| Audit Scope | [] |
|
|
||||||
| Auto Trigger | False |
|
|
||||||
| Next Run Time | None |
|
|
||||||
| Hostname | ubuntudbs |
|
|
||||||
| Start Time | None |
|
|
||||||
| End Time | None |
|
|
||||||
| Force | False |
|
|
||||||
+---------------+--------------------------------------+
|
|
||||||
|
|
||||||
and you can use the following command to check if the action plan
|
|
||||||
was created:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
$ openstack optimize actionplan list --audit a3326a6a-c18e-4e8e-adba-d0c61ad404c5
|
|
||||||
+--------------------------------------+--------------------------------------+-------------+------------+-----------------+
|
|
||||||
| UUID | Audit | State | Updated At | Global efficacy |
|
|
||||||
+--------------------------------------+--------------------------------------+-------------+------------+-----------------+
|
|
||||||
| 673b3fcb-8c16-4a41-9ee3-2956d9f6ca9e | a3326a6a-c18e-4e8e-adba-d0c61ad404c5 | RECOMMENDED | None | |
|
|
||||||
+--------------------------------------+--------------------------------------+-------------+------------+-----------------+
|
|
||||||
@@ -1,10 +1,4 @@
|
|||||||
==========
|
|
||||||
User Guide
|
|
||||||
==========
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
ways-to-install
|
|
||||||
user-guide
|
user-guide
|
||||||
event_type_audit
|
|
||||||
|
|||||||
@@ -4,6 +4,8 @@
|
|||||||
|
|
||||||
https://creativecommons.org/licenses/by/3.0/
|
https://creativecommons.org/licenses/by/3.0/
|
||||||
|
|
||||||
|
.. _user-guide:
|
||||||
|
|
||||||
==================
|
==================
|
||||||
Watcher User Guide
|
Watcher User Guide
|
||||||
==================
|
==================
|
||||||
@@ -58,8 +60,8 @@ plugin installation guide`_.
|
|||||||
.. _`OpenStack CLI`: https://docs.openstack.org/python-openstackclient/latest/cli/man/openstack.html
|
.. _`OpenStack CLI`: https://docs.openstack.org/python-openstackclient/latest/cli/man/openstack.html
|
||||||
.. _`Watcher CLI`: https://docs.openstack.org/python-watcherclient/latest/cli/index.html
|
.. _`Watcher CLI`: https://docs.openstack.org/python-watcherclient/latest/cli/index.html
|
||||||
|
|
||||||
Watcher CLI Command
|
Seeing what the Watcher CLI can do ?
|
||||||
-------------------
|
------------------------------------
|
||||||
We can see all of the commands available with Watcher CLI by running the
|
We can see all of the commands available with Watcher CLI by running the
|
||||||
watcher binary without options.
|
watcher binary without options.
|
||||||
|
|
||||||
@@ -67,8 +69,8 @@ watcher binary without options.
|
|||||||
|
|
||||||
$ openstack help optimize
|
$ openstack help optimize
|
||||||
|
|
||||||
Running an audit of the cluster
|
How do I run an audit of my cluster ?
|
||||||
-------------------------------
|
-------------------------------------
|
||||||
|
|
||||||
First, you need to find the :ref:`goal <goal_definition>` you want to achieve:
|
First, you need to find the :ref:`goal <goal_definition>` you want to achieve:
|
||||||
|
|
||||||
|
|||||||
165
lower-constraints.txt
Normal file
165
lower-constraints.txt
Normal file
@@ -0,0 +1,165 @@
|
|||||||
|
alabaster==0.7.10
|
||||||
|
alembic==0.9.8
|
||||||
|
amqp==2.2.2
|
||||||
|
appdirs==1.4.3
|
||||||
|
APScheduler==3.5.1
|
||||||
|
asn1crypto==0.24.0
|
||||||
|
automaton==1.14.0
|
||||||
|
Babel==2.5.3
|
||||||
|
beautifulsoup4==4.6.0
|
||||||
|
cachetools==2.0.1
|
||||||
|
certifi==2018.1.18
|
||||||
|
cffi==1.11.5
|
||||||
|
chardet==3.0.4
|
||||||
|
cliff==2.11.0
|
||||||
|
cmd2==0.8.1
|
||||||
|
contextlib2==0.5.5
|
||||||
|
coverage==4.5.1
|
||||||
|
croniter==0.3.20
|
||||||
|
cryptography==2.1.4
|
||||||
|
debtcollector==1.19.0
|
||||||
|
decorator==4.2.1
|
||||||
|
deprecation==2.0
|
||||||
|
doc8==0.8.0
|
||||||
|
docutils==0.14
|
||||||
|
dogpile.cache==0.6.5
|
||||||
|
dulwich==0.19.0
|
||||||
|
enum34==1.1.6
|
||||||
|
enum-compat==0.0.2
|
||||||
|
eventlet==0.20.0
|
||||||
|
extras==1.0.0
|
||||||
|
fasteners==0.14.1
|
||||||
|
fixtures==3.0.0
|
||||||
|
flake8==2.5.5
|
||||||
|
freezegun==0.3.10
|
||||||
|
future==0.16.0
|
||||||
|
futurist==1.8.0
|
||||||
|
gitdb2==2.0.3
|
||||||
|
GitPython==2.1.8
|
||||||
|
gnocchiclient==7.0.1
|
||||||
|
greenlet==0.4.13
|
||||||
|
hacking==0.12.0
|
||||||
|
idna==2.6
|
||||||
|
imagesize==1.0.0
|
||||||
|
iso8601==0.1.12
|
||||||
|
Jinja2==2.10
|
||||||
|
jmespath==0.9.3
|
||||||
|
jsonpatch==1.21
|
||||||
|
jsonpointer==2.0
|
||||||
|
jsonschema==2.6.0
|
||||||
|
keystoneauth1==3.4.0
|
||||||
|
keystonemiddleware==4.21.0
|
||||||
|
kombu==4.1.0
|
||||||
|
linecache2==1.0.0
|
||||||
|
logutils==0.3.5
|
||||||
|
lxml==4.1.1
|
||||||
|
Mako==1.0.7
|
||||||
|
MarkupSafe==1.0
|
||||||
|
mccabe==0.2.1
|
||||||
|
microversion_parse==0.2.1
|
||||||
|
mock==2.0.0
|
||||||
|
monotonic==1.4
|
||||||
|
mox3==0.25.0
|
||||||
|
msgpack==0.5.6
|
||||||
|
munch==2.2.0
|
||||||
|
netaddr==0.7.19
|
||||||
|
netifaces==0.10.6
|
||||||
|
networkx==1.11
|
||||||
|
openstackdocstheme==1.20.0
|
||||||
|
openstacksdk==0.12.0
|
||||||
|
os-api-ref===1.4.0
|
||||||
|
os-client-config==1.29.0
|
||||||
|
os-service-types==1.2.0
|
||||||
|
os-testr==1.0.0
|
||||||
|
osc-lib==1.10.0
|
||||||
|
os-resource-classes==0.4.0
|
||||||
|
oslo.cache==1.29.0
|
||||||
|
oslo.concurrency==3.26.0
|
||||||
|
oslo.config==5.2.0
|
||||||
|
oslo.context==2.21.0
|
||||||
|
oslo.db==4.35.0
|
||||||
|
oslo.i18n==3.20.0
|
||||||
|
oslo.log==3.37.0
|
||||||
|
oslo.messaging==8.1.2
|
||||||
|
oslo.middleware==3.35.0
|
||||||
|
oslo.policy==1.34.0
|
||||||
|
oslo.reports==1.27.0
|
||||||
|
oslo.serialization==2.25.0
|
||||||
|
oslo.service==1.30.0
|
||||||
|
oslo.upgradecheck==0.1.0
|
||||||
|
oslo.utils==3.36.0
|
||||||
|
oslo.versionedobjects==1.32.0
|
||||||
|
oslotest==3.3.0
|
||||||
|
packaging==17.1
|
||||||
|
Paste==2.0.3
|
||||||
|
PasteDeploy==1.5.2
|
||||||
|
pbr==3.1.1
|
||||||
|
pecan==1.3.2
|
||||||
|
pep8==1.5.7
|
||||||
|
pika==0.10.0
|
||||||
|
pika-pool==0.1.3
|
||||||
|
prettytable==0.7.2
|
||||||
|
psutil==5.4.3
|
||||||
|
pycadf==2.7.0
|
||||||
|
pycparser==2.18
|
||||||
|
pyflakes==0.8.1
|
||||||
|
Pygments==2.2.0
|
||||||
|
pyinotify==0.9.6
|
||||||
|
pyOpenSSL==17.5.0
|
||||||
|
pyparsing==2.2.0
|
||||||
|
pyperclip==1.6.0
|
||||||
|
python-ceilometerclient==2.9.0
|
||||||
|
python-cinderclient==3.5.0
|
||||||
|
python-dateutil==2.7.0
|
||||||
|
python-editor==1.0.3
|
||||||
|
python-glanceclient==2.9.1
|
||||||
|
python-ironicclient==2.5.0
|
||||||
|
python-keystoneclient==3.15.0
|
||||||
|
python-mimeparse==1.6.0
|
||||||
|
python-monascaclient==1.12.0
|
||||||
|
python-neutronclient==6.7.0
|
||||||
|
python-novaclient==14.1.0
|
||||||
|
python-openstackclient==3.14.0
|
||||||
|
python-subunit==1.2.0
|
||||||
|
pytz==2018.3
|
||||||
|
PyYAML==3.12
|
||||||
|
reno==2.7.0
|
||||||
|
repoze.lru==0.7
|
||||||
|
requests==2.18.4
|
||||||
|
requestsexceptions==1.4.0
|
||||||
|
restructuredtext-lint==1.1.3
|
||||||
|
rfc3986==1.1.0
|
||||||
|
Routes==2.4.1
|
||||||
|
simplegeneric==0.8.1
|
||||||
|
simplejson==3.13.2
|
||||||
|
six==1.11.0
|
||||||
|
smmap2==2.0.3
|
||||||
|
snowballstemmer==1.2.1
|
||||||
|
Sphinx==1.6.5
|
||||||
|
sphinxcontrib-httpdomain==1.6.1
|
||||||
|
sphinxcontrib-pecanwsme==0.8.0
|
||||||
|
sphinxcontrib-websupport==1.0.1
|
||||||
|
SQLAlchemy==1.2.5
|
||||||
|
sqlalchemy-migrate==0.11.0
|
||||||
|
sqlparse==0.2.4
|
||||||
|
statsd==3.2.2
|
||||||
|
stestr==2.0.0
|
||||||
|
stevedore==1.28.0
|
||||||
|
taskflow==3.1.0
|
||||||
|
Tempita==0.5.2
|
||||||
|
tenacity==4.9.0
|
||||||
|
testresources==2.0.1
|
||||||
|
testscenarios==0.5.0
|
||||||
|
testtools==2.3.0
|
||||||
|
traceback2==1.4.0
|
||||||
|
tzlocal==1.5.1
|
||||||
|
ujson==1.35
|
||||||
|
unittest2==1.1.0
|
||||||
|
urllib3==1.22
|
||||||
|
vine==1.1.4
|
||||||
|
waitress==1.1.0
|
||||||
|
warlock==1.3.0
|
||||||
|
WebOb==1.8.5
|
||||||
|
WebTest==2.0.29
|
||||||
|
wrapt==1.10.11
|
||||||
|
WSME==0.9.2
|
||||||
15
playbooks/legacy/grenade-devstack-watcher/post.yaml
Normal file
15
playbooks/legacy/grenade-devstack-watcher/post.yaml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
- hosts: primary
|
||||||
|
tasks:
|
||||||
|
|
||||||
|
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
|
||||||
|
synchronize:
|
||||||
|
src: '{{ ansible_user_dir }}/workspace/'
|
||||||
|
dest: '{{ zuul.executor.log_root }}'
|
||||||
|
mode: pull
|
||||||
|
copy_links: true
|
||||||
|
verify_host: true
|
||||||
|
rsync_opts:
|
||||||
|
- --include=/logs/**
|
||||||
|
- --include=*/
|
||||||
|
- --exclude=*
|
||||||
|
- --prune-empty-dirs
|
||||||
60
playbooks/legacy/grenade-devstack-watcher/run.yaml
Normal file
60
playbooks/legacy/grenade-devstack-watcher/run.yaml
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
- hosts: all
|
||||||
|
name: legacy-grenade-dsvm-watcher
|
||||||
|
tasks:
|
||||||
|
|
||||||
|
- name: Ensure legacy workspace directory
|
||||||
|
file:
|
||||||
|
path: '{{ ansible_user_dir }}/workspace'
|
||||||
|
state: directory
|
||||||
|
|
||||||
|
- shell:
|
||||||
|
cmd: |
|
||||||
|
set -e
|
||||||
|
set -x
|
||||||
|
cat > clonemap.yaml << EOF
|
||||||
|
clonemap:
|
||||||
|
- name: openstack/devstack-gate
|
||||||
|
dest: devstack-gate
|
||||||
|
EOF
|
||||||
|
/usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \
|
||||||
|
https://opendev.org \
|
||||||
|
openstack/devstack-gate
|
||||||
|
executable: /bin/bash
|
||||||
|
chdir: '{{ ansible_user_dir }}/workspace'
|
||||||
|
environment: '{{ zuul | zuul_legacy_vars }}'
|
||||||
|
|
||||||
|
- shell:
|
||||||
|
cmd: |
|
||||||
|
set -e
|
||||||
|
set -x
|
||||||
|
export PYTHONUNBUFFERED=true
|
||||||
|
|
||||||
|
export PROJECTS="openstack/grenade $PROJECTS"
|
||||||
|
export PROJECTS="openstack/watcher $PROJECTS"
|
||||||
|
export PROJECTS="openstack/watcher-tempest-plugin $PROJECTS"
|
||||||
|
export PROJECTS="openstack/python-watcherclient $PROJECTS"
|
||||||
|
export DEVSTACK_PROJECT_FROM_GIT="python-watcherclient $DEVSTACK_PROJECT_FROM_GIT"
|
||||||
|
|
||||||
|
export GRENADE_PLUGINRC="enable_grenade_plugin watcher https://opendev.org/openstack/watcher"
|
||||||
|
export DEVSTACK_LOCAL_CONFIG+=$'\n'"export TEMPEST_PLUGINS='/opt/stack/new/watcher-tempest-plugin'"
|
||||||
|
|
||||||
|
export DEVSTACK_GATE_TEMPEST_NOTESTS=1
|
||||||
|
export DEVSTACK_GATE_GRENADE=pullup
|
||||||
|
|
||||||
|
export BRANCH_OVERRIDE=default
|
||||||
|
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
|
||||||
|
export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE
|
||||||
|
fi
|
||||||
|
# Add configuration values for enabling security features in local.conf
|
||||||
|
function pre_test_hook {
|
||||||
|
if [ -f /opt/stack/old/watcher-tempest-plugin/tools/pre_test_hook.sh ] ; then
|
||||||
|
. /opt/stack/old/watcher-tempest-plugin/tools/pre_test_hook.sh
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
export -f pre_test_hook
|
||||||
|
|
||||||
|
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
|
||||||
|
./safe-devstack-vm-gate-wrap.sh
|
||||||
|
executable: /bin/bash
|
||||||
|
chdir: '{{ ansible_user_dir }}/workspace'
|
||||||
|
environment: '{{ zuul | zuul_legacy_vars }}'
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
upgrade:
|
|
||||||
- |
|
|
||||||
The default value of ``[oslo_policy] policy_file`` config option has
|
|
||||||
been changed from ``policy.json`` to ``policy.yaml``.
|
|
||||||
Operators who are utilizing customized or previously generated
|
|
||||||
static policy JSON files (which are not needed by default), should
|
|
||||||
generate new policy files or convert them in YAML format. Use the
|
|
||||||
`oslopolicy-convert-json-to-yaml
|
|
||||||
<https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html>`_
|
|
||||||
tool to convert a JSON to YAML formatted policy file in
|
|
||||||
backward compatible way.
|
|
||||||
deprecations:
|
|
||||||
- |
|
|
||||||
Use of JSON policy files was deprecated by the ``oslo.policy`` library
|
|
||||||
during the Victoria development cycle. As a result, this deprecation is
|
|
||||||
being noted in the Wallaby cycle with an anticipated future removal of support
|
|
||||||
by ``oslo.policy``. As such operators will need to convert to YAML policy
|
|
||||||
files. Please see the upgrade notes for details on migration of any
|
|
||||||
custom policy files.
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
---
|
|
||||||
upgrade:
|
|
||||||
- |
|
|
||||||
Python 2.7 support has been dropped. Last release of Watcher
|
|
||||||
to support py2.7 is OpenStack Train. The minimum version of Python now
|
|
||||||
supported by Watcher is Python 3.6.
|
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
---
|
|
||||||
features:
|
|
||||||
- |
|
|
||||||
Add a new webhook API and a new audit type EVENT, the microversion is 1.4.
|
|
||||||
Now Watcher user can create audit with EVENT type and the audit will be
|
|
||||||
triggered by webhook API.
|
|
||||||
The user guide is available online:
|
|
||||||
https://docs.openstack.org/watcher/latest/user/event_type_audit.html
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
prelude: >
|
|
||||||
Many operations in the decision engine will block on I/O. Such I/O
|
|
||||||
operations can stall the execution of a sequential application
|
|
||||||
significantly. To reduce the potential bottleneck of many operations
|
|
||||||
the general purpose decision engine threadpool is introduced.
|
|
||||||
features:
|
|
||||||
- |
|
|
||||||
A new threadpool for the decision engine that contributors can use to
|
|
||||||
improve the performance of many operations, primarily I/O bound onces.
|
|
||||||
The amount of workers used by the decision engine threadpool can be
|
|
||||||
configured to scale according to the available infrastructure using
|
|
||||||
the `watcher_decision_engine.max_general_workers` config option.
|
|
||||||
Documentation for contributors to effectively use this threadpool is
|
|
||||||
available online:
|
|
||||||
https://docs.openstack.org/watcher/latest/contributor/concurrency.html
|
|
||||||
- |
|
|
||||||
The building of the compute (Nova) data model will be done using the
|
|
||||||
decision engine threadpool, thereby, significantly reducing the total
|
|
||||||
time required to build it.
|
|
||||||
@@ -53,6 +53,7 @@ source_suffix = '.rst'
|
|||||||
master_doc = 'index'
|
master_doc = 'index'
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
|
project = u'watcher'
|
||||||
copyright = u'2016, Watcher developers'
|
copyright = u'2016, Watcher developers'
|
||||||
|
|
||||||
# Release notes are version independent
|
# Release notes are version independent
|
||||||
@@ -90,15 +91,11 @@ exclude_patterns = ['_build']
|
|||||||
#show_authors = False
|
#show_authors = False
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = 'native'
|
pygments_style = 'sphinx'
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
#modindex_common_prefix = []
|
#modindex_common_prefix = []
|
||||||
|
|
||||||
# openstackdocstheme options
|
|
||||||
openstackdocs_repo_name = 'openstack/watcher'
|
|
||||||
openstackdocs_bug_project = 'watcher'
|
|
||||||
openstackdocs_bug_tag = ''
|
|
||||||
|
|
||||||
# -- Options for HTML output --------------------------------------------------
|
# -- Options for HTML output --------------------------------------------------
|
||||||
|
|
||||||
|
|||||||
@@ -21,10 +21,6 @@ Contents:
|
|||||||
:maxdepth: 1
|
:maxdepth: 1
|
||||||
|
|
||||||
unreleased
|
unreleased
|
||||||
wallaby
|
|
||||||
victoria
|
|
||||||
ussuri
|
|
||||||
train
|
|
||||||
stein
|
stein
|
||||||
rocky
|
rocky
|
||||||
queens
|
queens
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -1,33 +0,0 @@
|
|||||||
# Gérald LONLAS <g.lonlas@gmail.com>, 2016. #zanata
|
|
||||||
msgid ""
|
|
||||||
msgstr ""
|
|
||||||
"Project-Id-Version: python-watcher\n"
|
|
||||||
"Report-Msgid-Bugs-To: \n"
|
|
||||||
"POT-Creation-Date: 2019-03-22 02:21+0000\n"
|
|
||||||
"MIME-Version: 1.0\n"
|
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
|
||||||
"Content-Transfer-Encoding: 8bit\n"
|
|
||||||
"PO-Revision-Date: 2016-10-22 06:44+0000\n"
|
|
||||||
"Last-Translator: Gérald LONLAS <g.lonlas@gmail.com>\n"
|
|
||||||
"Language-Team: French\n"
|
|
||||||
"Language: fr\n"
|
|
||||||
"X-Generator: Zanata 4.3.3\n"
|
|
||||||
"Plural-Forms: nplurals=2; plural=(n > 1)\n"
|
|
||||||
|
|
||||||
msgid "0.29.0"
|
|
||||||
msgstr "0.29.0"
|
|
||||||
|
|
||||||
msgid "Contents:"
|
|
||||||
msgstr "Contenu :"
|
|
||||||
|
|
||||||
msgid "Current Series Release Notes"
|
|
||||||
msgstr "Note de la release actuelle"
|
|
||||||
|
|
||||||
msgid "New Features"
|
|
||||||
msgstr "Nouvelles fonctionnalités"
|
|
||||||
|
|
||||||
msgid "Newton Series Release Notes"
|
|
||||||
msgstr "Note de release pour Newton"
|
|
||||||
|
|
||||||
msgid "Welcome to watcher's Release Notes documentation!"
|
|
||||||
msgstr "Bienvenue dans la documentation de la note de Release de Watcher"
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
==========================
|
|
||||||
Train Series Release Notes
|
|
||||||
==========================
|
|
||||||
|
|
||||||
.. release-notes::
|
|
||||||
:branch: stable/train
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
===========================
|
|
||||||
Ussuri Series Release Notes
|
|
||||||
===========================
|
|
||||||
|
|
||||||
.. release-notes::
|
|
||||||
:branch: stable/ussuri
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
=============================
|
|
||||||
Victoria Series Release Notes
|
|
||||||
=============================
|
|
||||||
|
|
||||||
.. release-notes::
|
|
||||||
:branch: stable/victoria
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
============================
|
|
||||||
Wallaby Series Release Notes
|
|
||||||
============================
|
|
||||||
|
|
||||||
.. release-notes::
|
|
||||||
:branch: stable/wallaby
|
|
||||||
@@ -1,34 +1,35 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
# The order of packages is significant, because pip processes them in the order
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
apscheduler>=3.5.1 # MIT License
|
apscheduler>=3.5.1 # MIT License
|
||||||
|
enum34>=1.1.6;python_version=='2.7' or python_version=='2.6' or python_version=='3.3' # BSD
|
||||||
jsonpatch>=1.21 # BSD
|
jsonpatch>=1.21 # BSD
|
||||||
keystoneauth1>=3.4.0 # Apache-2.0
|
keystoneauth1>=3.4.0 # Apache-2.0
|
||||||
jsonschema>=3.2.0 # MIT
|
jsonschema>=2.6.0 # MIT
|
||||||
keystonemiddleware>=4.21.0 # Apache-2.0
|
keystonemiddleware>=4.21.0 # Apache-2.0
|
||||||
lxml>=4.5.1 # BSD
|
lxml>=4.1.1 # BSD
|
||||||
croniter>=0.3.20 # MIT License
|
croniter>=0.3.20 # MIT License
|
||||||
os-resource-classes>=0.4.0
|
os-resource-classes>=0.4.0
|
||||||
oslo.concurrency>=3.26.0 # Apache-2.0
|
oslo.concurrency>=3.26.0 # Apache-2.0
|
||||||
oslo.cache>=1.29.0 # Apache-2.0
|
oslo.cache>=1.29.0 # Apache-2.0
|
||||||
oslo.config>=6.8.0 # Apache-2.0
|
oslo.config>=5.2.0 # Apache-2.0
|
||||||
oslo.context>=2.21.0 # Apache-2.0
|
oslo.context>=2.21.0 # Apache-2.0
|
||||||
oslo.db>=4.44.0 # Apache-2.0
|
oslo.db>=4.35.0 # Apache-2.0
|
||||||
oslo.i18n>=3.20.0 # Apache-2.0
|
oslo.i18n>=3.20.0 # Apache-2.0
|
||||||
oslo.log>=3.37.0 # Apache-2.0
|
oslo.log>=3.37.0 # Apache-2.0
|
||||||
oslo.messaging>=8.1.2 # Apache-2.0
|
oslo.messaging>=8.1.2 # Apache-2.0
|
||||||
oslo.policy>=3.6.0 # Apache-2.0
|
oslo.policy>=1.34.0 # Apache-2.0
|
||||||
oslo.reports>=1.27.0 # Apache-2.0
|
oslo.reports>=1.27.0 # Apache-2.0
|
||||||
oslo.serialization>=2.25.0 # Apache-2.0
|
oslo.serialization>=2.25.0 # Apache-2.0
|
||||||
oslo.service>=1.30.0 # Apache-2.0
|
oslo.service>=1.30.0 # Apache-2.0
|
||||||
oslo.upgradecheck>=1.3.0 # Apache-2.0
|
oslo.upgradecheck>=0.1.0 # Apache-2.0
|
||||||
oslo.utils>=3.36.0 # Apache-2.0
|
oslo.utils>=3.36.0 # Apache-2.0
|
||||||
oslo.versionedobjects>=1.32.0 # Apache-2.0
|
oslo.versionedobjects>=1.32.0 # Apache-2.0
|
||||||
PasteDeploy>=1.5.2 # MIT
|
PasteDeploy>=1.5.2 # MIT
|
||||||
pbr>=3.1.1 # Apache-2.0
|
pbr>=3.1.1 # Apache-2.0
|
||||||
pecan>=1.3.2 # BSD
|
pecan>=1.3.2 # BSD
|
||||||
PrettyTable>=0.7.2 # BSD
|
PrettyTable<0.8,>=0.7.2 # BSD
|
||||||
gnocchiclient>=7.0.1 # Apache-2.0
|
gnocchiclient>=7.0.1 # Apache-2.0
|
||||||
python-ceilometerclient>=2.9.0 # Apache-2.0
|
python-ceilometerclient>=2.9.0 # Apache-2.0
|
||||||
python-cinderclient>=3.5.0 # Apache-2.0
|
python-cinderclient>=3.5.0 # Apache-2.0
|
||||||
@@ -39,11 +40,14 @@ python-neutronclient>=6.7.0 # Apache-2.0
|
|||||||
python-novaclient>=14.1.0 # Apache-2.0
|
python-novaclient>=14.1.0 # Apache-2.0
|
||||||
python-openstackclient>=3.14.0 # Apache-2.0
|
python-openstackclient>=3.14.0 # Apache-2.0
|
||||||
python-ironicclient>=2.5.0 # Apache-2.0
|
python-ironicclient>=2.5.0 # Apache-2.0
|
||||||
|
six>=1.11.0 # MIT
|
||||||
SQLAlchemy>=1.2.5 # MIT
|
SQLAlchemy>=1.2.5 # MIT
|
||||||
stevedore>=1.28.0 # Apache-2.0
|
stevedore>=1.28.0 # Apache-2.0
|
||||||
taskflow>=3.8.0 # Apache-2.0
|
taskflow>=3.1.0 # Apache-2.0
|
||||||
WebOb>=1.8.5 # MIT
|
WebOb>=1.8.5 # MIT
|
||||||
WSME>=0.9.2 # MIT
|
WSME>=0.9.2 # MIT
|
||||||
networkx>=2.4 # BSD
|
# NOTE(fdegir): NetworkX 2.3 dropped support for Python 2
|
||||||
|
networkx>=1.11,<2.3;python_version<'3.0' # BSD
|
||||||
|
networkx>=1.11;python_version>='3.4' # BSD
|
||||||
microversion_parse>=0.2.1 # Apache-2.0
|
microversion_parse>=0.2.1 # Apache-2.0
|
||||||
futurist>=1.8.0 # Apache-2.0
|
futurist>=1.8.0 # Apache-2.0
|
||||||
|
|||||||
31
setup.cfg
31
setup.cfg
@@ -1,12 +1,11 @@
|
|||||||
[metadata]
|
[metadata]
|
||||||
name = python-watcher
|
name = python-watcher
|
||||||
summary = OpenStack Watcher provides a flexible and scalable resource optimization service for multi-tenant OpenStack-based clouds.
|
summary = OpenStack Watcher provides a flexible and scalable resource optimization service for multi-tenant OpenStack-based clouds.
|
||||||
description_file =
|
description-file =
|
||||||
README.rst
|
README.rst
|
||||||
author = OpenStack
|
author = OpenStack
|
||||||
author_email = openstack-discuss@lists.openstack.org
|
author-email = openstack-discuss@lists.openstack.org
|
||||||
home_page = https://docs.openstack.org/watcher/latest/
|
home-page = https://docs.openstack.org/watcher/latest/
|
||||||
python_requires = >=3.6
|
|
||||||
classifier =
|
classifier =
|
||||||
Environment :: OpenStack
|
Environment :: OpenStack
|
||||||
Intended Audience :: Information Technology
|
Intended Audience :: Information Technology
|
||||||
@@ -14,12 +13,11 @@ classifier =
|
|||||||
License :: OSI Approved :: Apache Software License
|
License :: OSI Approved :: Apache Software License
|
||||||
Operating System :: POSIX :: Linux
|
Operating System :: POSIX :: Linux
|
||||||
Programming Language :: Python
|
Programming Language :: Python
|
||||||
Programming Language :: Python :: Implementation :: CPython
|
Programming Language :: Python :: 2
|
||||||
Programming Language :: Python :: 3 :: Only
|
Programming Language :: Python :: 2.7
|
||||||
Programming Language :: Python :: 3
|
Programming Language :: Python :: 3
|
||||||
Programming Language :: Python :: 3.6
|
Programming Language :: Python :: 3.6
|
||||||
Programming Language :: Python :: 3.7
|
Programming Language :: Python :: 3.7
|
||||||
Programming Language :: Python :: 3.8
|
|
||||||
|
|
||||||
[files]
|
[files]
|
||||||
packages =
|
packages =
|
||||||
@@ -27,6 +25,10 @@ packages =
|
|||||||
data_files =
|
data_files =
|
||||||
etc/ = etc/*
|
etc/ = etc/*
|
||||||
|
|
||||||
|
[global]
|
||||||
|
setup-hooks =
|
||||||
|
pbr.hooks.setup_hook
|
||||||
|
|
||||||
[entry_points]
|
[entry_points]
|
||||||
oslo.config.opts =
|
oslo.config.opts =
|
||||||
watcher = watcher.conf.opts:list_opts
|
watcher = watcher.conf.opts:list_opts
|
||||||
@@ -108,3 +110,18 @@ watcher_cluster_data_model_collectors =
|
|||||||
compute = watcher.decision_engine.model.collector.nova:NovaClusterDataModelCollector
|
compute = watcher.decision_engine.model.collector.nova:NovaClusterDataModelCollector
|
||||||
storage = watcher.decision_engine.model.collector.cinder:CinderClusterDataModelCollector
|
storage = watcher.decision_engine.model.collector.cinder:CinderClusterDataModelCollector
|
||||||
baremetal = watcher.decision_engine.model.collector.ironic:BaremetalClusterDataModelCollector
|
baremetal = watcher.decision_engine.model.collector.ironic:BaremetalClusterDataModelCollector
|
||||||
|
|
||||||
|
|
||||||
|
[compile_catalog]
|
||||||
|
directory = watcher/locale
|
||||||
|
domain = watcher
|
||||||
|
|
||||||
|
[update_catalog]
|
||||||
|
domain = watcher
|
||||||
|
output_dir = watcher/locale
|
||||||
|
input_file = watcher/locale/watcher.pot
|
||||||
|
|
||||||
|
[extract_messages]
|
||||||
|
keywords = _ gettext ngettext l_ lazy_gettext _LI _LW _LE _LC
|
||||||
|
mapping_file = babel.cfg
|
||||||
|
output_file = watcher/locale/watcher.pot
|
||||||
|
|||||||
9
setup.py
9
setup.py
@@ -13,8 +13,17 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
|
||||||
import setuptools
|
import setuptools
|
||||||
|
|
||||||
|
# In python < 2.7.4, a lazy loading of package `pbr` will break
|
||||||
|
# setuptools if some other modules registered functions in `atexit`.
|
||||||
|
# solution from: http://bugs.python.org/issue15881#msg170215
|
||||||
|
try:
|
||||||
|
import multiprocessing # noqa
|
||||||
|
except ImportError:
|
||||||
|
pass
|
||||||
|
|
||||||
setuptools.setup(
|
setuptools.setup(
|
||||||
setup_requires=['pbr>=2.0.0'],
|
setup_requires=['pbr>=2.0.0'],
|
||||||
pbr=True)
|
pbr=True)
|
||||||
|
|||||||
@@ -5,8 +5,10 @@
|
|||||||
coverage>=4.5.1 # Apache-2.0
|
coverage>=4.5.1 # Apache-2.0
|
||||||
doc8>=0.8.0 # Apache-2.0
|
doc8>=0.8.0 # Apache-2.0
|
||||||
freezegun>=0.3.10 # Apache-2.0
|
freezegun>=0.3.10 # Apache-2.0
|
||||||
hacking>=3.0.1,<3.1.0 # Apache-2.0
|
hacking>=1.1.0,<1.2.0 # Apache-2.0
|
||||||
|
mock>=2.0.0 # BSD
|
||||||
oslotest>=3.3.0 # Apache-2.0
|
oslotest>=3.3.0 # Apache-2.0
|
||||||
|
os-testr>=1.0.0 # Apache-2.0
|
||||||
testscenarios>=0.5.0 # Apache-2.0/BSD
|
testscenarios>=0.5.0 # Apache-2.0/BSD
|
||||||
testtools>=2.3.0 # MIT
|
testtools>=2.3.0 # MIT
|
||||||
stestr>=2.0.0 # Apache-2.0
|
stestr>=2.0.0 # Apache-2.0
|
||||||
|
|||||||
108
tox.ini
108
tox.ini
@@ -1,18 +1,17 @@
|
|||||||
[tox]
|
[tox]
|
||||||
minversion = 3.18.0
|
minversion = 2.0
|
||||||
envlist = py3,pep8
|
envlist = py36,py37,py27,pep8
|
||||||
skipsdist = True
|
skipsdist = True
|
||||||
ignore_basepython_conflict = True
|
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
basepython = python3
|
|
||||||
usedevelop = True
|
usedevelop = True
|
||||||
allowlist_externals = find
|
whitelist_externals = find
|
||||||
rm
|
rm
|
||||||
install_command = pip install -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/xena} {opts} {packages}
|
install_command = pip install {opts} {packages}
|
||||||
setenv =
|
setenv =
|
||||||
VIRTUAL_ENV={envdir}
|
VIRTUAL_ENV={envdir}
|
||||||
deps =
|
deps =
|
||||||
|
-c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/train}
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
-r{toxinidir}/requirements.txt
|
-r{toxinidir}/requirements.txt
|
||||||
commands =
|
commands =
|
||||||
@@ -22,20 +21,24 @@ commands =
|
|||||||
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
|
passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
|
||||||
|
|
||||||
[testenv:pep8]
|
[testenv:pep8]
|
||||||
|
basepython = python3
|
||||||
commands =
|
commands =
|
||||||
doc8 doc/source/ CONTRIBUTING.rst HACKING.rst README.rst
|
doc8 doc/source/ CONTRIBUTING.rst HACKING.rst README.rst
|
||||||
flake8
|
flake8
|
||||||
bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
|
bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
|
||||||
|
|
||||||
[testenv:venv]
|
[testenv:venv]
|
||||||
|
basepython = python3
|
||||||
setenv = PYTHONHASHSEED=0
|
setenv = PYTHONHASHSEED=0
|
||||||
deps =
|
deps =
|
||||||
|
-c{env:UPPER_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/train}
|
||||||
-r{toxinidir}/doc/requirements.txt
|
-r{toxinidir}/doc/requirements.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
-r{toxinidir}/requirements.txt
|
-r{toxinidir}/requirements.txt
|
||||||
commands = {posargs}
|
commands = {posargs}
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
|
basepython = python3
|
||||||
setenv =
|
setenv =
|
||||||
PYTHON=coverage run --source watcher --parallel-mode
|
PYTHON=coverage run --source watcher --parallel-mode
|
||||||
commands =
|
commands =
|
||||||
@@ -46,87 +49,82 @@ commands =
|
|||||||
coverage report
|
coverage report
|
||||||
|
|
||||||
[testenv:docs]
|
[testenv:docs]
|
||||||
|
basepython = python3
|
||||||
setenv = PYTHONHASHSEED=0
|
setenv = PYTHONHASHSEED=0
|
||||||
deps =
|
deps = -r{toxinidir}/doc/requirements.txt
|
||||||
-r{toxinidir}/doc/requirements.txt
|
|
||||||
commands =
|
commands =
|
||||||
rm -fr doc/build doc/source/api/ .autogenerated
|
rm -fr doc/build doc/source/api/ .autogenerated
|
||||||
sphinx-build -W --keep-going -b html doc/source doc/build/html
|
sphinx-build -W -b html doc/source doc/build/html
|
||||||
|
|
||||||
[testenv:api-ref]
|
[testenv:api-ref]
|
||||||
|
basepython = python3
|
||||||
deps = -r{toxinidir}/doc/requirements.txt
|
deps = -r{toxinidir}/doc/requirements.txt
|
||||||
allowlist_externals = bash
|
whitelist_externals = bash
|
||||||
commands =
|
commands =
|
||||||
bash -c 'rm -rf api-ref/build'
|
bash -c 'rm -rf api-ref/build'
|
||||||
sphinx-build -W --keep-going -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
|
sphinx-build -W -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
|
||||||
|
|
||||||
[testenv:debug]
|
[testenv:debug]
|
||||||
|
basepython = python3
|
||||||
commands = oslo_debug_helper -t watcher/tests {posargs}
|
commands = oslo_debug_helper -t watcher/tests {posargs}
|
||||||
|
|
||||||
[testenv:genconfig]
|
[testenv:genconfig]
|
||||||
|
basepython = python3
|
||||||
sitepackages = False
|
sitepackages = False
|
||||||
commands =
|
commands =
|
||||||
oslo-config-generator --config-file etc/watcher/oslo-config-generator/watcher.conf
|
oslo-config-generator --config-file etc/watcher/oslo-config-generator/watcher.conf
|
||||||
|
|
||||||
[testenv:genpolicy]
|
[testenv:genpolicy]
|
||||||
|
basepython = python3
|
||||||
commands =
|
commands =
|
||||||
oslopolicy-sample-generator --config-file etc/watcher/oslo-policy-generator/watcher-policy-generator.conf
|
oslopolicy-sample-generator --config-file etc/watcher/oslo-policy-generator/watcher-policy-generator.conf
|
||||||
|
|
||||||
[testenv:wheel]
|
|
||||||
commands = python setup.py bdist_wheel
|
|
||||||
|
|
||||||
[testenv:pdf-docs]
|
|
||||||
envdir = {toxworkdir}/docs
|
|
||||||
deps = {[testenv:docs]deps}
|
|
||||||
allowlist_externals =
|
|
||||||
rm
|
|
||||||
make
|
|
||||||
commands =
|
|
||||||
rm -rf doc/build/pdf
|
|
||||||
sphinx-build -W --keep-going -b latex doc/source doc/build/pdf
|
|
||||||
make -C doc/build/pdf
|
|
||||||
|
|
||||||
[testenv:releasenotes]
|
|
||||||
deps = -r{toxinidir}/doc/requirements.txt
|
|
||||||
commands = sphinx-build -a -W -E -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html
|
|
||||||
|
|
||||||
[testenv:bandit]
|
|
||||||
deps = -r{toxinidir}/test-requirements.txt
|
|
||||||
commands = bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
|
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
filename = *.py,app.wsgi
|
filename = *.py,app.wsgi
|
||||||
show-source=True
|
show-source=True
|
||||||
# W504 line break after binary operator
|
ignore= H105,E123,E226,N320,H202
|
||||||
ignore= H105,E123,E226,N320,H202,W504
|
|
||||||
builtins= _
|
builtins= _
|
||||||
enable-extensions = H106,H203,H904
|
enable-extensions = H106,H203,H904
|
||||||
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes
|
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes
|
||||||
|
|
||||||
|
[testenv:wheel]
|
||||||
|
basepython = python3
|
||||||
|
commands = python setup.py bdist_wheel
|
||||||
|
|
||||||
[hacking]
|
[hacking]
|
||||||
import_exceptions = watcher._i18n
|
import_exceptions = watcher._i18n
|
||||||
|
local-check-factory = watcher.hacking.checks.factory
|
||||||
[flake8:local-plugins]
|
|
||||||
extension =
|
|
||||||
N319 = checks:no_translate_debug_logs
|
|
||||||
N321 = checks:use_jsonutils
|
|
||||||
N322 = checks:check_assert_called_once_with
|
|
||||||
N325 = checks:check_python3_xrange
|
|
||||||
N326 = checks:check_no_basestring
|
|
||||||
N327 = checks:check_python3_no_iteritems
|
|
||||||
N328 = checks:check_asserttrue
|
|
||||||
N329 = checks:check_assertfalse
|
|
||||||
N330 = checks:check_assertempty
|
|
||||||
N331 = checks:check_assertisinstance
|
|
||||||
N332 = checks:check_assertequal_for_httpcode
|
|
||||||
N333 = checks:check_log_warn_deprecated
|
|
||||||
N340 = checks:check_oslo_i18n_wrapper
|
|
||||||
N341 = checks:check_builtins_gettext
|
|
||||||
N342 = checks:no_redundant_import_alias
|
|
||||||
N366 = checks:import_stock_mock
|
|
||||||
paths = ./watcher/hacking
|
|
||||||
|
|
||||||
[doc8]
|
[doc8]
|
||||||
extension=.rst
|
extension=.rst
|
||||||
# todo: stop ignoring doc/source/man when https://bugs.launchpad.net/doc8/+bug/1502391 is fixed
|
# todo: stop ignoring doc/source/man when https://bugs.launchpad.net/doc8/+bug/1502391 is fixed
|
||||||
ignore-path=doc/source/image_src,doc/source/man,doc/source/api
|
ignore-path=doc/source/image_src,doc/source/man,doc/source/api
|
||||||
|
|
||||||
|
[testenv:pdf-docs]
|
||||||
|
basepython = python3
|
||||||
|
envdir = {toxworkdir}/docs
|
||||||
|
deps = {[testenv:docs]deps}
|
||||||
|
whitelist_externals =
|
||||||
|
rm
|
||||||
|
make
|
||||||
|
commands =
|
||||||
|
rm -rf doc/build/pdf
|
||||||
|
sphinx-build -W -b latex doc/source doc/build/pdf
|
||||||
|
make -C doc/build/pdf
|
||||||
|
|
||||||
|
[testenv:releasenotes]
|
||||||
|
basepython = python3
|
||||||
|
deps = -r{toxinidir}/doc/requirements.txt
|
||||||
|
commands = sphinx-build -a -W -E -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html
|
||||||
|
|
||||||
|
[testenv:bandit]
|
||||||
|
basepython = python3
|
||||||
|
deps = -r{toxinidir}/test-requirements.txt
|
||||||
|
commands = bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
|
||||||
|
|
||||||
|
[testenv:lower-constraints]
|
||||||
|
basepython = python3
|
||||||
|
deps =
|
||||||
|
-c{toxinidir}/lower-constraints.txt
|
||||||
|
-r{toxinidir}/test-requirements.txt
|
||||||
|
-r{toxinidir}/requirements.txt
|
||||||
|
|||||||
@@ -37,5 +37,5 @@ def install(app, conf, public_routes):
|
|||||||
if not CONF.get('enable_authentication'):
|
if not CONF.get('enable_authentication'):
|
||||||
return app
|
return app
|
||||||
return auth_token.AuthTokenMiddleware(app,
|
return auth_token.AuthTokenMiddleware(app,
|
||||||
conf=dict(conf.keystone_authtoken),
|
conf=dict(conf),
|
||||||
public_api_routes=public_routes)
|
public_api_routes=public_routes)
|
||||||
|
|||||||
@@ -13,6 +13,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from watcher.api import hooks
|
from watcher.api import hooks
|
||||||
|
|
||||||
@@ -25,10 +27,6 @@ server = {
|
|||||||
|
|
||||||
# Pecan Application Configurations
|
# Pecan Application Configurations
|
||||||
# See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa
|
# See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa
|
||||||
acl_public_routes = ['/']
|
|
||||||
if not cfg.CONF.api.get("enable_webhooks_auth"):
|
|
||||||
acl_public_routes.append('/v1/webhooks/.*')
|
|
||||||
|
|
||||||
app = {
|
app = {
|
||||||
'root': 'watcher.api.controllers.root.RootController',
|
'root': 'watcher.api.controllers.root.RootController',
|
||||||
'modules': ['watcher.api'],
|
'modules': ['watcher.api'],
|
||||||
@@ -38,7 +36,9 @@ app = {
|
|||||||
],
|
],
|
||||||
'static_root': '%(confdir)s/public',
|
'static_root': '%(confdir)s/public',
|
||||||
'enable_acl': True,
|
'enable_acl': True,
|
||||||
'acl_public_routes': acl_public_routes,
|
'acl_public_routes': [
|
||||||
|
'/',
|
||||||
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
# WSME Configurations
|
# WSME Configurations
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ from watcher.api.controllers import base
|
|||||||
|
|
||||||
def build_url(resource, resource_args, bookmark=False, base_url=None):
|
def build_url(resource, resource_args, bookmark=False, base_url=None):
|
||||||
if base_url is None:
|
if base_url is None:
|
||||||
base_url = pecan.request.application_url
|
base_url = pecan.request.host_url
|
||||||
|
|
||||||
template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
|
template = '%(url)s/%(res)s' if bookmark else '%(url)s/v1/%(res)s'
|
||||||
# FIXME(lucasagomes): I'm getting a 404 when doing a GET on
|
# FIXME(lucasagomes): I'm getting a 404 when doing a GET on
|
||||||
|
|||||||
@@ -30,12 +30,3 @@ audits.
|
|||||||
---
|
---
|
||||||
Added ``force`` into create audit request. If ``force`` is true,
|
Added ``force`` into create audit request. If ``force`` is true,
|
||||||
audit will be executed despite of ongoing actionplan.
|
audit will be executed despite of ongoing actionplan.
|
||||||
|
|
||||||
1.3
|
|
||||||
---
|
|
||||||
Added list data model API.
|
|
||||||
|
|
||||||
1.4
|
|
||||||
---
|
|
||||||
Added Watcher webhook API. It can be used to trigger audit
|
|
||||||
with ``event`` type.
|
|
||||||
|
|||||||
@@ -59,8 +59,7 @@ class Version(base.APIBase):
|
|||||||
version.status = status
|
version.status = status
|
||||||
version.max_version = v.max_version_string()
|
version.max_version = v.max_version_string()
|
||||||
version.min_version = v.min_version_string()
|
version.min_version = v.min_version_string()
|
||||||
version.links = [link.Link.make_link('self',
|
version.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||||
pecan.request.application_url,
|
|
||||||
id, '', bookmark=True)]
|
id, '', bookmark=True)]
|
||||||
return version
|
return version
|
||||||
|
|
||||||
|
|||||||
@@ -40,9 +40,7 @@ from watcher.api.controllers.v1 import goal
|
|||||||
from watcher.api.controllers.v1 import scoring_engine
|
from watcher.api.controllers.v1 import scoring_engine
|
||||||
from watcher.api.controllers.v1 import service
|
from watcher.api.controllers.v1 import service
|
||||||
from watcher.api.controllers.v1 import strategy
|
from watcher.api.controllers.v1 import strategy
|
||||||
from watcher.api.controllers.v1 import utils
|
|
||||||
from watcher.api.controllers.v1 import versions
|
from watcher.api.controllers.v1 import versions
|
||||||
from watcher.api.controllers.v1 import webhooks
|
|
||||||
|
|
||||||
|
|
||||||
def min_version():
|
def min_version():
|
||||||
@@ -132,9 +130,6 @@ class V1(APIBase):
|
|||||||
services = [link.Link]
|
services = [link.Link]
|
||||||
"""Links to the services resource"""
|
"""Links to the services resource"""
|
||||||
|
|
||||||
webhooks = [link.Link]
|
|
||||||
"""Links to the webhooks resource"""
|
|
||||||
|
|
||||||
links = [link.Link]
|
links = [link.Link]
|
||||||
"""Links that point to a specific URL for this version and documentation"""
|
"""Links that point to a specific URL for this version and documentation"""
|
||||||
|
|
||||||
@@ -142,8 +137,7 @@ class V1(APIBase):
|
|||||||
def convert():
|
def convert():
|
||||||
v1 = V1()
|
v1 = V1()
|
||||||
v1.id = "v1"
|
v1.id = "v1"
|
||||||
base_url = pecan.request.application_url
|
v1.links = [link.Link.make_link('self', pecan.request.host_url,
|
||||||
v1.links = [link.Link.make_link('self', base_url,
|
|
||||||
'v1', '', bookmark=True),
|
'v1', '', bookmark=True),
|
||||||
link.Link.make_link('describedby',
|
link.Link.make_link('describedby',
|
||||||
'http://docs.openstack.org',
|
'http://docs.openstack.org',
|
||||||
@@ -154,66 +148,57 @@ class V1(APIBase):
|
|||||||
v1.media_types = [MediaType('application/json',
|
v1.media_types = [MediaType('application/json',
|
||||||
'application/vnd.openstack.watcher.v1+json')]
|
'application/vnd.openstack.watcher.v1+json')]
|
||||||
v1.audit_templates = [link.Link.make_link('self',
|
v1.audit_templates = [link.Link.make_link('self',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'audit_templates', ''),
|
'audit_templates', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'audit_templates', '',
|
'audit_templates', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
v1.audits = [link.Link.make_link('self', base_url,
|
v1.audits = [link.Link.make_link('self', pecan.request.host_url,
|
||||||
'audits', ''),
|
'audits', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'audits', '',
|
'audits', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
if utils.allow_list_datamodel():
|
v1.data_model = [link.Link.make_link('self', pecan.request.host_url,
|
||||||
v1.data_model = [link.Link.make_link('self', base_url,
|
'data_model', ''),
|
||||||
'data_model', ''),
|
link.Link.make_link('bookmark',
|
||||||
link.Link.make_link('bookmark',
|
pecan.request.host_url,
|
||||||
base_url,
|
'data_model', '',
|
||||||
'data_model', '',
|
bookmark=True)
|
||||||
bookmark=True)
|
]
|
||||||
]
|
v1.actions = [link.Link.make_link('self', pecan.request.host_url,
|
||||||
v1.actions = [link.Link.make_link('self', base_url,
|
|
||||||
'actions', ''),
|
'actions', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'actions', '',
|
'actions', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
v1.action_plans = [link.Link.make_link(
|
v1.action_plans = [link.Link.make_link(
|
||||||
'self', base_url, 'action_plans', ''),
|
'self', pecan.request.host_url, 'action_plans', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'action_plans', '',
|
'action_plans', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
|
|
||||||
v1.scoring_engines = [link.Link.make_link(
|
v1.scoring_engines = [link.Link.make_link(
|
||||||
'self', base_url, 'scoring_engines', ''),
|
'self', pecan.request.host_url, 'scoring_engines', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'scoring_engines', '',
|
'scoring_engines', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
|
|
||||||
v1.services = [link.Link.make_link(
|
v1.services = [link.Link.make_link(
|
||||||
'self', base_url, 'services', ''),
|
'self', pecan.request.host_url, 'services', ''),
|
||||||
link.Link.make_link('bookmark',
|
link.Link.make_link('bookmark',
|
||||||
base_url,
|
pecan.request.host_url,
|
||||||
'services', '',
|
'services', '',
|
||||||
bookmark=True)
|
bookmark=True)
|
||||||
]
|
]
|
||||||
if utils.allow_webhook_api():
|
|
||||||
v1.webhooks = [link.Link.make_link(
|
|
||||||
'self', base_url, 'webhooks', ''),
|
|
||||||
link.Link.make_link('bookmark',
|
|
||||||
base_url,
|
|
||||||
'webhooks', '',
|
|
||||||
bookmark=True)
|
|
||||||
]
|
|
||||||
return v1
|
return v1
|
||||||
|
|
||||||
|
|
||||||
@@ -229,7 +214,6 @@ class Controller(rest.RestController):
|
|||||||
services = service.ServicesController()
|
services = service.ServicesController()
|
||||||
strategies = strategy.StrategiesController()
|
strategies = strategy.StrategiesController()
|
||||||
data_model = data_model.DataModelController()
|
data_model = data_model.DataModelController()
|
||||||
webhooks = webhooks.WebhookController()
|
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(V1)
|
@wsme_pecan.wsexpose(V1)
|
||||||
def get(self):
|
def get(self):
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ are dynamically loaded by Watcher at launch time.
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
import pecan
|
import pecan
|
||||||
from pecan import rest
|
from pecan import rest
|
||||||
import wsme
|
import wsme
|
||||||
@@ -363,7 +362,7 @@ class ActionsController(rest.RestController):
|
|||||||
|
|
||||||
return Action.convert_with_links(action)
|
return Action.convert_with_links(action)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(Action, body=Action, status_code=HTTPStatus.CREATED)
|
@wsme_pecan.wsexpose(Action, body=Action, status_code=201)
|
||||||
def post(self, action):
|
def post(self, action):
|
||||||
"""Create a new action(forbidden).
|
"""Create a new action(forbidden).
|
||||||
|
|
||||||
@@ -423,7 +422,7 @@ class ActionsController(rest.RestController):
|
|||||||
action_to_update.save()
|
action_to_update.save()
|
||||||
return Action.convert_with_links(action_to_update)
|
return Action.convert_with_links(action_to_update)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(None, types.uuid, status_code=HTTPStatus.NO_CONTENT)
|
@wsme_pecan.wsexpose(None, types.uuid, status_code=204)
|
||||||
def delete(self, action_uuid):
|
def delete(self, action_uuid):
|
||||||
"""Delete a action(forbidden).
|
"""Delete a action(forbidden).
|
||||||
|
|
||||||
|
|||||||
@@ -56,7 +56,6 @@ state machine <action_plan_state_machine>`.
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import pecan
|
import pecan
|
||||||
from pecan import rest
|
from pecan import rest
|
||||||
@@ -166,7 +165,7 @@ class ActionPlan(base.APIBase):
|
|||||||
name=indicator.name,
|
name=indicator.name,
|
||||||
description=indicator.description,
|
description=indicator.description,
|
||||||
unit=indicator.unit,
|
unit=indicator.unit,
|
||||||
value=float(indicator.value),
|
value=indicator.value,
|
||||||
)
|
)
|
||||||
efficacy_indicators.append(efficacy_indicator.as_dict())
|
efficacy_indicators.append(efficacy_indicator.as_dict())
|
||||||
self._efficacy_indicators = efficacy_indicators
|
self._efficacy_indicators = efficacy_indicators
|
||||||
@@ -461,7 +460,7 @@ class ActionPlansController(rest.RestController):
|
|||||||
|
|
||||||
return ActionPlan.convert_with_links(action_plan)
|
return ActionPlan.convert_with_links(action_plan)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(None, types.uuid, status_code=HTTPStatus.NO_CONTENT)
|
@wsme_pecan.wsexpose(None, types.uuid, status_code=204)
|
||||||
def delete(self, action_plan_uuid):
|
def delete(self, action_plan_uuid):
|
||||||
"""Delete an action plan.
|
"""Delete an action plan.
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,6 @@ states, visit :ref:`the Audit State machine <audit_state_machine>`.
|
|||||||
import datetime
|
import datetime
|
||||||
from dateutil import tz
|
from dateutil import tz
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
import pecan
|
import pecan
|
||||||
from pecan import rest
|
from pecan import rest
|
||||||
import wsme
|
import wsme
|
||||||
@@ -596,8 +595,7 @@ class AuditsController(rest.RestController):
|
|||||||
|
|
||||||
return Audit.convert_with_links(rpc_audit)
|
return Audit.convert_with_links(rpc_audit)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(Audit, body=AuditPostType,
|
@wsme_pecan.wsexpose(Audit, body=AuditPostType, status_code=201)
|
||||||
status_code=HTTPStatus.CREATED)
|
|
||||||
def post(self, audit_p):
|
def post(self, audit_p):
|
||||||
"""Create a new audit.
|
"""Create a new audit.
|
||||||
|
|
||||||
@@ -719,7 +717,7 @@ class AuditsController(rest.RestController):
|
|||||||
audit_to_update.save()
|
audit_to_update.save()
|
||||||
return Audit.convert_with_links(audit_to_update)
|
return Audit.convert_with_links(audit_to_update)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=HTTPStatus.NO_CONTENT)
|
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||||
def delete(self, audit):
|
def delete(self, audit):
|
||||||
"""Delete an audit.
|
"""Delete an audit.
|
||||||
|
|
||||||
|
|||||||
@@ -45,7 +45,6 @@ will be launched automatically or will need a manual confirmation from the
|
|||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
import pecan
|
import pecan
|
||||||
from pecan import rest
|
from pecan import rest
|
||||||
import wsme
|
import wsme
|
||||||
@@ -619,7 +618,7 @@ class AuditTemplatesController(rest.RestController):
|
|||||||
|
|
||||||
@wsme.validate(types.uuid, AuditTemplatePostType)
|
@wsme.validate(types.uuid, AuditTemplatePostType)
|
||||||
@wsme_pecan.wsexpose(AuditTemplate, body=AuditTemplatePostType,
|
@wsme_pecan.wsexpose(AuditTemplate, body=AuditTemplatePostType,
|
||||||
status_code=HTTPStatus.CREATED)
|
status_code=201)
|
||||||
def post(self, audit_template_postdata):
|
def post(self, audit_template_postdata):
|
||||||
"""Create a new audit template.
|
"""Create a new audit template.
|
||||||
|
|
||||||
@@ -695,7 +694,7 @@ class AuditTemplatesController(rest.RestController):
|
|||||||
audit_template_to_update.save()
|
audit_template_to_update.save()
|
||||||
return AuditTemplate.convert_with_links(audit_template_to_update)
|
return AuditTemplate.convert_with_links(audit_template_to_update)
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(None, wtypes.text, status_code=HTTPStatus.NO_CONTENT)
|
@wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
|
||||||
def delete(self, audit_template):
|
def delete(self, audit_template):
|
||||||
"""Delete a audit template.
|
"""Delete a audit template.
|
||||||
|
|
||||||
|
|||||||
@@ -24,7 +24,6 @@ from wsme import types as wtypes
|
|||||||
import wsmeext.pecan as wsme_pecan
|
import wsmeext.pecan as wsme_pecan
|
||||||
|
|
||||||
from watcher.api.controllers.v1 import types
|
from watcher.api.controllers.v1 import types
|
||||||
from watcher.api.controllers.v1 import utils
|
|
||||||
from watcher.common import exception
|
from watcher.common import exception
|
||||||
from watcher.common import policy
|
from watcher.common import policy
|
||||||
from watcher.decision_engine import rpcapi
|
from watcher.decision_engine import rpcapi
|
||||||
@@ -50,8 +49,6 @@ class DataModelController(rest.RestController):
|
|||||||
:param audit_uuid: The UUID of the audit, used to filter data model
|
:param audit_uuid: The UUID of the audit, used to filter data model
|
||||||
by the scope in audit.
|
by the scope in audit.
|
||||||
"""
|
"""
|
||||||
if not utils.allow_list_datamodel():
|
|
||||||
raise exception.NotAcceptable
|
|
||||||
if self.from_data_model:
|
if self.from_data_model:
|
||||||
raise exception.OperationNotPermitted
|
raise exception.OperationNotPermitted
|
||||||
allowed_data_model_type = [
|
allowed_data_model_type = [
|
||||||
|
|||||||
@@ -19,6 +19,8 @@ Service mechanism provides ability to monitor Watcher services state.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import datetime
|
import datetime
|
||||||
|
import six
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
@@ -68,7 +70,7 @@ class Service(base.APIBase):
|
|||||||
service = objects.Service.get(pecan.request.context, id)
|
service = objects.Service.get(pecan.request.context, id)
|
||||||
last_heartbeat = (service.last_seen_up or service.updated_at or
|
last_heartbeat = (service.last_seen_up or service.updated_at or
|
||||||
service.created_at)
|
service.created_at)
|
||||||
if isinstance(last_heartbeat, str):
|
if isinstance(last_heartbeat, six.string_types):
|
||||||
# NOTE(russellb) If this service came in over rpc via
|
# NOTE(russellb) If this service came in over rpc via
|
||||||
# conductor, then the timestamp will be a string and needs to be
|
# conductor, then the timestamp will be a string and needs to be
|
||||||
# converted back to a datetime.
|
# converted back to a datetime.
|
||||||
|
|||||||
@@ -15,6 +15,7 @@
|
|||||||
|
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
|
import six
|
||||||
import wsme
|
import wsme
|
||||||
from wsme import types as wtypes
|
from wsme import types as wtypes
|
||||||
|
|
||||||
@@ -131,7 +132,7 @@ class JsonType(wtypes.UserType):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
# These are the json serializable native types
|
# These are the json serializable native types
|
||||||
return ' | '.join(map(str, (wtypes.text, int, float,
|
return ' | '.join(map(str, (wtypes.text, six.integer_types, float,
|
||||||
BooleanType, list, dict, None)))
|
BooleanType, list, dict, None)))
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@@ -183,7 +184,7 @@ class MultiType(wtypes.UserType):
|
|||||||
class JsonPatchType(wtypes.Base):
|
class JsonPatchType(wtypes.Base):
|
||||||
"""A complex type that represents a single json-patch operation."""
|
"""A complex type that represents a single json-patch operation."""
|
||||||
|
|
||||||
path = wtypes.wsattr(wtypes.StringType(pattern=r'^(/[\w-]+)+$'),
|
path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'),
|
||||||
mandatory=True)
|
mandatory=True)
|
||||||
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
|
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
|
||||||
mandatory=True)
|
mandatory=True)
|
||||||
|
|||||||
@@ -164,8 +164,7 @@ def allow_start_end_audit_time():
|
|||||||
Version 1.1 of the API added support for start and end time of continuous
|
Version 1.1 of the API added support for start and end time of continuous
|
||||||
audits.
|
audits.
|
||||||
"""
|
"""
|
||||||
return pecan.request.version.minor >= (
|
return pecan.request.version.minor >= versions.MINOR_1_START_END_TIMING
|
||||||
versions.VERSIONS.MINOR_1_START_END_TIMING.value)
|
|
||||||
|
|
||||||
|
|
||||||
def allow_force():
|
def allow_force():
|
||||||
@@ -174,23 +173,4 @@ def allow_force():
|
|||||||
Version 1.2 of the API added support for forced audits that allows to
|
Version 1.2 of the API added support for forced audits that allows to
|
||||||
launch audit when other action plan is ongoing.
|
launch audit when other action plan is ongoing.
|
||||||
"""
|
"""
|
||||||
return pecan.request.version.minor >= (
|
return pecan.request.version.minor >= versions.MINOR_2_FORCE
|
||||||
versions.VERSIONS.MINOR_2_FORCE.value)
|
|
||||||
|
|
||||||
|
|
||||||
def allow_list_datamodel():
|
|
||||||
"""Check if we should support list data model API.
|
|
||||||
|
|
||||||
Version 1.3 of the API added support to list data model.
|
|
||||||
"""
|
|
||||||
return pecan.request.version.minor >= (
|
|
||||||
versions.VERSIONS.MINOR_3_DATAMODEL.value)
|
|
||||||
|
|
||||||
|
|
||||||
def allow_webhook_api():
|
|
||||||
"""Check if we should support webhook API.
|
|
||||||
|
|
||||||
Version 1.4 of the API added support to trigger webhook.
|
|
||||||
"""
|
|
||||||
return pecan.request.version.minor >= (
|
|
||||||
versions.VERSIONS.MINOR_4_WEBHOOK_API.value)
|
|
||||||
|
|||||||
@@ -14,25 +14,25 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import enum
|
|
||||||
|
|
||||||
|
|
||||||
class VERSIONS(enum.Enum):
|
|
||||||
MINOR_0_ROCKY = 0 # v1.0: corresponds to Rocky API
|
|
||||||
MINOR_1_START_END_TIMING = 1 # v1.1: Add start/end timei for audit
|
|
||||||
MINOR_2_FORCE = 2 # v1.2: Add force field to audit
|
|
||||||
MINOR_3_DATAMODEL = 3 # v1.3: Add list datamodel API
|
|
||||||
MINOR_4_WEBHOOK_API = 4 # v1.4: Add webhook trigger API
|
|
||||||
MINOR_MAX_VERSION = 4
|
|
||||||
|
|
||||||
|
|
||||||
# This is the version 1 API
|
# This is the version 1 API
|
||||||
BASE_VERSION = 1
|
BASE_VERSION = 1
|
||||||
|
|
||||||
|
# Here goes a short log of changes in every version.
|
||||||
|
#
|
||||||
|
# v1.0: corresponds to Rocky API
|
||||||
|
# v1.1: Add start/end time for continuous audit
|
||||||
|
# v1.2: Add force field to audit
|
||||||
|
|
||||||
|
MINOR_0_ROCKY = 0
|
||||||
|
MINOR_1_START_END_TIMING = 1
|
||||||
|
MINOR_2_FORCE = 2
|
||||||
|
|
||||||
|
MINOR_MAX_VERSION = MINOR_2_FORCE
|
||||||
|
|
||||||
# String representations of the minor and maximum versions
|
# String representations of the minor and maximum versions
|
||||||
_MIN_VERSION_STRING = '{}.{}'.format(BASE_VERSION,
|
_MIN_VERSION_STRING = '{}.{}'.format(BASE_VERSION, MINOR_0_ROCKY)
|
||||||
VERSIONS.MINOR_0_ROCKY.value)
|
_MAX_VERSION_STRING = '{}.{}'.format(BASE_VERSION, MINOR_MAX_VERSION)
|
||||||
_MAX_VERSION_STRING = '{}.{}'.format(BASE_VERSION,
|
|
||||||
VERSIONS.MINOR_MAX_VERSION.value)
|
|
||||||
|
|
||||||
|
|
||||||
def service_type_string():
|
def service_type_string():
|
||||||
|
|||||||
@@ -1,63 +0,0 @@
|
|||||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
# not use this file except in compliance with the License. You may obtain
|
|
||||||
# a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
||||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
||||||
# License for the specific language governing permissions and limitations
|
|
||||||
# under the License.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Webhook endpoint for Watcher v1 REST API.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
from oslo_log import log
|
|
||||||
import pecan
|
|
||||||
from pecan import rest
|
|
||||||
from wsme import types as wtypes
|
|
||||||
import wsmeext.pecan as wsme_pecan
|
|
||||||
|
|
||||||
from watcher.api.controllers.v1 import types
|
|
||||||
from watcher.api.controllers.v1 import utils
|
|
||||||
from watcher.common import exception
|
|
||||||
from watcher.decision_engine import rpcapi
|
|
||||||
from watcher import objects
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class WebhookController(rest.RestController):
|
|
||||||
"""REST controller for webhooks resource."""
|
|
||||||
def __init__(self):
|
|
||||||
super(WebhookController, self).__init__()
|
|
||||||
self.dc_client = rpcapi.DecisionEngineAPI()
|
|
||||||
|
|
||||||
@wsme_pecan.wsexpose(None, wtypes.text, body=types.jsontype,
|
|
||||||
status_code=HTTPStatus.ACCEPTED)
|
|
||||||
def post(self, audit_ident, body):
|
|
||||||
"""Trigger the given audit.
|
|
||||||
|
|
||||||
:param audit_ident: UUID or name of an audit.
|
|
||||||
"""
|
|
||||||
|
|
||||||
LOG.debug("Webhook trigger Audit: %s.", audit_ident)
|
|
||||||
|
|
||||||
context = pecan.request.context
|
|
||||||
audit = utils.get_resource('Audit', audit_ident)
|
|
||||||
if audit is None:
|
|
||||||
raise exception.AuditNotFound(audit=audit_ident)
|
|
||||||
if audit.audit_type != objects.audit.AuditType.EVENT.value:
|
|
||||||
raise exception.AuditTypeNotAllowed(audit_type=audit.audit_type)
|
|
||||||
allowed_state = (
|
|
||||||
objects.audit.State.PENDING,
|
|
||||||
objects.audit.State.SUCCEEDED,
|
|
||||||
)
|
|
||||||
if audit.state not in allowed_state:
|
|
||||||
raise exception.AuditStateNotAllowed(state=audit.state)
|
|
||||||
|
|
||||||
# trigger decision-engine to run the audit
|
|
||||||
self.dc_client.trigger_audit(context, audit.uuid)
|
|
||||||
@@ -15,9 +15,9 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from pecan import hooks
|
from pecan import hooks
|
||||||
|
from six.moves import http_client
|
||||||
|
|
||||||
from watcher.common import context
|
from watcher.common import context
|
||||||
|
|
||||||
@@ -91,8 +91,8 @@ class NoExceptionTracebackHook(hooks.PecanHook):
|
|||||||
# Do nothing if there is no error.
|
# Do nothing if there is no error.
|
||||||
# Status codes in the range 200 (OK) to 399 (400 = BAD_REQUEST) are not
|
# Status codes in the range 200 (OK) to 399 (400 = BAD_REQUEST) are not
|
||||||
# an error.
|
# an error.
|
||||||
if (HTTPStatus.OK <= state.response.status_int <
|
if (http_client.OK <= state.response.status_int <
|
||||||
HTTPStatus.BAD_REQUEST):
|
http_client.BAD_REQUEST):
|
||||||
return
|
return
|
||||||
|
|
||||||
json_body = state.response.json
|
json_body = state.response.json
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, app, conf, public_api_routes=()):
|
def __init__(self, app, conf, public_api_routes=()):
|
||||||
route_pattern_tpl = r'%s(\.json|\.xml)?$'
|
route_pattern_tpl = '%s(\.json|\.xml)?$'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
|
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)
|
||||||
|
|||||||
@@ -24,6 +24,7 @@ from xml import etree as et
|
|||||||
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
|
import six
|
||||||
import webob
|
import webob
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
@@ -83,10 +84,12 @@ class ParsableErrorMiddleware(object):
|
|||||||
'</error_message>' % state['status_code']]
|
'</error_message>' % state['status_code']]
|
||||||
state['headers'].append(('Content-Type', 'application/xml'))
|
state['headers'].append(('Content-Type', 'application/xml'))
|
||||||
else:
|
else:
|
||||||
app_iter = [i.decode('utf-8') for i in app_iter]
|
if six.PY3:
|
||||||
|
app_iter = [i.decode('utf-8') for i in app_iter]
|
||||||
body = [jsonutils.dumps(
|
body = [jsonutils.dumps(
|
||||||
{'error_message': '\n'.join(app_iter)})]
|
{'error_message': '\n'.join(app_iter)})]
|
||||||
body = [item.encode('utf-8') for item in body]
|
if six.PY3:
|
||||||
|
body = [item.encode('utf-8') for item in body]
|
||||||
state['headers'].append(('Content-Type', 'application/json'))
|
state['headers'].append(('Content-Type', 'application/json'))
|
||||||
state['headers'].append(('Content-Length', str(len(body[0]))))
|
state['headers'].append(('Content-Length', str(len(body[0]))))
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -20,6 +20,7 @@ import itertools
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher.common import context as watcher_context
|
from watcher.common import context as watcher_context
|
||||||
from watcher.common import scheduling
|
from watcher.common import scheduling
|
||||||
@@ -82,7 +83,7 @@ class APISchedulingService(scheduling.BackgroundSchedulerService):
|
|||||||
service = objects.Service.get(context, service_id)
|
service = objects.Service.get(context, service_id)
|
||||||
last_heartbeat = (service.last_seen_up or service.updated_at or
|
last_heartbeat = (service.last_seen_up or service.updated_at or
|
||||||
service.created_at)
|
service.created_at)
|
||||||
if isinstance(last_heartbeat, str):
|
if isinstance(last_heartbeat, six.string_types):
|
||||||
# NOTE(russellb) If this service came in over rpc via
|
# NOTE(russellb) If this service came in over rpc via
|
||||||
# conductor, then the timestamp will be a string and needs to be
|
# conductor, then the timestamp will be a string and needs to be
|
||||||
# converted back to a datetime.
|
# converted back to a datetime.
|
||||||
|
|||||||
@@ -18,9 +18,11 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class BaseActionPlanHandler(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseActionPlanHandler(object):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def execute(self):
|
def execute(self):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -19,12 +19,14 @@
|
|||||||
import abc
|
import abc
|
||||||
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher.common import clients
|
from watcher.common import clients
|
||||||
from watcher.common.loader import loadable
|
from watcher.common.loader import loadable
|
||||||
|
|
||||||
|
|
||||||
class BaseAction(loadable.Loadable, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseAction(loadable.Loadable):
|
||||||
# NOTE(jed): by convention we decided
|
# NOTE(jed): by convention we decided
|
||||||
# that the attribute "resource_id" is the unique id of
|
# that the attribute "resource_id" is the unique id of
|
||||||
# the resource to which the Action applies to allow us to use it in the
|
# the resource to which the Action applies to allow us to use it in the
|
||||||
@@ -138,7 +140,7 @@ class BaseAction(loadable.Loadable, metaclass=abc.ABCMeta):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def check_abort(self):
|
def check_abort(self):
|
||||||
if self.__class__.__name__ == 'Migrate':
|
if self.__class__.__name__ is 'Migrate':
|
||||||
if self.migration_type == self.LIVE_MIGRATION:
|
if self.migration_type == self.LIVE_MIGRATION:
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -15,6 +15,8 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
|
||||||
from watcher.applier.loading import default
|
from watcher.applier.loading import default
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ class Migrate(base.BaseAction):
|
|||||||
return self.migrate(destination=self.destination_node)
|
return self.migrate(destination=self.destination_node)
|
||||||
|
|
||||||
def revert(self):
|
def revert(self):
|
||||||
return self.migrate(destination=self.source_node)
|
LOG.info('Migrate action do not revert!')
|
||||||
|
|
||||||
def abort(self):
|
def abort(self):
|
||||||
nova = nova_helper.NovaHelper(osc=self.osc)
|
nova = nova_helper.NovaHelper(osc=self.osc)
|
||||||
|
|||||||
@@ -47,24 +47,24 @@ class Resize(base.BaseAction):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def schema(self):
|
def schema(self):
|
||||||
return {
|
return {
|
||||||
'type': 'object',
|
'type': 'object',
|
||||||
'properties': {
|
'properties': {
|
||||||
'resource_id': {
|
'resource_id': {
|
||||||
'type': 'string',
|
'type': 'string',
|
||||||
'minlength': 1,
|
'minlength': 1,
|
||||||
'pattern': ('^([a-fA-F0-9]){8}-([a-fA-F0-9]){4}-'
|
'pattern': ('^([a-fA-F0-9]){8}-([a-fA-F0-9]){4}-'
|
||||||
'([a-fA-F0-9]){4}-([a-fA-F0-9]){4}-'
|
'([a-fA-F0-9]){4}-([a-fA-F0-9]){4}-'
|
||||||
'([a-fA-F0-9]){12}$')
|
'([a-fA-F0-9]){12}$')
|
||||||
|
},
|
||||||
|
'flavor': {
|
||||||
|
'type': 'string',
|
||||||
|
'minlength': 1,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
'flavor': {
|
'required': ['resource_id', 'flavor'],
|
||||||
'type': 'string',
|
'additionalProperties': False,
|
||||||
'minlength': 1,
|
}
|
||||||
},
|
|
||||||
},
|
|
||||||
'required': ['resource_id', 'flavor'],
|
|
||||||
'additionalProperties': False,
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def instance_uuid(self):
|
def instance_uuid(self):
|
||||||
@@ -95,7 +95,7 @@ class Resize(base.BaseAction):
|
|||||||
return self.resize()
|
return self.resize()
|
||||||
|
|
||||||
def revert(self):
|
def revert(self):
|
||||||
LOG.warning("revert not supported")
|
return self.migrate(destination=self.source_node)
|
||||||
|
|
||||||
def pre_condition(self):
|
def pre_condition(self):
|
||||||
# TODO(jed): check if the instance exists / check if the instance is on
|
# TODO(jed): check if the instance exists / check if the instance is on
|
||||||
|
|||||||
@@ -26,9 +26,11 @@ See: :doc:`../architecture` for more details on this component.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class BaseApplier(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseApplier(object):
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def execute(self, action_plan_uuid):
|
def execute(self, action_plan_uuid):
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|||||||
@@ -11,6 +11,9 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
|
|
||||||
from watcher.common.loader import default
|
from watcher.common.loader import default
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -17,6 +17,7 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import six
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import eventlet
|
import eventlet
|
||||||
@@ -39,7 +40,8 @@ CANCEL_STATE = [objects.action_plan.State.CANCELLING,
|
|||||||
objects.action_plan.State.CANCELLED]
|
objects.action_plan.State.CANCELLED]
|
||||||
|
|
||||||
|
|
||||||
class BaseWorkFlowEngine(loadable.Loadable, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseWorkFlowEngine(loadable.Loadable):
|
||||||
|
|
||||||
def __init__(self, config, context=None, applier_manager=None):
|
def __init__(self, config, context=None, applier_manager=None):
|
||||||
"""Constructor
|
"""Constructor
|
||||||
|
|||||||
@@ -25,11 +25,8 @@ from taskflow import task as flow_task
|
|||||||
|
|
||||||
from watcher.applier.workflow_engine import base
|
from watcher.applier.workflow_engine import base
|
||||||
from watcher.common import exception
|
from watcher.common import exception
|
||||||
from watcher import conf
|
|
||||||
from watcher import objects
|
from watcher import objects
|
||||||
|
|
||||||
CONF = conf.CONF
|
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -115,7 +112,7 @@ class DefaultWorkFlowEngine(base.BaseWorkFlowEngine):
|
|||||||
|
|
||||||
return flow
|
return flow
|
||||||
|
|
||||||
except exception.ActionPlanCancelled:
|
except exception.ActionPlanCancelled as e:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
except tf_exception.WrappedFailure as e:
|
except tf_exception.WrappedFailure as e:
|
||||||
@@ -130,11 +127,9 @@ class DefaultWorkFlowEngine(base.BaseWorkFlowEngine):
|
|||||||
|
|
||||||
class TaskFlowActionContainer(base.BaseTaskFlowActionContainer):
|
class TaskFlowActionContainer(base.BaseTaskFlowActionContainer):
|
||||||
def __init__(self, db_action, engine):
|
def __init__(self, db_action, engine):
|
||||||
self.name = "action_type:{0} uuid:{1}".format(db_action.action_type,
|
name = "action_type:{0} uuid:{1}".format(db_action.action_type,
|
||||||
db_action.uuid)
|
db_action.uuid)
|
||||||
super(TaskFlowActionContainer, self).__init__(self.name,
|
super(TaskFlowActionContainer, self).__init__(name, db_action, engine)
|
||||||
db_action,
|
|
||||||
engine)
|
|
||||||
|
|
||||||
def do_pre_execute(self):
|
def do_pre_execute(self):
|
||||||
db_action = self.engine.notify(self._db_action,
|
db_action = self.engine.notify(self._db_action,
|
||||||
@@ -163,12 +158,6 @@ class TaskFlowActionContainer(base.BaseTaskFlowActionContainer):
|
|||||||
self.action.post_condition()
|
self.action.post_condition()
|
||||||
|
|
||||||
def do_revert(self, *args, **kwargs):
|
def do_revert(self, *args, **kwargs):
|
||||||
# NOTE: Not rollback action plan
|
|
||||||
if not CONF.watcher_applier.rollback_when_actionplan_failed:
|
|
||||||
LOG.info("Failed actionplan rollback option is turned off, and "
|
|
||||||
"the following action will be skipped: %s", self.name)
|
|
||||||
return
|
|
||||||
|
|
||||||
LOG.warning("Revert action: %s", self.name)
|
LOG.warning("Revert action: %s", self.name)
|
||||||
try:
|
try:
|
||||||
# TODO(jed): do we need to update the states in case of failure?
|
# TODO(jed): do we need to update the states in case of failure?
|
||||||
|
|||||||
@@ -18,10 +18,3 @@
|
|||||||
import eventlet
|
import eventlet
|
||||||
|
|
||||||
eventlet.monkey_patch()
|
eventlet.monkey_patch()
|
||||||
|
|
||||||
# Monkey patch the original current_thread to use the up-to-date _active
|
|
||||||
# global variable. See https://bugs.launchpad.net/bugs/1863021 and
|
|
||||||
# https://github.com/eventlet/eventlet/issues/592
|
|
||||||
import __original_module_threading as orig_threading # noqa
|
|
||||||
import threading # noqa
|
|
||||||
orig_threading.current_thread.__globals__['_active'] = threading._active
|
|
||||||
|
|||||||
@@ -14,8 +14,8 @@
|
|||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from oslo_upgradecheck import common_checks
|
|
||||||
from oslo_upgradecheck import upgradecheck
|
from oslo_upgradecheck import upgradecheck
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
from watcher.common import clients
|
from watcher.common import clients
|
||||||
@@ -38,16 +38,12 @@ class Checks(upgradecheck.UpgradeCommands):
|
|||||||
clients.check_min_nova_api_version(CONF.nova_client.api_version)
|
clients.check_min_nova_api_version(CONF.nova_client.api_version)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
return upgradecheck.Result(
|
return upgradecheck.Result(
|
||||||
upgradecheck.Code.FAILURE, str(e))
|
upgradecheck.Code.FAILURE, six.text_type(e))
|
||||||
return upgradecheck.Result(upgradecheck.Code.SUCCESS)
|
return upgradecheck.Result(upgradecheck.Code.SUCCESS)
|
||||||
|
|
||||||
_upgrade_checks = (
|
_upgrade_checks = (
|
||||||
# Added in Train.
|
# Added in Train.
|
||||||
(_('Minimum Nova API Version'), _minimum_nova_api_version),
|
(_('Minimum Nova API Version'), _minimum_nova_api_version),
|
||||||
# Added in Wallaby.
|
|
||||||
(_("Policy File JSON to YAML Migration"),
|
|
||||||
(common_checks.check_policy_json, {'conf': CONF})),
|
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ import time
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
|
||||||
from cinderclient import exceptions as cinder_exception
|
from cinderclient import exceptions as cinder_exception
|
||||||
from cinderclient.v3.volumes import Volume
|
from cinderclient.v2.volumes import Volume
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
from watcher.common import clients
|
from watcher.common import clients
|
||||||
from watcher.common import exception
|
from watcher.common import exception
|
||||||
|
|||||||
@@ -13,6 +13,7 @@
|
|||||||
from oslo_context import context
|
from oslo_context import context
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import timeutils
|
from oslo_utils import timeutils
|
||||||
|
import six
|
||||||
|
|
||||||
LOG = log.getLogger(__name__)
|
LOG = log.getLogger(__name__)
|
||||||
|
|
||||||
@@ -68,7 +69,7 @@ class RequestContext(context.RequestContext):
|
|||||||
self.project_id = project_id
|
self.project_id = project_id
|
||||||
if not timestamp:
|
if not timestamp:
|
||||||
timestamp = timeutils.utcnow()
|
timestamp = timeutils.utcnow()
|
||||||
if isinstance(timestamp, str):
|
if isinstance(timestamp, six.string_types):
|
||||||
timestamp = timeutils.parse_isotime(timestamp)
|
timestamp = timeutils.parse_isotime(timestamp)
|
||||||
self.timestamp = timestamp
|
self.timestamp = timestamp
|
||||||
self.user_name = user_name
|
self.user_name = user_name
|
||||||
|
|||||||
@@ -25,10 +25,10 @@ SHOULD include dedicated exception logging.
|
|||||||
import functools
|
import functools
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
from keystoneclient import exceptions as keystone_exceptions
|
from keystoneclient import exceptions as keystone_exceptions
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ class WatcherException(Exception):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
msg_fmt = _("An unknown exception occurred")
|
msg_fmt = _("An unknown exception occurred")
|
||||||
code = HTTPStatus.INTERNAL_SERVER_ERROR
|
code = 500
|
||||||
headers = {}
|
headers = {}
|
||||||
safe = False
|
safe = False
|
||||||
|
|
||||||
@@ -97,16 +97,19 @@ class WatcherException(Exception):
|
|||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""Encode to utf-8 then wsme api can consume it as well"""
|
"""Encode to utf-8 then wsme api can consume it as well"""
|
||||||
return self.args[0]
|
if not six.PY3:
|
||||||
|
return six.text_type(self.args[0]).encode('utf-8')
|
||||||
|
else:
|
||||||
|
return self.args[0]
|
||||||
|
|
||||||
def __unicode__(self):
|
def __unicode__(self):
|
||||||
return str(self.args[0])
|
return six.text_type(self.args[0])
|
||||||
|
|
||||||
def format_message(self):
|
def format_message(self):
|
||||||
if self.__class__.__name__.endswith('_Remote'):
|
if self.__class__.__name__.endswith('_Remote'):
|
||||||
return self.args[0]
|
return self.args[0]
|
||||||
else:
|
else:
|
||||||
return str(self)
|
return six.text_type(self)
|
||||||
|
|
||||||
|
|
||||||
class UnsupportedError(WatcherException):
|
class UnsupportedError(WatcherException):
|
||||||
@@ -115,12 +118,12 @@ class UnsupportedError(WatcherException):
|
|||||||
|
|
||||||
class NotAuthorized(WatcherException):
|
class NotAuthorized(WatcherException):
|
||||||
msg_fmt = _("Not authorized")
|
msg_fmt = _("Not authorized")
|
||||||
code = HTTPStatus.FORBIDDEN
|
code = 403
|
||||||
|
|
||||||
|
|
||||||
class NotAcceptable(WatcherException):
|
class NotAcceptable(WatcherException):
|
||||||
msg_fmt = _("Request not acceptable.")
|
msg_fmt = _("Request not acceptable.")
|
||||||
code = HTTPStatus.NOT_ACCEPTABLE
|
code = 406
|
||||||
|
|
||||||
|
|
||||||
class PolicyNotAuthorized(NotAuthorized):
|
class PolicyNotAuthorized(NotAuthorized):
|
||||||
@@ -133,7 +136,7 @@ class OperationNotPermitted(NotAuthorized):
|
|||||||
|
|
||||||
class Invalid(WatcherException, ValueError):
|
class Invalid(WatcherException, ValueError):
|
||||||
msg_fmt = _("Unacceptable parameters")
|
msg_fmt = _("Unacceptable parameters")
|
||||||
code = HTTPStatus.BAD_REQUEST
|
code = 400
|
||||||
|
|
||||||
|
|
||||||
class ObjectNotFound(WatcherException):
|
class ObjectNotFound(WatcherException):
|
||||||
@@ -142,12 +145,12 @@ class ObjectNotFound(WatcherException):
|
|||||||
|
|
||||||
class Conflict(WatcherException):
|
class Conflict(WatcherException):
|
||||||
msg_fmt = _('Conflict')
|
msg_fmt = _('Conflict')
|
||||||
code = HTTPStatus.CONFLICT
|
code = 409
|
||||||
|
|
||||||
|
|
||||||
class ResourceNotFound(ObjectNotFound):
|
class ResourceNotFound(ObjectNotFound):
|
||||||
msg_fmt = _("The %(name)s resource %(id)s could not be found")
|
msg_fmt = _("The %(name)s resource %(id)s could not be found")
|
||||||
code = HTTPStatus.NOT_FOUND
|
code = 404
|
||||||
|
|
||||||
|
|
||||||
class InvalidParameter(Invalid):
|
class InvalidParameter(Invalid):
|
||||||
@@ -240,14 +243,6 @@ class AuditTypeNotFound(Invalid):
|
|||||||
msg_fmt = _("Audit type %(audit_type)s could not be found")
|
msg_fmt = _("Audit type %(audit_type)s could not be found")
|
||||||
|
|
||||||
|
|
||||||
class AuditTypeNotAllowed(Invalid):
|
|
||||||
msg_fmt = _("Audit type %(audit_type)s is disallowed.")
|
|
||||||
|
|
||||||
|
|
||||||
class AuditStateNotAllowed(Invalid):
|
|
||||||
msg_fmt = _("Audit state %(state)s is disallowed.")
|
|
||||||
|
|
||||||
|
|
||||||
class AuditParameterNotAllowed(Invalid):
|
class AuditParameterNotAllowed(Invalid):
|
||||||
msg_fmt = _("Audit parameter %(parameter)s are not allowed")
|
msg_fmt = _("Audit parameter %(parameter)s are not allowed")
|
||||||
|
|
||||||
|
|||||||
@@ -14,10 +14,14 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class BaseLoader(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseLoader(object):
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def list_available(self):
|
def list_available(self):
|
||||||
|
|||||||
@@ -14,6 +14,8 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from stevedore import driver as drivermanager
|
from stevedore import driver as drivermanager
|
||||||
|
|||||||
@@ -16,10 +16,13 @@
|
|||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher.common import service
|
from watcher.common import service
|
||||||
|
|
||||||
|
|
||||||
class Loadable(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class Loadable(object):
|
||||||
"""Generic interface for dynamically loading a driver/entry point.
|
"""Generic interface for dynamically loading a driver/entry point.
|
||||||
|
|
||||||
This defines the contract in order to let the loader manager inject
|
This defines the contract in order to let the loader manager inject
|
||||||
@@ -45,7 +48,8 @@ LoadableSingletonMeta = type(
|
|||||||
"LoadableSingletonMeta", (abc.ABCMeta, service.Singleton), {})
|
"LoadableSingletonMeta", (abc.ABCMeta, service.Singleton), {})
|
||||||
|
|
||||||
|
|
||||||
class LoadableSingleton(object, metaclass=LoadableSingletonMeta):
|
@six.add_metaclass(LoadableSingletonMeta)
|
||||||
|
class LoadableSingleton(object):
|
||||||
"""Generic interface for dynamically loading a driver as a singleton.
|
"""Generic interface for dynamically loading a driver as a singleton.
|
||||||
|
|
||||||
This defines the contract in order to let the loader manager inject
|
This defines the contract in order to let the loader manager inject
|
||||||
|
|||||||
@@ -11,7 +11,6 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
from http import HTTPStatus
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import log as logging
|
from oslo_log import log as logging
|
||||||
|
|
||||||
@@ -54,7 +53,7 @@ class PlacementHelper(object):
|
|||||||
if rp_name:
|
if rp_name:
|
||||||
url += '?name=%s' % rp_name
|
url += '?name=%s' % rp_name
|
||||||
resp = self.get(url)
|
resp = self.get(url)
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
json_resp = resp.json()
|
json_resp = resp.json()
|
||||||
return json_resp['resource_providers']
|
return json_resp['resource_providers']
|
||||||
|
|
||||||
@@ -78,7 +77,7 @@ class PlacementHelper(object):
|
|||||||
"""
|
"""
|
||||||
url = '/resource_providers/%s/inventories' % rp_uuid
|
url = '/resource_providers/%s/inventories' % rp_uuid
|
||||||
resp = self.get(url)
|
resp = self.get(url)
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
json = resp.json()
|
json = resp.json()
|
||||||
return json['inventories']
|
return json['inventories']
|
||||||
msg = ("Failed to get resource provider %(rp_uuid)s inventories. "
|
msg = ("Failed to get resource provider %(rp_uuid)s inventories. "
|
||||||
@@ -98,7 +97,7 @@ class PlacementHelper(object):
|
|||||||
"""
|
"""
|
||||||
resp = self.get("/resource_providers/%s/traits" % rp_uuid)
|
resp = self.get("/resource_providers/%s/traits" % rp_uuid)
|
||||||
|
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
json = resp.json()
|
json = resp.json()
|
||||||
return json['traits']
|
return json['traits']
|
||||||
msg = ("Failed to get resource provider %(rp_uuid)s traits. "
|
msg = ("Failed to get resource provider %(rp_uuid)s traits. "
|
||||||
@@ -119,7 +118,7 @@ class PlacementHelper(object):
|
|||||||
"""
|
"""
|
||||||
url = '/allocations/%s' % consumer_uuid
|
url = '/allocations/%s' % consumer_uuid
|
||||||
resp = self.get(url)
|
resp = self.get(url)
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
json = resp.json()
|
json = resp.json()
|
||||||
return json['allocations']
|
return json['allocations']
|
||||||
msg = ("Failed to get allocations for consumer %(c_uuid). "
|
msg = ("Failed to get allocations for consumer %(c_uuid). "
|
||||||
@@ -140,7 +139,7 @@ class PlacementHelper(object):
|
|||||||
"""
|
"""
|
||||||
url = '/resource_providers/%s/usages' % rp_uuid
|
url = '/resource_providers/%s/usages' % rp_uuid
|
||||||
resp = self.get(url)
|
resp = self.get(url)
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
json = resp.json()
|
json = resp.json()
|
||||||
return json['usages']
|
return json['usages']
|
||||||
msg = ("Failed to get resource provider %(rp_uuid)s usages. "
|
msg = ("Failed to get resource provider %(rp_uuid)s usages. "
|
||||||
@@ -165,7 +164,7 @@ class PlacementHelper(object):
|
|||||||
"""
|
"""
|
||||||
url = "/allocation_candidates?%s" % resources
|
url = "/allocation_candidates?%s" % resources
|
||||||
resp = self.get(url)
|
resp = self.get(url)
|
||||||
if resp.status_code == HTTPStatus.OK:
|
if resp.status_code == 200:
|
||||||
data = resp.json()
|
data = resp.json()
|
||||||
return data['provider_summaries']
|
return data['provider_summaries']
|
||||||
|
|
||||||
|
|||||||
@@ -18,7 +18,6 @@
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_policy import opts
|
|
||||||
from oslo_policy import policy
|
from oslo_policy import policy
|
||||||
|
|
||||||
from watcher.common import exception
|
from watcher.common import exception
|
||||||
@@ -27,12 +26,6 @@ from watcher.common import policies
|
|||||||
_ENFORCER = None
|
_ENFORCER = None
|
||||||
CONF = cfg.CONF
|
CONF = cfg.CONF
|
||||||
|
|
||||||
# TODO(gmann): Remove setting the default value of config policy_file
|
|
||||||
# once oslo_policy change the default value to 'policy.yaml'.
|
|
||||||
# https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49
|
|
||||||
DEFAULT_POLICY_FILE = 'policy.yaml'
|
|
||||||
opts.set_defaults(CONF, DEFAULT_POLICY_FILE)
|
|
||||||
|
|
||||||
|
|
||||||
# we can get a policy enforcer by this init.
|
# we can get a policy enforcer by this init.
|
||||||
# oslo policy support change policy rule dynamically.
|
# oslo policy support change policy rule dynamically.
|
||||||
|
|||||||
@@ -121,40 +121,22 @@ class RequestContextSerializer(messaging.Serializer):
|
|||||||
def get_client(target, version_cap=None, serializer=None):
|
def get_client(target, version_cap=None, serializer=None):
|
||||||
assert TRANSPORT is not None
|
assert TRANSPORT is not None
|
||||||
serializer = RequestContextSerializer(serializer)
|
serializer = RequestContextSerializer(serializer)
|
||||||
return messaging.RPCClient(
|
return messaging.RPCClient(TRANSPORT,
|
||||||
TRANSPORT,
|
target,
|
||||||
target,
|
version_cap=version_cap,
|
||||||
version_cap=version_cap,
|
serializer=serializer)
|
||||||
serializer=serializer
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_server(target, endpoints, serializer=None):
|
def get_server(target, endpoints, serializer=None):
|
||||||
assert TRANSPORT is not None
|
assert TRANSPORT is not None
|
||||||
access_policy = dispatcher.DefaultRPCAccessPolicy
|
access_policy = dispatcher.DefaultRPCAccessPolicy
|
||||||
serializer = RequestContextSerializer(serializer)
|
serializer = RequestContextSerializer(serializer)
|
||||||
return messaging.get_rpc_server(
|
return messaging.get_rpc_server(TRANSPORT,
|
||||||
TRANSPORT,
|
target,
|
||||||
target,
|
endpoints,
|
||||||
endpoints,
|
executor='eventlet',
|
||||||
executor='eventlet',
|
serializer=serializer,
|
||||||
serializer=serializer,
|
access_policy=access_policy)
|
||||||
access_policy=access_policy
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_notification_listener(targets, endpoints, serializer=None, pool=None):
|
|
||||||
assert NOTIFICATION_TRANSPORT is not None
|
|
||||||
serializer = RequestContextSerializer(serializer)
|
|
||||||
return messaging.get_notification_listener(
|
|
||||||
NOTIFICATION_TRANSPORT,
|
|
||||||
targets,
|
|
||||||
endpoints,
|
|
||||||
allow_requeue=False,
|
|
||||||
executor='eventlet',
|
|
||||||
pool=pool,
|
|
||||||
serializer=serializer
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_notifier(publisher_id):
|
def get_notifier(publisher_id):
|
||||||
|
|||||||
@@ -37,7 +37,6 @@ class GreenThreadPoolExecutor(BasePoolExecutor):
|
|||||||
pool = futurist.GreenThreadPoolExecutor(int(max_workers))
|
pool = futurist.GreenThreadPoolExecutor(int(max_workers))
|
||||||
super(GreenThreadPoolExecutor, self).__init__(pool)
|
super(GreenThreadPoolExecutor, self).__init__(pool)
|
||||||
|
|
||||||
|
|
||||||
executors = {
|
executors = {
|
||||||
'default': GreenThreadPoolExecutor(),
|
'default': GreenThreadPoolExecutor(),
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,12 +21,14 @@ from oslo_concurrency import processutils
|
|||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_log import _options
|
from oslo_log import _options
|
||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
import oslo_messaging as messaging
|
import oslo_messaging as om
|
||||||
from oslo_reports import guru_meditation_report as gmr
|
from oslo_reports import guru_meditation_report as gmr
|
||||||
from oslo_reports import opts as gmr_opts
|
from oslo_reports import opts as gmr_opts
|
||||||
from oslo_service import service
|
from oslo_service import service
|
||||||
from oslo_service import wsgi
|
from oslo_service import wsgi
|
||||||
|
|
||||||
|
from oslo_messaging.rpc import dispatcher
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
from watcher.api import app
|
from watcher.api import app
|
||||||
from watcher.common import config
|
from watcher.common import config
|
||||||
@@ -181,6 +183,11 @@ class Service(service.ServiceBase):
|
|||||||
]
|
]
|
||||||
self.notification_endpoints = self.manager.notification_endpoints
|
self.notification_endpoints = self.manager.notification_endpoints
|
||||||
|
|
||||||
|
self.serializer = rpc.RequestContextSerializer(
|
||||||
|
base.WatcherObjectSerializer())
|
||||||
|
|
||||||
|
self._transport = None
|
||||||
|
self._notification_transport = None
|
||||||
self._conductor_client = None
|
self._conductor_client = None
|
||||||
|
|
||||||
self.conductor_topic_handler = None
|
self.conductor_topic_handler = None
|
||||||
@@ -194,17 +201,27 @@ class Service(service.ServiceBase):
|
|||||||
self.notification_topics, self.notification_endpoints
|
self.notification_topics, self.notification_endpoints
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def transport(self):
|
||||||
|
if self._transport is None:
|
||||||
|
self._transport = om.get_rpc_transport(CONF)
|
||||||
|
return self._transport
|
||||||
|
|
||||||
|
@property
|
||||||
|
def notification_transport(self):
|
||||||
|
if self._notification_transport is None:
|
||||||
|
self._notification_transport = om.get_notification_transport(CONF)
|
||||||
|
return self._notification_transport
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def conductor_client(self):
|
def conductor_client(self):
|
||||||
if self._conductor_client is None:
|
if self._conductor_client is None:
|
||||||
target = messaging.Target(
|
target = om.Target(
|
||||||
topic=self.conductor_topic,
|
topic=self.conductor_topic,
|
||||||
version=self.API_VERSION,
|
version=self.API_VERSION,
|
||||||
)
|
)
|
||||||
self._conductor_client = rpc.get_client(
|
self._conductor_client = om.RPCClient(
|
||||||
target,
|
self.transport, target, serializer=self.serializer)
|
||||||
serializer=base.WatcherObjectSerializer()
|
|
||||||
)
|
|
||||||
return self._conductor_client
|
return self._conductor_client
|
||||||
|
|
||||||
@conductor_client.setter
|
@conductor_client.setter
|
||||||
@@ -212,18 +229,21 @@ class Service(service.ServiceBase):
|
|||||||
self.conductor_client = c
|
self.conductor_client = c
|
||||||
|
|
||||||
def build_topic_handler(self, topic_name, endpoints=()):
|
def build_topic_handler(self, topic_name, endpoints=()):
|
||||||
target = messaging.Target(
|
access_policy = dispatcher.DefaultRPCAccessPolicy
|
||||||
|
serializer = rpc.RequestContextSerializer(rpc.JsonPayloadSerializer())
|
||||||
|
target = om.Target(
|
||||||
topic=topic_name,
|
topic=topic_name,
|
||||||
# For compatibility, we can override it with 'host' opt
|
# For compatibility, we can override it with 'host' opt
|
||||||
server=CONF.host or socket.gethostname(),
|
server=CONF.host or socket.gethostname(),
|
||||||
version=self.api_version,
|
version=self.api_version,
|
||||||
)
|
)
|
||||||
return rpc.get_server(
|
return om.get_rpc_server(
|
||||||
target, endpoints,
|
self.transport, target, endpoints,
|
||||||
serializer=rpc.JsonPayloadSerializer()
|
executor='eventlet', serializer=serializer,
|
||||||
)
|
access_policy=access_policy)
|
||||||
|
|
||||||
def build_notification_handler(self, topic_names, endpoints=()):
|
def build_notification_handler(self, topic_names, endpoints=()):
|
||||||
|
serializer = rpc.RequestContextSerializer(rpc.JsonPayloadSerializer())
|
||||||
targets = []
|
targets = []
|
||||||
for topic in topic_names:
|
for topic in topic_names:
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
@@ -231,13 +251,11 @@ class Service(service.ServiceBase):
|
|||||||
exchange, topic = topic.split('.')
|
exchange, topic = topic.split('.')
|
||||||
kwargs['exchange'] = exchange
|
kwargs['exchange'] = exchange
|
||||||
kwargs['topic'] = topic
|
kwargs['topic'] = topic
|
||||||
targets.append(messaging.Target(**kwargs))
|
targets.append(om.Target(**kwargs))
|
||||||
|
return om.get_notification_listener(
|
||||||
return rpc.get_notification_listener(
|
self.notification_transport, targets, endpoints,
|
||||||
targets, endpoints,
|
executor='eventlet', serializer=serializer,
|
||||||
serializer=rpc.JsonPayloadSerializer(),
|
allow_requeue=False, pool=CONF.host)
|
||||||
pool=CONF.host
|
|
||||||
)
|
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
LOG.debug("Connecting to '%s'", CONF.transport_url)
|
LOG.debug("Connecting to '%s'", CONF.transport_url)
|
||||||
|
|||||||
@@ -15,9 +15,11 @@
|
|||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
import abc
|
import abc
|
||||||
|
import six
|
||||||
|
|
||||||
|
|
||||||
class ServiceManager(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class ServiceManager(object):
|
||||||
|
|
||||||
@abc.abstractproperty
|
@abc.abstractproperty
|
||||||
def service_name(self):
|
def service_name(self):
|
||||||
|
|||||||
@@ -28,6 +28,7 @@ from oslo_config import cfg
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
from oslo_utils import uuidutils
|
from oslo_utils import uuidutils
|
||||||
|
import six
|
||||||
|
|
||||||
from watcher.common import exception
|
from watcher.common import exception
|
||||||
|
|
||||||
@@ -81,7 +82,7 @@ def safe_rstrip(value, chars=None):
|
|||||||
:return: Stripped value.
|
:return: Stripped value.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
if not isinstance(value, str):
|
if not isinstance(value, six.string_types):
|
||||||
LOG.warning(
|
LOG.warning(
|
||||||
"Failed to remove trailing character. Returning original object."
|
"Failed to remove trailing character. Returning original object."
|
||||||
"Supplied object is not a string: %s,", value)
|
"Supplied object is not a string: %s,", value)
|
||||||
@@ -103,7 +104,7 @@ def is_hostname_safe(hostname):
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
m = r'^[a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])?$'
|
m = r'^[a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])?$'
|
||||||
return (isinstance(hostname, str) and
|
return (isinstance(hostname, six.string_types) and
|
||||||
(re.match(m, hostname) is not None))
|
(re.match(m, hostname) is not None))
|
||||||
|
|
||||||
|
|
||||||
@@ -152,7 +153,6 @@ def extend_with_strict_schema(validator_class):
|
|||||||
|
|
||||||
return validators.extend(validator_class, {"properties": strict_schema})
|
return validators.extend(validator_class, {"properties": strict_schema})
|
||||||
|
|
||||||
|
|
||||||
StrictDefaultValidatingDraft4Validator = extend_with_default(
|
StrictDefaultValidatingDraft4Validator = extend_with_default(
|
||||||
extend_with_strict_schema(validators.Draft4Validator))
|
extend_with_strict_schema(validators.Draft4Validator))
|
||||||
|
|
||||||
|
|||||||
@@ -55,11 +55,6 @@ API_SERVICE_OPTS = [
|
|||||||
"the service, this option should be False; note, you "
|
"the service, this option should be False; note, you "
|
||||||
"will want to change public API endpoint to represent "
|
"will want to change public API endpoint to represent "
|
||||||
"SSL termination URL with 'public_endpoint' option."),
|
"SSL termination URL with 'public_endpoint' option."),
|
||||||
|
|
||||||
cfg.BoolOpt('enable_webhooks_auth',
|
|
||||||
default=True,
|
|
||||||
help='This option enables or disables webhook request '
|
|
||||||
'authentication via keystone. Default value is True.'),
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -43,20 +43,11 @@ APPLIER_MANAGER_OPTS = [
|
|||||||
help='Select the engine to use to execute the workflow'),
|
help='Select the engine to use to execute the workflow'),
|
||||||
]
|
]
|
||||||
|
|
||||||
APPLIER_OPTS = [
|
|
||||||
cfg.BoolOpt('rollback_when_actionplan_failed',
|
|
||||||
default=False,
|
|
||||||
help='If set True, the failed actionplan will rollback '
|
|
||||||
'when executing. Defaule value is False.'),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def register_opts(conf):
|
def register_opts(conf):
|
||||||
conf.register_group(watcher_applier)
|
conf.register_group(watcher_applier)
|
||||||
conf.register_opts(APPLIER_MANAGER_OPTS, group=watcher_applier)
|
conf.register_opts(APPLIER_MANAGER_OPTS, group=watcher_applier)
|
||||||
conf.register_opts(APPLIER_OPTS, group=watcher_applier)
|
|
||||||
|
|
||||||
|
|
||||||
def list_opts():
|
def list_opts():
|
||||||
return [(watcher_applier, APPLIER_MANAGER_OPTS),
|
return [(watcher_applier, APPLIER_MANAGER_OPTS)]
|
||||||
(watcher_applier, APPLIER_OPTS)]
|
|
||||||
|
|||||||
@@ -40,18 +40,11 @@ WATCHER_DECISION_ENGINE_OPTS = [
|
|||||||
default='watcher.decision.api',
|
default='watcher.decision.api',
|
||||||
help='The identifier used by the Watcher '
|
help='The identifier used by the Watcher '
|
||||||
'module on the message broker'),
|
'module on the message broker'),
|
||||||
cfg.IntOpt('max_audit_workers',
|
cfg.IntOpt('max_workers',
|
||||||
default=2,
|
default=2,
|
||||||
required=True,
|
required=True,
|
||||||
help='The maximum number of threads that can be used to '
|
help='The maximum number of threads that can be used to '
|
||||||
'execute audits in parallel.'),
|
'execute strategies'),
|
||||||
cfg.IntOpt('max_general_workers',
|
|
||||||
default=4,
|
|
||||||
required=True,
|
|
||||||
help='The maximum number of threads that can be used to '
|
|
||||||
'execute general tasks in parallel. The number of general '
|
|
||||||
'workers will not increase depending on the number of '
|
|
||||||
'audit workers!'),
|
|
||||||
cfg.IntOpt('action_plan_expiry',
|
cfg.IntOpt('action_plan_expiry',
|
||||||
default=24,
|
default=24,
|
||||||
mutable=True,
|
mutable=True,
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ Base classes for storage engines
|
|||||||
import abc
|
import abc
|
||||||
from oslo_config import cfg
|
from oslo_config import cfg
|
||||||
from oslo_db import api as db_api
|
from oslo_db import api as db_api
|
||||||
|
import six
|
||||||
|
|
||||||
_BACKEND_MAPPING = {'sqlalchemy': 'watcher.db.sqlalchemy.api'}
|
_BACKEND_MAPPING = {'sqlalchemy': 'watcher.db.sqlalchemy.api'}
|
||||||
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING,
|
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING,
|
||||||
@@ -29,7 +30,8 @@ def get_instance():
|
|||||||
return IMPL
|
return IMPL
|
||||||
|
|
||||||
|
|
||||||
class BaseConnection(object, metaclass=abc.ABCMeta):
|
@six.add_metaclass(abc.ABCMeta)
|
||||||
|
class BaseConnection(object):
|
||||||
"""Base class for storage system connections."""
|
"""Base class for storage system connections."""
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
|
|||||||
@@ -15,6 +15,8 @@
|
|||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import collections
|
import collections
|
||||||
import datetime
|
import datetime
|
||||||
import itertools
|
import itertools
|
||||||
@@ -23,6 +25,7 @@ import sys
|
|||||||
from oslo_log import log
|
from oslo_log import log
|
||||||
from oslo_utils import strutils
|
from oslo_utils import strutils
|
||||||
import prettytable as ptable
|
import prettytable as ptable
|
||||||
|
from six.moves import input
|
||||||
|
|
||||||
from watcher._i18n import _
|
from watcher._i18n import _
|
||||||
from watcher._i18n import lazy_translation_enabled
|
from watcher._i18n import lazy_translation_enabled
|
||||||
|
|||||||
@@ -278,7 +278,7 @@ class Connection(api.BaseConnection):
|
|||||||
query = model_query(model, session=session)
|
query = model_query(model, session=session)
|
||||||
query = add_identity_filter(query, id_)
|
query = add_identity_filter(query, id_)
|
||||||
try:
|
try:
|
||||||
ref = query.with_for_update().one()
|
ref = query.with_lockmode('update').one()
|
||||||
except exc.NoResultFound:
|
except exc.NoResultFound:
|
||||||
raise exception.ResourceNotFound(name=model.__name__, id=id_)
|
raise exception.ResourceNotFound(name=model.__name__, id=id_)
|
||||||
|
|
||||||
@@ -815,7 +815,7 @@ class Connection(api.BaseConnection):
|
|||||||
query = model_query(models.Action, session=session)
|
query = model_query(models.Action, session=session)
|
||||||
query = add_identity_filter(query, action_id)
|
query = add_identity_filter(query, action_id)
|
||||||
try:
|
try:
|
||||||
ref = query.with_for_update().one()
|
ref = query.with_lockmode('update').one()
|
||||||
except exc.NoResultFound:
|
except exc.NoResultFound:
|
||||||
raise exception.ActionNotFound(action=action_id)
|
raise exception.ActionNotFound(action=action_id)
|
||||||
|
|
||||||
@@ -900,7 +900,7 @@ class Connection(api.BaseConnection):
|
|||||||
query = model_query(models.ActionPlan, session=session)
|
query = model_query(models.ActionPlan, session=session)
|
||||||
query = add_identity_filter(query, action_plan_id)
|
query = add_identity_filter(query, action_plan_id)
|
||||||
try:
|
try:
|
||||||
ref = query.with_for_update().one()
|
ref = query.with_lockmode('update').one()
|
||||||
except exc.NoResultFound:
|
except exc.NoResultFound:
|
||||||
raise exception.ActionPlanNotFound(action_plan=action_plan_id)
|
raise exception.ActionPlanNotFound(action_plan=action_plan_id)
|
||||||
|
|
||||||
@@ -1125,8 +1125,8 @@ class Connection(api.BaseConnection):
|
|||||||
|
|
||||||
def get_action_description_by_id(self, context,
|
def get_action_description_by_id(self, context,
|
||||||
action_id, eager=False):
|
action_id, eager=False):
|
||||||
return self._get_action_description(
|
return self._get_action_description(
|
||||||
context, fieldname="id", value=action_id, eager=eager)
|
context, fieldname="id", value=action_id, eager=eager)
|
||||||
|
|
||||||
def get_action_description_by_type(self, context,
|
def get_action_description_by_type(self, context,
|
||||||
action_type, eager=False):
|
action_type, eager=False):
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ SQLAlchemy models for watcher service
|
|||||||
|
|
||||||
from oslo_db.sqlalchemy import models
|
from oslo_db.sqlalchemy import models
|
||||||
from oslo_serialization import jsonutils
|
from oslo_serialization import jsonutils
|
||||||
|
import six.moves.urllib.parse as urlparse
|
||||||
from sqlalchemy import Boolean
|
from sqlalchemy import Boolean
|
||||||
from sqlalchemy import Column
|
from sqlalchemy import Column
|
||||||
from sqlalchemy import DateTime
|
from sqlalchemy import DateTime
|
||||||
@@ -32,7 +33,7 @@ from sqlalchemy import String
|
|||||||
from sqlalchemy import Text
|
from sqlalchemy import Text
|
||||||
from sqlalchemy.types import TypeDecorator, TEXT
|
from sqlalchemy.types import TypeDecorator, TEXT
|
||||||
from sqlalchemy import UniqueConstraint
|
from sqlalchemy import UniqueConstraint
|
||||||
import urllib.parse as urlparse
|
|
||||||
from watcher import conf
|
from watcher import conf
|
||||||
|
|
||||||
CONF = conf.CONF
|
CONF = conf.CONF
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user