Compare commits

..

3 Commits

Author SHA1 Message Date
OpenStack Proposal Bot
870e6d75e0 Imported Translations from Zanata
For more information about this automatic import see:
https://docs.openstack.org/i18n/latest/reviewing-translation-import.html

Change-Id: Ie708859051162cc7a68cfdf289398f6df1abe6c8
2020-04-26 09:05:18 +00:00
OpenStack Release Bot
3069f83731 Update TOX_CONSTRAINTS_FILE for stable/ussuri
Update the URL to the upper-constraints file to point to the redirect
rule on releases.openstack.org so that anyone working on this branch
will switch to the correct upper-constraints list automatically when
the requirements repository branches.

Until the requirements repository has as stable/ussuri branch, tests will
continue to use the upper-constraints list on master.

Change-Id: I80ec47827b91977dde874246fb94dbbeeeb7ef14
2020-04-23 09:48:50 +00:00
OpenStack Release Bot
d7d534f5d1 Update .gitreview for stable/ussuri
Change-Id: I72fffb815f040dd4f1b13a73455276f70bd10aaf
2020-04-23 09:48:48 +00:00
233 changed files with 1265 additions and 2352 deletions

View File

@@ -2,3 +2,4 @@
host=review.opendev.org host=review.opendev.org
port=29418 port=29418
project=openstack/watcher.git project=openstack/watcher.git
defaultbranch=stable/ussuri

View File

@@ -1,9 +1,9 @@
- project: - project:
queue: watcher
templates: templates:
- check-requirements - check-requirements
- openstack-cover-jobs - openstack-cover-jobs
- openstack-python3-jobs - openstack-lower-constraints-jobs
- openstack-python3-ussuri-jobs
- publish-openstack-docs-pti - publish-openstack-docs-pti
- release-notes-jobs-python3 - release-notes-jobs-python3
check: check:
@@ -13,8 +13,10 @@
- watcher-tempest-strategies - watcher-tempest-strategies
- watcher-tempest-actuator - watcher-tempest-actuator
- watcherclient-tempest-functional - watcherclient-tempest-functional
- watcher-tls-test
- watcher-tempest-functional-ipv6-only - watcher-tempest-functional-ipv6-only
gate: gate:
queue: watcher
jobs: jobs:
- watcher-tempest-functional - watcher-tempest-functional
- watcher-tempest-functional-ipv6-only - watcher-tempest-functional-ipv6-only
@@ -86,10 +88,21 @@
tempest_concurrency: 1 tempest_concurrency: 1
tempest_test_regex: watcher_tempest_plugin.tests.scenario.test_execute_strategies tempest_test_regex: watcher_tempest_plugin.tests.scenario.test_execute_strategies
- job:
name: watcher-tls-test
parent: watcher-tempest-multinode
group-vars:
subnode:
devstack_services:
tls-proxy: true
vars:
devstack_services:
tls-proxy: true
- job: - job:
name: watcher-tempest-multinode name: watcher-tempest-multinode
parent: watcher-tempest-functional parent: watcher-tempest-functional
nodeset: openstack-two-node-focal nodeset: openstack-two-node-bionic
roles: roles:
- zuul: openstack/tempest - zuul: openstack/tempest
group-vars: group-vars:
@@ -107,6 +120,8 @@
watcher-api: false watcher-api: false
watcher-decision-engine: true watcher-decision-engine: true
watcher-applier: false watcher-applier: false
# We need to add TLS support for watcher plugin
tls-proxy: false
ceilometer: false ceilometer: false
ceilometer-acompute: false ceilometer-acompute: false
ceilometer-acentral: false ceilometer-acentral: false
@@ -145,6 +160,7 @@
timeout: 7200 timeout: 7200
required-projects: &base_required_projects required-projects: &base_required_projects
- openstack/ceilometer - openstack/ceilometer
- openstack/devstack-gate
- openstack/python-openstackclient - openstack/python-openstackclient
- openstack/python-watcherclient - openstack/python-watcherclient
- openstack/watcher - openstack/watcher
@@ -154,6 +170,7 @@
devstack_plugins: devstack_plugins:
watcher: https://opendev.org/openstack/watcher watcher: https://opendev.org/openstack/watcher
devstack_services: devstack_services:
tls-proxy: false
watcher-api: true watcher-api: true
watcher-decision-engine: true watcher-decision-engine: true
watcher-applier: true watcher-applier: true
@@ -162,10 +179,14 @@
s-container: false s-container: false
s-object: false s-object: false
s-proxy: false s-proxy: false
tempest_plugins: devstack_localrc:
- watcher-tempest-plugin TEMPEST_PLUGINS: /opt/stack/watcher-tempest-plugin
USE_PYTHON3: true
tempest_test_regex: watcher_tempest_plugin.tests.api tempest_test_regex: watcher_tempest_plugin.tests.api
tox_envlist: all tox_envlist: all
tox_environment:
# Do we really need to set this? It's cargo culted
PYTHONUNBUFFERED: 'true'
zuul_copy_output: zuul_copy_output:
/etc/hosts: logs /etc/hosts: logs
@@ -179,12 +200,10 @@
- job: - job:
name: watcher-grenade name: watcher-grenade
parent: grenade parent: legacy-dsvm-base
required-projects: timeout: 10800
- openstack/watcher run: playbooks/legacy/grenade-devstack-watcher/run.yaml
- openstack/python-watcherclient post-run: playbooks/legacy/grenade-devstack-watcher/post.yaml
- openstack/watcher-tempest-plugin
vars: *base_vars
irrelevant-files: irrelevant-files:
- ^(test-|)requirements.txt$ - ^(test-|)requirements.txt$
- ^.*\.rst$ - ^.*\.rst$
@@ -196,6 +215,12 @@
- ^setup.cfg$ - ^setup.cfg$
- ^tools/.*$ - ^tools/.*$
- ^tox.ini$ - ^tox.ini$
required-projects:
- openstack/grenade
- openstack/devstack-gate
- openstack/watcher
- openstack/python-watcherclient
- openstack/watcher-tempest-plugin
- job: - job:
# This job is used in python-watcherclient repo # This job is used in python-watcherclient repo

View File

@@ -22,6 +22,9 @@
# All configuration values have a default; values that are commented out # All configuration values have a default; values that are commented out
# serve to show the default. # serve to show the default.
from watcher import version as watcher_version
extensions = [ extensions = [
'openstackdocstheme', 'openstackdocstheme',
'os_api_ref', 'os_api_ref',
@@ -43,13 +46,21 @@ project = u'Infrastructure Optimization API Reference'
copyright = u'2010-present, OpenStack Foundation' copyright = u'2010-present, OpenStack Foundation'
# openstackdocstheme options # openstackdocstheme options
openstackdocs_repo_name = 'openstack/watcher' repository_name = 'openstack/watcher'
openstackdocs_auto_name = False bug_project = 'watcher'
openstackdocs_bug_project = 'watcher' bug_tag = ''
openstackdocs_bug_tag = ''
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The full version, including alpha/beta/rc tags.
release = watcher_version.version_info.release_string()
# The short X.Y version.
version = watcher_version.version_string
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native' pygments_style = 'sphinx'
# -- Options for HTML output -------------------------------------------------- # -- Options for HTML output --------------------------------------------------
@@ -64,6 +75,10 @@ html_theme_options = {
"sidebar_mode": "toc", "sidebar_mode": "toc",
} }
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
html_last_updated_fmt = '%Y-%m-%d %H:%M'
# -- Options for LaTeX output ------------------------------------------------- # -- Options for LaTeX output -------------------------------------------------
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples

2
babel.cfg Normal file
View File

@@ -0,0 +1,2 @@
[python: **.py]

View File

@@ -298,7 +298,7 @@ function start_watcher_api {
service_protocol="http" service_protocol="http"
fi fi
if [[ "$WATCHER_USE_WSGI_MODE" == "uwsgi" ]]; then if [[ "$WATCHER_USE_WSGI_MODE" == "uwsgi" ]]; then
run_process "watcher-api" "$(which uwsgi) --procname-prefix watcher-api --ini $WATCHER_UWSGI_CONF" run_process "watcher-api" "$WATCHER_BIN_DIR/uwsgi --procname-prefix watcher-api --ini $WATCHER_UWSGI_CONF"
watcher_url=$service_protocol://$SERVICE_HOST/infra-optim watcher_url=$service_protocol://$SERVICE_HOST/infra-optim
else else
watcher_url=$service_protocol://$SERVICE_HOST:$service_port watcher_url=$service_protocol://$SERVICE_HOST:$service_port

View File

@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import unicode_literals
import importlib import importlib
import inspect import inspect

View File

@@ -1,10 +1,10 @@
# The order of packages is significant, because pip processes them in the order # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration # of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
openstackdocstheme>=2.2.1 # Apache-2.0 openstackdocstheme>=1.20.0 # Apache-2.0
sphinx>=2.0.0,!=2.1.0 # BSD sphinx>=1.8.0,!=2.1.0,!=3.0.0 # BSD
sphinxcontrib-pecanwsme>=0.8.0 # Apache-2.0 sphinxcontrib-pecanwsme>=0.8.0 # Apache-2.0
sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD sphinxcontrib-svg2pdfconverter>=0.1.0 # BSD
reno>=3.1.0 # Apache-2.0 reno>=2.7.0 # Apache-2.0
sphinxcontrib-apidoc>=0.2.0 # BSD sphinxcontrib-apidoc>=0.2.0 # BSD
os-api-ref>=1.4.0 # Apache-2.0 os-api-ref>=1.4.0 # Apache-2.0

View File

@@ -17,14 +17,6 @@
Policies Policies
======== ========
.. warning::
JSON formatted policy file is deprecated since Watcher 6.0.0 (Wallaby).
This `oslopolicy-convert-json-to-yaml`__ tool will migrate your existing
JSON-formatted policy file to YAML in a backward-compatible way.
.. __: https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html
Watcher's public API calls may be restricted to certain sets of users using a Watcher's public API calls may be restricted to certain sets of users using a
policy configuration file. This document explains exactly how policies are policy configuration file. This document explains exactly how policies are
configured and what they apply to. configured and what they apply to.

View File

@@ -14,6 +14,7 @@
import os import os
import sys import sys
from watcher import version as watcher_version
from watcher import objects from watcher import objects
objects.register_all() objects.register_all()
@@ -56,8 +57,18 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
project = 'Watcher' project = u'Watcher'
copyright = 'OpenStack Foundation' copyright = u'OpenStack Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
# The full version, including alpha/beta/rc tags.
release = watcher_version.version_info.release_string()
# The short X.Y version.
version = watcher_version.version_string
# A list of ignored prefixes for module index sorting. # A list of ignored prefixes for module index sorting.
modindex_common_prefix = ['watcher.'] modindex_common_prefix = ['watcher.']
@@ -83,7 +94,7 @@ add_module_names = True
suppress_warnings = ['app.add_directive'] suppress_warnings = ['app.add_directive']
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native' pygments_style = 'sphinx'
# -- Options for man page output -------------------------------------------- # -- Options for man page output --------------------------------------------
@@ -91,14 +102,14 @@ pygments_style = 'native'
# List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual' # List of tuples 'sourcefile', 'target', u'title', u'Authors name', 'manual'
man_pages = [ man_pages = [
('man/watcher-api', 'watcher-api', 'Watcher API Server', ('man/watcher-api', 'watcher-api', u'Watcher API Server',
['OpenStack'], 1), [u'OpenStack'], 1),
('man/watcher-applier', 'watcher-applier', 'Watcher Applier', ('man/watcher-applier', 'watcher-applier', u'Watcher Applier',
['OpenStack'], 1), [u'OpenStack'], 1),
('man/watcher-db-manage', 'watcher-db-manage', ('man/watcher-db-manage', 'watcher-db-manage',
'Watcher Db Management Utility', ['OpenStack'], 1), u'Watcher Db Management Utility', [u'OpenStack'], 1),
('man/watcher-decision-engine', 'watcher-decision-engine', ('man/watcher-decision-engine', 'watcher-decision-engine',
'Watcher Decision Engine', ['OpenStack'], 1), u'Watcher Decision Engine', [u'OpenStack'], 1),
] ]
# -- Options for HTML output -------------------------------------------------- # -- Options for HTML output --------------------------------------------------
@@ -114,13 +125,12 @@ html_theme = 'openstackdocs'
# Output file base name for HTML help builder. # Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project htmlhelp_basename = '%sdoc' % project
html_last_updated_fmt = '%Y-%m-%d %H:%M'
#openstackdocstheme options #openstackdocstheme options
openstackdocs_repo_name = 'openstack/watcher' repository_name = 'openstack/watcher'
openstackdocs_pdf_link = True bug_project = 'watcher'
openstackdocs_auto_name = False bug_tag = ''
openstackdocs_bug_project = 'watcher'
openstackdocs_bug_tag = ''
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass # (source start file, target name, title, author, documentclass
@@ -128,8 +138,8 @@ openstackdocs_bug_tag = ''
latex_documents = [ latex_documents = [
('index', ('index',
'doc-watcher.tex', 'doc-watcher.tex',
'Watcher Documentation', u'%s Documentation' % project,
'OpenStack Foundation', 'manual'), u'OpenStack Foundation', 'manual'),
] ]
# If false, no module index is generated. # If false, no module index is generated.

View File

@@ -47,8 +47,6 @@ unavailable as well as `instance_l3_cpu_cache`::
[[local|localrc]] [[local|localrc]]
enable_plugin watcher https://opendev.org/openstack/watcher enable_plugin watcher https://opendev.org/openstack/watcher
enable_plugin watcher-dashboard https://opendev.org/openstack/watcher-dashboard
enable_plugin ceilometer https://opendev.org/openstack/ceilometer.git enable_plugin ceilometer https://opendev.org/openstack/ceilometer.git
CEILOMETER_BACKEND=gnocchi CEILOMETER_BACKEND=gnocchi

View File

@@ -56,6 +56,9 @@ Here is an example showing how you can write a plugin called ``NewStrategy``:
# filepath: thirdparty/new.py # filepath: thirdparty/new.py
# import path: thirdparty.new # import path: thirdparty.new
import abc import abc
import six
from watcher._i18n import _ from watcher._i18n import _
from watcher.decision_engine.strategy.strategies import base from watcher.decision_engine.strategy.strategies import base

View File

@@ -89,9 +89,9 @@ step 2: Create audit to do optimization
.. code-block:: shell .. code-block:: shell
$ openstack optimize audittemplate create \ $ openstack optimize audittemplate create \
saving_energy_template1 saving_energy --strategy saving_energy at1 saving_energy --strategy saving_energy
$ openstack optimize audit create -a saving_energy_audit1 \ $ openstack optimize audit create -a at1 \
-p free_used_percent=20.0 -p free_used_percent=20.0
External Links External Links

161
lower-constraints.txt Normal file
View File

@@ -0,0 +1,161 @@
alabaster==0.7.10
alembic==0.9.8
amqp==2.2.2
appdirs==1.4.3
APScheduler==3.5.1
asn1crypto==0.24.0
automaton==1.14.0
Babel==2.5.3
beautifulsoup4==4.6.0
cachetools==2.0.1
certifi==2018.1.18
cffi==1.11.5
chardet==3.0.4
cliff==2.11.0
cmd2==0.8.1
contextlib2==0.5.5
coverage==4.5.1
croniter==0.3.20
cryptography==2.1.4
debtcollector==1.19.0
decorator==4.2.1
deprecation==2.0
doc8==0.8.0
docutils==0.14
dogpile.cache==0.6.5
dulwich==0.19.0
enum34==1.1.6
enum-compat==0.0.2
eventlet==0.20.0
extras==1.0.0
fasteners==0.14.1
fixtures==3.0.0
freezegun==0.3.10
future==0.16.0
futurist==1.8.0
gitdb2==2.0.3
GitPython==2.1.8
gnocchiclient==7.0.1
greenlet==0.4.13
idna==2.6
imagesize==1.0.0
iso8601==0.1.12
Jinja2==2.10
jmespath==0.9.3
jsonpatch==1.21
jsonpointer==2.0
jsonschema==2.6.0
keystoneauth1==3.4.0
keystonemiddleware==4.21.0
kombu==4.1.0
linecache2==1.0.0
logutils==0.3.5
lxml==4.1.1
Mako==1.0.7
MarkupSafe==1.0
mccabe==0.2.1
microversion_parse==0.2.1
mock==2.0.0
monotonic==1.4
mox3==0.25.0
msgpack==0.5.6
munch==2.2.0
netaddr==0.7.19
netifaces==0.10.6
networkx==2.2
openstackdocstheme==1.20.0
openstacksdk==0.12.0
os-api-ref===1.4.0
os-client-config==1.29.0
os-service-types==1.2.0
os-testr==1.0.0
osc-lib==1.10.0
os-resource-classes==0.4.0
oslo.cache==1.29.0
oslo.concurrency==3.26.0
oslo.config==5.2.0
oslo.context==2.21.0
oslo.db==4.35.0
oslo.i18n==3.20.0
oslo.log==3.37.0
oslo.messaging==8.1.2
oslo.middleware==3.35.0
oslo.policy==1.34.0
oslo.reports==1.27.0
oslo.serialization==2.25.0
oslo.service==1.30.0
oslo.upgradecheck==0.1.0
oslo.utils==3.36.0
oslo.versionedobjects==1.32.0
oslotest==3.3.0
packaging==17.1
Paste==2.0.3
PasteDeploy==1.5.2
pbr==3.1.1
pecan==1.3.2
pika==0.10.0
pika-pool==0.1.3
prettytable==0.7.2
psutil==5.4.3
pycadf==2.7.0
pycparser==2.18
Pygments==2.2.0
pyinotify==0.9.6
pyOpenSSL==17.5.0
pyparsing==2.2.0
pyperclip==1.6.0
python-ceilometerclient==2.9.0
python-cinderclient==3.5.0
python-dateutil==2.7.0
python-editor==1.0.3
python-glanceclient==2.9.1
python-ironicclient==2.5.0
python-keystoneclient==3.15.0
python-mimeparse==1.6.0
python-monascaclient==1.12.0
python-neutronclient==6.7.0
python-novaclient==14.1.0
python-openstackclient==3.14.0
python-subunit==1.2.0
pytz==2018.3
PyYAML==3.12
reno==2.7.0
repoze.lru==0.7
requests==2.18.4
requestsexceptions==1.4.0
restructuredtext-lint==1.1.3
rfc3986==1.1.0
Routes==2.4.1
simplegeneric==0.8.1
simplejson==3.13.2
six==1.11.0
smmap2==2.0.3
snowballstemmer==1.2.1
Sphinx==1.6.5
sphinxcontrib-httpdomain==1.6.1
sphinxcontrib-pecanwsme==0.8.0
sphinxcontrib-websupport==1.0.1
SQLAlchemy==1.2.5
sqlalchemy-migrate==0.11.0
sqlparse==0.2.4
statsd==3.2.2
stestr==2.0.0
stevedore==1.28.0
taskflow==3.7.1
Tempita==0.5.2
tenacity==4.9.0
testresources==2.0.1
testscenarios==0.5.0
testtools==2.3.0
traceback2==1.4.0
tzlocal==1.5.1
ujson==1.35
unittest2==1.1.0
urllib3==1.22
vine==1.1.4
waitress==1.1.0
warlock==1.3.0
WebOb==1.8.5
WebTest==2.0.29
wrapt==1.10.11
WSME==0.9.2

View File

@@ -0,0 +1,15 @@
- hosts: primary
tasks:
- name: Copy files from {{ ansible_user_dir }}/workspace/ on node
synchronize:
src: '{{ ansible_user_dir }}/workspace/'
dest: '{{ zuul.executor.log_root }}'
mode: pull
copy_links: true
verify_host: true
rsync_opts:
- --include=/logs/**
- --include=*/
- --exclude=*
- --prune-empty-dirs

View File

@@ -0,0 +1,60 @@
- hosts: all
name: legacy-grenade-dsvm-watcher
tasks:
- name: Ensure legacy workspace directory
file:
path: '{{ ansible_user_dir }}/workspace'
state: directory
- shell:
cmd: |
set -e
set -x
cat > clonemap.yaml << EOF
clonemap:
- name: openstack/devstack-gate
dest: devstack-gate
EOF
/usr/zuul-env/bin/zuul-cloner -m clonemap.yaml --cache-dir /opt/git \
https://opendev.org \
openstack/devstack-gate
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'
- shell:
cmd: |
set -e
set -x
export PYTHONUNBUFFERED=true
export PROJECTS="openstack/grenade $PROJECTS"
export PROJECTS="openstack/watcher $PROJECTS"
export PROJECTS="openstack/watcher-tempest-plugin $PROJECTS"
export PROJECTS="openstack/python-watcherclient $PROJECTS"
export DEVSTACK_PROJECT_FROM_GIT="python-watcherclient $DEVSTACK_PROJECT_FROM_GIT"
export GRENADE_PLUGINRC="enable_grenade_plugin watcher https://opendev.org/openstack/watcher"
export DEVSTACK_LOCAL_CONFIG+=$'\n'"export TEMPEST_PLUGINS='/opt/stack/new/watcher-tempest-plugin'"
export DEVSTACK_GATE_TEMPEST_NOTESTS=1
export DEVSTACK_GATE_GRENADE=pullup
export DEVSTACK_GATE_USE_PYTHON3=True
export BRANCH_OVERRIDE=default
if [ "$BRANCH_OVERRIDE" != "default" ] ; then
export OVERRIDE_ZUUL_BRANCH=$BRANCH_OVERRIDE
fi
# Add configuration values for enabling security features in local.conf
function pre_test_hook {
if [ -f /opt/stack/old/watcher-tempest-plugin/tools/pre_test_hook.sh ] ; then
. /opt/stack/old/watcher-tempest-plugin/tools/pre_test_hook.sh
fi
}
export -f pre_test_hook
cp devstack-gate/devstack-vm-gate-wrap.sh ./safe-devstack-vm-gate-wrap.sh
./safe-devstack-vm-gate-wrap.sh
executable: /bin/bash
chdir: '{{ ansible_user_dir }}/workspace'
environment: '{{ zuul | zuul_legacy_vars }}'

View File

@@ -1,20 +0,0 @@
---
upgrade:
- |
The default value of ``[oslo_policy] policy_file`` config option has
been changed from ``policy.json`` to ``policy.yaml``.
Operators who are utilizing customized or previously generated
static policy JSON files (which are not needed by default), should
generate new policy files or convert them in YAML format. Use the
`oslopolicy-convert-json-to-yaml
<https://docs.openstack.org/oslo.policy/latest/cli/oslopolicy-convert-json-to-yaml.html>`_
tool to convert a JSON to YAML formatted policy file in
backward compatible way.
deprecations:
- |
Use of JSON policy files was deprecated by the ``oslo.policy`` library
during the Victoria development cycle. As a result, this deprecation is
being noted in the Wallaby cycle with an anticipated future removal of support
by ``oslo.policy``. As such operators will need to convert to YAML policy
files. Please see the upgrade notes for details on migration of any
custom policy files.

View File

@@ -1,6 +0,0 @@
===========================
2023.1 Series Release Notes
===========================
.. release-notes::
:branch: stable/2023.1

View File

@@ -53,7 +53,8 @@ source_suffix = '.rst'
master_doc = 'index' master_doc = 'index'
# General information about the project. # General information about the project.
copyright = '2016, Watcher developers' project = u'watcher'
copyright = u'2016, Watcher developers'
# Release notes are version independent # Release notes are version independent
# The short X.Y version. # The short X.Y version.
@@ -90,15 +91,11 @@ exclude_patterns = ['_build']
#show_authors = False #show_authors = False
# The name of the Pygments (syntax highlighting) style to use. # The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native' pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting. # A list of ignored prefixes for module index sorting.
#modindex_common_prefix = [] #modindex_common_prefix = []
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/watcher'
openstackdocs_bug_project = 'watcher'
openstackdocs_bug_tag = ''
# -- Options for HTML output -------------------------------------------------- # -- Options for HTML output --------------------------------------------------
@@ -196,8 +193,8 @@ latex_elements = {
# Grouping the document tree into LaTeX files. List of tuples # Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]) # (source start file, target name, title, author, documentclass [howto/manual])
latex_documents = [ latex_documents = [
('index', 'watcher.tex', 'Watcher Documentation', ('index', 'watcher.tex', u'Watcher Documentation',
'Watcher developers', 'manual'), u'Watcher developers', 'manual'),
] ]
# The name of an image file (relative to this directory) to place at the top of # The name of an image file (relative to this directory) to place at the top of
@@ -226,8 +223,8 @@ latex_documents = [
# One entry per manual page. List of tuples # One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section). # (source start file, name, description, authors, manual section).
man_pages = [ man_pages = [
('index', 'watcher', 'Watcher Documentation', ('index', 'watcher', u'Watcher Documentation',
['Watcher developers'], 1) [u'Watcher developers'], 1)
] ]
# If true, show URL addresses after external links. # If true, show URL addresses after external links.
@@ -240,8 +237,8 @@ man_pages = [
# (source start file, target name, title, author, # (source start file, target name, title, author,
# dir menu entry, description, category) # dir menu entry, description, category)
texinfo_documents = [ texinfo_documents = [
('index', 'watcher', 'Watcher Documentation', ('index', 'watcher', u'Watcher Documentation',
'Watcher developers', 'watcher', 'One line description of project.', u'Watcher developers', 'watcher', 'One line description of project.',
'Miscellaneous'), 'Miscellaneous'),
] ]

View File

@@ -21,13 +21,6 @@ Contents:
:maxdepth: 1 :maxdepth: 1
unreleased unreleased
2023.1
zed
yoga
xena
wallaby
victoria
ussuri
train train
stein stein
rocky rocky

File diff suppressed because it is too large Load Diff

View File

@@ -1,33 +0,0 @@
# Gérald LONLAS <g.lonlas@gmail.com>, 2016. #zanata
msgid ""
msgstr ""
"Project-Id-Version: python-watcher\n"
"Report-Msgid-Bugs-To: \n"
"POT-Creation-Date: 2019-03-22 02:21+0000\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"PO-Revision-Date: 2016-10-22 06:44+0000\n"
"Last-Translator: Gérald LONLAS <g.lonlas@gmail.com>\n"
"Language-Team: French\n"
"Language: fr\n"
"X-Generator: Zanata 4.3.3\n"
"Plural-Forms: nplurals=2; plural=(n > 1)\n"
msgid "0.29.0"
msgstr "0.29.0"
msgid "Contents:"
msgstr "Contenu :"
msgid "Current Series Release Notes"
msgstr "Note de la release actuelle"
msgid "New Features"
msgstr "Nouvelles fonctionnalités"
msgid "Newton Series Release Notes"
msgstr "Note de release pour Newton"
msgid "Welcome to watcher's Release Notes documentation!"
msgstr "Bienvenue dans la documentation de la note de Release de Watcher"

View File

@@ -1,6 +0,0 @@
===========================
Ussuri Series Release Notes
===========================
.. release-notes::
:branch: stable/ussuri

View File

@@ -1,6 +0,0 @@
=============================
Victoria Series Release Notes
=============================
.. release-notes::
:branch: stable/victoria

View File

@@ -1,6 +0,0 @@
============================
Wallaby Series Release Notes
============================
.. release-notes::
:branch: stable/wallaby

View File

@@ -1,6 +0,0 @@
=========================
Xena Series Release Notes
=========================
.. release-notes::
:branch: stable/xena

View File

@@ -1,6 +0,0 @@
=========================
Yoga Series Release Notes
=========================
.. release-notes::
:branch: stable/yoga

View File

@@ -1,6 +0,0 @@
========================
Zed Series Release Notes
========================
.. release-notes::
:branch: stable/zed

View File

@@ -1,35 +1,36 @@
# The order of packages is significant, because pip processes them in the order # The order of packages is significant, because pip processes them in the order
# of appearance. Changing the order has an impact on the overall integration # of appearance. Changing the order has an impact on the overall integration
# process, which may cause wedges in the gate later. # process, which may cause wedges in the gate later.
apscheduler>=3.5.1 # MIT License apscheduler>=3.5.1 # MIT License
jsonpatch>=1.21 # BSD jsonpatch>=1.21 # BSD
keystoneauth1>=3.4.0 # Apache-2.0 keystoneauth1>=3.4.0 # Apache-2.0
jsonschema>=3.2.0 # MIT jsonschema>=2.6.0 # MIT
keystonemiddleware>=4.21.0 # Apache-2.0 keystonemiddleware>=4.21.0 # Apache-2.0
lxml>=4.5.1 # BSD lxml>=4.1.1 # BSD
croniter>=0.3.20 # MIT License croniter>=0.3.20 # MIT License
os-resource-classes>=0.4.0 os-resource-classes>=0.4.0
oslo.concurrency>=3.26.0 # Apache-2.0 oslo.concurrency>=3.26.0 # Apache-2.0
oslo.cache>=1.29.0 # Apache-2.0 oslo.cache>=1.29.0 # Apache-2.0
oslo.config>=6.8.0 # Apache-2.0 oslo.config>=5.2.0 # Apache-2.0
oslo.context>=2.21.0 # Apache-2.0 oslo.context>=2.21.0 # Apache-2.0
oslo.db>=4.44.0 # Apache-2.0 oslo.db>=4.35.0 # Apache-2.0
oslo.i18n>=3.20.0 # Apache-2.0 oslo.i18n>=3.20.0 # Apache-2.0
oslo.log>=3.37.0 # Apache-2.0 oslo.log>=3.37.0 # Apache-2.0
oslo.messaging>=14.1.0 # Apache-2.0 oslo.messaging>=8.1.2 # Apache-2.0
oslo.policy>=3.6.0 # Apache-2.0 oslo.policy>=1.34.0 # Apache-2.0
oslo.reports>=1.27.0 # Apache-2.0 oslo.reports>=1.27.0 # Apache-2.0
oslo.serialization>=2.25.0 # Apache-2.0 oslo.serialization>=2.25.0 # Apache-2.0
oslo.service>=1.30.0 # Apache-2.0 oslo.service>=1.30.0 # Apache-2.0
oslo.upgradecheck>=1.3.0 # Apache-2.0 oslo.upgradecheck>=0.1.0 # Apache-2.0
oslo.utils>=3.36.0 # Apache-2.0 oslo.utils>=3.36.0 # Apache-2.0
oslo.versionedobjects>=1.32.0 # Apache-2.0 oslo.versionedobjects>=1.32.0 # Apache-2.0
PasteDeploy>=1.5.2 # MIT PasteDeploy>=1.5.2 # MIT
pbr>=3.1.1 # Apache-2.0 pbr>=3.1.1 # Apache-2.0
pecan>=1.3.2 # BSD pecan>=1.3.2 # BSD
PrettyTable>=0.7.2 # BSD PrettyTable<0.8,>=0.7.2 # BSD
gnocchiclient>=7.0.1 # Apache-2.0 gnocchiclient>=7.0.1 # Apache-2.0
python-ceilometerclient>=2.9.0 # Apache-2.0
python-cinderclient>=3.5.0 # Apache-2.0 python-cinderclient>=3.5.0 # Apache-2.0
python-glanceclient>=2.9.1 # Apache-2.0 python-glanceclient>=2.9.1 # Apache-2.0
python-keystoneclient>=3.15.0 # Apache-2.0 python-keystoneclient>=3.15.0 # Apache-2.0
@@ -38,11 +39,12 @@ python-neutronclient>=6.7.0 # Apache-2.0
python-novaclient>=14.1.0 # Apache-2.0 python-novaclient>=14.1.0 # Apache-2.0
python-openstackclient>=3.14.0 # Apache-2.0 python-openstackclient>=3.14.0 # Apache-2.0
python-ironicclient>=2.5.0 # Apache-2.0 python-ironicclient>=2.5.0 # Apache-2.0
six>=1.11.0 # MIT
SQLAlchemy>=1.2.5 # MIT SQLAlchemy>=1.2.5 # MIT
stevedore>=1.28.0 # Apache-2.0 stevedore>=1.28.0 # Apache-2.0
taskflow>=3.8.0 # Apache-2.0 taskflow>=3.7.1 # Apache-2.0
WebOb>=1.8.5 # MIT WebOb>=1.8.5 # MIT
WSME>=0.9.2 # MIT WSME>=0.9.2 # MIT
networkx>=2.4 # BSD networkx>=2.2;python_version>='3.4' # BSD
microversion_parse>=0.2.1 # Apache-2.0 microversion_parse>=0.2.1 # Apache-2.0
futurist>=1.8.0 # Apache-2.0 futurist>=1.8.0 # Apache-2.0

View File

@@ -1,12 +1,12 @@
[metadata] [metadata]
name = python-watcher name = python-watcher
summary = OpenStack Watcher provides a flexible and scalable resource optimization service for multi-tenant OpenStack-based clouds. summary = OpenStack Watcher provides a flexible and scalable resource optimization service for multi-tenant OpenStack-based clouds.
description_file = description-file =
README.rst README.rst
author = OpenStack author = OpenStack
author_email = openstack-discuss@lists.openstack.org author-email = openstack-discuss@lists.openstack.org
home_page = https://docs.openstack.org/watcher/latest/ home-page = https://docs.openstack.org/watcher/latest/
python_requires = >=3.6 python-requires = >=3.6
classifier = classifier =
Environment :: OpenStack Environment :: OpenStack
Intended Audience :: Information Technology Intended Audience :: Information Technology
@@ -19,7 +19,6 @@ classifier =
Programming Language :: Python :: 3 Programming Language :: Python :: 3
Programming Language :: Python :: 3.6 Programming Language :: Python :: 3.6
Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
[files] [files]
packages = packages =
@@ -108,3 +107,18 @@ watcher_cluster_data_model_collectors =
compute = watcher.decision_engine.model.collector.nova:NovaClusterDataModelCollector compute = watcher.decision_engine.model.collector.nova:NovaClusterDataModelCollector
storage = watcher.decision_engine.model.collector.cinder:CinderClusterDataModelCollector storage = watcher.decision_engine.model.collector.cinder:CinderClusterDataModelCollector
baremetal = watcher.decision_engine.model.collector.ironic:BaremetalClusterDataModelCollector baremetal = watcher.decision_engine.model.collector.ironic:BaremetalClusterDataModelCollector
[compile_catalog]
directory = watcher/locale
domain = watcher
[update_catalog]
domain = watcher
output_dir = watcher/locale
input_file = watcher/locale/watcher.pot
[extract_messages]
keywords = _ gettext ngettext l_ lazy_gettext _LI _LW _LE _LC
mapping_file = babel.cfg
output_file = watcher/locale/watcher.pot

View File

@@ -5,11 +5,12 @@
coverage>=4.5.1 # Apache-2.0 coverage>=4.5.1 # Apache-2.0
doc8>=0.8.0 # Apache-2.0 doc8>=0.8.0 # Apache-2.0
freezegun>=0.3.10 # Apache-2.0 freezegun>=0.3.10 # Apache-2.0
hacking>=3.0.1,<3.1.0 # Apache-2.0 hacking>=3.0,<3.1.0 # Apache-2.0
mock>=2.0.0 # BSD
oslotest>=3.3.0 # Apache-2.0 oslotest>=3.3.0 # Apache-2.0
os-testr>=1.0.0 # Apache-2.0
testscenarios>=0.5.0 # Apache-2.0/BSD testscenarios>=0.5.0 # Apache-2.0/BSD
testtools>=2.3.0 # MIT testtools>=2.3.0 # MIT
stestr>=2.0.0 # Apache-2.0 stestr>=2.0.0 # Apache-2.0
os-api-ref>=1.4.0 # Apache-2.0 os-api-ref>=1.4.0 # Apache-2.0
bandit>=1.6.0 # Apache-2.0 bandit>=1.6.0 # Apache-2.0
WebTest>=2.0.27 # MIT

78
tox.ini
View File

@@ -1,40 +1,37 @@
[tox] [tox]
minversion = 3.18.0 minversion = 2.0
envlist = py3,pep8 envlist = py36,py37,pep8
skipsdist = True
ignore_basepython_conflict = True ignore_basepython_conflict = True
[testenv] [testenv]
basepython = python3 basepython = python3
usedevelop = True usedevelop = True
allowlist_externals = find whitelist_externals = find
rm rm
install_command = pip install -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} {opts} {packages} install_command = pip install {opts} {packages}
setenv = setenv =
VIRTUAL_ENV={envdir} VIRTUAL_ENV={envdir}
deps = deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/ussuri}
-r{toxinidir}/test-requirements.txt -r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt -r{toxinidir}/requirements.txt
commands = commands =
rm -f .testrepository/times.dbm rm -f .testrepository/times.dbm
find . -type f -name "*.py[c|o]" -delete find . -type f -name "*.py[c|o]" -delete
stestr run {posargs} stestr run {posargs}
passenv = passenv = http_proxy HTTP_PROXY https_proxy HTTPS_PROXY no_proxy NO_PROXY
http_proxy
HTTP_PROXY
https_proxy
HTTPS_PROXY
no_proxy
NO_PROXY
[testenv:pep8] [testenv:pep8]
commands = commands =
doc8 doc/source/ CONTRIBUTING.rst HACKING.rst README.rst doc8 doc/source/ CONTRIBUTING.rst HACKING.rst README.rst
flake8 flake8
#bandit -r watcher -x watcher/tests/* -n5 -ll -s B320 bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
[testenv:venv] [testenv:venv]
setenv = PYTHONHASHSEED=0 setenv = PYTHONHASHSEED=0
deps = deps =
-c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/ussuri}
-r{toxinidir}/doc/requirements.txt -r{toxinidir}/doc/requirements.txt
-r{toxinidir}/test-requirements.txt -r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt -r{toxinidir}/requirements.txt
@@ -52,15 +49,14 @@ commands =
[testenv:docs] [testenv:docs]
setenv = PYTHONHASHSEED=0 setenv = PYTHONHASHSEED=0
deps = deps = -r{toxinidir}/doc/requirements.txt
-r{toxinidir}/doc/requirements.txt
commands = commands =
rm -fr doc/build doc/source/api/ .autogenerated rm -fr doc/build doc/source/api/ .autogenerated
sphinx-build -W --keep-going -b html doc/source doc/build/html sphinx-build -W --keep-going -b html doc/source doc/build/html
[testenv:api-ref] [testenv:api-ref]
deps = -r{toxinidir}/doc/requirements.txt deps = -r{toxinidir}/doc/requirements.txt
allowlist_externals = bash whitelist_externals = bash
commands = commands =
bash -c 'rm -rf api-ref/build' bash -c 'rm -rf api-ref/build'
sphinx-build -W --keep-going -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html sphinx-build -W --keep-going -b html -d api-ref/build/doctrees api-ref/source api-ref/build/html
@@ -77,28 +73,6 @@ commands =
commands = commands =
oslopolicy-sample-generator --config-file etc/watcher/oslo-policy-generator/watcher-policy-generator.conf oslopolicy-sample-generator --config-file etc/watcher/oslo-policy-generator/watcher-policy-generator.conf
[testenv:wheel]
commands = python setup.py bdist_wheel
[testenv:pdf-docs]
envdir = {toxworkdir}/docs
deps = {[testenv:docs]deps}
allowlist_externals =
rm
make
commands =
rm -rf doc/build/pdf
sphinx-build -W --keep-going -b latex doc/source doc/build/pdf
make -C doc/build/pdf
[testenv:releasenotes]
deps = -r{toxinidir}/doc/requirements.txt
commands = sphinx-build -a -W -E -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html
[testenv:bandit]
deps = -r{toxinidir}/test-requirements.txt
commands = bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
[flake8] [flake8]
filename = *.py,app.wsgi filename = *.py,app.wsgi
show-source=True show-source=True
@@ -108,6 +82,9 @@ builtins= _
enable-extensions = H106,H203,H904 enable-extensions = H106,H203,H904
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes
[testenv:wheel]
commands = python setup.py bdist_wheel
[hacking] [hacking]
import_exceptions = watcher._i18n import_exceptions = watcher._i18n
@@ -128,10 +105,35 @@ extension =
N340 = checks:check_oslo_i18n_wrapper N340 = checks:check_oslo_i18n_wrapper
N341 = checks:check_builtins_gettext N341 = checks:check_builtins_gettext
N342 = checks:no_redundant_import_alias N342 = checks:no_redundant_import_alias
N366 = checks:import_stock_mock
paths = ./watcher/hacking paths = ./watcher/hacking
[doc8] [doc8]
extension=.rst extension=.rst
# todo: stop ignoring doc/source/man when https://bugs.launchpad.net/doc8/+bug/1502391 is fixed # todo: stop ignoring doc/source/man when https://bugs.launchpad.net/doc8/+bug/1502391 is fixed
ignore-path=doc/source/image_src,doc/source/man,doc/source/api ignore-path=doc/source/image_src,doc/source/man,doc/source/api
[testenv:pdf-docs]
envdir = {toxworkdir}/docs
deps = {[testenv:docs]deps}
whitelist_externals =
rm
make
commands =
rm -rf doc/build/pdf
sphinx-build -W --keep-going -b latex doc/source doc/build/pdf
make -C doc/build/pdf
[testenv:releasenotes]
deps = -r{toxinidir}/doc/requirements.txt
commands = sphinx-build -a -W -E -d releasenotes/build/doctrees --keep-going -b html releasenotes/source releasenotes/build/html
[testenv:bandit]
deps = -r{toxinidir}/test-requirements.txt
commands = bandit -r watcher -x watcher/tests/* -n5 -ll -s B320
[testenv:lower-constraints]
deps =
-c{toxinidir}/lower-constraints.txt
-r{toxinidir}/test-requirements.txt
-r{toxinidir}/requirements.txt

View File

@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import unicode_literals
from oslo_config import cfg from oslo_config import cfg
from watcher.api import hooks from watcher.api import hooks

View File

@@ -57,7 +57,6 @@ are dynamically loaded by Watcher at launch time.
import datetime import datetime
from http import HTTPStatus
import pecan import pecan
from pecan import rest from pecan import rest
import wsme import wsme
@@ -363,7 +362,7 @@ class ActionsController(rest.RestController):
return Action.convert_with_links(action) return Action.convert_with_links(action)
@wsme_pecan.wsexpose(Action, body=Action, status_code=HTTPStatus.CREATED) @wsme_pecan.wsexpose(Action, body=Action, status_code=201)
def post(self, action): def post(self, action):
"""Create a new action(forbidden). """Create a new action(forbidden).
@@ -423,7 +422,7 @@ class ActionsController(rest.RestController):
action_to_update.save() action_to_update.save()
return Action.convert_with_links(action_to_update) return Action.convert_with_links(action_to_update)
@wsme_pecan.wsexpose(None, types.uuid, status_code=HTTPStatus.NO_CONTENT) @wsme_pecan.wsexpose(None, types.uuid, status_code=204)
def delete(self, action_uuid): def delete(self, action_uuid):
"""Delete a action(forbidden). """Delete a action(forbidden).

View File

@@ -56,7 +56,6 @@ state machine <action_plan_state_machine>`.
import datetime import datetime
from http import HTTPStatus
from oslo_log import log from oslo_log import log
import pecan import pecan
from pecan import rest from pecan import rest
@@ -461,7 +460,7 @@ class ActionPlansController(rest.RestController):
return ActionPlan.convert_with_links(action_plan) return ActionPlan.convert_with_links(action_plan)
@wsme_pecan.wsexpose(None, types.uuid, status_code=HTTPStatus.NO_CONTENT) @wsme_pecan.wsexpose(None, types.uuid, status_code=204)
def delete(self, action_plan_uuid): def delete(self, action_plan_uuid):
"""Delete an action plan. """Delete an action plan.

View File

@@ -32,7 +32,6 @@ states, visit :ref:`the Audit State machine <audit_state_machine>`.
import datetime import datetime
from dateutil import tz from dateutil import tz
from http import HTTPStatus
import pecan import pecan
from pecan import rest from pecan import rest
import wsme import wsme
@@ -596,8 +595,7 @@ class AuditsController(rest.RestController):
return Audit.convert_with_links(rpc_audit) return Audit.convert_with_links(rpc_audit)
@wsme_pecan.wsexpose(Audit, body=AuditPostType, @wsme_pecan.wsexpose(Audit, body=AuditPostType, status_code=201)
status_code=HTTPStatus.CREATED)
def post(self, audit_p): def post(self, audit_p):
"""Create a new audit. """Create a new audit.
@@ -719,7 +717,7 @@ class AuditsController(rest.RestController):
audit_to_update.save() audit_to_update.save()
return Audit.convert_with_links(audit_to_update) return Audit.convert_with_links(audit_to_update)
@wsme_pecan.wsexpose(None, wtypes.text, status_code=HTTPStatus.NO_CONTENT) @wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
def delete(self, audit): def delete(self, audit):
"""Delete an audit. """Delete an audit.

View File

@@ -45,7 +45,6 @@ will be launched automatically or will need a manual confirmation from the
import datetime import datetime
from http import HTTPStatus
import pecan import pecan
from pecan import rest from pecan import rest
import wsme import wsme
@@ -139,9 +138,6 @@ class AuditTemplatePostType(wtypes.Base):
raise exception.InvalidGoal(goal=audit_template.goal) raise exception.InvalidGoal(goal=audit_template.goal)
if audit_template.scope: if audit_template.scope:
keys = [list(s)[0] for s in audit_template.scope]
if keys[0] not in ('compute', 'storage'):
audit_template.scope = [dict(compute=audit_template.scope)]
common_utils.Draft4Validator( common_utils.Draft4Validator(
AuditTemplatePostType._build_schema() AuditTemplatePostType._build_schema()
).validate(audit_template.scope) ).validate(audit_template.scope)
@@ -162,23 +158,18 @@ class AuditTemplatePostType(wtypes.Base):
"included and excluded together")) "included and excluded together"))
if audit_template.strategy: if audit_template.strategy:
try: available_strategies = objects.Strategy.list(
if (common_utils.is_uuid_like(audit_template.strategy) or AuditTemplatePostType._ctx)
common_utils.is_int_like(audit_template.strategy)): available_strategies_map = {
strategy = objects.Strategy.get( s.uuid: s for s in available_strategies}
AuditTemplatePostType._ctx, audit_template.strategy) if audit_template.strategy not in available_strategies_map:
else:
strategy = objects.Strategy.get_by_name(
AuditTemplatePostType._ctx, audit_template.strategy)
except Exception:
raise exception.InvalidStrategy( raise exception.InvalidStrategy(
strategy=audit_template.strategy) strategy=audit_template.strategy)
strategy = available_strategies_map[audit_template.strategy]
# Check that the strategy we indicate is actually related to the # Check that the strategy we indicate is actually related to the
# specified goal # specified goal
if strategy.goal_id != goal.id: if strategy.goal_id != goal.id:
available_strategies = objects.Strategy.list(
AuditTemplatePostType._ctx)
choices = ["'%s' (%s)" % (s.uuid, s.name) choices = ["'%s' (%s)" % (s.uuid, s.name)
for s in available_strategies] for s in available_strategies]
raise exception.InvalidStrategy( raise exception.InvalidStrategy(
@@ -619,7 +610,7 @@ class AuditTemplatesController(rest.RestController):
@wsme.validate(types.uuid, AuditTemplatePostType) @wsme.validate(types.uuid, AuditTemplatePostType)
@wsme_pecan.wsexpose(AuditTemplate, body=AuditTemplatePostType, @wsme_pecan.wsexpose(AuditTemplate, body=AuditTemplatePostType,
status_code=HTTPStatus.CREATED) status_code=201)
def post(self, audit_template_postdata): def post(self, audit_template_postdata):
"""Create a new audit template. """Create a new audit template.
@@ -695,7 +686,7 @@ class AuditTemplatesController(rest.RestController):
audit_template_to_update.save() audit_template_to_update.save()
return AuditTemplate.convert_with_links(audit_template_to_update) return AuditTemplate.convert_with_links(audit_template_to_update)
@wsme_pecan.wsexpose(None, wtypes.text, status_code=HTTPStatus.NO_CONTENT) @wsme_pecan.wsexpose(None, wtypes.text, status_code=204)
def delete(self, audit_template): def delete(self, audit_template):
"""Delete a audit template. """Delete a audit template.

View File

@@ -19,6 +19,8 @@ Service mechanism provides ability to monitor Watcher services state.
""" """
import datetime import datetime
import six
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
@@ -68,7 +70,7 @@ class Service(base.APIBase):
service = objects.Service.get(pecan.request.context, id) service = objects.Service.get(pecan.request.context, id)
last_heartbeat = (service.last_seen_up or service.updated_at or last_heartbeat = (service.last_seen_up or service.updated_at or
service.created_at) service.created_at)
if isinstance(last_heartbeat, str): if isinstance(last_heartbeat, six.string_types):
# NOTE(russellb) If this service came in over rpc via # NOTE(russellb) If this service came in over rpc via
# conductor, then the timestamp will be a string and needs to be # conductor, then the timestamp will be a string and needs to be
# converted back to a datetime. # converted back to a datetime.

View File

@@ -15,6 +15,7 @@
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
from oslo_utils import strutils from oslo_utils import strutils
import six
import wsme import wsme
from wsme import types as wtypes from wsme import types as wtypes
@@ -131,7 +132,7 @@ class JsonType(wtypes.UserType):
def __str__(self): def __str__(self):
# These are the json serializable native types # These are the json serializable native types
return ' | '.join(map(str, (wtypes.text, int, float, return ' | '.join(map(str, (wtypes.text, six.integer_types, float,
BooleanType, list, dict, None))) BooleanType, list, dict, None)))
@staticmethod @staticmethod

View File

@@ -14,7 +14,6 @@
Webhook endpoint for Watcher v1 REST API. Webhook endpoint for Watcher v1 REST API.
""" """
from http import HTTPStatus
from oslo_log import log from oslo_log import log
import pecan import pecan
from pecan import rest from pecan import rest
@@ -37,7 +36,7 @@ class WebhookController(rest.RestController):
self.dc_client = rpcapi.DecisionEngineAPI() self.dc_client = rpcapi.DecisionEngineAPI()
@wsme_pecan.wsexpose(None, wtypes.text, body=types.jsontype, @wsme_pecan.wsexpose(None, wtypes.text, body=types.jsontype,
status_code=HTTPStatus.ACCEPTED) status_code=202)
def post(self, audit_ident, body): def post(self, audit_ident, body):
"""Trigger the given audit. """Trigger the given audit.

View File

@@ -15,9 +15,9 @@
# under the License. # under the License.
from http import HTTPStatus
from oslo_config import cfg from oslo_config import cfg
from pecan import hooks from pecan import hooks
from six.moves import http_client
from watcher.common import context from watcher.common import context
@@ -91,8 +91,8 @@ class NoExceptionTracebackHook(hooks.PecanHook):
# Do nothing if there is no error. # Do nothing if there is no error.
# Status codes in the range 200 (OK) to 399 (400 = BAD_REQUEST) are not # Status codes in the range 200 (OK) to 399 (400 = BAD_REQUEST) are not
# an error. # an error.
if (HTTPStatus.OK <= state.response.status_int < if (http_client.OK <= state.response.status_int <
HTTPStatus.BAD_REQUEST): http_client.BAD_REQUEST):
return return
json_body = state.response.json json_body = state.response.json

View File

@@ -24,6 +24,7 @@ from xml import etree as et
from oslo_log import log from oslo_log import log
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
import six
import webob import webob
from watcher._i18n import _ from watcher._i18n import _
@@ -83,10 +84,12 @@ class ParsableErrorMiddleware(object):
'</error_message>' % state['status_code']] '</error_message>' % state['status_code']]
state['headers'].append(('Content-Type', 'application/xml')) state['headers'].append(('Content-Type', 'application/xml'))
else: else:
app_iter = [i.decode('utf-8') for i in app_iter] if six.PY3:
app_iter = [i.decode('utf-8') for i in app_iter]
body = [jsonutils.dumps( body = [jsonutils.dumps(
{'error_message': '\n'.join(app_iter)})] {'error_message': '\n'.join(app_iter)})]
body = [item.encode('utf-8') for item in body] if six.PY3:
body = [item.encode('utf-8') for item in body]
state['headers'].append(('Content-Type', 'application/json')) state['headers'].append(('Content-Type', 'application/json'))
state['headers'].append(('Content-Length', str(len(body[0])))) state['headers'].append(('Content-Length', str(len(body[0]))))
else: else:

View File

@@ -20,6 +20,7 @@ import itertools
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
import six
from watcher.common import context as watcher_context from watcher.common import context as watcher_context
from watcher.common import scheduling from watcher.common import scheduling
@@ -82,7 +83,7 @@ class APISchedulingService(scheduling.BackgroundSchedulerService):
service = objects.Service.get(context, service_id) service = objects.Service.get(context, service_id)
last_heartbeat = (service.last_seen_up or service.updated_at or last_heartbeat = (service.last_seen_up or service.updated_at or
service.created_at) service.created_at)
if isinstance(last_heartbeat, str): if isinstance(last_heartbeat, six.string_types):
# NOTE(russellb) If this service came in over rpc via # NOTE(russellb) If this service came in over rpc via
# conductor, then the timestamp will be a string and needs to be # conductor, then the timestamp will be a string and needs to be
# converted back to a datetime. # converted back to a datetime.

View File

@@ -18,9 +18,11 @@
# #
import abc import abc
import six
class BaseActionPlanHandler(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseActionPlanHandler(object):
@abc.abstractmethod @abc.abstractmethod
def execute(self): def execute(self):
raise NotImplementedError() raise NotImplementedError()

View File

@@ -19,12 +19,14 @@
import abc import abc
import jsonschema import jsonschema
import six
from watcher.common import clients from watcher.common import clients
from watcher.common.loader import loadable from watcher.common.loader import loadable
class BaseAction(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseAction(loadable.Loadable):
# NOTE(jed): by convention we decided # NOTE(jed): by convention we decided
# that the attribute "resource_id" is the unique id of # that the attribute "resource_id" is the unique id of
# the resource to which the Action applies to allow us to use it in the # the resource to which the Action applies to allow us to use it in the

View File

@@ -15,6 +15,8 @@
# limitations under the License. # limitations under the License.
# #
from __future__ import unicode_literals
from oslo_log import log from oslo_log import log
from watcher.applier.loading import default from watcher.applier.loading import default

View File

@@ -186,7 +186,7 @@ class Migrate(base.BaseAction):
return self.migrate(destination=self.destination_node) return self.migrate(destination=self.destination_node)
def revert(self): def revert(self):
return self.migrate(destination=self.source_node) LOG.info('Migrate action do not revert!')
def abort(self): def abort(self):
nova = nova_helper.NovaHelper(osc=self.osc) nova = nova_helper.NovaHelper(osc=self.osc)

View File

@@ -95,7 +95,7 @@ class Resize(base.BaseAction):
return self.resize() return self.resize()
def revert(self): def revert(self):
LOG.warning("revert not supported") return self.migrate(destination=self.source_node)
def pre_condition(self): def pre_condition(self):
# TODO(jed): check if the instance exists / check if the instance is on # TODO(jed): check if the instance exists / check if the instance is on

View File

@@ -26,9 +26,11 @@ See: :doc:`../architecture` for more details on this component.
""" """
import abc import abc
import six
class BaseApplier(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseApplier(object):
@abc.abstractmethod @abc.abstractmethod
def execute(self, action_plan_uuid): def execute(self, action_plan_uuid):
raise NotImplementedError() raise NotImplementedError()

View File

@@ -11,6 +11,9 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import unicode_literals
from watcher.common.loader import default from watcher.common.loader import default

View File

@@ -17,6 +17,7 @@
# #
import abc import abc
import six
import time import time
import eventlet import eventlet
@@ -39,7 +40,8 @@ CANCEL_STATE = [objects.action_plan.State.CANCELLING,
objects.action_plan.State.CANCELLED] objects.action_plan.State.CANCELLED]
class BaseWorkFlowEngine(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseWorkFlowEngine(loadable.Loadable):
def __init__(self, config, context=None, applier_manager=None): def __init__(self, config, context=None, applier_manager=None):
"""Constructor """Constructor

View File

@@ -25,11 +25,8 @@ from taskflow import task as flow_task
from watcher.applier.workflow_engine import base from watcher.applier.workflow_engine import base
from watcher.common import exception from watcher.common import exception
from watcher import conf
from watcher import objects from watcher import objects
CONF = conf.CONF
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@@ -130,11 +127,9 @@ class DefaultWorkFlowEngine(base.BaseWorkFlowEngine):
class TaskFlowActionContainer(base.BaseTaskFlowActionContainer): class TaskFlowActionContainer(base.BaseTaskFlowActionContainer):
def __init__(self, db_action, engine): def __init__(self, db_action, engine):
self.name = "action_type:{0} uuid:{1}".format(db_action.action_type, name = "action_type:{0} uuid:{1}".format(db_action.action_type,
db_action.uuid) db_action.uuid)
super(TaskFlowActionContainer, self).__init__(self.name, super(TaskFlowActionContainer, self).__init__(name, db_action, engine)
db_action,
engine)
def do_pre_execute(self): def do_pre_execute(self):
db_action = self.engine.notify(self._db_action, db_action = self.engine.notify(self._db_action,
@@ -163,12 +158,6 @@ class TaskFlowActionContainer(base.BaseTaskFlowActionContainer):
self.action.post_condition() self.action.post_condition()
def do_revert(self, *args, **kwargs): def do_revert(self, *args, **kwargs):
# NOTE: Not rollback action plan
if not CONF.watcher_applier.rollback_when_actionplan_failed:
LOG.info("Failed actionplan rollback option is turned off, and "
"the following action will be skipped: %s", self.name)
return
LOG.warning("Revert action: %s", self.name) LOG.warning("Revert action: %s", self.name)
try: try:
# TODO(jed): do we need to update the states in case of failure? # TODO(jed): do we need to update the states in case of failure?

View File

@@ -18,10 +18,3 @@
import eventlet import eventlet
eventlet.monkey_patch() eventlet.monkey_patch()
# Monkey patch the original current_thread to use the up-to-date _active
# global variable. See https://bugs.launchpad.net/bugs/1863021 and
# https://github.com/eventlet/eventlet/issues/592
import __original_module_threading as orig_threading # noqa
import threading # noqa
orig_threading.current_thread.__globals__['_active'] = threading._active

View File

@@ -14,7 +14,6 @@
import sys import sys
from oslo_upgradecheck import common_checks
from oslo_upgradecheck import upgradecheck from oslo_upgradecheck import upgradecheck
from watcher._i18n import _ from watcher._i18n import _
@@ -44,10 +43,6 @@ class Checks(upgradecheck.UpgradeCommands):
_upgrade_checks = ( _upgrade_checks = (
# Added in Train. # Added in Train.
(_('Minimum Nova API Version'), _minimum_nova_api_version), (_('Minimum Nova API Version'), _minimum_nova_api_version),
# Added in Wallaby.
(_("Policy File JSON to YAML Migration"),
(common_checks.check_policy_json, {'conf': CONF})),
) )

View File

@@ -17,7 +17,7 @@ import time
from oslo_log import log from oslo_log import log
from cinderclient import exceptions as cinder_exception from cinderclient import exceptions as cinder_exception
from cinderclient.v3.volumes import Volume from cinderclient.v2.volumes import Volume
from watcher._i18n import _ from watcher._i18n import _
from watcher.common import clients from watcher.common import clients
from watcher.common import exception from watcher.common import exception

View File

@@ -13,6 +13,7 @@
from oslo_context import context from oslo_context import context
from oslo_log import log from oslo_log import log
from oslo_utils import timeutils from oslo_utils import timeutils
import six
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@@ -68,7 +69,7 @@ class RequestContext(context.RequestContext):
self.project_id = project_id self.project_id = project_id
if not timestamp: if not timestamp:
timestamp = timeutils.utcnow() timestamp = timeutils.utcnow()
if isinstance(timestamp, str): if isinstance(timestamp, six.string_types):
timestamp = timeutils.parse_isotime(timestamp) timestamp = timeutils.parse_isotime(timestamp)
self.timestamp = timestamp self.timestamp = timestamp
self.user_name = user_name self.user_name = user_name

View File

@@ -25,7 +25,6 @@ SHOULD include dedicated exception logging.
import functools import functools
import sys import sys
from http import HTTPStatus
from keystoneclient import exceptions as keystone_exceptions from keystoneclient import exceptions as keystone_exceptions
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
@@ -63,7 +62,7 @@ class WatcherException(Exception):
""" """
msg_fmt = _("An unknown exception occurred") msg_fmt = _("An unknown exception occurred")
code = HTTPStatus.INTERNAL_SERVER_ERROR code = 500
headers = {} headers = {}
safe = False safe = False
@@ -115,12 +114,12 @@ class UnsupportedError(WatcherException):
class NotAuthorized(WatcherException): class NotAuthorized(WatcherException):
msg_fmt = _("Not authorized") msg_fmt = _("Not authorized")
code = HTTPStatus.FORBIDDEN code = 403
class NotAcceptable(WatcherException): class NotAcceptable(WatcherException):
msg_fmt = _("Request not acceptable.") msg_fmt = _("Request not acceptable.")
code = HTTPStatus.NOT_ACCEPTABLE code = 406
class PolicyNotAuthorized(NotAuthorized): class PolicyNotAuthorized(NotAuthorized):
@@ -133,7 +132,7 @@ class OperationNotPermitted(NotAuthorized):
class Invalid(WatcherException, ValueError): class Invalid(WatcherException, ValueError):
msg_fmt = _("Unacceptable parameters") msg_fmt = _("Unacceptable parameters")
code = HTTPStatus.BAD_REQUEST code = 400
class ObjectNotFound(WatcherException): class ObjectNotFound(WatcherException):
@@ -142,12 +141,12 @@ class ObjectNotFound(WatcherException):
class Conflict(WatcherException): class Conflict(WatcherException):
msg_fmt = _('Conflict') msg_fmt = _('Conflict')
code = HTTPStatus.CONFLICT code = 409
class ResourceNotFound(ObjectNotFound): class ResourceNotFound(ObjectNotFound):
msg_fmt = _("The %(name)s resource %(id)s could not be found") msg_fmt = _("The %(name)s resource %(id)s could not be found")
code = HTTPStatus.NOT_FOUND code = 404
class InvalidParameter(Invalid): class InvalidParameter(Invalid):

View File

@@ -14,10 +14,14 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import unicode_literals
import abc import abc
import six
class BaseLoader(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseLoader(object):
@abc.abstractmethod @abc.abstractmethod
def list_available(self): def list_available(self):

View File

@@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from __future__ import unicode_literals
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from stevedore import driver as drivermanager from stevedore import driver as drivermanager

View File

@@ -16,10 +16,13 @@
import abc import abc
import six
from watcher.common import service from watcher.common import service
class Loadable(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class Loadable(object):
"""Generic interface for dynamically loading a driver/entry point. """Generic interface for dynamically loading a driver/entry point.
This defines the contract in order to let the loader manager inject This defines the contract in order to let the loader manager inject
@@ -45,7 +48,8 @@ LoadableSingletonMeta = type(
"LoadableSingletonMeta", (abc.ABCMeta, service.Singleton), {}) "LoadableSingletonMeta", (abc.ABCMeta, service.Singleton), {})
class LoadableSingleton(object, metaclass=LoadableSingletonMeta): @six.add_metaclass(LoadableSingletonMeta)
class LoadableSingleton(object):
"""Generic interface for dynamically loading a driver as a singleton. """Generic interface for dynamically loading a driver as a singleton.
This defines the contract in order to let the loader manager inject This defines the contract in order to let the loader manager inject

View File

@@ -11,7 +11,6 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from http import HTTPStatus
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log as logging from oslo_log import log as logging
@@ -54,7 +53,7 @@ class PlacementHelper(object):
if rp_name: if rp_name:
url += '?name=%s' % rp_name url += '?name=%s' % rp_name
resp = self.get(url) resp = self.get(url)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
json_resp = resp.json() json_resp = resp.json()
return json_resp['resource_providers'] return json_resp['resource_providers']
@@ -78,7 +77,7 @@ class PlacementHelper(object):
""" """
url = '/resource_providers/%s/inventories' % rp_uuid url = '/resource_providers/%s/inventories' % rp_uuid
resp = self.get(url) resp = self.get(url)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
json = resp.json() json = resp.json()
return json['inventories'] return json['inventories']
msg = ("Failed to get resource provider %(rp_uuid)s inventories. " msg = ("Failed to get resource provider %(rp_uuid)s inventories. "
@@ -98,7 +97,7 @@ class PlacementHelper(object):
""" """
resp = self.get("/resource_providers/%s/traits" % rp_uuid) resp = self.get("/resource_providers/%s/traits" % rp_uuid)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
json = resp.json() json = resp.json()
return json['traits'] return json['traits']
msg = ("Failed to get resource provider %(rp_uuid)s traits. " msg = ("Failed to get resource provider %(rp_uuid)s traits. "
@@ -119,7 +118,7 @@ class PlacementHelper(object):
""" """
url = '/allocations/%s' % consumer_uuid url = '/allocations/%s' % consumer_uuid
resp = self.get(url) resp = self.get(url)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
json = resp.json() json = resp.json()
return json['allocations'] return json['allocations']
msg = ("Failed to get allocations for consumer %(c_uuid). " msg = ("Failed to get allocations for consumer %(c_uuid). "
@@ -140,7 +139,7 @@ class PlacementHelper(object):
""" """
url = '/resource_providers/%s/usages' % rp_uuid url = '/resource_providers/%s/usages' % rp_uuid
resp = self.get(url) resp = self.get(url)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
json = resp.json() json = resp.json()
return json['usages'] return json['usages']
msg = ("Failed to get resource provider %(rp_uuid)s usages. " msg = ("Failed to get resource provider %(rp_uuid)s usages. "
@@ -165,7 +164,7 @@ class PlacementHelper(object):
""" """
url = "/allocation_candidates?%s" % resources url = "/allocation_candidates?%s" % resources
resp = self.get(url) resp = self.get(url)
if resp.status_code == HTTPStatus.OK: if resp.status_code == 200:
data = resp.json() data = resp.json()
return data['provider_summaries'] return data['provider_summaries']

View File

@@ -18,7 +18,6 @@
import sys import sys
from oslo_config import cfg from oslo_config import cfg
from oslo_policy import opts
from oslo_policy import policy from oslo_policy import policy
from watcher.common import exception from watcher.common import exception
@@ -27,12 +26,6 @@ from watcher.common import policies
_ENFORCER = None _ENFORCER = None
CONF = cfg.CONF CONF = cfg.CONF
# TODO(gmann): Remove setting the default value of config policy_file
# once oslo_policy change the default value to 'policy.yaml'.
# https://github.com/openstack/oslo.policy/blob/a626ad12fe5a3abd49d70e3e5b95589d279ab578/oslo_policy/opts.py#L49
DEFAULT_POLICY_FILE = 'policy.yaml'
opts.set_defaults(CONF, DEFAULT_POLICY_FILE)
# we can get a policy enforcer by this init. # we can get a policy enforcer by this init.
# oslo policy support change policy rule dynamically. # oslo policy support change policy rule dynamically.

View File

@@ -121,40 +121,22 @@ class RequestContextSerializer(messaging.Serializer):
def get_client(target, version_cap=None, serializer=None): def get_client(target, version_cap=None, serializer=None):
assert TRANSPORT is not None assert TRANSPORT is not None
serializer = RequestContextSerializer(serializer) serializer = RequestContextSerializer(serializer)
return messaging.get_rpc_client( return messaging.RPCClient(TRANSPORT,
TRANSPORT, target,
target, version_cap=version_cap,
version_cap=version_cap, serializer=serializer)
serializer=serializer
)
def get_server(target, endpoints, serializer=None): def get_server(target, endpoints, serializer=None):
assert TRANSPORT is not None assert TRANSPORT is not None
access_policy = dispatcher.DefaultRPCAccessPolicy access_policy = dispatcher.DefaultRPCAccessPolicy
serializer = RequestContextSerializer(serializer) serializer = RequestContextSerializer(serializer)
return messaging.get_rpc_server( return messaging.get_rpc_server(TRANSPORT,
TRANSPORT, target,
target, endpoints,
endpoints, executor='eventlet',
executor='eventlet', serializer=serializer,
serializer=serializer, access_policy=access_policy)
access_policy=access_policy
)
def get_notification_listener(targets, endpoints, serializer=None, pool=None):
assert NOTIFICATION_TRANSPORT is not None
serializer = RequestContextSerializer(serializer)
return messaging.get_notification_listener(
NOTIFICATION_TRANSPORT,
targets,
endpoints,
allow_requeue=False,
executor='eventlet',
pool=pool,
serializer=serializer
)
def get_notifier(publisher_id): def get_notifier(publisher_id):

View File

@@ -21,12 +21,14 @@ from oslo_concurrency import processutils
from oslo_config import cfg from oslo_config import cfg
from oslo_log import _options from oslo_log import _options
from oslo_log import log from oslo_log import log
import oslo_messaging as messaging import oslo_messaging as om
from oslo_reports import guru_meditation_report as gmr from oslo_reports import guru_meditation_report as gmr
from oslo_reports import opts as gmr_opts from oslo_reports import opts as gmr_opts
from oslo_service import service from oslo_service import service
from oslo_service import wsgi from oslo_service import wsgi
from oslo_messaging.rpc import dispatcher
from watcher._i18n import _ from watcher._i18n import _
from watcher.api import app from watcher.api import app
from watcher.common import config from watcher.common import config
@@ -181,6 +183,11 @@ class Service(service.ServiceBase):
] ]
self.notification_endpoints = self.manager.notification_endpoints self.notification_endpoints = self.manager.notification_endpoints
self.serializer = rpc.RequestContextSerializer(
base.WatcherObjectSerializer())
self._transport = None
self._notification_transport = None
self._conductor_client = None self._conductor_client = None
self.conductor_topic_handler = None self.conductor_topic_handler = None
@@ -194,17 +201,27 @@ class Service(service.ServiceBase):
self.notification_topics, self.notification_endpoints self.notification_topics, self.notification_endpoints
) )
@property
def transport(self):
if self._transport is None:
self._transport = om.get_rpc_transport(CONF)
return self._transport
@property
def notification_transport(self):
if self._notification_transport is None:
self._notification_transport = om.get_notification_transport(CONF)
return self._notification_transport
@property @property
def conductor_client(self): def conductor_client(self):
if self._conductor_client is None: if self._conductor_client is None:
target = messaging.Target( target = om.Target(
topic=self.conductor_topic, topic=self.conductor_topic,
version=self.API_VERSION, version=self.API_VERSION,
) )
self._conductor_client = rpc.get_client( self._conductor_client = om.RPCClient(
target, self.transport, target, serializer=self.serializer)
serializer=base.WatcherObjectSerializer()
)
return self._conductor_client return self._conductor_client
@conductor_client.setter @conductor_client.setter
@@ -212,18 +229,21 @@ class Service(service.ServiceBase):
self.conductor_client = c self.conductor_client = c
def build_topic_handler(self, topic_name, endpoints=()): def build_topic_handler(self, topic_name, endpoints=()):
target = messaging.Target( access_policy = dispatcher.DefaultRPCAccessPolicy
serializer = rpc.RequestContextSerializer(rpc.JsonPayloadSerializer())
target = om.Target(
topic=topic_name, topic=topic_name,
# For compatibility, we can override it with 'host' opt # For compatibility, we can override it with 'host' opt
server=CONF.host or socket.gethostname(), server=CONF.host or socket.gethostname(),
version=self.api_version, version=self.api_version,
) )
return rpc.get_server( return om.get_rpc_server(
target, endpoints, self.transport, target, endpoints,
serializer=rpc.JsonPayloadSerializer() executor='eventlet', serializer=serializer,
) access_policy=access_policy)
def build_notification_handler(self, topic_names, endpoints=()): def build_notification_handler(self, topic_names, endpoints=()):
serializer = rpc.RequestContextSerializer(rpc.JsonPayloadSerializer())
targets = [] targets = []
for topic in topic_names: for topic in topic_names:
kwargs = {} kwargs = {}
@@ -231,13 +251,11 @@ class Service(service.ServiceBase):
exchange, topic = topic.split('.') exchange, topic = topic.split('.')
kwargs['exchange'] = exchange kwargs['exchange'] = exchange
kwargs['topic'] = topic kwargs['topic'] = topic
targets.append(messaging.Target(**kwargs)) targets.append(om.Target(**kwargs))
return om.get_notification_listener(
return rpc.get_notification_listener( self.notification_transport, targets, endpoints,
targets, endpoints, executor='eventlet', serializer=serializer,
serializer=rpc.JsonPayloadSerializer(), allow_requeue=False, pool=CONF.host)
pool=CONF.host
)
def start(self): def start(self):
LOG.debug("Connecting to '%s'", CONF.transport_url) LOG.debug("Connecting to '%s'", CONF.transport_url)

View File

@@ -15,9 +15,11 @@
# under the License. # under the License.
import abc import abc
import six
class ServiceManager(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class ServiceManager(object):
@abc.abstractproperty @abc.abstractproperty
def service_name(self): def service_name(self):

View File

@@ -28,6 +28,7 @@ from oslo_config import cfg
from oslo_log import log from oslo_log import log
from oslo_utils import strutils from oslo_utils import strutils
from oslo_utils import uuidutils from oslo_utils import uuidutils
import six
from watcher.common import exception from watcher.common import exception
@@ -81,7 +82,7 @@ def safe_rstrip(value, chars=None):
:return: Stripped value. :return: Stripped value.
""" """
if not isinstance(value, str): if not isinstance(value, six.string_types):
LOG.warning( LOG.warning(
"Failed to remove trailing character. Returning original object." "Failed to remove trailing character. Returning original object."
"Supplied object is not a string: %s,", value) "Supplied object is not a string: %s,", value)
@@ -103,7 +104,7 @@ def is_hostname_safe(hostname):
""" """
m = r'^[a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])?$' m = r'^[a-z0-9]([a-z0-9\-]{0,61}[a-z0-9])?$'
return (isinstance(hostname, str) and return (isinstance(hostname, six.string_types) and
(re.match(m, hostname) is not None)) (re.match(m, hostname) is not None))

View File

@@ -43,20 +43,11 @@ APPLIER_MANAGER_OPTS = [
help='Select the engine to use to execute the workflow'), help='Select the engine to use to execute the workflow'),
] ]
APPLIER_OPTS = [
cfg.BoolOpt('rollback_when_actionplan_failed',
default=False,
help='If set True, the failed actionplan will rollback '
'when executing. Defaule value is False.'),
]
def register_opts(conf): def register_opts(conf):
conf.register_group(watcher_applier) conf.register_group(watcher_applier)
conf.register_opts(APPLIER_MANAGER_OPTS, group=watcher_applier) conf.register_opts(APPLIER_MANAGER_OPTS, group=watcher_applier)
conf.register_opts(APPLIER_OPTS, group=watcher_applier)
def list_opts(): def list_opts():
return [(watcher_applier, APPLIER_MANAGER_OPTS), return [(watcher_applier, APPLIER_MANAGER_OPTS)]
(watcher_applier, APPLIER_OPTS)]

View File

@@ -134,13 +134,7 @@ GRAFANA_CLIENT_OPTS = [
"InfluxDB this will be the retention period. " "InfluxDB this will be the retention period. "
"These queries will need to be constructed using tools " "These queries will need to be constructed using tools "
"such as Postman. Example: SELECT cpu FROM {4}." "such as Postman. Example: SELECT cpu FROM {4}."
"cpu_percent WHERE host == '{1}' AND time > now()-{2}s"), "cpu_percent WHERE host == '{1}' AND time > now()-{2}s")]
cfg.IntOpt('http_timeout',
min=0,
default=60,
mutable=True,
help='Timeout for Grafana request')
]
def register_opts(conf): def register_opts(conf):

View File

@@ -18,6 +18,7 @@ Base classes for storage engines
import abc import abc
from oslo_config import cfg from oslo_config import cfg
from oslo_db import api as db_api from oslo_db import api as db_api
import six
_BACKEND_MAPPING = {'sqlalchemy': 'watcher.db.sqlalchemy.api'} _BACKEND_MAPPING = {'sqlalchemy': 'watcher.db.sqlalchemy.api'}
IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING, IMPL = db_api.DBAPI.from_config(cfg.CONF, backend_mapping=_BACKEND_MAPPING,
@@ -29,7 +30,8 @@ def get_instance():
return IMPL return IMPL
class BaseConnection(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseConnection(object):
"""Base class for storage system connections.""" """Base class for storage system connections."""
@abc.abstractmethod @abc.abstractmethod

View File

@@ -15,6 +15,8 @@
# limitations under the License. # limitations under the License.
# #
from __future__ import print_function
import collections import collections
import datetime import datetime
import itertools import itertools
@@ -23,6 +25,7 @@ import sys
from oslo_log import log from oslo_log import log
from oslo_utils import strutils from oslo_utils import strutils
import prettytable as ptable import prettytable as ptable
from six.moves import input
from watcher._i18n import _ from watcher._i18n import _
from watcher._i18n import lazy_translation_enabled from watcher._i18n import lazy_translation_enabled

View File

@@ -6,7 +6,6 @@ Create Date: 2017-03-24 11:21:29.036532
""" """
from alembic import op from alembic import op
from sqlalchemy import inspect
import sqlalchemy as sa import sqlalchemy as sa
from watcher.db.sqlalchemy import models from watcher.db.sqlalchemy import models
@@ -15,17 +14,8 @@ from watcher.db.sqlalchemy import models
revision = '0f6042416884' revision = '0f6042416884'
down_revision = '001' down_revision = '001'
def _table_exists(table_name):
bind = op.get_context().bind
insp = inspect(bind)
names = insp.get_table_names()
return any(t == table_name for t in names)
def upgrade(): def upgrade():
if _table_exists('apscheduler_jobs'):
return
op.create_table( op.create_table(
'apscheduler_jobs', 'apscheduler_jobs',
sa.Column('id', sa.Unicode(191, _warn_on_bytestring=False), sa.Column('id', sa.Unicode(191, _warn_on_bytestring=False),

View File

@@ -44,11 +44,7 @@ _FACADE = None
def _create_facade_lazily(): def _create_facade_lazily():
global _FACADE global _FACADE
if _FACADE is None: if _FACADE is None:
# FIXME(amoralej): Remove autocommit=True (and ideally use of _FACADE = db_session.EngineFacade.from_config(CONF)
# LegacyEngineFacade) asap since it's not compatible with SQLAlchemy
# 2.0.
_FACADE = db_session.EngineFacade.from_config(CONF,
autocommit=True)
return _FACADE return _FACADE
@@ -282,7 +278,7 @@ class Connection(api.BaseConnection):
query = model_query(model, session=session) query = model_query(model, session=session)
query = add_identity_filter(query, id_) query = add_identity_filter(query, id_)
try: try:
ref = query.with_for_update().one() ref = query.with_lockmode('update').one()
except exc.NoResultFound: except exc.NoResultFound:
raise exception.ResourceNotFound(name=model.__name__, id=id_) raise exception.ResourceNotFound(name=model.__name__, id=id_)
@@ -819,7 +815,7 @@ class Connection(api.BaseConnection):
query = model_query(models.Action, session=session) query = model_query(models.Action, session=session)
query = add_identity_filter(query, action_id) query = add_identity_filter(query, action_id)
try: try:
ref = query.with_for_update().one() ref = query.with_lockmode('update').one()
except exc.NoResultFound: except exc.NoResultFound:
raise exception.ActionNotFound(action=action_id) raise exception.ActionNotFound(action=action_id)
@@ -904,7 +900,7 @@ class Connection(api.BaseConnection):
query = model_query(models.ActionPlan, session=session) query = model_query(models.ActionPlan, session=session)
query = add_identity_filter(query, action_plan_id) query = add_identity_filter(query, action_plan_id)
try: try:
ref = query.with_for_update().one() ref = query.with_lockmode('update').one()
except exc.NoResultFound: except exc.NoResultFound:
raise exception.ActionPlanNotFound(action_plan=action_plan_id) raise exception.ActionPlanNotFound(action_plan=action_plan_id)

View File

@@ -18,6 +18,7 @@ SQLAlchemy models for watcher service
from oslo_db.sqlalchemy import models from oslo_db.sqlalchemy import models
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
import six.moves.urllib.parse as urlparse
from sqlalchemy import Boolean from sqlalchemy import Boolean
from sqlalchemy import Column from sqlalchemy import Column
from sqlalchemy import DateTime from sqlalchemy import DateTime
@@ -32,7 +33,7 @@ from sqlalchemy import String
from sqlalchemy import Text from sqlalchemy import Text
from sqlalchemy.types import TypeDecorator, TEXT from sqlalchemy.types import TypeDecorator, TEXT
from sqlalchemy import UniqueConstraint from sqlalchemy import UniqueConstraint
import urllib.parse as urlparse
from watcher import conf from watcher import conf
CONF = conf.CONF CONF = conf.CONF

View File

@@ -18,6 +18,7 @@
# limitations under the License. # limitations under the License.
# #
import abc import abc
import six
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
@@ -35,11 +36,9 @@ CONF = cfg.CONF
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
class BaseMetaClass(service.Singleton, abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
pass @six.add_metaclass(service.Singleton)
class BaseAuditHandler(object):
class BaseAuditHandler(object, metaclass=BaseMetaClass):
@abc.abstractmethod @abc.abstractmethod
def execute(self, audit, request_context): def execute(self, audit, request_context):
@@ -58,7 +57,8 @@ class BaseAuditHandler(object, metaclass=BaseMetaClass):
raise NotImplementedError() raise NotImplementedError()
class AuditHandler(BaseAuditHandler, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class AuditHandler(BaseAuditHandler):
def __init__(self): def __init__(self):
super(AuditHandler, self).__init__() super(AuditHandler, self).__init__()

View File

@@ -19,8 +19,6 @@ import time
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
from watcher.common import exception
CONF = cfg.CONF CONF = cfg.CONF
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@@ -56,13 +54,6 @@ class DataSourceBase(object):
instance_root_disk_size=None, instance_root_disk_size=None,
) )
def _get_meter(self, meter_name):
"""Retrieve the meter from the metric map or raise error"""
meter = self.METRIC_MAP.get(meter_name)
if meter is None:
raise exception.MetricNotAvailable(metric=meter_name)
return meter
def query_retry(self, f, *args, **kwargs): def query_retry(self, f, *args, **kwargs):
"""Attempts to retrieve metrics from the external service """Attempts to retrieve metrics from the external service
@@ -131,30 +122,6 @@ class DataSourceBase(object):
pass pass
@abc.abstractmethod
def statistic_series(self, resource=None, resource_type=None,
meter_name=None, start_time=None, end_time=None,
granularity=300):
"""Retrieves metrics based on the specified parameters over a period
:param resource: Resource object as defined in watcher models such as
ComputeNode and Instance
:param resource_type: Indicates which type of object is supplied
to the resource parameter
:param meter_name: The desired metric to retrieve as key from
METRIC_MAP
:param start_time: The datetime to start retrieving metrics for
:type start_time: datetime.datetime
:param end_time: The datetime to limit the retrieval of metrics to
:type end_time: datetime.datetime
:param granularity: Interval between samples in measurements in
seconds
:return: Dictionary of key value pairs with timestamps and metric
values
"""
pass
@abc.abstractmethod @abc.abstractmethod
def get_host_cpu_usage(self, resource, period, aggregate, def get_host_cpu_usage(self, resource, period, aggregate,
granularity=None): granularity=None):

View File

@@ -161,7 +161,9 @@ class CeilometerHelper(base.DataSourceBase):
end_time = datetime.datetime.utcnow() end_time = datetime.datetime.utcnow()
start_time = end_time - datetime.timedelta(seconds=int(period)) start_time = end_time - datetime.timedelta(seconds=int(period))
meter = self._get_meter(meter_name) meter = self.METRIC_MAP.get(meter_name)
if meter is None:
raise exception.MetricNotAvailable(metric=meter_name)
if aggregate == 'mean': if aggregate == 'mean':
aggregate = 'avg' aggregate = 'avg'
@@ -192,12 +194,6 @@ class CeilometerHelper(base.DataSourceBase):
item_value *= 10 item_value *= 10
return item_value return item_value
def statistic_series(self, resource=None, resource_type=None,
meter_name=None, start_time=None, end_time=None,
granularity=300):
raise NotImplementedError(
_('Ceilometer helper does not support statistic series method'))
def get_host_cpu_usage(self, resource, period, def get_host_cpu_usage(self, resource, period,
aggregate, granularity=None): aggregate, granularity=None):

View File

@@ -23,6 +23,7 @@ from oslo_config import cfg
from oslo_log import log from oslo_log import log
from watcher.common import clients from watcher.common import clients
from watcher.common import exception
from watcher.decision_engine.datasources import base from watcher.decision_engine.datasources import base
CONF = cfg.CONF CONF = cfg.CONF
@@ -71,7 +72,9 @@ class GnocchiHelper(base.DataSourceBase):
stop_time = datetime.utcnow() stop_time = datetime.utcnow()
start_time = stop_time - timedelta(seconds=(int(period))) start_time = stop_time - timedelta(seconds=(int(period)))
meter = self._get_meter(meter_name) meter = self.METRIC_MAP.get(meter_name)
if meter is None:
raise exception.MetricNotAvailable(metric=meter_name)
if aggregate == 'count': if aggregate == 'count':
aggregate = 'mean' aggregate = 'mean'
@@ -120,52 +123,6 @@ class GnocchiHelper(base.DataSourceBase):
return return_value return return_value
def statistic_series(self, resource=None, resource_type=None,
meter_name=None, start_time=None, end_time=None,
granularity=300):
meter = self._get_meter(meter_name)
resource_id = resource.uuid
if resource_type == 'compute_node':
resource_id = "%s_%s" % (resource.hostname, resource.hostname)
kwargs = dict(query={"=": {"original_resource_id": resource_id}},
limit=1)
resources = self.query_retry(
f=self.gnocchi.resource.search, **kwargs)
if not resources:
LOG.warning("The {0} resource {1} could not be "
"found".format(self.NAME, resource_id))
return
resource_id = resources[0]['id']
raw_kwargs = dict(
metric=meter,
start=start_time,
stop=end_time,
resource_id=resource_id,
granularity=granularity,
)
kwargs = {k: v for k, v in raw_kwargs.items() if k and v}
statistics = self.query_retry(
f=self.gnocchi.metric.get_measures, **kwargs)
return_value = None
if statistics:
# measure has structure [time, granularity, value]
if meter_name == 'host_airflow':
# Airflow from hardware.ipmi.node.airflow is reported as
# 1/10 th of actual CFM
return_value = {s[0]: s[2]*10 for s in statistics}
else:
return_value = {s[0]: s[2] for s in statistics}
return return_value
def get_host_cpu_usage(self, resource, period, aggregate, def get_host_cpu_usage(self, resource, period, aggregate,
granularity=300): granularity=300):

View File

@@ -16,13 +16,10 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
from urllib import parse as urlparse
from http import HTTPStatus
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
import six.moves.urllib.parse as urlparse
from watcher._i18n import _
from watcher.common import clients from watcher.common import clients
from watcher.common import exception from watcher.common import exception
from watcher.decision_engine.datasources import base from watcher.decision_engine.datasources import base
@@ -138,13 +135,12 @@ class GrafanaHelper(base.DataSourceBase):
raise exception.DataSourceNotAvailable(self.NAME) raise exception.DataSourceNotAvailable(self.NAME)
resp = requests.get(self._base_url + str(project_id) + '/query', resp = requests.get(self._base_url + str(project_id) + '/query',
params=params, headers=self._headers, params=params, headers=self._headers)
timeout=CONF.grafana_client.http_timeout) if resp.status_code == 200:
if resp.status_code == HTTPStatus.OK:
return resp return resp
elif resp.status_code == HTTPStatus.BAD_REQUEST: elif resp.status_code == 400:
LOG.error("Query for metric is invalid") LOG.error("Query for metric is invalid")
elif resp.status_code == HTTPStatus.UNAUTHORIZED: elif resp.status_code == 401:
LOG.error("Authorization token is invalid") LOG.error("Authorization token is invalid")
raise exception.DataSourceNotAvailable(self.NAME) raise exception.DataSourceNotAvailable(self.NAME)
@@ -191,12 +187,6 @@ class GrafanaHelper(base.DataSourceBase):
return result return result
def statistic_series(self, resource=None, resource_type=None,
meter_name=None, start_time=None, end_time=None,
granularity=300):
raise NotImplementedError(
_('Grafana helper does not support statistic series method'))
def get_host_cpu_usage(self, resource, period=300, def get_host_cpu_usage(self, resource, period=300,
aggregate="mean", granularity=None): aggregate="mean", granularity=None):
return self.statistic_aggregation( return self.statistic_aggregation(

View File

@@ -21,6 +21,7 @@ import datetime
from monascaclient import exc from monascaclient import exc
from watcher.common import clients from watcher.common import clients
from watcher.common import exception
from watcher.decision_engine.datasources import base from watcher.decision_engine.datasources import base
@@ -89,7 +90,9 @@ class MonascaHelper(base.DataSourceBase):
stop_time = datetime.datetime.utcnow() stop_time = datetime.datetime.utcnow()
start_time = stop_time - datetime.timedelta(seconds=(int(period))) start_time = stop_time - datetime.timedelta(seconds=(int(period)))
meter = self._get_meter(meter_name) meter = self.METRIC_MAP.get(meter_name)
if meter is None:
raise exception.MetricNotAvailable(metric=meter_name)
if aggregate == 'mean': if aggregate == 'mean':
aggregate = 'avg' aggregate = 'avg'
@@ -118,34 +121,6 @@ class MonascaHelper(base.DataSourceBase):
return cpu_usage return cpu_usage
def statistic_series(self, resource=None, resource_type=None,
meter_name=None, start_time=None, end_time=None,
granularity=300):
meter = self._get_meter(meter_name)
raw_kwargs = dict(
name=meter,
start_time=start_time.isoformat(),
end_time=end_time.isoformat(),
dimensions={'hostname': resource.uuid},
statistics='avg',
group_by='*',
)
kwargs = {k: v for k, v in raw_kwargs.items() if k and v}
statistics = self.query_retry(
f=self.monasca.metrics.list_statistics, **kwargs)
result = {}
for stat in statistics:
v_index = stat['columns'].index('avg')
t_index = stat['columns'].index('timestamp')
result.update({r[t_index]: r[v_index] for r in stat['statistics']})
return result
def get_host_cpu_usage(self, resource, period, def get_host_cpu_usage(self, resource, period,
aggregate, granularity=None): aggregate, granularity=None):
return self.statistic_aggregation( return self.statistic_aggregation(

View File

@@ -15,11 +15,13 @@
# limitations under the License. # limitations under the License.
import abc import abc
import six
from watcher.common.loader import loadable from watcher.common.loader import loadable
class Goal(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class Goal(loadable.Loadable):
def __init__(self, config): def __init__(self, config):
super(Goal, self).__init__(config) super(Goal, self).__init__(config)

View File

@@ -27,8 +27,11 @@ import abc
import jsonschema import jsonschema
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
import six
class EfficacySpecification(object, metaclass=abc.ABCMeta):
@six.add_metaclass(abc.ABCMeta)
class EfficacySpecification(object):
def __init__(self): def __init__(self):
self._indicators_specs = self.get_indicators_specifications() self._indicators_specs = self.get_indicators_specifications()

View File

@@ -18,6 +18,7 @@ import abc
import jsonschema import jsonschema
from jsonschema import SchemaError from jsonschema import SchemaError
from jsonschema import ValidationError from jsonschema import ValidationError
import six
from oslo_log import log from oslo_log import log
from oslo_serialization import jsonutils from oslo_serialization import jsonutils
@@ -28,7 +29,8 @@ from watcher.common import exception
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
class IndicatorSpecification(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class IndicatorSpecification(object):
def __init__(self, name=None, description=None, unit=None, required=True): def __init__(self, name=None, description=None, unit=None, required=True):
self.name = name self.name = name

View File

@@ -19,6 +19,9 @@
# limitations under the License. # limitations under the License.
# #
from __future__ import unicode_literals
from watcher.common.loader import default from watcher.common.loader import default

View File

@@ -25,9 +25,11 @@ See: :doc:`../architecture` for more details on this component.
""" """
import abc import abc
import six
class Model(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class Model(object):
@abc.abstractmethod @abc.abstractmethod
def to_string(self): def to_string(self):

View File

@@ -110,6 +110,7 @@ import time
from oslo_config import cfg from oslo_config import cfg
from oslo_log import log from oslo_log import log
import six
from watcher.common import clients from watcher.common import clients
from watcher.common.loader import loadable from watcher.common.loader import loadable
@@ -119,8 +120,8 @@ LOG = log.getLogger(__name__)
CONF = cfg.CONF CONF = cfg.CONF
class BaseClusterDataModelCollector(loadable.LoadableSingleton, @six.add_metaclass(abc.ABCMeta)
metaclass=abc.ABCMeta): class BaseClusterDataModelCollector(loadable.LoadableSingleton):
STALE_MODEL = model_root.ModelRoot(stale=True) STALE_MODEL = model_root.ModelRoot(stale=True)

View File

@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import six
from oslo_log import log from oslo_log import log
from watcher.common import cinder_helper from watcher.common import cinder_helper
@@ -150,9 +152,6 @@ class CinderClusterDataModelCollector(base.BaseClusterDataModelCollector):
if self._audit_scope_handler is None: if self._audit_scope_handler is None:
LOG.debug("No audit, Don't Build storage data model") LOG.debug("No audit, Don't Build storage data model")
return return
if self._data_model_scope is None:
LOG.debug("No audit scope, Don't Build storage data model")
return
builder = CinderModelBuilder(self.osc) builder = CinderModelBuilder(self.osc)
return builder.execute(self._data_model_scope) return builder.execute(self._data_model_scope)
@@ -205,7 +204,7 @@ class CinderModelBuilder(base.BaseModelBuilder):
"""Build a storage node from a Cinder storage node """Build a storage node from a Cinder storage node
:param node: A storage node :param node: A storage node
:type node: :py:class:`~cinderclient.v3.services.Service` :type node: :py:class:`~cinderclient.v2.services.Service`
""" """
# node.host is formatted as host@backendname since ocata, # node.host is formatted as host@backendname since ocata,
# or may be only host as of ocata # or may be only host as of ocata
@@ -233,7 +232,7 @@ class CinderModelBuilder(base.BaseModelBuilder):
"""Build a storage pool from a Cinder storage pool """Build a storage pool from a Cinder storage pool
:param pool: A storage pool :param pool: A storage pool
:type pool: :py:class:`~cinderclient.v3.pools.Pool` :type pool: :py:class:`~cinderclient.v2.pools.Pool`
:raises: exception.InvalidPoolAttributeValue :raises: exception.InvalidPoolAttributeValue
""" """
# build up the storage pool. # build up the storage pool.
@@ -287,7 +286,7 @@ class CinderModelBuilder(base.BaseModelBuilder):
:param instance: Cinder Volume object. :param instance: Cinder Volume object.
:return: A volume node for the graph. :return: A volume node for the graph.
""" """
attachments = [{k: v for k, v in iter(d.items()) if k in ( attachments = [{k: v for k, v in six.iteritems(d) if k in (
'server_id', 'attachment_id')} for d in volume.attachments] 'server_id', 'attachment_id')} for d in volume.attachments]
volume_attributes = { volume_attributes = {

View File

@@ -63,9 +63,6 @@ class BaremetalClusterDataModelCollector(base.BaseClusterDataModelCollector):
if self._audit_scope_handler is None: if self._audit_scope_handler is None:
LOG.debug("No audit, Don't Build Baremetal data model") LOG.debug("No audit, Don't Build Baremetal data model")
return return
if self._data_model_scope is None:
LOG.debug("No audit scope, Don't Build Baremetal data model")
return
builder = BareMetalModelBuilder(self.osc) builder = BareMetalModelBuilder(self.osc)
return builder.execute(self._data_model_scope) return builder.execute(self._data_model_scope)

View File

@@ -184,9 +184,6 @@ class NovaClusterDataModelCollector(base.BaseClusterDataModelCollector):
if self._audit_scope_handler is None: if self._audit_scope_handler is None:
LOG.debug("No audit, Don't Build compute data model") LOG.debug("No audit, Don't Build compute data model")
return return
if self._data_model_scope is None:
LOG.debug("No audit scope, Don't Build compute data model")
return
builder = NovaModelBuilder(self.osc) builder = NovaModelBuilder(self.osc)
return builder.execute(self._data_model_scope) return builder.execute(self._data_model_scope)

View File

@@ -16,12 +16,14 @@
import abc import abc
import six
from watcher.decision_engine.model.element import base from watcher.decision_engine.model.element import base
from watcher.objects import fields as wfields from watcher.objects import fields as wfields
class BaremetalResource(base.Element, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaremetalResource(base.Element):
VERSION = '1.0' VERSION = '1.0'

View File

@@ -21,6 +21,7 @@ import collections
from lxml import etree from lxml import etree
from oslo_log import log from oslo_log import log
import six
from watcher.objects import base from watcher.objects import base
from watcher.objects import fields as wfields from watcher.objects import fields as wfields
@@ -28,8 +29,9 @@ from watcher.objects import fields as wfields
LOG = log.getLogger(__name__) LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class Element(base.WatcherObject, base.WatcherObjectDictCompat, class Element(base.WatcherObject, base.WatcherObjectDictCompat,
base.WatcherComparableObject, metaclass=abc.ABCMeta): base.WatcherComparableObject):
# Initial version # Initial version
VERSION = '1.0' VERSION = '1.0'

View File

@@ -16,12 +16,14 @@
import abc import abc
import six
from watcher.decision_engine.model.element import base from watcher.decision_engine.model.element import base
from watcher.objects import fields as wfields from watcher.objects import fields as wfields
class ComputeResource(base.Element, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class ComputeResource(base.Element):
VERSION = '1.0' VERSION = '1.0'

View File

@@ -16,12 +16,14 @@
import abc import abc
import six
from watcher.decision_engine.model.element import base from watcher.decision_engine.model.element import base
from watcher.objects import fields as wfields from watcher.objects import fields as wfields
class StorageResource(base.Element, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class StorageResource(base.Element):
VERSION = '1.0' VERSION = '1.0'

View File

@@ -21,6 +21,7 @@ from lxml import etree
import networkx as nx import networkx as nx
from oslo_concurrency import lockutils from oslo_concurrency import lockutils
from oslo_log import log from oslo_log import log
import six
from watcher._i18n import _ from watcher._i18n import _
from watcher.common import exception from watcher.common import exception
@@ -92,9 +93,9 @@ class ModelRoot(nx.DiGraph, base.Model):
:param node: :py:class:`~.node.ComputeNode` object or node UUID :param node: :py:class:`~.node.ComputeNode` object or node UUID
:type node: str or :py:class:`~.instance.Instance` :type node: str or :py:class:`~.instance.Instance`
""" """
if isinstance(instance, str): if isinstance(instance, six.string_types):
instance = self.get_instance_by_uuid(instance) instance = self.get_instance_by_uuid(instance)
if isinstance(node, str): if isinstance(node, six.string_types):
node = self.get_node_by_uuid(node) node = self.get_node_by_uuid(node)
self.assert_node(node) self.assert_node(node)
self.assert_instance(instance) self.assert_instance(instance)
@@ -103,9 +104,9 @@ class ModelRoot(nx.DiGraph, base.Model):
@lockutils.synchronized("model_root") @lockutils.synchronized("model_root")
def unmap_instance(self, instance, node): def unmap_instance(self, instance, node):
if isinstance(instance, str): if isinstance(instance, six.string_types):
instance = self.get_instance_by_uuid(instance) instance = self.get_instance_by_uuid(instance)
if isinstance(node, str): if isinstance(node, six.string_types):
node = self.get_node_by_uuid(node) node = self.get_node_by_uuid(node)
self.remove_edge(instance.uuid, node.uuid) self.remove_edge(instance.uuid, node.uuid)
@@ -366,9 +367,9 @@ class StorageModelRoot(nx.DiGraph, base.Model):
:param pool: :py:class:`~.node.Pool` object or pool name :param pool: :py:class:`~.node.Pool` object or pool name
:param node: :py:class:`~.node.StorageNode` object or node host :param node: :py:class:`~.node.StorageNode` object or node host
""" """
if isinstance(pool, str): if isinstance(pool, six.string_types):
pool = self.get_pool_by_pool_name(pool) pool = self.get_pool_by_pool_name(pool)
if isinstance(node, str): if isinstance(node, six.string_types):
node = self.get_node_by_name(node) node = self.get_node_by_name(node)
self.assert_node(node) self.assert_node(node)
self.assert_pool(pool) self.assert_pool(pool)
@@ -382,9 +383,9 @@ class StorageModelRoot(nx.DiGraph, base.Model):
:param pool: :py:class:`~.node.Pool` object or pool name :param pool: :py:class:`~.node.Pool` object or pool name
:param node: :py:class:`~.node.StorageNode` object or node name :param node: :py:class:`~.node.StorageNode` object or node name
""" """
if isinstance(pool, str): if isinstance(pool, six.string_types):
pool = self.get_pool_by_pool_name(pool) pool = self.get_pool_by_pool_name(pool)
if isinstance(node, str): if isinstance(node, six.string_types):
node = self.get_node_by_name(node) node = self.get_node_by_name(node)
self.remove_edge(pool.name, node.host) self.remove_edge(pool.name, node.host)
@@ -410,9 +411,9 @@ class StorageModelRoot(nx.DiGraph, base.Model):
:param volume: :py:class:`~.volume.Volume` object or volume UUID :param volume: :py:class:`~.volume.Volume` object or volume UUID
:param pool: :py:class:`~.node.Pool` object or pool name :param pool: :py:class:`~.node.Pool` object or pool name
""" """
if isinstance(volume, str): if isinstance(volume, six.string_types):
volume = self.get_volume_by_uuid(volume) volume = self.get_volume_by_uuid(volume)
if isinstance(pool, str): if isinstance(pool, six.string_types):
pool = self.get_pool_by_pool_name(pool) pool = self.get_pool_by_pool_name(pool)
self.assert_pool(pool) self.assert_pool(pool)
self.assert_volume(volume) self.assert_volume(volume)
@@ -426,9 +427,9 @@ class StorageModelRoot(nx.DiGraph, base.Model):
:param volume: :py:class:`~.volume.Volume` object or volume UUID :param volume: :py:class:`~.volume.Volume` object or volume UUID
:param pool: :py:class:`~.node.Pool` object or pool name :param pool: :py:class:`~.node.Pool` object or pool name
""" """
if isinstance(volume, str): if isinstance(volume, six.string_types):
volume = self.get_volume_by_uuid(volume) volume = self.get_volume_by_uuid(volume)
if isinstance(pool, str): if isinstance(pool, six.string_types):
pool = self.get_pool_by_pool_name(pool) pool = self.get_pool_by_pool_name(pool)
self.remove_edge(volume.uuid, pool.name) self.remove_edge(volume.uuid, pool.name)
@@ -631,7 +632,7 @@ class BaremetalModelRoot(nx.DiGraph, base.Model):
super(BaremetalModelRoot, self).remove_node(node.uuid) super(BaremetalModelRoot, self).remove_node(node.uuid)
except nx.NetworkXError as exc: except nx.NetworkXError as exc:
LOG.exception(exc) LOG.exception(exc)
raise exception.IronicNodeNotFound(uuid=node.uuid) raise exception.IronicNodeNotFound(name=node.uuid)
@lockutils.synchronized("baremetal_model") @lockutils.synchronized("baremetal_model")
def get_all_ironic_nodes(self): def get_all_ironic_nodes(self):
@@ -643,7 +644,7 @@ class BaremetalModelRoot(nx.DiGraph, base.Model):
try: try:
return self._get_by_uuid(uuid) return self._get_by_uuid(uuid)
except exception.BaremetalResourceNotFound: except exception.BaremetalResourceNotFound:
raise exception.IronicNodeNotFound(uuid=uuid) raise exception.IronicNodeNotFound(name=uuid)
def _get_by_uuid(self, uuid): def _get_by_uuid(self, uuid):
try: try:

View File

@@ -17,9 +17,11 @@
# limitations under the License. # limitations under the License.
import abc import abc
import six
class NotificationEndpoint(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class NotificationEndpoint(object):
def __init__(self, collector): def __init__(self, collector):
super(NotificationEndpoint, self).__init__() super(NotificationEndpoint, self).__init__()

View File

@@ -14,6 +14,8 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import six
from oslo_log import log from oslo_log import log
from watcher.common import cinder_helper from watcher.common import cinder_helper
from watcher.common import exception from watcher.common import exception
@@ -159,7 +161,7 @@ class CinderNotification(base.NotificationEndpoint):
return 'attachment_id' return 'attachment_id'
attachments = [ attachments = [
{_keyReplace(k): v for k, v in iter(d.items()) {_keyReplace(k): v for k, v in six.iteritems(d)
if k in ('instance_uuid', 'id')} if k in ('instance_uuid', 'id')}
for d in data['volume_attachment'] for d in data['volume_attachment']
] ]

View File

@@ -19,6 +19,7 @@
import re import re
import oslo_messaging as om import oslo_messaging as om
import six
class NotificationFilter(om.NotificationFilter): class NotificationFilter(om.NotificationFilter):
@@ -80,7 +81,7 @@ class NotificationFilter(om.NotificationFilter):
elif regex is not None and data is None: elif regex is not None and data is None:
return True return True
elif (regex is not None and elif (regex is not None and
isinstance(data, str) and isinstance(data, six.string_types) and
not regex.match(data)): not regex.match(data)):
return True return True

View File

@@ -45,11 +45,13 @@ See :doc:`../architecture` for more details on this component.
""" """
import abc import abc
import six
from watcher.common.loader import loadable from watcher.common.loader import loadable
class BasePlanner(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BasePlanner(loadable.Loadable):
@classmethod @classmethod
def get_config_opts(cls): def get_config_opts(cls):

View File

@@ -16,11 +16,13 @@
# #
import abc import abc
import six
from watcher.common import context from watcher.common import context
class BaseScope(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseScope(object):
"""A base class for Scope mechanism """A base class for Scope mechanism
Child of this class is called when audit launches strategy. This strategy Child of this class is called when audit launches strategy. This strategy

View File

@@ -17,11 +17,13 @@
# limitations under the License. # limitations under the License.
import abc import abc
import six
from watcher.common.loader import loadable from watcher.common.loader import loadable
class ScoringEngine(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class ScoringEngine(loadable.Loadable):
"""A base class for all the Scoring Engines. """A base class for all the Scoring Engines.
A Scoring Engine is an instance of a data model, to which the learning A Scoring Engine is an instance of a data model, to which the learning
@@ -95,7 +97,8 @@ class ScoringEngine(loadable.Loadable, metaclass=abc.ABCMeta):
return [] return []
class ScoringEngineContainer(loadable.Loadable, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class ScoringEngineContainer(loadable.Loadable):
"""A base class for all the Scoring Engines Containers. """A base class for all the Scoring Engines Containers.
A Scoring Engine Container is an abstraction which allows to plugin A Scoring Engine Container is an abstraction which allows to plugin

View File

@@ -56,11 +56,13 @@ Two approaches to dealing with this can be envisaged:
""" """
import abc import abc
import six
from watcher.decision_engine.solution import efficacy from watcher.decision_engine.solution import efficacy
class BaseSolution(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseSolution(object):
def __init__(self, goal, strategy): def __init__(self, goal, strategy):
"""Base Solution constructor """Base Solution constructor

View File

@@ -17,9 +17,11 @@
# limitations under the License. # limitations under the License.
# #
import abc import abc
import six
class BaseSolutionComparator(object, metaclass=abc.ABCMeta): @six.add_metaclass(abc.ABCMeta)
class BaseSolutionComparator(object):
@abc.abstractmethod @abc.abstractmethod
def compare(self, sol1, sol2): def compare(self, sol1, sol2):
raise NotImplementedError() raise NotImplementedError()

Some files were not shown because too many files have changed in this diff Show More