Update hacking for Python3

The repo is Python 3 now, so update hacking to version 3.0 which
supports Python 3.

Fix problems found.

Update local hacking checks for new flake8.

Remove hacking and friends from lower-constraints, they are not needed
to be installed at run-time.

Change-Id: Ia6af344ec8441dc98a0820176373dcff3a8c80d5
This commit is contained in:
Andreas Jaeger
2020-04-02 07:36:46 +02:00
parent 60a3f1f072
commit 1bb2aefec3
19 changed files with 65 additions and 62 deletions

View File

@@ -30,7 +30,6 @@ eventlet==0.20.0
extras==1.0.0 extras==1.0.0
fasteners==0.14.1 fasteners==0.14.1
fixtures==3.0.0 fixtures==3.0.0
flake8==2.5.5
freezegun==0.3.10 freezegun==0.3.10
future==0.16.0 future==0.16.0
futurist==1.8.0 futurist==1.8.0
@@ -38,7 +37,6 @@ gitdb2==2.0.3
GitPython==2.1.8 GitPython==2.1.8
gnocchiclient==7.0.1 gnocchiclient==7.0.1
greenlet==0.4.13 greenlet==0.4.13
hacking==0.12.0
idna==2.6 idna==2.6
imagesize==1.0.0 imagesize==1.0.0
iso8601==0.1.12 iso8601==0.1.12
@@ -95,14 +93,12 @@ Paste==2.0.3
PasteDeploy==1.5.2 PasteDeploy==1.5.2
pbr==3.1.1 pbr==3.1.1
pecan==1.3.2 pecan==1.3.2
pep8==1.5.7
pika==0.10.0 pika==0.10.0
pika-pool==0.1.3 pika-pool==0.1.3
prettytable==0.7.2 prettytable==0.7.2
psutil==5.4.3 psutil==5.4.3
pycadf==2.7.0 pycadf==2.7.0
pycparser==2.18 pycparser==2.18
pyflakes==0.8.1
Pygments==2.2.0 Pygments==2.2.0
pyinotify==0.9.6 pyinotify==0.9.6
pyOpenSSL==17.5.0 pyOpenSSL==17.5.0

View File

@@ -5,7 +5,7 @@
coverage>=4.5.1 # Apache-2.0 coverage>=4.5.1 # Apache-2.0
doc8>=0.8.0 # Apache-2.0 doc8>=0.8.0 # Apache-2.0
freezegun>=0.3.10 # Apache-2.0 freezegun>=0.3.10 # Apache-2.0
hacking>=1.1.0,<1.2.0 # Apache-2.0 hacking>=3.0,<3.1.0 # Apache-2.0
mock>=2.0.0 # BSD mock>=2.0.0 # BSD
oslotest>=3.3.0 # Apache-2.0 oslotest>=3.3.0 # Apache-2.0
os-testr>=1.0.0 # Apache-2.0 os-testr>=1.0.0 # Apache-2.0

24
tox.ini
View File

@@ -75,7 +75,8 @@ commands =
[flake8] [flake8]
filename = *.py,app.wsgi filename = *.py,app.wsgi
show-source=True show-source=True
ignore= H105,E123,E226,N320,H202 # W504 line break after binary operator
ignore= H105,E123,E226,N320,H202,W504
builtins= _ builtins= _
enable-extensions = H106,H203,H904 enable-extensions = H106,H203,H904
exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes exclude=.venv,.git,.tox,dist,doc,*lib/python*,*egg,build,*sqlalchemy/alembic/versions/*,demo/,releasenotes
@@ -85,7 +86,26 @@ commands = python setup.py bdist_wheel
[hacking] [hacking]
import_exceptions = watcher._i18n import_exceptions = watcher._i18n
local-check-factory = watcher.hacking.checks.factory
[flake8:local-plugins]
extension =
N319 = checks:no_translate_debug_logs
N321 = checks:use_jsonutils
N322 = checks:check_assert_called_once_with
N325 = checks:check_python3_xrange
N326 = checks:check_no_basestring
N327 = checks:check_python3_no_iteritems
N328 = checks:check_asserttrue
N329 = checks:check_assertfalse
N330 = checks:check_assertempty
N331 = checks:check_assertisinstance
N332 = checks:check_assertequal_for_httpcode
N333 = checks:check_log_warn_deprecated
N340 = checks:check_oslo_i18n_wrapper
N341 = checks:check_builtins_gettext
N342 = checks:no_redundant_import_alias
paths = ./watcher/hacking
[doc8] [doc8]
extension=.rst extension=.rst

View File

@@ -184,7 +184,7 @@ class MultiType(wtypes.UserType):
class JsonPatchType(wtypes.Base): class JsonPatchType(wtypes.Base):
"""A complex type that represents a single json-patch operation.""" """A complex type that represents a single json-patch operation."""
path = wtypes.wsattr(wtypes.StringType(pattern='^(/[\w-]+)+$'), path = wtypes.wsattr(wtypes.StringType(pattern=r'^(/[\w-]+)+$'),
mandatory=True) mandatory=True)
op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'), op = wtypes.wsattr(wtypes.Enum(str, 'add', 'replace', 'remove'),
mandatory=True) mandatory=True)

View File

@@ -25,6 +25,7 @@ class VERSIONS(enum.Enum):
MINOR_4_WEBHOOK_API = 4 # v1.4: Add webhook trigger API MINOR_4_WEBHOOK_API = 4 # v1.4: Add webhook trigger API
MINOR_MAX_VERSION = 4 MINOR_MAX_VERSION = 4
# This is the version 1 API # This is the version 1 API
BASE_VERSION = 1 BASE_VERSION = 1
# String representations of the minor and maximum versions # String representations of the minor and maximum versions

View File

@@ -34,7 +34,7 @@ class AuthTokenMiddleware(auth_token.AuthProtocol):
""" """
def __init__(self, app, conf, public_api_routes=()): def __init__(self, app, conf, public_api_routes=()):
route_pattern_tpl = '%s(\.json|\.xml)?$' route_pattern_tpl = r'%s(\.json|\.xml)?$'
try: try:
self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl) self.public_api_routes = [re.compile(route_pattern_tpl % route_tpl)

View File

@@ -140,7 +140,7 @@ class BaseAction(loadable.Loadable):
raise NotImplementedError() raise NotImplementedError()
def check_abort(self): def check_abort(self):
if self.__class__.__name__ is 'Migrate': if self.__class__.__name__ == 'Migrate':
if self.migration_type == self.LIVE_MIGRATION: if self.migration_type == self.LIVE_MIGRATION:
return True return True
else: else:

View File

@@ -47,24 +47,24 @@ class Resize(base.BaseAction):
@property @property
def schema(self): def schema(self):
return { return {
'type': 'object', 'type': 'object',
'properties': { 'properties': {
'resource_id': { 'resource_id': {
'type': 'string', 'type': 'string',
'minlength': 1, 'minlength': 1,
'pattern': ('^([a-fA-F0-9]){8}-([a-fA-F0-9]){4}-' 'pattern': ('^([a-fA-F0-9]){8}-([a-fA-F0-9]){4}-'
'([a-fA-F0-9]){4}-([a-fA-F0-9]){4}-' '([a-fA-F0-9]){4}-([a-fA-F0-9]){4}-'
'([a-fA-F0-9]){12}$') '([a-fA-F0-9]){12}$')
},
'flavor': {
'type': 'string',
'minlength': 1,
},
}, },
'required': ['resource_id', 'flavor'], 'flavor': {
'additionalProperties': False, 'type': 'string',
} 'minlength': 1,
},
},
'required': ['resource_id', 'flavor'],
'additionalProperties': False,
}
@property @property
def instance_uuid(self): def instance_uuid(self):

View File

@@ -112,7 +112,7 @@ class DefaultWorkFlowEngine(base.BaseWorkFlowEngine):
return flow return flow
except exception.ActionPlanCancelled as e: except exception.ActionPlanCancelled:
raise raise
except tf_exception.WrappedFailure as e: except tf_exception.WrappedFailure as e:

View File

@@ -37,6 +37,7 @@ class GreenThreadPoolExecutor(BasePoolExecutor):
pool = futurist.GreenThreadPoolExecutor(int(max_workers)) pool = futurist.GreenThreadPoolExecutor(int(max_workers))
super(GreenThreadPoolExecutor, self).__init__(pool) super(GreenThreadPoolExecutor, self).__init__(pool)
executors = { executors = {
'default': GreenThreadPoolExecutor(), 'default': GreenThreadPoolExecutor(),
} }

View File

@@ -153,6 +153,7 @@ def extend_with_strict_schema(validator_class):
return validators.extend(validator_class, {"properties": strict_schema}) return validators.extend(validator_class, {"properties": strict_schema})
StrictDefaultValidatingDraft4Validator = extend_with_default( StrictDefaultValidatingDraft4Validator = extend_with_default(
extend_with_strict_schema(validators.Draft4Validator)) extend_with_strict_schema(validators.Draft4Validator))

View File

@@ -1125,8 +1125,8 @@ class Connection(api.BaseConnection):
def get_action_description_by_id(self, context, def get_action_description_by_id(self, context,
action_id, eager=False): action_id, eager=False):
return self._get_action_description( return self._get_action_description(
context, fieldname="id", value=action_id, eager=eager) context, fieldname="id", value=action_id, eager=eager)
def get_action_description_by_type(self, context, def get_action_description_by_type(self, context,
action_type, eager=False): action_type, eager=False):

View File

@@ -188,7 +188,7 @@ class CeilometerHelper(base.DataSourceBase):
item_value = None item_value = None
if statistic: if statistic:
item_value = statistic[-1]._info.get('aggregate').get(aggregate) item_value = statistic[-1]._info.get('aggregate').get(aggregate)
if meter_name is 'host_airflow': if meter_name == 'host_airflow':
# Airflow from hardware.ipmi.node.airflow is reported as # Airflow from hardware.ipmi.node.airflow is reported as
# 1/10 th of actual CFM # 1/10 th of actual CFM
item_value *= 10 item_value *= 10

View File

@@ -116,7 +116,7 @@ class GnocchiHelper(base.DataSourceBase):
# measure has structure [time, granularity, value] # measure has structure [time, granularity, value]
return_value = statistics[-1][2] return_value = statistics[-1][2]
if meter_name is 'host_airflow': if meter_name == 'host_airflow':
# Airflow from hardware.ipmi.node.airflow is reported as # Airflow from hardware.ipmi.node.airflow is reported as
# 1/10 th of actual CFM # 1/10 th of actual CFM
return_value *= 10 return_value *= 10

View File

@@ -72,7 +72,7 @@ class GrafanaHelper(base.DataSourceBase):
# Very basic url parsing # Very basic url parsing
parse = urlparse.urlparse(self._base_url) parse = urlparse.urlparse(self._base_url)
if parse.scheme is '' or parse.netloc is '' or parse.path is '': if parse.scheme == '' or parse.netloc == '' or parse.path == '':
LOG.critical("GrafanaHelper url not properly configured, " LOG.critical("GrafanaHelper url not properly configured, "
"check base_url and project_id") "check base_url and project_id")
return return

View File

@@ -112,10 +112,10 @@ class DataSourceManager(object):
datasource is attempted. datasource is attempted.
""" """
if not self.datasources or len(self.datasources) is 0: if not self.datasources or len(self.datasources) == 0:
raise exception.NoDatasourceAvailable raise exception.NoDatasourceAvailable
if not metrics or len(metrics) is 0: if not metrics or len(metrics) == 0:
LOG.critical("Can not retrieve datasource without specifying " LOG.critical("Can not retrieve datasource without specifying "
"list of required metrics.") "list of required metrics.")
raise exception.InvalidParameter(parameter='metrics', raise exception.InvalidParameter(parameter='metrics',
@@ -125,11 +125,11 @@ class DataSourceManager(object):
no_metric = False no_metric = False
for metric in metrics: for metric in metrics:
if (metric not in self.metric_map[datasource] or if (metric not in self.metric_map[datasource] or
self.metric_map[datasource].get(metric) is None): self.metric_map[datasource].get(metric) is None):
no_metric = True no_metric = True
LOG.warning("Datasource: {0} could not be used due to " LOG.warning("Datasource: {0} could not be used due to "
"metric: {1}".format(datasource, metric)) "metric: {1}".format(datasource, metric))
break break
if not no_metric: if not no_metric:
# Try to use a specific datasource but attempt additional # Try to use a specific datasource but attempt additional
# datasources upon exceptions (if config has more datasources) # datasources upon exceptions (if config has more datasources)

View File

@@ -401,7 +401,7 @@ class BasicConsolidation(base.ServerConsolidationBaseStrategy):
self._pre_execute() self._pre_execute()
# backwards compatibility for node parameter. # backwards compatibility for node parameter.
if self.aggregation_method['node'] is not '': if self.aggregation_method['node'] != '':
LOG.warning('Parameter node has been renamed to compute_node and ' LOG.warning('Parameter node has been renamed to compute_node and '
'will be removed in next release.') 'will be removed in next release.')
self.aggregation_method['compute_node'] = \ self.aggregation_method['compute_node'] = \

View File

@@ -514,7 +514,7 @@ class WorkloadStabilization(base.WorkloadStabilizationBaseStrategy):
self.aggregation_method['node'] self.aggregation_method['node']
# backwards compatibility for node parameter with period. # backwards compatibility for node parameter with period.
if self.periods['node'] is not 0: if self.periods['node'] != 0:
LOG.warning('Parameter node has been renamed to compute_node and ' LOG.warning('Parameter node has been renamed to compute_node and '
'will be removed in next release.') 'will be removed in next release.')
self.periods['compute_node'] = self.periods['node'] self.periods['compute_node'] = self.periods['node']

View File

@@ -23,6 +23,8 @@ def flake8ext(f):
only for unit tests to know which are watcher flake8 extensions. only for unit tests to know which are watcher flake8 extensions.
""" """
f.name = __name__ f.name = __name__
f.version = '0.0.1'
f.skip_on_py3 = False
return f return f
@@ -162,11 +164,11 @@ def check_asserttrue(logical_line, filename):
def check_assertfalse(logical_line, filename): def check_assertfalse(logical_line, filename):
if 'watcher/tests/' in filename: if 'watcher/tests/' in filename:
if re.search(r"assertEqual\(\s*False,[^,]*(,[^,]*)?\)", logical_line): if re.search(r"assertEqual\(\s*False,[^,]*(,[^,]*)?\)", logical_line):
msg = ("N328: Use assertFalse(observed) instead of " msg = ("N329: Use assertFalse(observed) instead of "
"assertEqual(False, observed)") "assertEqual(False, observed)")
yield (0, msg) yield (0, msg)
if re.search(r"assertEqual\([^,]*,\s*False(,[^,]*)?\)", logical_line): if re.search(r"assertEqual\([^,]*,\s*False(,[^,]*)?\)", logical_line):
msg = ("N328: Use assertFalse(observed) instead of " msg = ("N329: Use assertFalse(observed) instead of "
"assertEqual(False, observed)") "assertEqual(False, observed)")
yield (0, msg) yield (0, msg)
@@ -283,21 +285,3 @@ def no_redundant_import_alias(logical_line):
""" """
if re.match(re_redundant_import_alias, logical_line): if re.match(re_redundant_import_alias, logical_line):
yield(0, "N342: No redundant import alias.") yield(0, "N342: No redundant import alias.")
def factory(register):
register(use_jsonutils)
register(check_assert_called_once_with)
register(no_translate_debug_logs)
register(check_python3_xrange)
register(check_no_basestring)
register(check_python3_no_iteritems)
register(check_asserttrue)
register(check_assertfalse)
register(check_assertempty)
register(check_assertisinstance)
register(check_assertequal_for_httpcode)
register(check_log_warn_deprecated)
register(check_oslo_i18n_wrapper)
register(check_builtins_gettext)
register(no_redundant_import_alias)