Skip to content

Commit dfe3faf

Browse files
ofektherve
authored andcommitted
Adhere to code style (#3500)
1 parent c33c201 commit dfe3faf

10 files changed

Lines changed: 56 additions & 162 deletions

File tree

disk/datadog_checks/disk/__init__.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,4 @@
44
from .__about__ import __version__
55
from .disk import Disk
66

7-
__all__ = [
8-
'__version__',
9-
'Disk'
10-
]
7+
__all__ = ['__version__', 'Disk']

disk/datadog_checks/disk/disk.py

Lines changed: 22 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -9,27 +9,30 @@
99

1010
from six import iteritems, string_types
1111

12+
from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
13+
from datadog_checks.base.utils.platform import Platform
14+
from datadog_checks.base.utils.subprocess_output import get_subprocess_output
15+
from datadog_checks.base.utils.timeout import TimeoutException, timeout
16+
1217
try:
1318
import psutil
1419
except ImportError:
1520
psutil = None
1621

1722
try:
1823
import datadog_agent # noqa: F401
24+
1925
is_agent_6 = True
2026
except ImportError:
2127
is_agent_6 = False
2228

23-
from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
24-
from datadog_checks.base.utils.platform import Platform
25-
from datadog_checks.base.utils.subprocess_output import get_subprocess_output
26-
from datadog_checks.base.utils.timeout import timeout, TimeoutException
2729

2830
IGNORE_CASE = re.I if platform.system() == 'Windows' else 0
2931

3032

3133
class Disk(AgentCheck):
3234
""" Collects metrics about the machine's disks. """
35+
3336
# -T for filesystem info
3437
DF_COMMAND = ['df', '-T']
3538
METRIC_DISK = 'system.disk.{}'
@@ -106,8 +109,7 @@ def collect_metrics_psutil(self):
106109
disk_usage = timeout(5)(psutil.disk_usage)(part.mountpoint)
107110
except TimeoutException:
108111
self.log.warning(
109-
u'Timeout while retrieving the disk usage of `%s` mountpoint. Skipping...',
110-
part.mountpoint
112+
u'Timeout while retrieving the disk usage of `%s` mountpoint. Skipping...', part.mountpoint
111113
)
112114
continue
113115
except Exception as e:
@@ -145,15 +147,10 @@ def collect_metrics_psutil(self):
145147
rwro = {'rw', 'ro'} & set(part.opts.split(','))
146148
if len(rwro) == 1:
147149
self.service_check(
148-
'disk.read_write',
149-
AgentCheck.OK if rwro.pop() == 'rw' else AgentCheck.CRITICAL,
150-
tags=tags
150+
'disk.read_write', AgentCheck.OK if rwro.pop() == 'rw' else AgentCheck.CRITICAL, tags=tags
151151
)
152152
else:
153-
self.service_check(
154-
'disk.read_write', AgentCheck.UNKNOWN,
155-
tags=tags
156-
)
153+
self.service_check('disk.read_write', AgentCheck.UNKNOWN, tags=tags)
157154

158155
self.collect_latency_metrics()
159156

@@ -184,9 +181,8 @@ def _exclude_disk(self, device, file_system, mount_point):
184181
# account a space might be in the mount point.
185182
mount_point = mount_point.rsplit(' ', 1)[0]
186183

187-
return (
188-
self._partition_blacklisted(device, file_system, mount_point)
189-
or not self._partition_whitelisted(device, file_system, mount_point)
184+
return self._partition_blacklisted(device, file_system, mount_point) or not self._partition_whitelisted(
185+
device, file_system, mount_point
190186
)
191187

192188
def _partition_whitelisted(self, device, file_system, mount_point):
@@ -260,10 +256,7 @@ def _collect_inodes_metrics(self, mountpoint):
260256
try:
261257
inodes = timeout(5)(os.statvfs)(mountpoint)
262258
except TimeoutException:
263-
self.log.warning(
264-
u'Timeout while retrieving the disk usage of `%s` mountpoint. Skipping...',
265-
mountpoint
266-
)
259+
self.log.warning(u'Timeout while retrieving the disk usage of `%s` mountpoint. Skipping...', mountpoint)
267260
return metrics
268261
except Exception as e:
269262
self.log.warning('Unable to get disk metrics for %s: %s', mountpoint, e)
@@ -291,10 +284,8 @@ def collect_latency_metrics(self):
291284
write_time_pct = disk.write_time * 100 / 1000
292285
metric_tags = [] if self._custom_tags is None else self._custom_tags[:]
293286
metric_tags.append('device:{}'.format(disk_name))
294-
self.rate(self.METRIC_DISK.format('read_time_pct'),
295-
read_time_pct, tags=metric_tags)
296-
self.rate(self.METRIC_DISK.format('write_time_pct'),
297-
write_time_pct, tags=metric_tags)
287+
self.rate(self.METRIC_DISK.format('read_time_pct'), read_time_pct, tags=metric_tags)
288+
self.rate(self.METRIC_DISK.format('write_time_pct'), write_time_pct, tags=metric_tags)
298289
except AttributeError as e:
299290
# Some OS don't return read_time/write_time fields
300291
# http://psutil.readthedocs.io/en/latest/#psutil.disk_io_counters
@@ -348,7 +339,8 @@ def _keep_device(self, device):
348339
# map -hosts tmpfs 0 0 0 100% /net
349340
# and finally filter out fake devices
350341
return (
351-
device and len(device) > 1
342+
device
343+
and len(device) > 1
352344
and device[2].isdigit()
353345
and not self._exclude_disk(device[0], device[1], device[6])
354346
)
@@ -399,9 +391,7 @@ def _compile_pattern_filters(self, instance):
399391
self.warning(deprecation_message.format(old='excluded_filesystems', new='file_system_blacklist'))
400392

401393
if 'excluded_disks' in instance:
402-
device_blacklist_extras.extend(
403-
'{}$'.format(pattern) for pattern in instance['excluded_disks'] if pattern
404-
)
394+
device_blacklist_extras.extend('{}$'.format(pattern) for pattern in instance['excluded_disks'] if pattern)
405395
self.warning(deprecation_message.format(old='excluded_disks', new='device_blacklist'))
406396

407397
if 'excluded_disk_re' in instance:
@@ -415,19 +405,15 @@ def _compile_pattern_filters(self, instance):
415405
# Any without valid patterns will become None
416406
self._file_system_whitelist = self._compile_valid_patterns(self._file_system_whitelist, casing=re.I)
417407
self._file_system_blacklist = self._compile_valid_patterns(
418-
self._file_system_blacklist,
419-
casing=re.I,
420-
extra_patterns=file_system_blacklist_extras
408+
self._file_system_blacklist, casing=re.I, extra_patterns=file_system_blacklist_extras
421409
)
422410
self._device_whitelist = self._compile_valid_patterns(self._device_whitelist)
423411
self._device_blacklist = self._compile_valid_patterns(
424-
self._device_blacklist,
425-
extra_patterns=device_blacklist_extras
412+
self._device_blacklist, extra_patterns=device_blacklist_extras
426413
)
427414
self._mount_point_whitelist = self._compile_valid_patterns(self._mount_point_whitelist)
428415
self._mount_point_blacklist = self._compile_valid_patterns(
429-
self._mount_point_blacklist,
430-
extra_patterns=mount_point_blacklist_extras
416+
self._mount_point_blacklist, extra_patterns=mount_point_blacklist_extras
431417
)
432418

433419
def _compile_valid_patterns(self, patterns, casing=IGNORE_CASE, extra_patterns=None):
@@ -465,10 +451,7 @@ def _compile_tag_re(self):
465451
device_tag_list = []
466452
for regex_str, tags in iteritems(self._device_tag_re):
467453
try:
468-
device_tag_list.append([
469-
re.compile(regex_str, IGNORE_CASE),
470-
[t.strip() for t in tags.split(',')]
471-
])
454+
device_tag_list.append([re.compile(regex_str, IGNORE_CASE), [t.strip() for t in tags.split(',')]])
472455
except TypeError:
473456
self.log.warning('{} is not a valid regular expression and will be ignored'.format(regex_str))
474457
self._device_tag_re = device_tag_list

disk/setup.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
11
# (C) Datadog, Inc. 2018
22
# All rights reserved
33
# Licensed under a 3-clause BSD style license (see LICENSE)
4-
from setuptools import setup
54
from codecs import open # To use a consistent encoding
65
from os import path
76

7+
from setuptools import setup
8+
89
HERE = path.abspath(path.dirname(__file__))
910

1011
# Get version info
@@ -33,17 +34,13 @@ def get_requirements(fpath):
3334
long_description=long_description,
3435
long_description_content_type='text/markdown',
3536
keywords='datadog agent disk check',
36-
3737
# The project's main homepage.
3838
url='https://github.com/DataDog/integrations-core',
39-
4039
# Author details
4140
author='Datadog',
4241
author_email='packages@datadoghq.com',
43-
4442
# License
4543
license='MIT',
46-
4744
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
4845
classifiers=[
4946
'Development Status :: 5 - Production/Stable',
@@ -54,13 +51,10 @@ def get_requirements(fpath):
5451
'Programming Language :: Python :: 2',
5552
'Programming Language :: Python :: 2.7',
5653
],
57-
5854
# The package we're going to ship
5955
packages=['datadog_checks.disk'],
60-
6156
# Run-time dependencies
6257
install_requires=[CHECKS_BASE_REQ],
63-
6458
# Extra files to ship with the wheel package
6559
include_package_data=True,
6660
)

disk/tests/conftest.py

Lines changed: 6 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
import pytest
66

77
from datadog_checks.dev.utils import ON_WINDOWS, mock_context_manager
8+
89
from .metrics import CORE_GAUGES, CORE_RATES, UNIX_GAUGES
910
from .mocks import MockDiskIOMetrics, MockDiskMetrics, MockInodesMetrics, MockPart
1011

@@ -16,11 +17,9 @@ def psutil_mocks():
1617
else:
1718
mock_statvfs = mock.patch('os.statvfs', return_value=MockInodesMetrics(), __name__='statvfs')
1819

19-
with \
20-
mock.patch('psutil.disk_partitions', return_value=[MockPart()], __name__='disk_partitions'), \
21-
mock.patch('psutil.disk_usage', return_value=MockDiskMetrics(), __name__='disk_usage'), \
22-
mock.patch('psutil.disk_io_counters', return_value=MockDiskIOMetrics()), \
23-
mock_statvfs:
20+
with mock.patch('psutil.disk_partitions', return_value=[MockPart()], __name__='disk_partitions'), mock.patch(
21+
'psutil.disk_usage', return_value=MockDiskMetrics(), __name__='disk_usage'
22+
), mock.patch('psutil.disk_io_counters', return_value=MockDiskIOMetrics()), mock_statvfs:
2423
yield
2524

2625

@@ -31,16 +30,12 @@ def dd_environment(instance_basic_volume):
3130

3231
@pytest.fixture(scope='session')
3332
def instance_basic_volume():
34-
return {
35-
'use_mount': 'false',
36-
}
33+
return {'use_mount': 'false'}
3734

3835

3936
@pytest.fixture(scope='session')
4037
def instance_basic_mount():
41-
return {
42-
'use_mount': 'true',
43-
}
38+
return {'use_mount': 'true'}
4439

4540

4641
@pytest.fixture(scope='session')

disk/tests/metrics.py

Lines changed: 3 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,12 @@
11
# (C) Datadog, Inc. 2018
22
# All rights reserved
33
# Licensed under a 3-clause BSD style license (see LICENSE)
4-
CORE_GAUGES = {
5-
'system.disk.total': 5,
6-
'system.disk.used': 4,
7-
'system.disk.free': 1,
8-
'system.disk.in_use': .80,
9-
}
10-
CORE_RATES = {
11-
'system.disk.write_time_pct': 9.0,
12-
'system.disk.read_time_pct': 5.0,
13-
}
4+
CORE_GAUGES = {'system.disk.total': 5, 'system.disk.used': 4, 'system.disk.free': 1, 'system.disk.in_use': 0.80}
5+
CORE_RATES = {'system.disk.write_time_pct': 9.0, 'system.disk.read_time_pct': 5.0}
146
UNIX_GAUGES = {
157
'system.fs.inodes.total': 10,
168
'system.fs.inodes.used': 1,
179
'system.fs.inodes.free': 9,
18-
'system.fs.inodes.in_use': .10
10+
'system.fs.inodes.in_use': 0.10,
1911
}
2012
UNIX_GAUGES.update(CORE_GAUGES)

disk/tests/mocks.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,7 @@ def mock_df_output(fname):
1717

1818
class MockPart(object):
1919
def __init__(
20-
self,
21-
device=DEFAULT_DEVICE_NAME,
22-
fstype=DEFAULT_FILE_SYSTEM,
23-
mountpoint=DEFAULT_MOUNT_POINT,
24-
opts='ro'
20+
self, device=DEFAULT_DEVICE_NAME, fstype=DEFAULT_FILE_SYSTEM, mountpoint=DEFAULT_MOUNT_POINT, opts='ro'
2521
):
2622
self.device = device
2723
self.fstype = fstype

0 commit comments

Comments
 (0)