Commit 00366b58 authored by Klaus Zimmermann's avatar Klaus Zimmermann
Browse files

Add pylama config and improve code style (closes #120)

parent 596ae2d7
# globs
*~
\#*\#
*.egg-info
*_flymake.py
*.pyc
......
# -*- coding: utf-8 -*-
import dask.array as da
from iris.analysis import Aggregator
import numpy as np
from .util import change_units
......
......@@ -6,9 +6,11 @@ import dask
from dask.distributed import Client, LocalCluster
# from dask_jobqueue import SLURMCluster
class DistributedLocalClusterScheduler:
def __init__(self):
self.cluster = LocalCluster(local_directory='/dev/shm/dask-worker-space')
self.cluster = LocalCluster(
local_directory='/dev/shm/dask-worker-space')
self.client = Client(self.cluster)
def __enter__(self):
......
......@@ -2,8 +2,6 @@
import logging
import iris
from .aggregators import PointLocalAggregator
from .period import build_period
......@@ -16,7 +14,6 @@ class Index:
self.aggregator = PointLocalAggregator(index_function,
output_metadata)
def __call__(self, cube):
logging.info('Adding coord categorisation.')
coord_name = self.period.add_coord_categorisation(cube)
......
......@@ -69,7 +69,9 @@ class CountOccurrences:
self.units = Unit('days')
def prepare(self, input_cube):
change_units(self.threshold, input_cube.units, input_cube.standard_name)
change_units(self.threshold,
input_cube.units,
input_cube.standard_name)
def call_func(self, data, axis, **kwargs):
axis = normalize_axis(axis, data.ndim)
......@@ -94,7 +96,9 @@ class FirstOccurrence:
self.NO_OCCURRENCE = np.inf
def prepare(self, input_cube):
change_units(self.threshold, input_cube.units, input_cube.standard_name)
change_units(self.threshold,
input_cube.units,
input_cube.standard_name)
def call_func(self, data, axis, **kwargs):
axis = normalize_axis(axis, data.ndim)
......@@ -139,7 +143,9 @@ class LastOccurrence:
self.NO_OCCURRENCE = -np.inf
def prepare(self, input_cube):
change_units(self.threshold, input_cube.units, input_cube.standard_name)
change_units(self.threshold,
input_cube.units,
input_cube.standard_name)
def call_func(self, data, axis, **kwargs):
axis = normalize_axis(axis, data.ndim)
......@@ -185,7 +191,9 @@ class SpellLength:
self.units = Unit('days')
def prepare(self, input_cube):
change_units(self.threshold, input_cube.units, input_cube.standard_name)
change_units(self.threshold,
input_cube.units,
input_cube.standard_name)
def call_func(self, data, axis, **kwargs):
axis = normalize_axis(axis, data.ndim)
......@@ -238,7 +246,9 @@ class ThresholdedStatistics:
self.lazy_reducer = DASK_REDUCERS[reducer]
def prepare(self, input_cube):
change_units(self.threshold, input_cube.units, input_cube.standard_name)
change_units(self.threshold,
input_cube.units,
input_cube.standard_name)
self.standard_name = input_cube.standard_name
self.units = input_cube.units
......
......@@ -100,7 +100,8 @@ def build_index_function(spec):
candidates = list(pkg_resources.iter_entry_points('climix.index_functions',
name=name))
if len(candidates) == 0:
raise ValueError(f'No implementation found for index_function <{name}>')
raise ValueError(f'No implementation found for '
f'index_function <{name}>')
elif len(candidates) > 1:
distributions = [candidate.dist for candidate in candidates]
raise ValueError(
......@@ -186,6 +187,7 @@ def prepare_input_data(datafiles):
def guess_output_template(datafiles):
output_template = '{var_name}_{frequency}.nc'
def filename_stripper(path):
# remove directory part...
basename = os.path.basename(path)
......@@ -238,13 +240,14 @@ def save(result, output_filename, sliced_mode=False):
var = ds[result.var_name]
time_dim = result.coord_dims('time')[0]
no_slices = result.shape[time_dim]
def store(i, data):
var[i, ...] = data
thread = threading.Thread()
thread.start()
start = time.time()
for i, result_cube in enumerate(result.slices_over(time_dim)):
logging.info('Starting with {}'.format(result_cube.coord('time')))
logging.info(f'Starting with {result_cube.coord("time")}')
result_cube.data
logging.info('Waiting for previous save to finish')
thread.join()
......@@ -271,7 +274,9 @@ def do_main(requested_indices, datafiles, output_template, sliced_mode):
for index in indices:
logging.info(f'Starting calculations for index {index}')
logging.info('Building output filename')
output_filename = build_output_filename(index, datafiles, output_template)
output_filename = build_output_filename(index,
datafiles,
output_template)
logging.info('Preparing input data')
input_data = prepare_input_data(datafiles)
logging.info('Calculating index')
......
......@@ -9,15 +9,21 @@ import six
DENSITY_WATER = Unit('1000 kg m-3')
PrecipQty = namedtuple('PrecipQty', ['units', 'is_rate_based', 'is_mass_based'])
PrecipQty = namedtuple('PrecipQty',
['units', 'is_rate_based', 'is_mass_based'])
PRECIPITATION_INFO = OrderedDict([
('lwe_precipitation_rate', PrecipQty(Unit('m s-1'), True, False)),
('lwe_thickness_of_precipitation_amount', PrecipQty(Unit('m'), False, False)),
('thickness_of_rainfall_amount', PrecipQty(Unit('m'), False, False)),
('precipitation_amount', PrecipQty(Unit('kg m-2'), False, True)),
('precipitation_flux', PrecipQty(Unit('kg m-2 s-1'), True, True)),
('lwe_precipitation_rate',
PrecipQty(Unit('m s-1'), True, False)),
('lwe_thickness_of_precipitation_amount',
PrecipQty(Unit('m'), False, False)),
('thickness_of_rainfall_amount',
PrecipQty(Unit('m'), False, False)),
('precipitation_amount',
PrecipQty(Unit('kg m-2'), False, True)),
('precipitation_flux',
PrecipQty(Unit('kg m-2 s-1'), True, True)),
])
......
......@@ -90,9 +90,9 @@ def cube_diffs(cube1, cube2):
Returns
-------
(dict, dict)
Returns two dictionaries; the first for differences in global attributes,
the second for differences in cube coordinates. If no differences are
found the dictionary is empty.
Returns two dictionaries; the first for differences in global
attributes, the second for differences in cube coordinates. If no
differences are found the dictionary is empty.
"""
gdiff = dict_diffs(cube1.attributes, cube2.attributes, "global_attributes")
......
[pylama]
skip = climix/_version.py,*/*_todo.py
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment