Issue when running climix API - dask issue?
When I run my little example script I get the following error that repeats a lot until I cancel (ctr c).
Example script: /home/sm_renwi/Scripts/heatwavefuture/summerseason/seasonlength_paket/seasonlength/example_error_memoryview.py /home/sm_renwi/Scripts/heatwavefuture/summerseason/seasonlength_paket/control_SLENS_seasonlength.yml
Error message in ipython when running "indexcube.data" after calculating indexcube:
---------------------------------------------------------------------------
IndexError Traceback (most recent call last)
Cell In[21], line 1
----> 1 indexcube.data
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/iris/cube.py:2462, in Cube.data(self)
2429 @property
2430 def data(self):
2431 """
2432 The :class:`numpy.ndarray` representing the multi-dimensional data of
2433 the cube.
(...)
2460
2461 """
-> 2462 return self._data_manager.data
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/iris/_data_manager.py:206, in DataManager.data(self)
203 if self.has_lazy_data():
204 try:
205 # Realise the lazy data.
--> 206 result = as_concrete_data(self._lazy_array)
207 # Assign the realised result.
208 self._real_array = result
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/iris/_lazy_data.py:279, in as_concrete_data(data)
262 """
263 Return the actual content of a lazy array, as a numpy array.
264 If the input data is a NumPy `ndarray` or masked array, return it
(...)
276
277 """
278 if is_lazy_data(data):
--> 279 (data,) = _co_realise_lazy_arrays([data])
281 return data
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/iris/_lazy_data.py:242, in _co_realise_lazy_arrays(arrays)
227 def _co_realise_lazy_arrays(arrays):
228 """
229 Compute multiple lazy arrays and return a list of real values.
230
(...)
240
241 """
--> 242 computed_arrays = da.compute(*arrays)
243 results = []
244 for lazy_in, real_out in zip(arrays, computed_arrays):
245 # Ensure we always have arrays.
246 # Note : in some cases dask (and numpy) will return a scalar
247 # numpy.int/numpy.float object rather than an ndarray.
248 # Recorded in https://github.com/dask/dask/issues/2111.
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/base.py:600, in compute(traverse, optimize_graph, scheduler, get, *args, **kwargs)
597 postcomputes.append(x.__dask_postcompute__())
599 results = schedule(dsk, keys, **kwargs)
--> 600 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/base.py:600, in <listcomp>(.0)
597 postcomputes.append(x.__dask_postcompute__())
599 results = schedule(dsk, keys, **kwargs)
--> 600 return repack([f(r, *a) for r, (f, a) in zip(results, postcomputes)])
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/array/core.py:1283, in finalize(results)
1281 while isinstance(results2, (tuple, list)):
1282 if len(results2) > 1:
-> 1283 return concatenate3(results)
1284 else:
1285 results2 = results2[0]
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/array/core.py:5300, in concatenate3(arrays)
5298 if not ndim:
5299 return arrays
-> 5300 chunks = chunks_from_arrays(arrays)
5301 shape = tuple(map(sum, chunks))
5303 def dtype(x):
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/array/core.py:5087, in chunks_from_arrays(arrays)
5084 return (1,)
5086 while isinstance(arrays, (list, tuple)):
-> 5087 result.append(tuple(shape(deepfirst(a))[dim] for a in arrays))
5088 arrays = arrays[0]
5089 dim += 1
File ~/.conda/envs/climix_testconda/lib/python3.10/site-packages/dask/array/core.py:5087, in <genexpr>(.0)
5084 return (1,)
5086 while isinstance(arrays, (list, tuple)):
-> 5087 result.append(tuple(shape(deepfirst(a))[dim] for a in arrays))
5088 arrays = arrays[0]
5089 dim += 1
IndexError: tuple index out of range
Error message in terminal:
/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/node.py:182: UserWarning: Port 8787 is already in use.
Perhaps you already have a cluster running?
Hosting the HTTP server on port 43663 instead
warnings.warn(
/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/node.py:182: UserWarning: Port 8787 is already in use.
Perhaps you already have a cluster running?
Hosting the HTTP server on port 43577 instead
warnings.warn(
2023-06-15 10:53:40,552 - distributed.nanny - ERROR - Failed to start process
Traceback (most recent call last):
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/nanny.py", line 443, in instantiate
result = await self.process.start()
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/nanny.py", line 713, in start
await self.process.start()
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/process.py", line 55, in _call_and_set_future
res = func(*args, **kwargs)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/site-packages/distributed/process.py", line 215, in _start
process.start()
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/process.py", line 121, in start
self._popen = self._Popen(self)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/context.py", line 288, in _Popen
return Popen(process_obj)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/popen_spawn_posix.py", line 32, in __init__
super().__init__(process_obj)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/popen_fork.py", line 19, in __init__
self._launch(process_obj)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/popen_spawn_posix.py", line 42, in _launch
prep_data = spawn.get_preparation_data(process_obj._name)
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/spawn.py", line 154, in get_preparation_data
_check_not_importing_main()
File "/home/sm_renwi/.conda/envs/climix_testconda/lib/python3.10/multiprocessing/spawn.py", line 134, in _check_not_importing_main
raise RuntimeError('''
RuntimeError:
An attempt has been made to start a new process before the
current process has finished its bootstrapping phase.
This probably means that you are not using fork to start your
child processes and you have forgotten to use the proper idiom
in the main module:
if __name__ == '__main__':
freeze_support()
...
The "freeze_support()" line can be omitted if the program
is not going to be frozen to produce an executable.