ARTS 2.5.11 (git: 725533f0)
m_microphysics.cc
Go to the documentation of this file.
1/*===========================================================================
2 === File description
3 ===========================================================================*/
4
16/*===========================================================================
17 === External declarations
18 ===========================================================================*/
19#include <cmath>
20#include <cstdlib>
21#include <stdexcept>
22
23#include "array.h"
24#include "arts.h"
25#include "arts_constants.h"
26#include "auto_md.h"
27#include "check_input.h"
28#include "cloudbox.h"
29#include "disort.h"
30#include "file.h"
31#include "interpolation.h"
32#include "lin_alg.h"
33#include "logic.h"
34#include "math_funcs.h"
35#include "messages.h"
36#include "microphysics.h"
37#include "optproperties.h"
38#include "parameters.h"
39#include "physics_funcs.h"
40#include "psd.h"
41#include "rte.h"
42#include "sorting.h"
43#include "special_interp.h"
44#include "xml_io.h"
45
46inline constexpr Numeric PI=Constant::pi;
47
48/*===========================================================================
49 === The functions (in alphabetical order)
50 ===========================================================================*/
51
52/* Workspace method: Doxygen documentation will be auto-generated */
54 GriddedField4& hydrotable,
55 const ArrayOfAgenda& pnd_agenda_array,
56 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
58 const Index& scat_data_checked,
59 const Vector& f_grid,
60 const Index& iss,
61 const Vector& T_grid,
62 const Vector& wc_grid,
63 const Verbosity&)
64{
65 // Sizes
66 const Index nss = scat_data.nelem();
67 const Index nf = f_grid.nelem();
68 const Index nt = T_grid.nelem();
69 const Index nw = wc_grid.nelem();
70
71 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
72 "*scat_data* and *pnd_agenda_array* are inconsistent "
73 "in size.");
74 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
75 "*scat_data* and *pnd_agenda_array_input_names* are "
76 "inconsistent in size.");
77 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names[iss].nelem() != 1,
78 "This method requires one-moment PSDs, but *pnd_agenda_array_input_names* "
79 "for the selected scattering species does not have length one.");
80 ARTS_USER_ERROR_IF (!scat_data_checked,
81 "The scat_data must be flagged to have passed a "
82 "consistency check (scat_data_checked=1).");
83
84 // Allocate *hydrotable*
85 hydrotable.set_name("Table of particle optical properties");
86 hydrotable.data.resize(4, nf, nt, nw);
87 //
88 hydrotable.set_grid_name(0, "Quantity");
89 hydrotable.set_grid(0, {"Extinction [m-1]",
90 "Single scattering albedo [-]",
91 "Asymmetry parameter [-]",
92 "Radar reflectivity [m2]"});
93 hydrotable.set_grid_name(1, "Frequency [Hz]");
94 hydrotable.set_grid(1, f_grid);
95 hydrotable.set_grid_name(2, "Temperature [K]");
96 hydrotable.set_grid(2, T_grid);
97 hydrotable.set_grid_name(3, "Particle content [kg/m3]");
98 hydrotable.set_grid(3, wc_grid);
99
100 // Scattering angle grid
101 const Index nsa = 361;
102 Vector sa_grid;
103 nlinspace(sa_grid, 0, 180, nsa);
104
105 // Local variables
106 Matrix pnd_data;
107 Tensor3 dpnd_data_dx;
108 Matrix pnd_agenda_input(nt, 1);
109 ArrayOfString dpnd_data_dx_names(0);
110 Matrix ext(nf, nt);
111 Matrix abs(nf, nt);
112 Tensor3 pfun(nf, nt, nsa);
113 const Numeric fourpi = 4.0 * PI;
114 ArrayOfIndex cbox_limits = {0, nt-1};
115
116 // Loop and fill table
117 for (Index iw = 0; iw < nw; iw++) {
118 // Call pnd_agenda
119 pnd_agenda_input = wc_grid[iw];
121 pnd_data,
122 dpnd_data_dx,
123 iss,
124 T_grid,
125 pnd_agenda_input,
126 pnd_agenda_array_input_names[iss],
127 dpnd_data_dx_names,
128 pnd_agenda_array);
129
130 // Calculate extinsion, absorbtion and phase function
131 ext = 0.0;
132 abs = 0.0;
133 pfun = 0.0;
135 abs,
136 pfun,
137 scat_data[iss],
138 iss,
139 transpose(pnd_data),
140 cbox_limits,
141 T_grid,
142 sa_grid);
143
144 // Fill the hydrotable for present particle content
145 for (Index iv = 0; iv < nf; iv++) {
146 for (Index it = 0; it < nt; it++) {
147 hydrotable.data(0,iv,it,iw) = ext(iv,it);
148 hydrotable.data(1,iv,it,iw) = 1.0 - (abs(iv,it) / ext(iv,it));
149 hydrotable.data(2,iv,it,iw) = asymmetry_parameter(sa_grid,
150 pfun(iv,it,joker));
151 hydrotable.data(3,iv,it,iw) = fourpi * pfun(iv,it,nsa-1);
152 }
153 }
154 }
155}
156
157
158/* Workspace method: Doxygen documentation will be auto-generated */
160 Matrix& particle_masses,
161 const ArrayOfArrayOfScatteringMetaData& scat_meta,
162 const Verbosity&) {
163 const Index np_total = TotalNumberOfElements(scat_meta);
164
165 particle_masses.resize(np_total, 1);
166
167 Index i_se_flat = 0;
168 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
169 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
170 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
171 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
172 "A presumably incorrect value found for "
173 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
174 "The value is ", scat_meta[i_ss][i_se].mass)
175
176 particle_masses(i_se_flat, 0) = scat_meta[i_ss][i_se].mass;
177
178 i_se_flat++;
179 }
180 }
181}
182
183/* Workspace method: Doxygen documentation will be auto-generated */
185 Matrix& particle_masses,
186 // WS Input:
187 const ArrayOfArrayOfScatteringMetaData& scat_meta,
188 const Verbosity&) {
189 // resize particle_masses to required diemsions and properly initialize values
190 particle_masses.resize(TotalNumberOfElements(scat_meta), scat_meta.nelem());
191 particle_masses = 0.;
192
193 // calculate and set particle_masses
194 Index i_se_flat = 0;
195 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
196 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
197 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
198 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
199 "A presumably incorrect value found for "
200 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
201 "The value is ", scat_meta[i_ss][i_se].mass)
202
203 particle_masses(i_se_flat, i_ss) = scat_meta[i_ss][i_se].mass;
204 i_se_flat++;
205 }
206 }
207}
208
209/* Workspace method: Doxygen documentation will be auto-generated */
210void pndFromPsdBasic(Matrix& pnd_data,
211 Tensor3& dpnd_data_dx,
212 const Vector& pnd_size_grid,
213 const Matrix& psd_data,
214 const Vector& psd_size_grid,
215 const Tensor3& dpsd_data_dx,
216 const Index& quad_order,
217 const Verbosity&) {
218 // Some sizes
219 const Index np = psd_data.nrows();
220 const Index ng = psd_size_grid.nelem();
221 Index ndx = 0;
222 const bool do_dx = !dpsd_data_dx.empty();
223
224 // Checks
225 ARTS_USER_ERROR_IF (ng < 2,
226 "The method requires that length of *psd_size_grid* is >= 2.");
227 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
228 "So far, the method requires that *psd_size_grid* and "
229 "*pnd_size_grid* have same length.");
230 for (Index i = 0; i < ng; i++) {
231 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
232 "So far, the method requires that *psd_size_grid* and "
233 "*pnd_size_grid* are identical.");
234 }
235 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
236 "Number of columns in *psd_data* and length of "
237 "*psd_size_grid* must match.");
238
239 pnd_data.resize(np, ng);
240 if (do_dx) {
241 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
242 "Number of columns in *dpsd_data_dx* and length of "
243 "*psd_size_grid* must match.");
244 ndx = dpsd_data_dx.npages();
245 dpnd_data_dx.resize(ndx, np, ng);
246 } else {
247 dpnd_data_dx.resize(0, 0, 0);
248 }
249
250 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
251 // identical, of this as well implicitly)
252 ArrayOfIndex intarr;
253 Vector psd_size_grid_sorted(ng);
254 get_sorted_indexes(intarr, psd_size_grid);
255 for (Index i = 0; i < ng; i++)
256 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
257
258 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
259 "*psd_size_grid* is not allowed to contain "
260 "duplicate values.");
261
262 // Calculate pnd by intrgation of psd for given nodes/bins
263 Vector quadweights(ng);
264 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
265
266 for (Index i = 0; i < ng; i++) {
267 for (Index ip = 0; ip < np; ip++) {
268 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
269 }
270
271 if (do_dx) {
272 for (Index ip = 0; ip < np; ip++) {
273 for (Index ix = 0; ix < ndx; ix++) {
274 dpnd_data_dx(ix, ip, intarr[i]) =
275 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
276 }
277 }
278 }
279 }
280}
281
282/* Workspace method: Doxygen documentation will be auto-generated */
283void pndFromPsd(Matrix& pnd_data,
284 Tensor3& dpnd_data_dx,
285 const Vector& pnd_size_grid,
286 const Matrix& psd_data,
287 const Vector& psd_size_grid,
288 const Tensor3& dpsd_data_dx,
289 const ArrayOfArrayOfSingleScatteringData& scat_data,
290 const Vector& f_grid,
291 const Index& scat_data_checked,
292 const Index& quad_order,
293 const Index& scat_index,
294 const Numeric& threshold_rsec,
295 const Numeric& threshold_bext,
296 const Numeric& threshold_rpnd,
297 const Verbosity&) {
298 // Some sizes
299 const Index np = psd_data.nrows();
300 const Index ng = psd_size_grid.nelem();
301 Index ndx = 0;
302 const bool do_dx = !dpsd_data_dx.empty();
303
304 // Checks
305 ARTS_USER_ERROR_IF (ng < 3,
306 "The method requires that length of *psd_size_grid* is >= 3.");
307 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
308 "So far, the method requires that *psd_size_grid* and"
309 " *pnd_size_grid* have the same length.");
310 for (Index i = 0; i < ng; i++) {
311 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
312 "So far, the method requires that *psd_size_grid* and"
313 " *pnd_size_grid* are identical.");
314 }
315 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
316 "Number of columns in *psd_data* and length of"
317 " *psd_size_grid* must match.");
318
319 pnd_data.resize(np, ng);
320 if (do_dx) {
321 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
322 "Number of columns in *dpsd_data_dx* and length of"
323 " *psd_size_grid* must match.");
324 ndx = dpsd_data_dx.npages();
325 dpnd_data_dx.resize(ndx, np, ng);
326 } else {
327 dpnd_data_dx.resize(0, 0, 0);
328 }
329
330 ARTS_USER_ERROR_IF (!scat_data_checked,
331 "*scat_data* must have passed a consistency check"
332 " (scat_data_checked=1).\n"
333 "Alternatively, use *pndFromPsdBasic*.");
334 ARTS_USER_ERROR_IF (scat_index >= scat_data.nelem(),
335 "*scat_index* exceeds the number of available"
336 " scattering species.");
337 ARTS_USER_ERROR_IF (scat_data[scat_index].nelem() != ng,
338 "Number of scattering elements in this scattering"
339 " species (*scat_index*) inconsistent with length of"
340 " *pnd_size_grid*.");
341
342 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
343 // identical, of this as well implicitly)
344 ArrayOfIndex intarr;
345 Vector psd_size_grid_sorted(ng);
346 get_sorted_indexes(intarr, psd_size_grid);
347 for (Index i = 0; i < ng; i++)
348 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
349
350 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
351 "*psd_size_grid* is not allowed to contain "
352 "duplicate values.");
353
354 // Calculate pnd by integration of psd for given nodes/bins
355 Vector quadweights(ng);
356 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
357
358 for (Index i = 0; i < ng;
359 i++) //loop over pnd_size_grid aka scattering elements
360 {
361 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
362 {
363 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
364 }
365
366 if (do_dx) {
367 for (Index ip = 0; ip < np; ip++) {
368 for (Index ix = 0; ix < ndx; ix++) {
369 dpnd_data_dx(ix, ip, intarr[i]) =
370 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
371 }
372 }
373 }
374 }
375
376 ArrayOfSingleScatteringData sds = scat_data[scat_index];
377 Index fstart = 0;
378 Index nf = f_grid.nelem();
379 Matrix bulkext(np, nf, 0.);
380 Vector ext(nf), ext_s0(nf), ext_s1(nf), ext_l0(nf), ext_l1(nf);
381
382 // check that pnd_size_grid and scat_data cover (scalar) bulk extinction properly.
383 // (formerly we used mass. but heavy particles might not necessarily
384 // contribute much to optprops. same is valid for total particle number.)
385 //
386 // We do that by
387 // (a) checking that psd*ext decreases at the edges, i.e. to increase the
388 // probability that the main extinction peak is covered and in order to
389 // avoid passing check (b) through making the edge bins tiny
390 // (b) checking that the extinction contributions of the edge bins is small
391 // compared to the total bulk bin.
392 // The tests are somewhat over-sensitive on the small particle side in the
393 // sense that the checks are triggered easily by low mass density cases - which
394 // have their main extinction contributed by small particles, but for which
395 // the total extinction is quite low. Hence, there is also a total extinction
396 // threshold, below which the checks are not applied. Furthermore, such low
397 // densities do typically not occur throughout the whole atmospheric profile,
398 // but higher density layers might exists, meaning the low density layers
399 // contribute very little to the total optical properties, hence should not
400 // dominate the check criteria (no need to be very strict with them if they
401 // hardly contribute anyways). Therefore, checks are also not applied if the
402 // edge bin number density at a given atmospheric level is low compared to the
403 // maximum number of this scattering element in the profile.
404 //
405 // Technically, we need to cover all freqs separately (as optprops vary
406 // strongly with freq). We indeed do that here.
407 // FIXME: Shall a single freq or reduced freq-number option be implemented?
408 // If so, the don't-apply-test criteria need to be checked again though (so it
409 // does not happen that effectively all of the reduced-freq testing is skipped
410 // due to them).
411 //
412 // Technically, we also need to use identical temperatures (and preferably the
413 // ones used for deriving psd). But all scat elements can be on different
414 // T-grids, ie we would need to interpolate. Instead we just use the first in
415 // line.
416 // FIXME: Maybe refine in future allowing to select between low, high, median
417 // grid point or even specify one temp to use.
418 //
419 // And, technically ext might be directional dependent (for oriented
420 // particles). Also this might be different for each scat element. However, at
421 // least for now, we only check the 0-direction.
422 // FIXME: Check further directions?
423
424 for (Index ise = 0; ise < ng;
425 ise++) //loop over pnd_size_grid aka scattering elements
426 {
427 if (sds[intarr[ise]].ext_mat_data.nshelves() > 1)
428 ext = sds[intarr[ise]].ext_mat_data(joker, 0, 0, 0, 0);
429 else
430 ext = sds[intarr[ise]].ext_mat_data(0, 0, 0, 0, 0);
431
432 // keep the ext data of the edge bins
433 if (ise == 0)
434 ext_s0 = ext;
435 else if (ise == 1)
436 ext_s1 = ext;
437 else if (ise == ng - 2)
438 ext_l1 = ext;
439 else if (ise == ng - 1)
440 ext_l0 = ext;
441
442 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
443 if (abs(sum(pnd_data(ip, joker))) > 0.)
444 for (Index f = fstart; f < (fstart + nf); f++)
445 bulkext(ip, f) += pnd_data(ip, intarr[ise]) * ext[f];
446 }
447
448 Numeric max0 = 0, max1 = 0;
449 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
450 {
451 max0 = max(abs(pnd_data(ip, intarr[0])), max0);
452 max1 = max(abs(pnd_data(ip, intarr[ng - 1])), max1);
453 }
454
455 Numeric contrib;
456 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
457 {
458 if (abs(sum(pnd_data(ip, joker))) > 0.) {
459 for (Index f = fstart; f < (fstart + nf); f++) {
460 /* for( Index ise=0; ise<ng; ise++ )
461 {
462 if( sds[ise].ext_mat_data.nshelves()>1 )
463 ext = sds[ise].ext_mat_data(joker,0,0,0,0);
464 else
465 ext = sds[ise].ext_mat_data(0,0,0,0,0);
466 cout << " se #" << ise << ": contrib = pnd*ext/bext = "
467 << abs(pnd_data(ip,ise)) << "*" << ext[f] << "/"
468 << abs(bulkext(ip,f)) << " = "
469 << 1e2*abs(pnd_data(ip,ise))*ext[f]/abs(bulkext(ip,f))
470 << "%\n";
471 }*/
472
473 // check that bin-width normalized extinction (or ext*psd) is
474 // decreasing.
475 if (abs(bulkext(ip, f)) > 1e-2 * threshold_bext) {
476 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[0])) > 0. and
477 ext_s0[f] * abs(psd_data(ip, intarr[0])) >=
478 ext_s1[f] * abs(psd_data(ip, intarr[1])),
479 " Bin-width normalized extinction (ext*psd) not decreasing"
480 " at small size edge\n"
481 " at atm level #", ip, " and freq point #", f, ".\n"
482 " ext_s0=", ext_s0[f],
483 ", psd_s0=", abs(psd_data(ip, intarr[0])),
484 ", ext_s0*psd_s0=", ext_s0[f] * abs(psd_data(ip, intarr[0])),
485 "\n LARGER EQUAL\n"
486 " ext_s1=", ext_s1[f],
487 ", psd_s1=", abs(psd_data(ip, intarr[1])),
488 ", ext_s1*psd_s1=", ext_s1[f] * abs(psd_data(ip, intarr[1])),
489 "\n"
490 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
491 " Need to add smaller sized particles!\n")
492
493 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[ng - 1])) > 0. and
494 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])) >=
495 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])),
496 "Bin-width normalized extinction (ext*psd) not decreasing"
497 " at large size edge\n"
498 "at atm level #", ip, " and freq point #", f, ".\n"
499 " ext_l0=", ext_l0[f],
500 ", psd_l0=", abs(psd_data(ip, intarr[ng - 1])),
501 ", ext_l0*psd_l0=",
502 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])),
503 "\n LARGER EQUAL\n"
504 " ext_l1=", ext_l1[f],
505 ", psd_l1=", abs(psd_data(ip, intarr[ng - 2])),
506 ", ext_l1*psd_l1=",
507 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])), "\n"
508 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
509 " Need to add larger sized particles!\n")
510 }
511
512 // check that contribution of edge bins to total extinction is
513 // sufficiently small
514 if (abs(bulkext(ip, f)) > threshold_bext) {
515 if (abs(pnd_data(ip, intarr[0])) > threshold_rpnd * max0) {
516 contrib =
517 abs(pnd_data(ip, intarr[0])) * ext_s0[f] / abs(bulkext(ip, f));
518 //cout << " small edge contrib = pnd*ext/bext = "
519 // << abs(pnd_data(ip,intarr[0])) << "*" << ext_s0[f] << "/"
520 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
521 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[0])) * ext_s0[f] >
522 threshold_rsec * abs(bulkext(ip, f)),
523 "Contribution of edge bin to total extinction too high"
524 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
525 ") at small size edge\n"
526 "at atm level #", ip, " and freq point #", f, ".\n"
527 " Need to add smaller sized particles or refine the size"
528 " grid on the small size edge!\n")
529 }
530 if (abs(pnd_data(ip, intarr[ng - 1])) > threshold_rpnd * max1) {
531 contrib = abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] /
532 abs(bulkext(ip, f));
533 //cout << " large edge contrib = pnd*ext/bext = "
534 // << abs(pnd_data(ip,ng-1)) << "*" << ext_l0[f] << "/"
535 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
536 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] >
537 threshold_rsec * abs(bulkext(ip, f)),
538 "Contribution of edge bin to total extinction too high"
539 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
540 ") at large size edge\n"
541 "at atm level #", ip, " and freq point #", f, ".\n"
542 " Need to add larger sized particles or refine the size"
543 " grid on the large size edge!\n")
544 }
545 }
546 }
547 }
548 }
549}
550
551
552
553/* Workspace method: Doxygen documentation will be auto-generated */
555 Workspace& ws,
556 Tensor4& pnd_field,
557 ArrayOfTensor4& dpnd_field_dx,
558 const Index& atmosphere_dim,
559 const Vector& p_grid,
560 const Vector& lat_grid,
561 const Vector& lon_grid,
562 const Tensor3& t_field,
563 const Index& cloudbox_on,
564 const ArrayOfIndex& cloudbox_limits,
565 const ArrayOfString& scat_species,
566 const ArrayOfArrayOfSingleScatteringData& scat_data,
567 const ArrayOfArrayOfScatteringMetaData& scat_meta,
568 const Tensor4& particle_bulkprop_field,
569 const ArrayOfString& particle_bulkprop_names,
570 const ArrayOfAgenda& pnd_agenda_array,
571 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
572 const Index& jacobian_do,
573 const ArrayOfRetrievalQuantity& jacobian_quantities,
574 const Verbosity&) {
575
576 // Do nothing if cloudbox is inactive
577 if (!cloudbox_on) {
578 return;
579 }
580
581 ARTS_USER_ERROR_IF (particle_bulkprop_field.empty(),
582 "*particle_bulkprop_field* is empty.");
583
584 // Checks (not totally complete, but should cover most mistakes)
585 chk_if_in_range("atmosphere_dim", atmosphere_dim, 1, 3);
586 chk_atm_grids(atmosphere_dim, p_grid, lat_grid, lon_grid);
587 chk_atm_field("t_field", t_field, atmosphere_dim, p_grid, lat_grid, lon_grid);
588 chk_atm_field("particle_bulkprop_field",
589 particle_bulkprop_field,
590 atmosphere_dim,
591 particle_bulkprop_names.nelem(),
592 p_grid,
593 lat_grid,
594 lon_grid);
595
596 // Number of scattering species
597 const Index nss = scat_data.nelem();
598
599 ARTS_USER_ERROR_IF (particle_bulkprop_names.nelem() == 0 ||
600 particle_bulkprop_names.nelem() != particle_bulkprop_field.nbooks(),
601 "Number of fields in *particle_bulkprop_field*"
602 " inconsistent with number of names in"
603 " *particle_bulkprop_names*.");
604 ARTS_USER_ERROR_IF (particle_bulkprop_field.nbooks() < nss,
605 "At least one field per scattering species required"
606 " in *particle_bulkprop_field*.");
607
608 ARTS_USER_ERROR_IF (cloudbox_limits.nelem() != 2 * atmosphere_dim,
609 "Length of *cloudbox_limits* incorrect with respect "
610 "to *atmosphere_dim*.");
611 ARTS_USER_ERROR_IF (cloudbox_limits[1] <= cloudbox_limits[0] || cloudbox_limits[0] < 0 ||
612 cloudbox_limits[1] >= p_grid.nelem(),
613 "Invalid data in pressure part of *cloudbox_limits*.");
614 if (atmosphere_dim > 1) {
615 ARTS_USER_ERROR_IF (cloudbox_limits[3] <= cloudbox_limits[2] || cloudbox_limits[2] < 0 ||
616 cloudbox_limits[3] >= lat_grid.nelem(),
617 "Invalid data in latitude part of *cloudbox_limits*.");
618 if (atmosphere_dim > 2) {
619 ARTS_USER_ERROR_IF (cloudbox_limits[5] <= cloudbox_limits[4] || cloudbox_limits[4] < 0 ||
620 cloudbox_limits[5] >= lon_grid.nelem(),
621 "Invalid data in longitude part of *cloudbox_limits*.");
622 }
623 }
624
625 ARTS_USER_ERROR_IF (nss < 1, "*scat_data* is empty!.");
626 ARTS_USER_ERROR_IF (scat_species.nelem() != nss,
627 "*scat_data* and *scat_species* are inconsistent in size.");
628 ARTS_USER_ERROR_IF (scat_meta.nelem() != nss,
629 "*scat_data* and *scat_meta* are inconsistent in size.");
630 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
631 "*scat_data* and *pnd_agenda_array* are inconsistent "
632 "in size.");
633 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
634 "*scat_data* and *pnd_agenda_array_input_names* are "
635 "inconsistent in size.");
636 // Further checks of scat_data vs. scat_meta below
637
638 // Effective lengths of cloudbox
639 const Index np = cloudbox_limits[1] - cloudbox_limits[0] + 1;
640 Index np_nonzero = np; // Can be changed below
641 Index ip_offset = cloudbox_limits[0]; // Can be changed below
642 Index pf_offset = 0; // Can be changed below
643 Index nlat = 1;
644 Index ilat_offset = 0;
645 if (atmosphere_dim > 1) {
646 nlat = cloudbox_limits[3] - cloudbox_limits[2] + 1;
647 ilat_offset = cloudbox_limits[2];
648 }
649 Index nlon = 1;
650 Index ilon_offset = 0;
651 if (atmosphere_dim > 2) {
652 nlon = cloudbox_limits[5] - cloudbox_limits[4] + 1;
653 ilon_offset = cloudbox_limits[4];
654 }
655
656 // Check that *particle_bulkprop_field* contains zeros outside and at
657 // cloudbox boundaries
658 constexpr std::string_view estring =
659 "*particle_bulkprop_field* allowed to contain"
660 " non-zero values only inside the cloudbox.";
661 // Pressure end ranges
662 if (cloudbox_limits[0] != 0) {
663 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
664 joker, Range(0, ip_offset + 1), joker, joker)) > 0 ||
665 min(particle_bulkprop_field(
666 joker, Range(0, ip_offset + 1), joker, joker)) < 0,
667 estring);
668 np_nonzero--;
669 ip_offset++;
670 pf_offset = 1;
671 }
672 if (cloudbox_limits[1] != p_grid.nelem() - 1) {
673 const Index np_above = p_grid.nelem() + 1 - (np + ip_offset);
674 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
675 joker, Range(cloudbox_limits[1], np_above), joker, joker)) > 0 ||
676 min(particle_bulkprop_field(
677 joker, Range(cloudbox_limits[1], np_above), joker, joker)) < 0,
678 estring);
679 np_nonzero--;
680 }
681
682 // Cumulative number of scattering elements
683 ArrayOfIndex ncumse(nss + 1);
684 ncumse[0] = 0;
685 for (Index i = 0; i < nss; i++) {
686 ARTS_USER_ERROR_IF (scat_data[i].nelem() != scat_meta[i].nelem(),
687 "*scat_data* and *scat_meta* have inconsistent sizes.");
688 ncumse[i + 1] = ncumse[i] + scat_data[i].nelem();
689 }
690
691 // Allocate output variables
692 //
693 pnd_field.resize(ncumse[nss], np, nlat, nlon);
694 pnd_field = 0.0;
695 //
696 // Help variables for partial derivatives
697 Index nq = 0;
698 ArrayOfArrayOfIndex scatspecies_to_jq;
699 //
700 if (!jacobian_do) {
701 dpnd_field_dx.resize(0);
702 } else {
703 nq = jacobian_quantities.nelem();
704 dpnd_field_dx.resize(nq);
705 scatspecies_to_jq.resize(nss);
706 //
707 for (Index iq = 0; iq < nq; iq++) {
708 if (jacobian_quantities[iq] == Jacobian::Special::ScatteringString) {
709 const Index ihit =
710 find_first(scat_species, jacobian_quantities[iq].Subtag());
711 ARTS_USER_ERROR_IF (ihit < 0,
712 "Jacobian quantity with index ", iq, " refers to\n"
713 " ", jacobian_quantities[iq].Subtag(),
714 "\nbut this species could not be found in *scat_species*.")
715 scatspecies_to_jq[ihit].push_back(iq);
716 dpnd_field_dx[iq].resize(ncumse[nss], np, nlat, nlon);
717 dpnd_field_dx[iq] = 0.0;
718 }
719 }
720 }
721
722 // Extract data from pnd-agenda array
723 for (Index is = 0; is < nss; is++) {
724 // Index range with respect to pnd_field
725 Range se_range(ncumse[is], ncumse[is + 1] - ncumse[is]);
726
727 // Determine how pnd_agenda_array_input_names are related to input fields
728 //
729 const Index nin = pnd_agenda_array_input_names[is].nelem();
730 ArrayOfIndex i_pbulkprop(nin);
731 //
732 for (Index i = 0; i < nin; i++) {
733 i_pbulkprop[i] = find_first(particle_bulkprop_names,
734 pnd_agenda_array_input_names[is][i]);
735 ARTS_USER_ERROR_IF (i_pbulkprop[i] < 0,
736 "Pnd-agenda with index ", is, " is set to require \"",
737 pnd_agenda_array_input_names[is][i], "\",\nbut this quantity "
738 "could not found in *particle_bulkprop_names*.\n"
739 "(Note that temperature must be written as \"Temperature\")")
740 }
741
742 // Set *dpnd_data_dx_names*
743 //
744 Index ndx = 0;
745 ArrayOfString dpnd_data_dx_names(0);
746 //
747 if (jacobian_do) {
748 ndx = scatspecies_to_jq[is].nelem();
749 dpnd_data_dx_names.resize(ndx);
750 for (Index ix = 0; ix < ndx; ix++) {
751 dpnd_data_dx_names[ix] =
752 jacobian_quantities[scatspecies_to_jq[is][ix]].SubSubtag();
753 }
754 }
755
756 // Loop lat/lon positions and call *pnd_agenda*
757 for (Index ilon = 0; ilon < nlon; ilon++) {
758 for (Index ilat = 0; ilat < nlat; ilat++) {
759
760 // Nothing to do for lat/lon end points, if not 1D
761 if ((nlat > 1 && (ilat == 0 || ilat == nlat - 1)) ||
762 (nlon > 1 && (ilon == 0 || ilon == nlon - 1))) {
763 continue;
764 }
765
766 Matrix pnd_agenda_input(np_nonzero, nin);
767 //
768 for (Index i = 0; i < nin; i++) {
769 for (Index ip = 0; ip < np_nonzero; ip++) {
770 pnd_agenda_input(ip, i) =
771 particle_bulkprop_field(i_pbulkprop[i],
772 ip_offset + ip,
773 ilat_offset + ilat,
774 ilon_offset + ilon);
775 }
776 }
777
778 Vector pnd_agenda_input_t(np);
779 //
780 for (Index ip = 0; ip < np_nonzero; ip++) {
781 pnd_agenda_input_t[ip] =
782 t_field(ip_offset + ip, ilat_offset + ilat, ilon_offset + ilon);
783 }
784
785 // Call pnd-agenda array
786 Matrix pnd_data;
787 Tensor3 dpnd_data_dx;
788 //
790 pnd_data,
791 dpnd_data_dx,
792 is,
793 pnd_agenda_input_t,
794 pnd_agenda_input,
795 pnd_agenda_array_input_names[is],
796 dpnd_data_dx_names,
797 pnd_agenda_array);
798
799 // Copy to output variables
800 for (Index ip = 0; ip < np_nonzero; ip++) {
801 pnd_field(se_range, pf_offset+ip, ilat, ilon) = pnd_data(ip, joker);
802 }
803 for (Index ix = 0; ix < ndx; ix++) {
804 for (Index ip = 0; ip < np_nonzero; ip++) {
805 dpnd_field_dx[scatspecies_to_jq[is][ix]](se_range, pf_offset+ip, ilat, ilon) =
806 dpnd_data_dx(ix, ip, joker);
807 }
808 }
809 }
810 }
811 }
812}
813
814
815
816/* Workspace method: Doxygen documentation will be auto-generated */
817void ScatSpeciesSizeMassInfo(Vector& scat_species_x,
818 Numeric& scat_species_a,
819 Numeric& scat_species_b,
820 const ArrayOfArrayOfScatteringMetaData& scat_meta,
821 const Index& species_index,
822 const String& x_unit,
823 const Numeric& x_fit_start,
824 const Numeric& x_fit_end,
825 const Index& do_only_x,
826 const Verbosity&) {
827 // Checks
828 const Index nss = scat_meta.nelem();
829 ARTS_USER_ERROR_IF (nss == 0, "*scat_meta* is empty!");
830 ARTS_USER_ERROR_IF (nss < species_index + 1,
831 "Selected scattering species index is ", species_index,
832 " but this "
833 "is not allowed since *scat_meta* has only ", nss, " elements.")
834 //
835 const Index nse = scat_meta[species_index].nelem();
836 ARTS_USER_ERROR_IF (nse < 2,
837 "The scattering species must have at least two "
838 "elements to use this method.");
839
840 // Extract particle masses
841 //
842 Vector mass(nse);
843 //
844 for (Index i = 0; i < nse; i++) {
845 mass[i] = scat_meta[species_index][i].mass;
846 }
847
848 // Create size grid
849 //
850 scat_species_x.resize(nse);
851 //
852 if (x_unit == "dveq") {
853 for (Index i = 0; i < nse; i++) {
854 scat_species_x[i] = scat_meta[species_index][i].diameter_volume_equ;
855 }
856 //
857 if (do_only_x) {
858 scat_species_a = -1;
859 scat_species_b = -1;
860 } else
861 derive_scat_species_a_and_b(scat_species_a,
862 scat_species_b,
863 scat_species_x,
864 mass,
865 x_fit_start,
866 x_fit_end);
867 }
868
869 else if (x_unit == "dmax") {
870 for (Index i = 0; i < nse; i++) {
871 scat_species_x[i] = scat_meta[species_index][i].diameter_max;
872 }
873 //
874 if (do_only_x) {
875 scat_species_a = -1;
876 scat_species_b = -1;
877 } else
878 derive_scat_species_a_and_b(scat_species_a,
879 scat_species_b,
880 scat_species_x,
881 mass,
882 x_fit_start,
883 x_fit_end);
884 }
885
886 else if (x_unit == "area") {
887 for (Index i = 0; i < nse; i++) {
888 scat_species_x[i] =
889 scat_meta[species_index][i].diameter_area_equ_aerodynamical;
890 }
891 //
892 if (do_only_x) {
893 scat_species_a = -1;
894 scat_species_b = -1;
895 } else
896 derive_scat_species_a_and_b(scat_species_a,
897 scat_species_b,
898 scat_species_x,
899 mass,
900 x_fit_start,
901 x_fit_end);
902 }
903
904 else if (x_unit == "mass") {
905 scat_species_x = mass;
906 //
907 scat_species_a = 1;
908 scat_species_b = 1;
909 }
910
911 else {
912 ARTS_USER_ERROR ("You have selected the x_unit: ", x_unit, "\nwhile accepted "
913 "choices are: \"dveq\", \"dmax\", \"mass\" and \"area\"")
914 }
915}
This file contains the definition of Array.
Index TotalNumberOfElements(const Array< Array< base > > &aa)
Determine total number of elements in an ArrayOfArray.
Definition array.h:224
Index find_first(const Array< base > &x, const base &w)
Find first occurance.
Definition array.h:173
base max(const Array< base > &x)
Max function.
Definition array.h:128
base min(const Array< base > &x)
Min function.
Definition array.h:144
The global header file for ARTS.
Constants of physical expressions as constexpr.
void pnd_agenda_arrayExecute(Workspace &ws, Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Index agenda_array_index, const Vector &pnd_agenda_input_t, const Matrix &pnd_agenda_input, const ArrayOfString &pnd_agenda_input_names, const ArrayOfString &dpnd_data_dx_names, const ArrayOfAgenda &input_agenda_array)
Definition auto_md.cc:27445
void chk_atm_grids(const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid)
chk_atm_grids
void chk_if_in_range(const String &x_name, const Index &x, const Index &x_low, const Index &x_high)
chk_if_in_range
void chk_atm_field(const String &x_name, ConstTensor3View x, const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid, const bool &chk_lat90)
chk_atm_field (simple fields)
This can be used to make arrays out of anything.
Definition array.h:31
Index nelem() const ARTS_NOEXCEPT
Definition array.h:75
void set_name(const String &s)
Set name of this gridded field.
void set_grid_name(Index i, const String &s)
Set grid name.
void set_grid(Index i, const Vector &g)
Set a numeric grid.
Workspace class.
void bin_quadweights(Vector &w, const Vector &x, const Index &order)
Definition cloudbox.cc:555
Internal cloudbox functions.
#define ARTS_USER_ERROR(...)
Definition debug.h:153
#define ARTS_USER_ERROR_IF(condition,...)
Definition debug.h:137
Functions for disort interface.
This file contains basic functions to handle ASCII files.
Header file for interpolation.cc.
void pndFromPsdBasic(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const Index &quad_order, const Verbosity &)
WORKSPACE METHOD: pndFromPsdBasic.
void particle_massesFromMetaDataSingleCategory(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaDataSingleCategory.
void pndFromPsd(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const ArrayOfArrayOfSingleScatteringData &scat_data, const Vector &f_grid, const Index &scat_data_checked, const Index &quad_order, const Index &scat_index, const Numeric &threshold_rsec, const Numeric &threshold_bext, const Numeric &threshold_rpnd, const Verbosity &)
WORKSPACE METHOD: pndFromPsd.
void ScatSpeciesSizeMassInfo(Vector &scat_species_x, Numeric &scat_species_a, Numeric &scat_species_b, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Index &species_index, const String &x_unit, const Numeric &x_fit_start, const Numeric &x_fit_end, const Index &do_only_x, const Verbosity &)
WORKSPACE METHOD: ScatSpeciesSizeMassInfo.
constexpr Numeric PI
void pnd_fieldCalcFromParticleBulkProps(Workspace &ws, Tensor4 &pnd_field, ArrayOfTensor4 &dpnd_field_dx, const Index &atmosphere_dim, const Vector &p_grid, const Vector &lat_grid, const Vector &lon_grid, const Tensor3 &t_field, const Index &cloudbox_on, const ArrayOfIndex &cloudbox_limits, const ArrayOfString &scat_species, const ArrayOfArrayOfSingleScatteringData &scat_data, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Tensor4 &particle_bulkprop_field, const ArrayOfString &particle_bulkprop_names, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const Index &jacobian_do, const ArrayOfRetrievalQuantity &jacobian_quantities, const Verbosity &)
WORKSPACE METHOD: pnd_fieldCalcFromParticleBulkProps.
void HydrotableCalc(Workspace &ws, GriddedField4 &hydrotable, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const ArrayOfArrayOfSingleScatteringData &scat_data, const Index &scat_data_checked, const Vector &f_grid, const Index &iss, const Vector &T_grid, const Vector &wc_grid, const Verbosity &)
WORKSPACE METHOD: HydrotableCalc.
void particle_massesFromMetaData(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaData.
void nlinspace(Vector &x, const Numeric start, const Numeric stop, const Index n)
nlinspace
Declarations having to do with the four output streams.
Numeric asymmetry_parameter(ConstVectorView sa_grid, ConstVectorView pfun)
asymmetry_parameter
void derive_scat_species_a_and_b(Numeric &a, Numeric &b, const Vector &x, const Vector &mass, const Numeric &x_fit_start, const Numeric &x_fit_end)
Internal functions for microphysics calculations (size distributions etc.)
constexpr Numeric pi
The following mathematical constants are generated in python Decimal package by the code:
void ext_abs_pfun_from_tro(MatrixView ext_data, MatrixView abs_data, Tensor3View pfun_data, const ArrayOfSingleScatteringData &scat_data, const Index &iss, ConstMatrixView pnd_data, ArrayOfIndex &cloudbox_limits, ConstVectorView T_grid, ConstVectorView sa_grid, const Index f_index)
Extinction, absorption and phase function for one scattering species, 1D and TRO.
Scattering database structure and functions.
This file contains header information for the dealing with command line parameters.
This file contains declerations of functions of physical character.
Internal functions associated with size distributions.
Declaration of functions in rte.cc.
Contains sorting routines.
void get_sorted_indexes(ArrayOfIndex &sorted, const T &data)
get_sorted_indexes
Definition sorting.h:39
Header file for special_interp.cc.
This file contains basic functions to handle XML data files.