ARTS 2.5.10 (git: 2f1c442c)
m_microphysics.cc
Go to the documentation of this file.
1/* Copyright (C) 2011-2017
2 Jana Mendrok <jana.mendrok@gmail.com>
3 Daniel Kreyling <daniel.kreyling@nict.go.jp>
4 Manfred Brath <manfred.brath@uni-hamburg.de>
5 Patrick Eriksson <patrick.eriksson@chalmers.se>
6
7 This program is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
10 later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
20 USA. */
21
22/*===========================================================================
23 === File description
24 ===========================================================================*/
25
37/*===========================================================================
38 === External declarations
39 ===========================================================================*/
40#include <cmath>
41#include <cstdlib>
42#include <stdexcept>
43
44#include "array.h"
45#include "arts.h"
46#include "arts_constants.h"
47#include "auto_md.h"
48#include "check_input.h"
49#include "cloudbox.h"
50#include "disort.h"
51#include "file.h"
52#include "interpolation.h"
53#include "lin_alg.h"
54#include "logic.h"
55#include "math_funcs.h"
56#include "messages.h"
57#include "microphysics.h"
58#include "optproperties.h"
59#include "parameters.h"
60#include "physics_funcs.h"
61#include "psd.h"
62#include "rte.h"
63#include "sorting.h"
64#include "special_interp.h"
65#include "xml_io.h"
66
67inline constexpr Numeric PI=Constant::pi;
68
69/*===========================================================================
70 === The functions (in alphabetical order)
71 ===========================================================================*/
72
73/* Workspace method: Doxygen documentation will be auto-generated */
75 GriddedField4& hydrotable,
76 const ArrayOfAgenda& pnd_agenda_array,
77 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
79 const Index& scat_data_checked,
80 const Vector& f_grid,
81 const Index& iss,
82 const Vector& T_grid,
83 const Vector& wc_grid,
84 const Verbosity&)
85{
86 // Sizes
87 const Index nss = scat_data.nelem();
88 const Index nf = f_grid.nelem();
89 const Index nt = T_grid.nelem();
90 const Index nw = wc_grid.nelem();
91
92 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
93 "*scat_data* and *pnd_agenda_array* are inconsistent "
94 "in size.");
95 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
96 "*scat_data* and *pnd_agenda_array_input_names* are "
97 "inconsistent in size.");
98 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names[iss].nelem() != 1,
99 "This method requires one-moment PSDs, but *pnd_agenda_array_input_names* "
100 "for the selected scattering species does not have length one.");
101 ARTS_USER_ERROR_IF (!scat_data_checked,
102 "The scat_data must be flagged to have passed a "
103 "consistency check (scat_data_checked=1).");
104
105 // Allocate *hydrotable*
106 hydrotable.set_name("Table of particle optical properties");
107 hydrotable.data.resize(4, nf, nt, nw);
108 //
109 hydrotable.set_grid_name(0, "Quantity");
110 hydrotable.set_grid(0, {"Extinction [m-1]",
111 "Single scattering albedo [-]",
112 "Asymmetry parameter [-]",
113 "Radar reflectivity [m2]"});
114 hydrotable.set_grid_name(1, "Frequency [Hz]");
115 hydrotable.set_grid(1, f_grid);
116 hydrotable.set_grid_name(2, "Temperature [K]");
117 hydrotable.set_grid(2, T_grid);
118 hydrotable.set_grid_name(3, "Particle content [kg/m3]");
119 hydrotable.set_grid(3, wc_grid);
120
121 // Scattering angle grid
122 const Index nsa = 361;
123 Vector sa_grid;
124 nlinspace(sa_grid, 0, 180, nsa);
125
126 // Local variables
127 Matrix pnd_data;
128 Tensor3 dpnd_data_dx;
129 Matrix pnd_agenda_input(nt, 1);
130 ArrayOfString dpnd_data_dx_names(0);
131 Matrix ext(nf, nt);
132 Matrix abs(nf, nt);
133 Tensor3 pfun(nf, nt, nsa);
134 const Numeric fourpi = 4.0 * PI;
135 ArrayOfIndex cbox_limits = {0, nt-1};
136
137 // Loop and fill table
138 for (Index iw = 0; iw < nw; iw++) {
139 // Call pnd_agenda
140 pnd_agenda_input = wc_grid[iw];
142 pnd_data,
143 dpnd_data_dx,
144 iss,
145 T_grid,
146 pnd_agenda_input,
147 pnd_agenda_array_input_names[iss],
148 dpnd_data_dx_names,
149 pnd_agenda_array);
150
151 // Calculate extinsion, absorbtion and phase function
152 ext = 0.0;
153 abs = 0.0;
154 pfun = 0.0;
156 abs,
157 pfun,
158 scat_data[iss],
159 iss,
160 transpose(pnd_data),
161 cbox_limits,
162 T_grid,
163 sa_grid);
164
165 // Fill the hydrotable for present particle content
166 for (Index iv = 0; iv < nf; iv++) {
167 for (Index it = 0; it < nt; it++) {
168 hydrotable.data(0,iv,it,iw) = ext(iv,it);
169 hydrotable.data(1,iv,it,iw) = 1.0 - (abs(iv,it) / ext(iv,it));
170 hydrotable.data(2,iv,it,iw) = asymmetry_parameter(sa_grid,
171 pfun(iv,it,joker));
172 hydrotable.data(3,iv,it,iw) = fourpi * pfun(iv,it,nsa-1);
173 }
174 }
175 }
176}
177
178
179/* Workspace method: Doxygen documentation will be auto-generated */
181 Matrix& particle_masses,
182 const ArrayOfArrayOfScatteringMetaData& scat_meta,
183 const Verbosity&) {
184 const Index np_total = TotalNumberOfElements(scat_meta);
185
186 particle_masses.resize(np_total, 1);
187
188 Index i_se_flat = 0;
189 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
190 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
191 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
192 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
193 "A presumably incorrect value found for "
194 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
195 "The value is ", scat_meta[i_ss][i_se].mass)
196
197 particle_masses(i_se_flat, 0) = scat_meta[i_ss][i_se].mass;
198
199 i_se_flat++;
200 }
201 }
202}
203
204/* Workspace method: Doxygen documentation will be auto-generated */
206 Matrix& particle_masses,
207 // WS Input:
208 const ArrayOfArrayOfScatteringMetaData& scat_meta,
209 const Verbosity&) {
210 // resize particle_masses to required diemsions and properly initialize values
211 particle_masses.resize(TotalNumberOfElements(scat_meta), scat_meta.nelem());
212 particle_masses = 0.;
213
214 // calculate and set particle_masses
215 Index i_se_flat = 0;
216 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
217 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
218 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
219 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
220 "A presumably incorrect value found for "
221 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
222 "The value is ", scat_meta[i_ss][i_se].mass)
223
224 particle_masses(i_se_flat, i_ss) = scat_meta[i_ss][i_se].mass;
225 i_se_flat++;
226 }
227 }
228}
229
230/* Workspace method: Doxygen documentation will be auto-generated */
231void pndFromPsdBasic(Matrix& pnd_data,
232 Tensor3& dpnd_data_dx,
233 const Vector& pnd_size_grid,
234 const Matrix& psd_data,
235 const Vector& psd_size_grid,
236 const Tensor3& dpsd_data_dx,
237 const Index& quad_order,
238 const Verbosity&) {
239 // Some sizes
240 const Index np = psd_data.nrows();
241 const Index ng = psd_size_grid.nelem();
242 Index ndx = 0;
243 const bool do_dx = !dpsd_data_dx.empty();
244
245 // Checks
246 ARTS_USER_ERROR_IF (ng < 2,
247 "The method requires that length of *psd_size_grid* is >= 2.");
248 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
249 "So far, the method requires that *psd_size_grid* and "
250 "*pnd_size_grid* have same length.");
251 for (Index i = 0; i < ng; i++) {
252 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
253 "So far, the method requires that *psd_size_grid* and "
254 "*pnd_size_grid* are identical.");
255 }
256 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
257 "Number of columns in *psd_data* and length of "
258 "*psd_size_grid* must match.");
259
260 pnd_data.resize(np, ng);
261 if (do_dx) {
262 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
263 "Number of columns in *dpsd_data_dx* and length of "
264 "*psd_size_grid* must match.");
265 ndx = dpsd_data_dx.npages();
266 dpnd_data_dx.resize(ndx, np, ng);
267 } else {
268 dpnd_data_dx.resize(0, 0, 0);
269 }
270
271 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
272 // identical, of this as well implicitly)
273 ArrayOfIndex intarr;
274 Vector psd_size_grid_sorted(ng);
275 get_sorted_indexes(intarr, psd_size_grid);
276 for (Index i = 0; i < ng; i++)
277 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
278
279 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
280 "*psd_size_grid* is not allowed to contain "
281 "duplicate values.");
282
283 // Calculate pnd by intrgation of psd for given nodes/bins
284 Vector quadweights(ng);
285 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
286
287 for (Index i = 0; i < ng; i++) {
288 for (Index ip = 0; ip < np; ip++) {
289 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
290 }
291
292 if (do_dx) {
293 for (Index ip = 0; ip < np; ip++) {
294 for (Index ix = 0; ix < ndx; ix++) {
295 dpnd_data_dx(ix, ip, intarr[i]) =
296 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
297 }
298 }
299 }
300 }
301}
302
303/* Workspace method: Doxygen documentation will be auto-generated */
304void pndFromPsd(Matrix& pnd_data,
305 Tensor3& dpnd_data_dx,
306 const Vector& pnd_size_grid,
307 const Matrix& psd_data,
308 const Vector& psd_size_grid,
309 const Tensor3& dpsd_data_dx,
310 const ArrayOfArrayOfSingleScatteringData& scat_data,
311 const Vector& f_grid,
312 const Index& scat_data_checked,
313 const Index& quad_order,
314 const Index& scat_index,
315 const Numeric& threshold_rsec,
316 const Numeric& threshold_bext,
317 const Numeric& threshold_rpnd,
318 const Verbosity&) {
319 // Some sizes
320 const Index np = psd_data.nrows();
321 const Index ng = psd_size_grid.nelem();
322 Index ndx = 0;
323 const bool do_dx = !dpsd_data_dx.empty();
324
325 // Checks
326 ARTS_USER_ERROR_IF (ng < 3,
327 "The method requires that length of *psd_size_grid* is >= 3.");
328 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
329 "So far, the method requires that *psd_size_grid* and"
330 " *pnd_size_grid* have the same length.");
331 for (Index i = 0; i < ng; i++) {
332 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
333 "So far, the method requires that *psd_size_grid* and"
334 " *pnd_size_grid* are identical.");
335 }
336 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
337 "Number of columns in *psd_data* and length of"
338 " *psd_size_grid* must match.");
339
340 pnd_data.resize(np, ng);
341 if (do_dx) {
342 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
343 "Number of columns in *dpsd_data_dx* and length of"
344 " *psd_size_grid* must match.");
345 ndx = dpsd_data_dx.npages();
346 dpnd_data_dx.resize(ndx, np, ng);
347 } else {
348 dpnd_data_dx.resize(0, 0, 0);
349 }
350
351 ARTS_USER_ERROR_IF (!scat_data_checked,
352 "*scat_data* must have passed a consistency check"
353 " (scat_data_checked=1).\n"
354 "Alternatively, use *pndFromPsdBasic*.");
355 ARTS_USER_ERROR_IF (scat_index >= scat_data.nelem(),
356 "*scat_index* exceeds the number of available"
357 " scattering species.");
358 ARTS_USER_ERROR_IF (scat_data[scat_index].nelem() != ng,
359 "Number of scattering elements in this scattering"
360 " species (*scat_index*) inconsistent with length of"
361 " *pnd_size_grid*.");
362
363 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
364 // identical, of this as well implicitly)
365 ArrayOfIndex intarr;
366 Vector psd_size_grid_sorted(ng);
367 get_sorted_indexes(intarr, psd_size_grid);
368 for (Index i = 0; i < ng; i++)
369 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
370
371 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
372 "*psd_size_grid* is not allowed to contain "
373 "duplicate values.");
374
375 // Calculate pnd by integration of psd for given nodes/bins
376 Vector quadweights(ng);
377 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
378
379 for (Index i = 0; i < ng;
380 i++) //loop over pnd_size_grid aka scattering elements
381 {
382 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
383 {
384 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
385 }
386
387 if (do_dx) {
388 for (Index ip = 0; ip < np; ip++) {
389 for (Index ix = 0; ix < ndx; ix++) {
390 dpnd_data_dx(ix, ip, intarr[i]) =
391 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
392 }
393 }
394 }
395 }
396
397 ArrayOfSingleScatteringData sds = scat_data[scat_index];
398 Index fstart = 0;
399 Index nf = f_grid.nelem();
400 Matrix bulkext(np, nf, 0.);
401 Vector ext(nf), ext_s0(nf), ext_s1(nf), ext_l0(nf), ext_l1(nf);
402
403 // check that pnd_size_grid and scat_data cover (scalar) bulk extinction properly.
404 // (formerly we used mass. but heavy particles might not necessarily
405 // contribute much to optprops. same is valid for total particle number.)
406 //
407 // We do that by
408 // (a) checking that psd*ext decreases at the edges, i.e. to increase the
409 // probability that the main extinction peak is covered and in order to
410 // avoid passing check (b) through making the edge bins tiny
411 // (b) checking that the extinction contributions of the edge bins is small
412 // compared to the total bulk bin.
413 // The tests are somewhat over-sensitive on the small particle side in the
414 // sense that the checks are triggered easily by low mass density cases - which
415 // have their main extinction contributed by small particles, but for which
416 // the total extinction is quite low. Hence, there is also a total extinction
417 // threshold, below which the checks are not applied. Furthermore, such low
418 // densities do typically not occur throughout the whole atmospheric profile,
419 // but higher density layers might exists, meaning the low density layers
420 // contribute very little to the total optical properties, hence should not
421 // dominate the check criteria (no need to be very strict with them if they
422 // hardly contribute anyways). Therefore, checks are also not applied if the
423 // edge bin number density at a given atmospheric level is low compared to the
424 // maximum number of this scattering element in the profile.
425 //
426 // Technically, we need to cover all freqs separately (as optprops vary
427 // strongly with freq). We indeed do that here.
428 // FIXME: Shall a single freq or reduced freq-number option be implemented?
429 // If so, the don't-apply-test criteria need to be checked again though (so it
430 // does not happen that effectively all of the reduced-freq testing is skipped
431 // due to them).
432 //
433 // Technically, we also need to use identical temperatures (and preferably the
434 // ones used for deriving psd). But all scat elements can be on different
435 // T-grids, ie we would need to interpolate. Instead we just use the first in
436 // line.
437 // FIXME: Maybe refine in future allowing to select between low, high, median
438 // grid point or even specify one temp to use.
439 //
440 // And, technically ext might be directional dependent (for oriented
441 // particles). Also this might be different for each scat element. However, at
442 // least for now, we only check the 0-direction.
443 // FIXME: Check further directions?
444
445 for (Index ise = 0; ise < ng;
446 ise++) //loop over pnd_size_grid aka scattering elements
447 {
448 if (sds[intarr[ise]].ext_mat_data.nshelves() > 1)
449 ext = sds[intarr[ise]].ext_mat_data(joker, 0, 0, 0, 0);
450 else
451 ext = sds[intarr[ise]].ext_mat_data(0, 0, 0, 0, 0);
452
453 // keep the ext data of the edge bins
454 if (ise == 0)
455 ext_s0 = ext;
456 else if (ise == 1)
457 ext_s1 = ext;
458 else if (ise == ng - 2)
459 ext_l1 = ext;
460 else if (ise == ng - 1)
461 ext_l0 = ext;
462
463 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
464 if (abs(pnd_data(ip, joker).sum()) > 0.)
465 for (Index f = fstart; f < (fstart + nf); f++)
466 bulkext(ip, f) += pnd_data(ip, intarr[ise]) * ext[f];
467 }
468
469 Numeric max0 = 0, max1 = 0;
470 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
471 {
472 max0 = max(abs(pnd_data(ip, intarr[0])), max0);
473 max1 = max(abs(pnd_data(ip, intarr[ng - 1])), max1);
474 }
475
476 Numeric contrib;
477 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
478 {
479 if (abs(pnd_data(ip, joker).sum()) > 0.) {
480 for (Index f = fstart; f < (fstart + nf); f++) {
481 /* for( Index ise=0; ise<ng; ise++ )
482 {
483 if( sds[ise].ext_mat_data.nshelves()>1 )
484 ext = sds[ise].ext_mat_data(joker,0,0,0,0);
485 else
486 ext = sds[ise].ext_mat_data(0,0,0,0,0);
487 cout << " se #" << ise << ": contrib = pnd*ext/bext = "
488 << abs(pnd_data(ip,ise)) << "*" << ext[f] << "/"
489 << abs(bulkext(ip,f)) << " = "
490 << 1e2*abs(pnd_data(ip,ise))*ext[f]/abs(bulkext(ip,f))
491 << "%\n";
492 }*/
493
494 // check that bin-width normalized extinction (or ext*psd) is
495 // decreasing.
496 if (abs(bulkext(ip, f)) > 1e-2 * threshold_bext) {
497 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[0])) > 0. and
498 ext_s0[f] * abs(psd_data(ip, intarr[0])) >=
499 ext_s1[f] * abs(psd_data(ip, intarr[1])),
500 " Bin-width normalized extinction (ext*psd) not decreasing"
501 " at small size edge\n"
502 " at atm level #", ip, " and freq point #", f, ".\n"
503 " ext_s0=", ext_s0[f],
504 ", psd_s0=", abs(psd_data(ip, intarr[0])),
505 ", ext_s0*psd_s0=", ext_s0[f] * abs(psd_data(ip, intarr[0])),
506 "\n LARGER EQUAL\n"
507 " ext_s1=", ext_s1[f],
508 ", psd_s1=", abs(psd_data(ip, intarr[1])),
509 ", ext_s1*psd_s1=", ext_s1[f] * abs(psd_data(ip, intarr[1])),
510 "\n"
511 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
512 " Need to add smaller sized particles!\n")
513
514 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[ng - 1])) > 0. and
515 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])) >=
516 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])),
517 "Bin-width normalized extinction (ext*psd) not decreasing"
518 " at large size edge\n"
519 "at atm level #", ip, " and freq point #", f, ".\n"
520 " ext_l0=", ext_l0[f],
521 ", psd_l0=", abs(psd_data(ip, intarr[ng - 1])),
522 ", ext_l0*psd_l0=",
523 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])),
524 "\n LARGER EQUAL\n"
525 " ext_l1=", ext_l1[f],
526 ", psd_l1=", abs(psd_data(ip, intarr[ng - 2])),
527 ", ext_l1*psd_l1=",
528 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])), "\n"
529 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
530 " Need to add larger sized particles!\n")
531 }
532
533 // check that contribution of edge bins to total extinction is
534 // sufficiently small
535 if (abs(bulkext(ip, f)) > threshold_bext) {
536 if (abs(pnd_data(ip, intarr[0])) > threshold_rpnd * max0) {
537 contrib =
538 abs(pnd_data(ip, intarr[0])) * ext_s0[f] / abs(bulkext(ip, f));
539 //cout << " small edge contrib = pnd*ext/bext = "
540 // << abs(pnd_data(ip,intarr[0])) << "*" << ext_s0[f] << "/"
541 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
542 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[0])) * ext_s0[f] >
543 threshold_rsec * abs(bulkext(ip, f)),
544 "Contribution of edge bin to total extinction too high"
545 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
546 ") at small size edge\n"
547 "at atm level #", ip, " and freq point #", f, ".\n"
548 " Need to add smaller sized particles or refine the size"
549 " grid on the small size edge!\n")
550 }
551 if (abs(pnd_data(ip, intarr[ng - 1])) > threshold_rpnd * max1) {
552 contrib = abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] /
553 abs(bulkext(ip, f));
554 //cout << " large edge contrib = pnd*ext/bext = "
555 // << abs(pnd_data(ip,ng-1)) << "*" << ext_l0[f] << "/"
556 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
557 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] >
558 threshold_rsec * abs(bulkext(ip, f)),
559 "Contribution of edge bin to total extinction too high"
560 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
561 ") at large size edge\n"
562 "at atm level #", ip, " and freq point #", f, ".\n"
563 " Need to add larger sized particles or refine the size"
564 " grid on the large size edge!\n")
565 }
566 }
567 }
568 }
569 }
570}
571
572
573
574/* Workspace method: Doxygen documentation will be auto-generated */
576 Workspace& ws,
577 Tensor4& pnd_field,
578 ArrayOfTensor4& dpnd_field_dx,
579 const Index& atmosphere_dim,
580 const Vector& p_grid,
581 const Vector& lat_grid,
582 const Vector& lon_grid,
583 const Tensor3& t_field,
584 const Index& cloudbox_on,
585 const ArrayOfIndex& cloudbox_limits,
586 const ArrayOfString& scat_species,
587 const ArrayOfArrayOfSingleScatteringData& scat_data,
588 const ArrayOfArrayOfScatteringMetaData& scat_meta,
589 const Tensor4& particle_bulkprop_field,
590 const ArrayOfString& particle_bulkprop_names,
591 const ArrayOfAgenda& pnd_agenda_array,
592 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
593 const Index& jacobian_do,
594 const ArrayOfRetrievalQuantity& jacobian_quantities,
595 const Verbosity&) {
596
597 // Do nothing if cloudbox is inactive
598 if (!cloudbox_on) {
599 return;
600 }
601
602 ARTS_USER_ERROR_IF (particle_bulkprop_field.empty(),
603 "*particle_bulkprop_field* is empty.");
604
605 // Checks (not totally complete, but should cover most mistakes)
606 chk_if_in_range("atmosphere_dim", atmosphere_dim, 1, 3);
607 chk_atm_grids(atmosphere_dim, p_grid, lat_grid, lon_grid);
608 chk_atm_field("t_field", t_field, atmosphere_dim, p_grid, lat_grid, lon_grid);
609 chk_atm_field("particle_bulkprop_field",
610 particle_bulkprop_field,
611 atmosphere_dim,
612 particle_bulkprop_names.nelem(),
613 p_grid,
614 lat_grid,
615 lon_grid);
616
617 // Number of scattering species
618 const Index nss = scat_data.nelem();
619
620 ARTS_USER_ERROR_IF (particle_bulkprop_names.nelem() == 0 ||
621 particle_bulkprop_names.nelem() != particle_bulkprop_field.nbooks(),
622 "Number of fields in *particle_bulkprop_field*"
623 " inconsistent with number of names in"
624 " *particle_bulkprop_names*.");
625 ARTS_USER_ERROR_IF (particle_bulkprop_field.nbooks() < nss,
626 "At least one field per scattering species required"
627 " in *particle_bulkprop_field*.");
628
629 ARTS_USER_ERROR_IF (cloudbox_limits.nelem() != 2 * atmosphere_dim,
630 "Length of *cloudbox_limits* incorrect with respect "
631 "to *atmosphere_dim*.");
632 ARTS_USER_ERROR_IF (cloudbox_limits[1] <= cloudbox_limits[0] || cloudbox_limits[0] < 0 ||
633 cloudbox_limits[1] >= p_grid.nelem(),
634 "Invalid data in pressure part of *cloudbox_limits*.");
635 if (atmosphere_dim > 1) {
636 ARTS_USER_ERROR_IF (cloudbox_limits[3] <= cloudbox_limits[2] || cloudbox_limits[2] < 0 ||
637 cloudbox_limits[3] >= lat_grid.nelem(),
638 "Invalid data in latitude part of *cloudbox_limits*.");
639 if (atmosphere_dim > 2) {
640 ARTS_USER_ERROR_IF (cloudbox_limits[5] <= cloudbox_limits[4] || cloudbox_limits[4] < 0 ||
641 cloudbox_limits[5] >= lon_grid.nelem(),
642 "Invalid data in longitude part of *cloudbox_limits*.");
643 }
644 }
645
646 ARTS_USER_ERROR_IF (nss < 1, "*scat_data* is empty!.");
647 ARTS_USER_ERROR_IF (scat_species.nelem() != nss,
648 "*scat_data* and *scat_species* are inconsistent in size.");
649 ARTS_USER_ERROR_IF (scat_meta.nelem() != nss,
650 "*scat_data* and *scat_meta* are inconsistent in size.");
651 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
652 "*scat_data* and *pnd_agenda_array* are inconsistent "
653 "in size.");
654 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
655 "*scat_data* and *pnd_agenda_array_input_names* are "
656 "inconsistent in size.");
657 // Further checks of scat_data vs. scat_meta below
658
659 // Effective lengths of cloudbox
660 const Index np = cloudbox_limits[1] - cloudbox_limits[0] + 1;
661 Index np_nonzero = np; // Can be changed below
662 Index ip_offset = cloudbox_limits[0]; // Can be changed below
663 Index pf_offset = 0; // Can be changed below
664 Index nlat = 1;
665 Index ilat_offset = 0;
666 if (atmosphere_dim > 1) {
667 nlat = cloudbox_limits[3] - cloudbox_limits[2] + 1;
668 ilat_offset = cloudbox_limits[2];
669 }
670 Index nlon = 1;
671 Index ilon_offset = 0;
672 if (atmosphere_dim > 2) {
673 nlon = cloudbox_limits[5] - cloudbox_limits[4] + 1;
674 ilon_offset = cloudbox_limits[4];
675 }
676
677 // Check that *particle_bulkprop_field* contains zeros outside and at
678 // cloudbox boundaries
679 constexpr std::string_view estring =
680 "*particle_bulkprop_field* allowed to contain"
681 " non-zero values only inside the cloudbox.";
682 // Pressure end ranges
683 if (cloudbox_limits[0] != 0) {
684 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
685 joker, Range(0, ip_offset + 1), joker, joker)) > 0 ||
686 min(particle_bulkprop_field(
687 joker, Range(0, ip_offset + 1), joker, joker)) < 0,
688 estring);
689 np_nonzero--;
690 ip_offset++;
691 pf_offset = 1;
692 }
693 if (cloudbox_limits[1] != p_grid.nelem() - 1) {
694 const Index np_above = p_grid.nelem() + 1 - (np + ip_offset);
695 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
696 joker, Range(cloudbox_limits[1], np_above), joker, joker)) > 0 ||
697 min(particle_bulkprop_field(
698 joker, Range(cloudbox_limits[1], np_above), joker, joker)) < 0,
699 estring);
700 np_nonzero--;
701 }
702
703 // Cumulative number of scattering elements
704 ArrayOfIndex ncumse(nss + 1);
705 ncumse[0] = 0;
706 for (Index i = 0; i < nss; i++) {
707 ARTS_USER_ERROR_IF (scat_data[i].nelem() != scat_meta[i].nelem(),
708 "*scat_data* and *scat_meta* have inconsistent sizes.");
709 ncumse[i + 1] = ncumse[i] + scat_data[i].nelem();
710 }
711
712 // Allocate output variables
713 //
714 pnd_field.resize(ncumse[nss], np, nlat, nlon);
715 pnd_field = 0.0;
716 //
717 // Help variables for partial derivatives
718 Index nq = 0;
719 ArrayOfArrayOfIndex scatspecies_to_jq;
720 //
721 if (!jacobian_do) {
722 dpnd_field_dx.resize(0);
723 } else {
724 nq = jacobian_quantities.nelem();
725 dpnd_field_dx.resize(nq);
726 scatspecies_to_jq.resize(nss);
727 //
728 for (Index iq = 0; iq < nq; iq++) {
729 if (jacobian_quantities[iq] == Jacobian::Special::ScatteringString) {
730 const Index ihit =
731 find_first(scat_species, jacobian_quantities[iq].Subtag());
732 ARTS_USER_ERROR_IF (ihit < 0,
733 "Jacobian quantity with index ", iq, " refers to\n"
734 " ", jacobian_quantities[iq].Subtag(),
735 "\nbut this species could not be found in *scat_species*.")
736 scatspecies_to_jq[ihit].push_back(iq);
737 dpnd_field_dx[iq].resize(ncumse[nss], np, nlat, nlon);
738 dpnd_field_dx[iq] = 0.0;
739 }
740 }
741 }
742
743 // Extract data from pnd-agenda array
744 for (Index is = 0; is < nss; is++) {
745 // Index range with respect to pnd_field
746 Range se_range(ncumse[is], ncumse[is + 1] - ncumse[is]);
747
748 // Determine how pnd_agenda_array_input_names are related to input fields
749 //
750 const Index nin = pnd_agenda_array_input_names[is].nelem();
751 ArrayOfIndex i_pbulkprop(nin);
752 //
753 for (Index i = 0; i < nin; i++) {
754 i_pbulkprop[i] = find_first(particle_bulkprop_names,
755 pnd_agenda_array_input_names[is][i]);
756 ARTS_USER_ERROR_IF (i_pbulkprop[i] < 0,
757 "Pnd-agenda with index ", is, " is set to require \"",
758 pnd_agenda_array_input_names[is][i], "\",\nbut this quantity "
759 "could not found in *particle_bulkprop_names*.\n"
760 "(Note that temperature must be written as \"Temperature\")")
761 }
762
763 // Set *dpnd_data_dx_names*
764 //
765 Index ndx = 0;
766 ArrayOfString dpnd_data_dx_names(0);
767 //
768 if (jacobian_do) {
769 ndx = scatspecies_to_jq[is].nelem();
770 dpnd_data_dx_names.resize(ndx);
771 for (Index ix = 0; ix < ndx; ix++) {
772 dpnd_data_dx_names[ix] =
773 jacobian_quantities[scatspecies_to_jq[is][ix]].SubSubtag();
774 }
775 }
776
777 // Loop lat/lon positions and call *pnd_agenda*
778 for (Index ilon = 0; ilon < nlon; ilon++) {
779 for (Index ilat = 0; ilat < nlat; ilat++) {
780
781 // Nothing to do for lat/lon end points, if not 1D
782 if ((nlat > 1 && (ilat == 0 || ilat == nlat - 1)) ||
783 (nlon > 1 && (ilon == 0 || ilon == nlon - 1))) {
784 continue;
785 }
786
787 Matrix pnd_agenda_input(np_nonzero, nin);
788 //
789 for (Index i = 0; i < nin; i++) {
790 for (Index ip = 0; ip < np_nonzero; ip++) {
791 pnd_agenda_input(ip, i) =
792 particle_bulkprop_field(i_pbulkprop[i],
793 ip_offset + ip,
794 ilat_offset + ilat,
795 ilon_offset + ilon);
796 }
797 }
798
799 Vector pnd_agenda_input_t(np);
800 //
801 for (Index ip = 0; ip < np_nonzero; ip++) {
802 pnd_agenda_input_t[ip] =
803 t_field(ip_offset + ip, ilat_offset + ilat, ilon_offset + ilon);
804 }
805
806 // Call pnd-agenda array
807 Matrix pnd_data;
808 Tensor3 dpnd_data_dx;
809 //
811 pnd_data,
812 dpnd_data_dx,
813 is,
814 pnd_agenda_input_t,
815 pnd_agenda_input,
816 pnd_agenda_array_input_names[is],
817 dpnd_data_dx_names,
818 pnd_agenda_array);
819
820 // Copy to output variables
821 for (Index ip = 0; ip < np_nonzero; ip++) {
822 pnd_field(se_range, pf_offset+ip, ilat, ilon) = pnd_data(ip, joker);
823 }
824 for (Index ix = 0; ix < ndx; ix++) {
825 for (Index ip = 0; ip < np_nonzero; ip++) {
826 dpnd_field_dx[scatspecies_to_jq[is][ix]](se_range, pf_offset+ip, ilat, ilon) =
827 dpnd_data_dx(ix, ip, joker);
828 }
829 }
830 }
831 }
832 }
833}
834
835
836
837/* Workspace method: Doxygen documentation will be auto-generated */
838void ScatSpeciesSizeMassInfo(Vector& scat_species_x,
839 Numeric& scat_species_a,
840 Numeric& scat_species_b,
841 const ArrayOfArrayOfScatteringMetaData& scat_meta,
842 const Index& species_index,
843 const String& x_unit,
844 const Numeric& x_fit_start,
845 const Numeric& x_fit_end,
846 const Index& do_only_x,
847 const Verbosity&) {
848 // Checks
849 const Index nss = scat_meta.nelem();
850 ARTS_USER_ERROR_IF (nss == 0, "*scat_meta* is empty!");
851 ARTS_USER_ERROR_IF (nss < species_index + 1,
852 "Selected scattering species index is ", species_index,
853 " but this "
854 "is not allowed since *scat_meta* has only ", nss, " elements.")
855 //
856 const Index nse = scat_meta[species_index].nelem();
857 ARTS_USER_ERROR_IF (nse < 2,
858 "The scattering species must have at least two "
859 "elements to use this method.");
860
861 // Extract particle masses
862 //
863 Vector mass(nse);
864 //
865 for (Index i = 0; i < nse; i++) {
866 mass[i] = scat_meta[species_index][i].mass;
867 }
868
869 // Create size grid
870 //
871 scat_species_x.resize(nse);
872 //
873 if (x_unit == "dveq") {
874 for (Index i = 0; i < nse; i++) {
875 scat_species_x[i] = scat_meta[species_index][i].diameter_volume_equ;
876 }
877 //
878 if (do_only_x) {
879 scat_species_a = -1;
880 scat_species_b = -1;
881 } else
882 derive_scat_species_a_and_b(scat_species_a,
883 scat_species_b,
884 scat_species_x,
885 mass,
886 x_fit_start,
887 x_fit_end);
888 }
889
890 else if (x_unit == "dmax") {
891 for (Index i = 0; i < nse; i++) {
892 scat_species_x[i] = scat_meta[species_index][i].diameter_max;
893 }
894 //
895 if (do_only_x) {
896 scat_species_a = -1;
897 scat_species_b = -1;
898 } else
899 derive_scat_species_a_and_b(scat_species_a,
900 scat_species_b,
901 scat_species_x,
902 mass,
903 x_fit_start,
904 x_fit_end);
905 }
906
907 else if (x_unit == "area") {
908 for (Index i = 0; i < nse; i++) {
909 scat_species_x[i] =
910 scat_meta[species_index][i].diameter_area_equ_aerodynamical;
911 }
912 //
913 if (do_only_x) {
914 scat_species_a = -1;
915 scat_species_b = -1;
916 } else
917 derive_scat_species_a_and_b(scat_species_a,
918 scat_species_b,
919 scat_species_x,
920 mass,
921 x_fit_start,
922 x_fit_end);
923 }
924
925 else if (x_unit == "mass") {
926 scat_species_x = mass;
927 //
928 scat_species_a = 1;
929 scat_species_b = 1;
930 }
931
932 else {
933 ARTS_USER_ERROR ("You have selected the x_unit: ", x_unit, "\nwhile accepted "
934 "choices are: \"dveq\", \"dmax\", \"mass\" and \"area\"")
935 }
936}
This file contains the definition of Array.
Index TotalNumberOfElements(const Array< Array< base > > &aa)
Determine total number of elements in an ArrayOfArray.
Definition: array.h:241
Index find_first(const Array< base > &x, const base &w)
Find first occurance.
Definition: array.h:190
base max(const Array< base > &x)
Max function.
Definition: array.h:145
base min(const Array< base > &x)
Min function.
Definition: array.h:161
The global header file for ARTS.
Constants of physical expressions as constexpr.
void pnd_agenda_arrayExecute(Workspace &ws, Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Index agenda_array_index, const Vector &pnd_agenda_input_t, const Matrix &pnd_agenda_input, const ArrayOfString &pnd_agenda_input_names, const ArrayOfString &dpnd_data_dx_names, const ArrayOfAgenda &input_agenda_array)
Definition: auto_md.cc:25669
void chk_atm_grids(const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid)
chk_atm_grids
void chk_if_in_range(const String &x_name, const Index &x, const Index &x_low, const Index &x_high)
chk_if_in_range
Definition: check_input.cc:86
void chk_atm_field(const String &x_name, ConstTensor3View x, const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid, const bool &chk_lat90)
chk_atm_field (simple fields)
This can be used to make arrays out of anything.
Definition: array.h:48
Index nelem() const ARTS_NOEXCEPT
Definition: array.h:92
Index nrows() const noexcept
Definition: matpackI.h:1079
Index ncols() const noexcept
Definition: matpackI.h:1080
bool empty() const noexcept
Definition: matpackIII.h:157
Index npages() const
Returns the number of pages.
Definition: matpackIII.h:145
Index ncols() const
Returns the number of columns.
Definition: matpackIII.h:151
bool empty() const noexcept
Definition: matpackIV.h:152
Index nbooks() const noexcept
Definition: matpackIV.h:143
Index nelem() const noexcept
Returns the number of elements.
Definition: matpackI.h:547
void set_name(const String &s)
Set name of this gridded field.
void set_grid_name(Index i, const String &s)
Set grid name.
void set_grid(Index i, const Vector &g)
Set a numeric grid.
The Matrix class.
Definition: matpackI.h:1285
void resize(Index r, Index c)
Resize function.
Definition: matpackI.cc:1011
The range class.
Definition: matpackI.h:160
The Tensor3 class.
Definition: matpackIII.h:352
void resize(Index p, Index r, Index c)
Resize function.
Definition: matpackIII.cc:661
The Tensor4 class.
Definition: matpackIV.h:435
void resize(Index b, Index p, Index r, Index c)
Resize function.
Definition: matpackIV.cc:1054
The Vector class.
Definition: matpackI.h:910
void resize(Index n)
Resize function.
Definition: matpackI.cc:390
Workspace class.
Definition: workspace_ng.h:53
void bin_quadweights(Vector &w, const Vector &x, const Index &order)
Definition: cloudbox.cc:573
Internal cloudbox functions.
#define ARTS_USER_ERROR(...)
Definition: debug.h:169
#define ARTS_USER_ERROR_IF(condition,...)
Definition: debug.h:153
Functions for disort interface.
This file contains basic functions to handle ASCII files.
Header file for interpolation.cc.
Linear algebra functions.
bool is_increasing(ConstVectorView x)
Checks if a vector is sorted and strictly increasing.
Definition: logic.cc:218
Header file for logic.cc.
void pndFromPsdBasic(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const Index &quad_order, const Verbosity &)
WORKSPACE METHOD: pndFromPsdBasic.
void particle_massesFromMetaDataSingleCategory(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaDataSingleCategory.
void pndFromPsd(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const ArrayOfArrayOfSingleScatteringData &scat_data, const Vector &f_grid, const Index &scat_data_checked, const Index &quad_order, const Index &scat_index, const Numeric &threshold_rsec, const Numeric &threshold_bext, const Numeric &threshold_rpnd, const Verbosity &)
WORKSPACE METHOD: pndFromPsd.
void ScatSpeciesSizeMassInfo(Vector &scat_species_x, Numeric &scat_species_a, Numeric &scat_species_b, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Index &species_index, const String &x_unit, const Numeric &x_fit_start, const Numeric &x_fit_end, const Index &do_only_x, const Verbosity &)
WORKSPACE METHOD: ScatSpeciesSizeMassInfo.
constexpr Numeric PI
void pnd_fieldCalcFromParticleBulkProps(Workspace &ws, Tensor4 &pnd_field, ArrayOfTensor4 &dpnd_field_dx, const Index &atmosphere_dim, const Vector &p_grid, const Vector &lat_grid, const Vector &lon_grid, const Tensor3 &t_field, const Index &cloudbox_on, const ArrayOfIndex &cloudbox_limits, const ArrayOfString &scat_species, const ArrayOfArrayOfSingleScatteringData &scat_data, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Tensor4 &particle_bulkprop_field, const ArrayOfString &particle_bulkprop_names, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const Index &jacobian_do, const ArrayOfRetrievalQuantity &jacobian_quantities, const Verbosity &)
WORKSPACE METHOD: pnd_fieldCalcFromParticleBulkProps.
void HydrotableCalc(Workspace &ws, GriddedField4 &hydrotable, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const ArrayOfArrayOfSingleScatteringData &scat_data, const Index &scat_data_checked, const Vector &f_grid, const Index &iss, const Vector &T_grid, const Vector &wc_grid, const Verbosity &)
WORKSPACE METHOD: HydrotableCalc.
void particle_massesFromMetaData(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaData.
void nlinspace(Vector &x, const Numeric start, const Numeric stop, const Index n)
nlinspace
Definition: math_funcs.cc:227
void abs(Sparse &A, const Sparse &B)
Absolute value of sparse matrix elements.
Definition: matpackII.cc:394
NUMERIC Numeric
The type to use for all floating point numbers.
Definition: matpack.h:33
INDEX Index
The type to use for all integer numbers and indices.
Definition: matpack.h:39
ConstComplexMatrixView transpose(ConstComplexMatrixView m)
Const version of transpose.
const Joker joker
Declarations having to do with the four output streams.
Numeric asymmetry_parameter(ConstVectorView sa_grid, ConstVectorView pfun)
asymmetry_parameter
Definition: microphysics.cc:61
void derive_scat_species_a_and_b(Numeric &a, Numeric &b, const Vector &x, const Vector &mass, const Numeric &x_fit_start, const Numeric &x_fit_end)
Definition: microphysics.cc:97
Internal functions for microphysics calculations (size distributions etc.)
constexpr Numeric pi
The following mathematical constants are generated in python Decimal package by the code:
void ext_abs_pfun_from_tro(MatrixView ext_data, MatrixView abs_data, Tensor3View pfun_data, const ArrayOfSingleScatteringData &scat_data, const Index &iss, ConstMatrixView pnd_data, ArrayOfIndex &cloudbox_limits, ConstVectorView T_grid, ConstVectorView sa_grid)
Extinction, absorption and phase function for one scattering species, 1D and TRO.
Scattering database structure and functions.
This file contains header information for the dealing with command line parameters.
This file contains declerations of functions of physical character.
Internal functions associated with size distributions.
Declaration of functions in rte.cc.
Contains sorting routines.
void get_sorted_indexes(ArrayOfIndex &sorted, const T &data)
get_sorted_indexes
Definition: sorting.h:57
Header file for special_interp.cc.
This file contains basic functions to handle XML data files.