ARTS 2.5.9 (git: 825fa5f2)
m_microphysics.cc
Go to the documentation of this file.
1/* Copyright (C) 2011-2017
2 Jana Mendrok <jana.mendrok@gmail.com>
3 Daniel Kreyling <daniel.kreyling@nict.go.jp>
4 Manfred Brath <manfred.brath@uni-hamburg.de>
5 Patrick Eriksson <patrick.eriksson@chalmers.se>
6
7 This program is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
10 later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
20 USA. */
21
22/*===========================================================================
23 === File description
24 ===========================================================================*/
25
37/*===========================================================================
38 === External declarations
39 ===========================================================================*/
40#include <cmath>
41#include <cstdlib>
42#include <stdexcept>
43
44#include "array.h"
45#include "arts.h"
46#include "arts_constants.h"
47#include "auto_md.h"
48#include "check_input.h"
49#include "cloudbox.h"
50#include "disort.h"
51#include "file.h"
52#include "interpolation.h"
53#include "lin_alg.h"
54#include "logic.h"
55#include "math_funcs.h"
56#include "messages.h"
57#include "microphysics.h"
58#include "optproperties.h"
59#include "parameters.h"
60#include "physics_funcs.h"
61#include "psd.h"
62#include "rte.h"
63#include "sorting.h"
64#include "special_interp.h"
65#include "xml_io.h"
66
67inline constexpr Numeric PI=Constant::pi;
68
69/*===========================================================================
70 === The functions (in alphabetical order)
71 ===========================================================================*/
72
73/* Workspace method: Doxygen documentation will be auto-generated */
75 GriddedField4& hydrotable,
76 const ArrayOfAgenda& pnd_agenda_array,
77 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
79 const Index& scat_data_checked,
80 const Vector& f_grid,
81 const Index& iss,
82 const Vector& T_grid,
83 const Vector& wc_grid,
84 const Verbosity&)
85{
86 // Sizes
87 const Index nss = scat_data.nelem();
88 const Index nf = f_grid.nelem();
89 const Index nt = T_grid.nelem();
90 const Index nw = wc_grid.nelem();
91
92 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
93 "*scat_data* and *pnd_agenda_array* are inconsistent "
94 "in size.");
95 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
96 "*scat_data* and *pnd_agenda_array_input_names* are "
97 "inconsistent in size.");
98 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names[iss].nelem() != 1,
99 "This method requires one-moment PSDs, but *pnd_agenda_array_input_names* "
100 "for the selected scattering species does not have length one.");
101 ARTS_USER_ERROR_IF (!scat_data_checked,
102 "The scat_data must be flagged to have passed a "
103 "consistency check (scat_data_checked=1).");
104
105 // Allocate *hydrotable*
106 hydrotable.set_name("Table of particle optical properties");
107 hydrotable.data.resize(5, nf, nt, nw);
108 //
109 hydrotable.set_grid_name(0, "Quantity");
110 hydrotable.set_grid(0, {"Extinction [m-1]",
111 "Single scattering albedo [-]",
112 "Asymmetry parameter [-]",
113 "Radar reflectivity [m2]",
114 "Test [-]"});
115 hydrotable.set_grid_name(1, "Frequency [Hz]");
116 hydrotable.set_grid(1, f_grid);
117 hydrotable.set_grid_name(2, "Temperature [K]");
118 hydrotable.set_grid(2, T_grid);
119 hydrotable.set_grid_name(3, "Particle content [kg/m3]");
120 hydrotable.set_grid(3, wc_grid);
121
122 // Scattering angle grid
123 const Index nsa = 361;
124 Vector sa_grid;
125 nlinspace(sa_grid, 0, 180, nsa);
126
127 // Local variables
128 Matrix pnd_data;
129 Tensor3 dpnd_data_dx;
130 Matrix pnd_agenda_input(nt, 1);
131 ArrayOfString dpnd_data_dx_names(0);
132 Matrix ext(nf, nt);
133 Matrix abs(nf, nt);
134 Tensor3 pfun(nf, nt, nsa);
135 const Numeric fourpi = 4.0 * PI;
136 ArrayOfIndex cbox_limits = {0, nt-1};
137
138 // Loop and fill table
139 for (Index iw = 0; iw < nw; iw++) {
140 // Call pnd_agenda
141 pnd_agenda_input = wc_grid[iw];
143 pnd_data,
144 dpnd_data_dx,
145 iss,
146 T_grid,
147 pnd_agenda_input,
148 pnd_agenda_array_input_names[iss],
149 dpnd_data_dx_names,
150 pnd_agenda_array);
151
152 // Calculate extinsion, absorbtion and phase function
153 ext = 0.0;
154 abs = 0.0;
155 pfun = 0.0;
157 abs,
158 pfun,
159 scat_data[iss],
160 iss,
161 transpose(pnd_data),
162 cbox_limits,
163 T_grid,
164 sa_grid);
165
166 // Fill the hydrotable for present particle content
167 for (Index iv = 0; iv < nf; iv++) {
168 for (Index it = 0; it < nt; it++) {
169 hydrotable.data(0,iv,it,iw) = ext(iv,it);
170 hydrotable.data(1,iv,it,iw) = 1.0 - (abs(iv,it) / ext(iv,it));
171 hydrotable.data(2,iv,it,iw) = asymmetry_parameter(sa_grid,
172 pfun(iv,it,joker));
173 hydrotable.data(3,iv,it,iw) = fourpi * pfun(iv,it,nsa-1);
174 }
175 }
176 }
177}
178
179
180/* Workspace method: Doxygen documentation will be auto-generated */
182 Matrix& particle_masses,
183 const ArrayOfArrayOfScatteringMetaData& scat_meta,
184 const Verbosity&) {
185 const Index np_total = TotalNumberOfElements(scat_meta);
186
187 particle_masses.resize(np_total, 1);
188
189 Index i_se_flat = 0;
190 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
191 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
192 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
193 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
194 "A presumably incorrect value found for "
195 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
196 "The value is ", scat_meta[i_ss][i_se].mass)
197
198 particle_masses(i_se_flat, 0) = scat_meta[i_ss][i_se].mass;
199
200 i_se_flat++;
201 }
202 }
203}
204
205/* Workspace method: Doxygen documentation will be auto-generated */
207 Matrix& particle_masses,
208 // WS Input:
209 const ArrayOfArrayOfScatteringMetaData& scat_meta,
210 const Verbosity&) {
211 // resize particle_masses to required diemsions and properly initialize values
212 particle_masses.resize(TotalNumberOfElements(scat_meta), scat_meta.nelem());
213 particle_masses = 0.;
214
215 // calculate and set particle_masses
216 Index i_se_flat = 0;
217 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
218 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
219 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
220 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
221 "A presumably incorrect value found for "
222 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
223 "The value is ", scat_meta[i_ss][i_se].mass)
224
225 particle_masses(i_se_flat, i_ss) = scat_meta[i_ss][i_se].mass;
226 i_se_flat++;
227 }
228 }
229}
230
231/* Workspace method: Doxygen documentation will be auto-generated */
232void pndFromPsdBasic(Matrix& pnd_data,
233 Tensor3& dpnd_data_dx,
234 const Vector& pnd_size_grid,
235 const Matrix& psd_data,
236 const Vector& psd_size_grid,
237 const Tensor3& dpsd_data_dx,
238 const Index& quad_order,
239 const Verbosity&) {
240 // Some sizes
241 const Index np = psd_data.nrows();
242 const Index ng = psd_size_grid.nelem();
243 Index ndx = 0;
244 const bool do_dx = !dpsd_data_dx.empty();
245
246 // Checks
247 ARTS_USER_ERROR_IF (ng < 2,
248 "The method requires that length of *psd_size_grid* is >= 2.");
249 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
250 "So far, the method requires that *psd_size_grid* and "
251 "*pnd_size_grid* have same length.");
252 for (Index i = 0; i < ng; i++) {
253 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
254 "So far, the method requires that *psd_size_grid* and "
255 "*pnd_size_grid* are identical.");
256 }
257 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
258 "Number of columns in *psd_data* and length of "
259 "*psd_size_grid* must match.");
260
261 pnd_data.resize(np, ng);
262 if (do_dx) {
263 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
264 "Number of columns in *dpsd_data_dx* and length of "
265 "*psd_size_grid* must match.");
266 ndx = dpsd_data_dx.npages();
267 dpnd_data_dx.resize(ndx, np, ng);
268 } else {
269 dpnd_data_dx.resize(0, 0, 0);
270 }
271
272 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
273 // identical, of this as well implicitly)
274 ArrayOfIndex intarr;
275 Vector psd_size_grid_sorted(ng);
276 get_sorted_indexes(intarr, psd_size_grid);
277 for (Index i = 0; i < ng; i++)
278 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
279
280 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
281 "*psd_size_grid* is not allowed to contain "
282 "duplicate values.");
283
284 // Calculate pnd by intrgation of psd for given nodes/bins
285 Vector quadweights(ng);
286 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
287
288 for (Index i = 0; i < ng; i++) {
289 for (Index ip = 0; ip < np; ip++) {
290 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
291 }
292
293 if (do_dx) {
294 for (Index ip = 0; ip < np; ip++) {
295 for (Index ix = 0; ix < ndx; ix++) {
296 dpnd_data_dx(ix, ip, intarr[i]) =
297 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
298 }
299 }
300 }
301 }
302}
303
304/* Workspace method: Doxygen documentation will be auto-generated */
305void pndFromPsd(Matrix& pnd_data,
306 Tensor3& dpnd_data_dx,
307 const Vector& pnd_size_grid,
308 const Matrix& psd_data,
309 const Vector& psd_size_grid,
310 const Tensor3& dpsd_data_dx,
311 const ArrayOfArrayOfSingleScatteringData& scat_data,
312 const Vector& f_grid,
313 const Index& scat_data_checked,
314 const Index& quad_order,
315 const Index& scat_index,
316 const Numeric& threshold_rsec,
317 const Numeric& threshold_bext,
318 const Numeric& threshold_rpnd,
319 const Verbosity&) {
320 // Some sizes
321 const Index np = psd_data.nrows();
322 const Index ng = psd_size_grid.nelem();
323 Index ndx = 0;
324 const bool do_dx = !dpsd_data_dx.empty();
325
326 // Checks
327 ARTS_USER_ERROR_IF (ng < 3,
328 "The method requires that length of *psd_size_grid* is >= 3.");
329 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
330 "So far, the method requires that *psd_size_grid* and"
331 " *pnd_size_grid* have the same length.");
332 for (Index i = 0; i < ng; i++) {
333 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
334 "So far, the method requires that *psd_size_grid* and"
335 " *pnd_size_grid* are identical.");
336 }
337 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
338 "Number of columns in *psd_data* and length of"
339 " *psd_size_grid* must match.");
340
341 pnd_data.resize(np, ng);
342 if (do_dx) {
343 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
344 "Number of columns in *dpsd_data_dx* and length of"
345 " *psd_size_grid* must match.");
346 ndx = dpsd_data_dx.npages();
347 dpnd_data_dx.resize(ndx, np, ng);
348 } else {
349 dpnd_data_dx.resize(0, 0, 0);
350 }
351
352 ARTS_USER_ERROR_IF (!scat_data_checked,
353 "*scat_data* must have passed a consistency check"
354 " (scat_data_checked=1).\n"
355 "Alternatively, use *pndFromPsdBasic*.");
356 ARTS_USER_ERROR_IF (scat_index >= scat_data.nelem(),
357 "*scat_index* exceeds the number of available"
358 " scattering species.");
359 ARTS_USER_ERROR_IF (scat_data[scat_index].nelem() != ng,
360 "Number of scattering elements in this scattering"
361 " species (*scat_index*) inconsistent with length of"
362 " *pnd_size_grid*.");
363
364 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
365 // identical, of this as well implicitly)
366 ArrayOfIndex intarr;
367 Vector psd_size_grid_sorted(ng);
368 get_sorted_indexes(intarr, psd_size_grid);
369 for (Index i = 0; i < ng; i++)
370 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
371
372 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
373 "*psd_size_grid* is not allowed to contain "
374 "duplicate values.");
375
376 // Calculate pnd by integration of psd for given nodes/bins
377 Vector quadweights(ng);
378 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
379
380 for (Index i = 0; i < ng;
381 i++) //loop over pnd_size_grid aka scattering elements
382 {
383 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
384 {
385 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
386 }
387
388 if (do_dx) {
389 for (Index ip = 0; ip < np; ip++) {
390 for (Index ix = 0; ix < ndx; ix++) {
391 dpnd_data_dx(ix, ip, intarr[i]) =
392 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
393 }
394 }
395 }
396 }
397
398 ArrayOfSingleScatteringData sds = scat_data[scat_index];
399 Index fstart = 0;
400 Index nf = f_grid.nelem();
401 Matrix bulkext(np, nf, 0.);
402 Vector ext(nf), ext_s0(nf), ext_s1(nf), ext_l0(nf), ext_l1(nf);
403
404 // check that pnd_size_grid and scat_data cover (scalar) bulk extinction properly.
405 // (formerly we used mass. but heavy particles might not necessarily
406 // contribute much to optprops. same is valid for total particle number.)
407 //
408 // We do that by
409 // (a) checking that psd*ext decreases at the edges, i.e. to increase the
410 // probability that the main extinction peak is covered and in order to
411 // avoid passing check (b) through making the edge bins tiny
412 // (b) checking that the extinction contributions of the edge bins is small
413 // compared to the total bulk bin.
414 // The tests are somewhat over-sensitive on the small particle side in the
415 // sense that the checks are triggered easily by low mass density cases - which
416 // have their main extinction contributed by small particles, but for which
417 // the total extinction is quite low. Hence, there is also a total extinction
418 // threshold, below which the checks are not applied. Furthermore, such low
419 // densities do typically not occur throughout the whole atmospheric profile,
420 // but higher density layers might exists, meaning the low density layers
421 // contribute very little to the total optical properties, hence should not
422 // dominate the check criteria (no need to be very strict with them if they
423 // hardly contribute anyways). Therefore, checks are also not applied if the
424 // edge bin number density at a given atmospheric level is low compared to the
425 // maximum number of this scattering element in the profile.
426 //
427 // Technically, we need to cover all freqs separately (as optprops vary
428 // strongly with freq). We indeed do that here.
429 // FIXME: Shall a single freq or reduced freq-number option be implemented?
430 // If so, the don't-apply-test criteria need to be checked again though (so it
431 // does not happen that effectively all of the reduced-freq testing is skipped
432 // due to them).
433 //
434 // Technically, we also need to use identical temperatures (and preferably the
435 // ones used for deriving psd). But all scat elements can be on different
436 // T-grids, ie we would need to interpolate. Instead we just use the first in
437 // line.
438 // FIXME: Maybe refine in future allowing to select between low, high, median
439 // grid point or even specify one temp to use.
440 //
441 // And, technically ext might be directional dependent (for oriented
442 // particles). Also this might be different for each scat element. However, at
443 // least for now, we only check the 0-direction.
444 // FIXME: Check further directions?
445
446 for (Index ise = 0; ise < ng;
447 ise++) //loop over pnd_size_grid aka scattering elements
448 {
449 if (sds[intarr[ise]].ext_mat_data.nshelves() > 1)
450 ext = sds[intarr[ise]].ext_mat_data(joker, 0, 0, 0, 0);
451 else
452 ext = sds[intarr[ise]].ext_mat_data(0, 0, 0, 0, 0);
453
454 // keep the ext data of the edge bins
455 if (ise == 0)
456 ext_s0 = ext;
457 else if (ise == 1)
458 ext_s1 = ext;
459 else if (ise == ng - 2)
460 ext_l1 = ext;
461 else if (ise == ng - 1)
462 ext_l0 = ext;
463
464 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
465 if (abs(pnd_data(ip, joker).sum()) > 0.)
466 for (Index f = fstart; f < (fstart + nf); f++)
467 bulkext(ip, f) += pnd_data(ip, intarr[ise]) * ext[f];
468 }
469
470 Numeric max0 = 0, max1 = 0;
471 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
472 {
473 max0 = max(abs(pnd_data(ip, intarr[0])), max0);
474 max1 = max(abs(pnd_data(ip, intarr[ng - 1])), max1);
475 }
476
477 Numeric contrib;
478 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
479 {
480 if (abs(pnd_data(ip, joker).sum()) > 0.) {
481 for (Index f = fstart; f < (fstart + nf); f++) {
482 /* for( Index ise=0; ise<ng; ise++ )
483 {
484 if( sds[ise].ext_mat_data.nshelves()>1 )
485 ext = sds[ise].ext_mat_data(joker,0,0,0,0);
486 else
487 ext = sds[ise].ext_mat_data(0,0,0,0,0);
488 cout << " se #" << ise << ": contrib = pnd*ext/bext = "
489 << abs(pnd_data(ip,ise)) << "*" << ext[f] << "/"
490 << abs(bulkext(ip,f)) << " = "
491 << 1e2*abs(pnd_data(ip,ise))*ext[f]/abs(bulkext(ip,f))
492 << "%\n";
493 }*/
494
495 // check that bin-width normalized extinction (or ext*psd) is
496 // decreasing.
497 if (abs(bulkext(ip, f)) > 1e-2 * threshold_bext) {
498 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[0])) > 0. and
499 ext_s0[f] * abs(psd_data(ip, intarr[0])) >=
500 ext_s1[f] * abs(psd_data(ip, intarr[1])),
501 " Bin-width normalized extinction (ext*psd) not decreasing"
502 " at small size edge\n"
503 " at atm level #", ip, " and freq point #", f, ".\n"
504 " ext_s0=", ext_s0[f],
505 ", psd_s0=", abs(psd_data(ip, intarr[0])),
506 ", ext_s0*psd_s0=", ext_s0[f] * abs(psd_data(ip, intarr[0])),
507 "\n LARGER EQUAL\n"
508 " ext_s1=", ext_s1[f],
509 ", psd_s1=", abs(psd_data(ip, intarr[1])),
510 ", ext_s1*psd_s1=", ext_s1[f] * abs(psd_data(ip, intarr[1])),
511 "\n"
512 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
513 " Need to add smaller sized particles!\n")
514
515 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[ng - 1])) > 0. and
516 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])) >=
517 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])),
518 "Bin-width normalized extinction (ext*psd) not decreasing"
519 " at large size edge\n"
520 "at atm level #", ip, " and freq point #", f, ".\n"
521 " ext_l0=", ext_l0[f],
522 ", psd_l0=", abs(psd_data(ip, intarr[ng - 1])),
523 ", ext_l0*psd_l0=",
524 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])),
525 "\n LARGER EQUAL\n"
526 " ext_l1=", ext_l1[f],
527 ", psd_l1=", abs(psd_data(ip, intarr[ng - 2])),
528 ", ext_l1*psd_l1=",
529 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])), "\n"
530 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
531 " Need to add larger sized particles!\n")
532 }
533
534 // check that contribution of edge bins to total extinction is
535 // sufficiently small
536 if (abs(bulkext(ip, f)) > threshold_bext) {
537 if (abs(pnd_data(ip, intarr[0])) > threshold_rpnd * max0) {
538 contrib =
539 abs(pnd_data(ip, intarr[0])) * ext_s0[f] / abs(bulkext(ip, f));
540 //cout << " small edge contrib = pnd*ext/bext = "
541 // << abs(pnd_data(ip,intarr[0])) << "*" << ext_s0[f] << "/"
542 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
543 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[0])) * ext_s0[f] >
544 threshold_rsec * abs(bulkext(ip, f)),
545 "Contribution of edge bin to total extinction too high"
546 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
547 ") at small size edge\n"
548 "at atm level #", ip, " and freq point #", f, ".\n"
549 " Need to add smaller sized particles or refine the size"
550 " grid on the small size edge!\n")
551 }
552 if (abs(pnd_data(ip, intarr[ng - 1])) > threshold_rpnd * max1) {
553 contrib = abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] /
554 abs(bulkext(ip, f));
555 //cout << " large edge contrib = pnd*ext/bext = "
556 // << abs(pnd_data(ip,ng-1)) << "*" << ext_l0[f] << "/"
557 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
558 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] >
559 threshold_rsec * abs(bulkext(ip, f)),
560 "Contribution of edge bin to total extinction too high"
561 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
562 ") at large size edge\n"
563 "at atm level #", ip, " and freq point #", f, ".\n"
564 " Need to add larger sized particles or refine the size"
565 " grid on the large size edge!\n")
566 }
567 }
568 }
569 }
570 }
571}
572
573
574
575/* Workspace method: Doxygen documentation will be auto-generated */
577 Workspace& ws,
578 Tensor4& pnd_field,
579 ArrayOfTensor4& dpnd_field_dx,
580 const Index& atmosphere_dim,
581 const Vector& p_grid,
582 const Vector& lat_grid,
583 const Vector& lon_grid,
584 const Tensor3& t_field,
585 const Index& cloudbox_on,
586 const ArrayOfIndex& cloudbox_limits,
587 const ArrayOfString& scat_species,
588 const ArrayOfArrayOfSingleScatteringData& scat_data,
589 const ArrayOfArrayOfScatteringMetaData& scat_meta,
590 const Tensor4& particle_bulkprop_field,
591 const ArrayOfString& particle_bulkprop_names,
592 const ArrayOfAgenda& pnd_agenda_array,
593 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
594 const Index& jacobian_do,
595 const ArrayOfRetrievalQuantity& jacobian_quantities,
596 const Verbosity&) {
597
598 // Do nothing if cloudbox is inactive
599 if (!cloudbox_on) {
600 return;
601 }
602
603 ARTS_USER_ERROR_IF (particle_bulkprop_field.empty(),
604 "*particle_bulkprop_field* is empty.");
605
606 // Checks (not totally complete, but should cover most mistakes)
607 chk_if_in_range("atmosphere_dim", atmosphere_dim, 1, 3);
608 chk_atm_grids(atmosphere_dim, p_grid, lat_grid, lon_grid);
609 chk_atm_field("t_field", t_field, atmosphere_dim, p_grid, lat_grid, lon_grid);
610 chk_atm_field("particle_bulkprop_field",
611 particle_bulkprop_field,
612 atmosphere_dim,
613 particle_bulkprop_names.nelem(),
614 p_grid,
615 lat_grid,
616 lon_grid);
617
618 // Number of scattering species
619 const Index nss = scat_data.nelem();
620
621 ARTS_USER_ERROR_IF (particle_bulkprop_names.nelem() == 0 ||
622 particle_bulkprop_names.nelem() != particle_bulkprop_field.nbooks(),
623 "Number of fields in *particle_bulkprop_field*"
624 " inconsistent with number of names in"
625 " *particle_bulkprop_names*.");
626 ARTS_USER_ERROR_IF (particle_bulkprop_field.nbooks() < nss,
627 "At least one field per scattering species required"
628 " in *particle_bulkprop_field*.");
629
630 ARTS_USER_ERROR_IF (cloudbox_limits.nelem() != 2 * atmosphere_dim,
631 "Length of *cloudbox_limits* incorrect with respect "
632 "to *atmosphere_dim*.");
633 ARTS_USER_ERROR_IF (cloudbox_limits[1] <= cloudbox_limits[0] || cloudbox_limits[0] < 0 ||
634 cloudbox_limits[1] >= p_grid.nelem(),
635 "Invalid data in pressure part of *cloudbox_limits*.");
636 if (atmosphere_dim > 1) {
637 ARTS_USER_ERROR_IF (cloudbox_limits[3] <= cloudbox_limits[2] || cloudbox_limits[2] < 0 ||
638 cloudbox_limits[3] >= lat_grid.nelem(),
639 "Invalid data in latitude part of *cloudbox_limits*.");
640 if (atmosphere_dim > 2) {
641 ARTS_USER_ERROR_IF (cloudbox_limits[5] <= cloudbox_limits[4] || cloudbox_limits[4] < 0 ||
642 cloudbox_limits[5] >= lon_grid.nelem(),
643 "Invalid data in longitude part of *cloudbox_limits*.");
644 }
645 }
646
647 ARTS_USER_ERROR_IF (nss < 1, "*scat_data* is empty!.");
648 ARTS_USER_ERROR_IF (scat_species.nelem() != nss,
649 "*scat_data* and *scat_species* are inconsistent in size.");
650 ARTS_USER_ERROR_IF (scat_meta.nelem() != nss,
651 "*scat_data* and *scat_meta* are inconsistent in size.");
652 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
653 "*scat_data* and *pnd_agenda_array* are inconsistent "
654 "in size.");
655 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
656 "*scat_data* and *pnd_agenda_array_input_names* are "
657 "inconsistent in size.");
658 // Further checks of scat_data vs. scat_meta below
659
660 // Effective lengths of cloudbox
661 const Index np = cloudbox_limits[1] - cloudbox_limits[0] + 1;
662 Index np_nonzero = np; // Can be changed below
663 Index ip_offset = cloudbox_limits[0]; // Can be changed below
664 Index pf_offset = 0; // Can be changed below
665 Index nlat = 1;
666 Index ilat_offset = 0;
667 if (atmosphere_dim > 1) {
668 nlat = cloudbox_limits[3] - cloudbox_limits[2] + 1;
669 ilat_offset = cloudbox_limits[2];
670 }
671 Index nlon = 1;
672 Index ilon_offset = 0;
673 if (atmosphere_dim > 2) {
674 nlon = cloudbox_limits[5] - cloudbox_limits[4] + 1;
675 ilon_offset = cloudbox_limits[4];
676 }
677
678 // Check that *particle_bulkprop_field* contains zeros outside and at
679 // cloudbox boundaries
680 constexpr std::string_view estring =
681 "*particle_bulkprop_field* allowed to contain"
682 " non-zero values only inside the cloudbox.";
683 // Pressure end ranges
684 if (cloudbox_limits[0] != 0) {
685 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
686 joker, Range(0, ip_offset + 1), joker, joker)) > 0 ||
687 min(particle_bulkprop_field(
688 joker, Range(0, ip_offset + 1), joker, joker)) < 0,
689 estring);
690 np_nonzero--;
691 ip_offset++;
692 pf_offset = 1;
693 }
694 if (cloudbox_limits[1] != p_grid.nelem() - 1) {
695 const Index np_above = p_grid.nelem() + 1 - (np + ip_offset);
696 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
697 joker, Range(cloudbox_limits[1], np_above), joker, joker)) > 0 ||
698 min(particle_bulkprop_field(
699 joker, Range(cloudbox_limits[1], np_above), joker, joker)) < 0,
700 estring);
701 np_nonzero--;
702 }
703
704 // Cumulative number of scattering elements
705 ArrayOfIndex ncumse(nss + 1);
706 ncumse[0] = 0;
707 for (Index i = 0; i < nss; i++) {
708 ARTS_USER_ERROR_IF (scat_data[i].nelem() != scat_meta[i].nelem(),
709 "*scat_data* and *scat_meta* have inconsistent sizes.");
710 ncumse[i + 1] = ncumse[i] + scat_data[i].nelem();
711 }
712
713 // Allocate output variables
714 //
715 pnd_field.resize(ncumse[nss], np, nlat, nlon);
716 pnd_field = 0.0;
717 //
718 // Help variables for partial derivatives
719 Index nq = 0;
720 ArrayOfArrayOfIndex scatspecies_to_jq;
721 //
722 if (!jacobian_do) {
723 dpnd_field_dx.resize(0);
724 } else {
725 nq = jacobian_quantities.nelem();
726 dpnd_field_dx.resize(nq);
727 scatspecies_to_jq.resize(nss);
728 //
729 for (Index iq = 0; iq < nq; iq++) {
730 if (jacobian_quantities[iq] == Jacobian::Special::ScatteringString) {
731 const Index ihit =
732 find_first(scat_species, jacobian_quantities[iq].Subtag());
733 ARTS_USER_ERROR_IF (ihit < 0,
734 "Jacobian quantity with index ", iq, " refers to\n"
735 " ", jacobian_quantities[iq].Subtag(),
736 "\nbut this species could not be found in *scat_species*.")
737 scatspecies_to_jq[ihit].push_back(iq);
738 dpnd_field_dx[iq].resize(ncumse[nss], np, nlat, nlon);
739 dpnd_field_dx[iq] = 0.0;
740 }
741 }
742 }
743
744 // Extract data from pnd-agenda array
745 for (Index is = 0; is < nss; is++) {
746 // Index range with respect to pnd_field
747 Range se_range(ncumse[is], ncumse[is + 1] - ncumse[is]);
748
749 // Determine how pnd_agenda_array_input_names are related to input fields
750 //
751 const Index nin = pnd_agenda_array_input_names[is].nelem();
752 ArrayOfIndex i_pbulkprop(nin);
753 //
754 for (Index i = 0; i < nin; i++) {
755 i_pbulkprop[i] = find_first(particle_bulkprop_names,
756 pnd_agenda_array_input_names[is][i]);
757 ARTS_USER_ERROR_IF (i_pbulkprop[i] < 0,
758 "Pnd-agenda with index ", is, " is set to require \"",
759 pnd_agenda_array_input_names[is][i], "\",\nbut this quantity "
760 "could not found in *particle_bulkprop_names*.\n"
761 "(Note that temperature must be written as \"Temperature\")")
762 }
763
764 // Set *dpnd_data_dx_names*
765 //
766 Index ndx = 0;
767 ArrayOfString dpnd_data_dx_names(0);
768 //
769 if (jacobian_do) {
770 ndx = scatspecies_to_jq[is].nelem();
771 dpnd_data_dx_names.resize(ndx);
772 for (Index ix = 0; ix < ndx; ix++) {
773 dpnd_data_dx_names[ix] =
774 jacobian_quantities[scatspecies_to_jq[is][ix]].SubSubtag();
775 }
776 }
777
778 // Loop lat/lon positions and call *pnd_agenda*
779 for (Index ilon = 0; ilon < nlon; ilon++) {
780 for (Index ilat = 0; ilat < nlat; ilat++) {
781
782 // Nothing to do for lat/lon end points, if not 1D
783 if ((nlat > 1 && (ilat == 0 || ilat == nlat - 1)) ||
784 (nlon > 1 && (ilon == 0 || ilon == nlon - 1))) {
785 continue;
786 }
787
788 Matrix pnd_agenda_input(np_nonzero, nin);
789 //
790 for (Index i = 0; i < nin; i++) {
791 for (Index ip = 0; ip < np_nonzero; ip++) {
792 pnd_agenda_input(ip, i) =
793 particle_bulkprop_field(i_pbulkprop[i],
794 ip_offset + ip,
795 ilat_offset + ilat,
796 ilon_offset + ilon);
797 }
798 }
799
800 Vector pnd_agenda_input_t(np);
801 //
802 for (Index ip = 0; ip < np_nonzero; ip++) {
803 pnd_agenda_input_t[ip] =
804 t_field(ip_offset + ip, ilat_offset + ilat, ilon_offset + ilon);
805 }
806
807 // Call pnd-agenda array
808 Matrix pnd_data;
809 Tensor3 dpnd_data_dx;
810 //
812 pnd_data,
813 dpnd_data_dx,
814 is,
815 pnd_agenda_input_t,
816 pnd_agenda_input,
817 pnd_agenda_array_input_names[is],
818 dpnd_data_dx_names,
819 pnd_agenda_array);
820
821 // Copy to output variables
822 for (Index ip = 0; ip < np_nonzero; ip++) {
823 pnd_field(se_range, pf_offset+ip, ilat, ilon) = pnd_data(ip, joker);
824 }
825 for (Index ix = 0; ix < ndx; ix++) {
826 for (Index ip = 0; ip < np_nonzero; ip++) {
827 dpnd_field_dx[scatspecies_to_jq[is][ix]](se_range, pf_offset+ip, ilat, ilon) =
828 dpnd_data_dx(ix, ip, joker);
829 }
830 }
831 }
832 }
833 }
834}
835
836
837
838/* Workspace method: Doxygen documentation will be auto-generated */
839void ScatSpeciesSizeMassInfo(Vector& scat_species_x,
840 Numeric& scat_species_a,
841 Numeric& scat_species_b,
842 const ArrayOfArrayOfScatteringMetaData& scat_meta,
843 const Index& species_index,
844 const String& x_unit,
845 const Numeric& x_fit_start,
846 const Numeric& x_fit_end,
847 const Index& do_only_x,
848 const Verbosity&) {
849 // Checks
850 const Index nss = scat_meta.nelem();
851 ARTS_USER_ERROR_IF (nss == 0, "*scat_meta* is empty!");
852 ARTS_USER_ERROR_IF (nss < species_index + 1,
853 "Selected scattering species index is ", species_index,
854 " but this "
855 "is not allowed since *scat_meta* has only ", nss, " elements.")
856 //
857 const Index nse = scat_meta[species_index].nelem();
858 ARTS_USER_ERROR_IF (nse < 2,
859 "The scattering species must have at least two "
860 "elements to use this method.");
861
862 // Extract particle masses
863 //
864 Vector mass(nse);
865 //
866 for (Index i = 0; i < nse; i++) {
867 mass[i] = scat_meta[species_index][i].mass;
868 }
869
870 // Create size grid
871 //
872 scat_species_x.resize(nse);
873 //
874 if (x_unit == "dveq") {
875 for (Index i = 0; i < nse; i++) {
876 scat_species_x[i] = scat_meta[species_index][i].diameter_volume_equ;
877 }
878 //
879 if (do_only_x) {
880 scat_species_a = -1;
881 scat_species_b = -1;
882 } else
883 derive_scat_species_a_and_b(scat_species_a,
884 scat_species_b,
885 scat_species_x,
886 mass,
887 x_fit_start,
888 x_fit_end);
889 }
890
891 else if (x_unit == "dmax") {
892 for (Index i = 0; i < nse; i++) {
893 scat_species_x[i] = scat_meta[species_index][i].diameter_max;
894 }
895 //
896 if (do_only_x) {
897 scat_species_a = -1;
898 scat_species_b = -1;
899 } else
900 derive_scat_species_a_and_b(scat_species_a,
901 scat_species_b,
902 scat_species_x,
903 mass,
904 x_fit_start,
905 x_fit_end);
906 }
907
908 else if (x_unit == "area") {
909 for (Index i = 0; i < nse; i++) {
910 scat_species_x[i] =
911 scat_meta[species_index][i].diameter_area_equ_aerodynamical;
912 }
913 //
914 if (do_only_x) {
915 scat_species_a = -1;
916 scat_species_b = -1;
917 } else
918 derive_scat_species_a_and_b(scat_species_a,
919 scat_species_b,
920 scat_species_x,
921 mass,
922 x_fit_start,
923 x_fit_end);
924 }
925
926 else if (x_unit == "mass") {
927 scat_species_x = mass;
928 //
929 scat_species_a = 1;
930 scat_species_b = 1;
931 }
932
933 else {
934 ARTS_USER_ERROR ("You have selected the x_unit: ", x_unit, "\nwhile accepted "
935 "choices are: \"dveq\", \"dmax\", \"mass\" and \"area\"")
936 }
937}
This file contains the definition of Array.
Index TotalNumberOfElements(const Array< Array< base > > &aa)
Determine total number of elements in an ArrayOfArray.
Definition: array.h:241
Index find_first(const Array< base > &x, const base &w)
Find first occurance.
Definition: array.h:190
base max(const Array< base > &x)
Max function.
Definition: array.h:145
base min(const Array< base > &x)
Min function.
Definition: array.h:161
The global header file for ARTS.
Constants of physical expressions as constexpr.
void pnd_agenda_arrayExecute(Workspace &ws, Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Index agenda_array_index, const Vector &pnd_agenda_input_t, const Matrix &pnd_agenda_input, const ArrayOfString &pnd_agenda_input_names, const ArrayOfString &dpnd_data_dx_names, const ArrayOfAgenda &input_agenda_array)
Definition: auto_md.cc:25531
void chk_atm_grids(const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid)
chk_atm_grids
void chk_if_in_range(const String &x_name, const Index &x, const Index &x_low, const Index &x_high)
chk_if_in_range
Definition: check_input.cc:86
void chk_atm_field(const String &x_name, ConstTensor3View x, const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid, const bool &chk_lat90)
chk_atm_field (simple fields)
This can be used to make arrays out of anything.
Definition: array.h:48
Index nelem() const ARTS_NOEXCEPT
Definition: array.h:92
Index nrows() const noexcept
Definition: matpackI.h:1079
Index ncols() const noexcept
Definition: matpackI.h:1080
bool empty() const noexcept
Definition: matpackIII.h:157
Index npages() const
Returns the number of pages.
Definition: matpackIII.h:145
Index ncols() const
Returns the number of columns.
Definition: matpackIII.h:151
bool empty() const noexcept
Definition: matpackIV.h:152
Index nbooks() const noexcept
Definition: matpackIV.h:143
Index nelem() const noexcept
Returns the number of elements.
Definition: matpackI.h:547
void set_name(const String &s)
Set name of this gridded field.
void set_grid_name(Index i, const String &s)
Set grid name.
void set_grid(Index i, const Vector &g)
Set a numeric grid.
The Matrix class.
Definition: matpackI.h:1285
void resize(Index r, Index c)
Resize function.
Definition: matpackI.cc:1011
The range class.
Definition: matpackI.h:160
The Tensor3 class.
Definition: matpackIII.h:352
void resize(Index p, Index r, Index c)
Resize function.
Definition: matpackIII.cc:661
The Tensor4 class.
Definition: matpackIV.h:435
void resize(Index b, Index p, Index r, Index c)
Resize function.
Definition: matpackIV.cc:1054
The Vector class.
Definition: matpackI.h:910
void resize(Index n)
Resize function.
Definition: matpackI.cc:390
Workspace class.
Definition: workspace_ng.h:53
void bin_quadweights(Vector &w, const Vector &x, const Index &order)
Definition: cloudbox.cc:573
Internal cloudbox functions.
#define ARTS_USER_ERROR(...)
Definition: debug.h:150
#define ARTS_USER_ERROR_IF(condition,...)
Definition: debug.h:134
Functions for disort interface.
This file contains basic functions to handle ASCII files.
Header file for interpolation.cc.
Linear algebra functions.
bool is_increasing(ConstVectorView x)
Checks if a vector is sorted and strictly increasing.
Definition: logic.cc:218
Header file for logic.cc.
void pndFromPsdBasic(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const Index &quad_order, const Verbosity &)
WORKSPACE METHOD: pndFromPsdBasic.
void particle_massesFromMetaDataSingleCategory(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaDataSingleCategory.
void pndFromPsd(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const ArrayOfArrayOfSingleScatteringData &scat_data, const Vector &f_grid, const Index &scat_data_checked, const Index &quad_order, const Index &scat_index, const Numeric &threshold_rsec, const Numeric &threshold_bext, const Numeric &threshold_rpnd, const Verbosity &)
WORKSPACE METHOD: pndFromPsd.
void ScatSpeciesSizeMassInfo(Vector &scat_species_x, Numeric &scat_species_a, Numeric &scat_species_b, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Index &species_index, const String &x_unit, const Numeric &x_fit_start, const Numeric &x_fit_end, const Index &do_only_x, const Verbosity &)
WORKSPACE METHOD: ScatSpeciesSizeMassInfo.
constexpr Numeric PI
void pnd_fieldCalcFromParticleBulkProps(Workspace &ws, Tensor4 &pnd_field, ArrayOfTensor4 &dpnd_field_dx, const Index &atmosphere_dim, const Vector &p_grid, const Vector &lat_grid, const Vector &lon_grid, const Tensor3 &t_field, const Index &cloudbox_on, const ArrayOfIndex &cloudbox_limits, const ArrayOfString &scat_species, const ArrayOfArrayOfSingleScatteringData &scat_data, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Tensor4 &particle_bulkprop_field, const ArrayOfString &particle_bulkprop_names, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const Index &jacobian_do, const ArrayOfRetrievalQuantity &jacobian_quantities, const Verbosity &)
WORKSPACE METHOD: pnd_fieldCalcFromParticleBulkProps.
void HydrotableCalc(Workspace &ws, GriddedField4 &hydrotable, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const ArrayOfArrayOfSingleScatteringData &scat_data, const Index &scat_data_checked, const Vector &f_grid, const Index &iss, const Vector &T_grid, const Vector &wc_grid, const Verbosity &)
WORKSPACE METHOD: HydrotableCalc.
void particle_massesFromMetaData(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaData.
void nlinspace(Vector &x, const Numeric start, const Numeric stop, const Index n)
nlinspace
Definition: math_funcs.cc:227
void abs(Sparse &A, const Sparse &B)
Absolute value of sparse matrix elements.
Definition: matpackII.cc:394
NUMERIC Numeric
The type to use for all floating point numbers.
Definition: matpack.h:33
INDEX Index
The type to use for all integer numbers and indices.
Definition: matpack.h:39
ConstComplexMatrixView transpose(ConstComplexMatrixView m)
Const version of transpose.
const Joker joker
Declarations having to do with the four output streams.
Numeric asymmetry_parameter(ConstVectorView sa_grid, ConstVectorView pfun)
asymmetry_parameter
Definition: microphysics.cc:61
void derive_scat_species_a_and_b(Numeric &a, Numeric &b, const Vector &x, const Vector &mass, const Numeric &x_fit_start, const Numeric &x_fit_end)
Definition: microphysics.cc:97
Internal functions for microphysics calculations (size distributions etc.)
constexpr Numeric pi
The following mathematical constants are generated in python Decimal package by the code:
void ext_abs_pfun_from_tro(MatrixView ext_data, MatrixView abs_data, Tensor3View pfun_data, const ArrayOfSingleScatteringData &scat_data, const Index &iss, ConstMatrixView pnd_data, ArrayOfIndex &cloudbox_limits, ConstVectorView T_grid, ConstVectorView sa_grid)
Extinction, absorption and phase function for one scattering species, 1D and TRO.
Scattering database structure and functions.
This file contains header information for the dealing with command line parameters.
This file contains declerations of functions of physical character.
Internal functions associated with size distributions.
Declaration of functions in rte.cc.
Contains sorting routines.
void get_sorted_indexes(ArrayOfIndex &sorted, const T &data)
get_sorted_indexes
Definition: sorting.h:57
Header file for special_interp.cc.
This file contains basic functions to handle XML data files.