ARTS 2.5.0 (git: 9ee3ac6c)
m_microphysics.cc
Go to the documentation of this file.
1/* Copyright (C) 2011-2017
2 Jana Mendrok <jana.mendrok@gmail.com>
3 Daniel Kreyling <daniel.kreyling@nict.go.jp>
4 Manfred Brath <manfred.brath@uni-hamburg.de>
5 Patrick Eriksson <patrick.eriksson@chalmers.se>
6
7 This program is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 2, or (at your option) any
10 later version.
11
12 This program is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with this program; if not, write to the Free Software
19 Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
20 USA. */
21
22/*===========================================================================
23 === File description
24 ===========================================================================*/
25
37/*===========================================================================
38 === External declarations
39 ===========================================================================*/
40#include <cmath>
41#include <cstdlib>
42#include <stdexcept>
43
44#include "array.h"
45#include "arts.h"
46#include "auto_md.h"
47#include "check_input.h"
48#include "cloudbox.h"
49#include "file.h"
50#include "interpolation.h"
51#include "lin_alg.h"
52#include "logic.h"
53#include "math_funcs.h"
54#include "messages.h"
55#include "microphysics.h"
56#include "optproperties.h"
57#include "parameters.h"
58#include "physics_funcs.h"
59#include "psd.h"
60#include "rte.h"
61#include "sorting.h"
62#include "special_interp.h"
63#include "xml_io.h"
64
65/*===========================================================================
66 === The functions (in alphabetical order)
67 ===========================================================================*/
68
69/* Workspace method: Doxygen documentation will be auto-generated */
71 Matrix& particle_masses,
72 const ArrayOfArrayOfScatteringMetaData& scat_meta,
73 const Verbosity&) {
74 const Index np_total = TotalNumberOfElements(scat_meta);
75
76 particle_masses.resize(np_total, 1);
77
78 Index i_se_flat = 0;
79 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
80 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
81 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
82 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
83 "A presumably incorrect value found for "
84 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
85 "The value is ", scat_meta[i_ss][i_se].mass)
86
87 particle_masses(i_se_flat, 0) = scat_meta[i_ss][i_se].mass;
88
89 i_se_flat++;
90 }
91 }
92}
93
94/* Workspace method: Doxygen documentation will be auto-generated */
96 Matrix& particle_masses,
97 // WS Input:
98 const ArrayOfArrayOfScatteringMetaData& scat_meta,
99 const Verbosity&) {
100 // resize particle_masses to required diemsions and properly initialize values
101 particle_masses.resize(TotalNumberOfElements(scat_meta), scat_meta.nelem());
102 particle_masses = 0.;
103
104 // calculate and set particle_masses
105 Index i_se_flat = 0;
106 for (Index i_ss = 0; i_ss < scat_meta.nelem(); i_ss++) {
107 for (Index i_se = 0; i_se < scat_meta[i_ss].nelem(); i_se++) {
108 ARTS_USER_ERROR_IF (std::isnan(scat_meta[i_ss][i_se].mass) ||
109 scat_meta[i_ss][i_se].mass <= 0 || scat_meta[i_ss][i_se].mass > 1.,
110 "A presumably incorrect value found for "
111 "scat_meta[", i_ss, "][", i_se, "].mass.\n"
112 "The value is ", scat_meta[i_ss][i_se].mass)
113
114 particle_masses(i_se_flat, i_ss) = scat_meta[i_ss][i_se].mass;
115 i_se_flat++;
116 }
117 }
118}
119
120/* Workspace method: Doxygen documentation will be auto-generated */
121void pndFromPsdBasic(Matrix& pnd_data,
122 Tensor3& dpnd_data_dx,
123 const Vector& pnd_size_grid,
124 const Matrix& psd_data,
125 const Vector& psd_size_grid,
126 const Tensor3& dpsd_data_dx,
127 const Index& quad_order,
128 const Verbosity&) {
129 // Some sizes
130 const Index np = psd_data.nrows();
131 const Index ng = psd_size_grid.nelem();
132 Index ndx = 0;
133 const bool do_dx = !dpsd_data_dx.empty();
134
135 // Checks
136 ARTS_USER_ERROR_IF (ng < 2,
137 "The method requires that length of *psd_size_grid* is >= 2.");
138 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
139 "So far, the method requires that *psd_size_grid* and "
140 "*pnd_size_grid* have same length.");
141 for (Index i = 0; i < ng; i++) {
142 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
143 "So far, the method requires that *psd_size_grid* and "
144 "*pnd_size_grid* are identical.");
145 }
146 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
147 "Number of columns in *psd_data* and length of "
148 "*psd_size_grid* must match.");
149
150 pnd_data.resize(np, ng);
151 if (do_dx) {
152 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
153 "Number of columns in *dpsd_data_dx* and length of "
154 "*psd_size_grid* must match.");
155 ndx = dpsd_data_dx.npages();
156 dpnd_data_dx.resize(ndx, np, ng);
157 } else {
158 dpnd_data_dx.resize(0, 0, 0);
159 }
160
161 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
162 // identical, of this as well implicitly)
163 ArrayOfIndex intarr;
164 Vector psd_size_grid_sorted(ng);
165 get_sorted_indexes(intarr, psd_size_grid);
166 for (Index i = 0; i < ng; i++)
167 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
168
169 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
170 "*psd_size_grid* is not allowed to contain "
171 "duplicate values.");
172
173 // Calculate pnd by intrgation of psd for given nodes/bins
174 Vector quadweights(ng);
175 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
176
177 for (Index i = 0; i < ng; i++) {
178 for (Index ip = 0; ip < np; ip++) {
179 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
180 }
181
182 if (do_dx) {
183 for (Index ip = 0; ip < np; ip++) {
184 for (Index ix = 0; ix < ndx; ix++) {
185 dpnd_data_dx(ix, ip, intarr[i]) =
186 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
187 }
188 }
189 }
190 }
191}
192
193/* Workspace method: Doxygen documentation will be auto-generated */
194void pndFromPsd(Matrix& pnd_data,
195 Tensor3& dpnd_data_dx,
196 const Vector& pnd_size_grid,
197 const Matrix& psd_data,
198 const Vector& psd_size_grid,
199 const Tensor3& dpsd_data_dx,
200 const ArrayOfArrayOfSingleScatteringData& scat_data,
201 const Vector& f_grid,
202 const Index& scat_data_checked,
203 const Index& quad_order,
204 const Index& scat_index,
205 const Numeric& threshold_rsec,
206 const Numeric& threshold_bext,
207 const Numeric& threshold_rpnd,
208 const Verbosity&) {
209 // Some sizes
210 const Index np = psd_data.nrows();
211 const Index ng = psd_size_grid.nelem();
212 Index ndx = 0;
213 const bool do_dx = !dpsd_data_dx.empty();
214
215 // Checks
216 ARTS_USER_ERROR_IF (ng < 3,
217 "The method requires that length of *psd_size_grid* is >= 3.");
218 ARTS_USER_ERROR_IF (ng != pnd_size_grid.nelem(),
219 "So far, the method requires that *psd_size_grid* and"
220 " *pnd_size_grid* have the same length.");
221 for (Index i = 0; i < ng; i++) {
222 ARTS_USER_ERROR_IF (psd_size_grid[i] != pnd_size_grid[i],
223 "So far, the method requires that *psd_size_grid* and"
224 " *pnd_size_grid* are identical.");
225 }
226 ARTS_USER_ERROR_IF (psd_data.ncols() != ng,
227 "Number of columns in *psd_data* and length of"
228 " *psd_size_grid* must match.");
229
230 pnd_data.resize(np, ng);
231 if (do_dx) {
232 ARTS_USER_ERROR_IF (dpsd_data_dx.ncols() != ng,
233 "Number of columns in *dpsd_data_dx* and length of"
234 " *psd_size_grid* must match.");
235 ndx = dpsd_data_dx.npages();
236 dpnd_data_dx.resize(ndx, np, ng);
237 } else {
238 dpnd_data_dx.resize(0, 0, 0);
239 }
240
241 ARTS_USER_ERROR_IF (!scat_data_checked,
242 "*scat_data* must have passed a consistency check"
243 " (scat_data_checked=1).\n"
244 "Alternatively, use *pndFromPsdBasic*.");
245 ARTS_USER_ERROR_IF (scat_index >= scat_data.nelem(),
246 "*scat_index* exceeds the number of available"
247 " scattering species.");
248 ARTS_USER_ERROR_IF (scat_data[scat_index].nelem() != ng,
249 "Number of scattering elements in this scattering"
250 " species (*scat_index*) inconsistent with length of"
251 " *pnd_size_grid*.");
252
253 // Get sorted version of psd_size_grid (and, since pnd_size_grid so far is
254 // identical, of this as well implicitly)
255 ArrayOfIndex intarr;
256 Vector psd_size_grid_sorted(ng);
257 get_sorted_indexes(intarr, psd_size_grid);
258 for (Index i = 0; i < ng; i++)
259 psd_size_grid_sorted[i] = psd_size_grid[intarr[i]];
260
261 ARTS_USER_ERROR_IF (!is_increasing(psd_size_grid_sorted),
262 "*psd_size_grid* is not allowed to contain "
263 "duplicate values.");
264
265 // Calculate pnd by integration of psd for given nodes/bins
266 Vector quadweights(ng);
267 bin_quadweights(quadweights, psd_size_grid_sorted, quad_order);
268
269 for (Index i = 0; i < ng;
270 i++) //loop over pnd_size_grid aka scattering elements
271 {
272 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
273 {
274 pnd_data(ip, intarr[i]) = quadweights[i] * psd_data(ip, intarr[i]);
275 }
276
277 if (do_dx) {
278 for (Index ip = 0; ip < np; ip++) {
279 for (Index ix = 0; ix < ndx; ix++) {
280 dpnd_data_dx(ix, ip, intarr[i]) =
281 quadweights[i] * dpsd_data_dx(ix, ip, intarr[i]);
282 }
283 }
284 }
285 }
286
287 ArrayOfSingleScatteringData sds = scat_data[scat_index];
288 Index fstart = 0;
289 Index nf = f_grid.nelem();
290 Matrix bulkext(np, nf, 0.);
291 Vector ext(nf), ext_s0(nf), ext_s1(nf), ext_l0(nf), ext_l1(nf);
292
293 // check that pnd_size_grid and scat_data cover (scalar) bulk extinction properly.
294 // (formerly we used mass. but heavy particles might not necessarily
295 // contribute much to optprops. same is valid for total particle number.)
296 //
297 // We do that by
298 // (a) checking that psd*ext decreases at the edges, i.e. to increase the
299 // probability that the main extinction peak is covered and in order to
300 // avoid passing check (b) through making the edge bins tiny
301 // (b) checking that the extinction contributions of the edge bins is small
302 // compared to the total bulk bin.
303 // The tests are somewhat over-sensitive on the small particle side in the
304 // sense that the checks are triggered easily by low mass density cases - which
305 // have their main extinction contributed by small particles, but for which
306 // the total extinction is quite low. Hence, there is also a total extinction
307 // threshold, below which the checks are not applied. Furthermore, such low
308 // densities do typically not occur throughout the whole atmospheric profile,
309 // but higher density layers might exists, meaning the low density layers
310 // contribute very little to the total optical properties, hence should not
311 // dominate the check criteria (no need to be very strict with them if they
312 // hardly contribute anyways). Therefore, checks are also not applied if the
313 // edge bin number density at a given atmospheric level is low compared to the
314 // maximum number of this scattering element in the profile.
315 //
316 // Technically, we need to cover all freqs separately (as optprops vary
317 // strongly with freq). We indeed do that here.
318 // FIXME: Shall a single freq or reduced freq-number option be implemented?
319 // If so, the don't-apply-test criteria need to be checked again though (so it
320 // does not happen that effectively all of the reduced-freq testing is skipped
321 // due to them).
322 //
323 // Technically, we also need to use identical temperatures (and preferably the
324 // ones used for deriving psd). But all scat elements can be on different
325 // T-grids, ie we would need to interpolate. Instead we just use the first in
326 // line.
327 // FIXME: Maybe refine in future allowing to select between low, high, median
328 // grid point or even specify one temp to use.
329 //
330 // And, technically ext might be directional dependent (for oriented
331 // particles). Also this might be different for each scat element. However, at
332 // least for now, we only check the 0-direction.
333 // FIXME: Check further directions?
334
335 for (Index ise = 0; ise < ng;
336 ise++) //loop over pnd_size_grid aka scattering elements
337 {
338 if (sds[intarr[ise]].ext_mat_data.nshelves() > 1)
339 ext = sds[intarr[ise]].ext_mat_data(joker, 0, 0, 0, 0);
340 else
341 ext = sds[intarr[ise]].ext_mat_data(0, 0, 0, 0, 0);
342
343 // keep the ext data of the edge bins
344 if (ise == 0)
345 ext_s0 = ext;
346 else if (ise == 1)
347 ext_s1 = ext;
348 else if (ise == ng - 2)
349 ext_l1 = ext;
350 else if (ise == ng - 1)
351 ext_l0 = ext;
352
353 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
354 if (abs(pnd_data(ip, joker).sum()) > 0.)
355 for (Index f = fstart; f < (fstart + nf); f++)
356 bulkext(ip, f) += pnd_data(ip, intarr[ise]) * ext[f];
357 }
358
359 Numeric max0 = 0, max1 = 0;
360 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
361 {
362 max0 = max(abs(pnd_data(ip, intarr[0])), max0);
363 max1 = max(abs(pnd_data(ip, intarr[ng - 1])), max1);
364 }
365
366 Numeric contrib;
367 for (Index ip = 0; ip < np; ip++) //loop over pressure levels
368 {
369 if (abs(pnd_data(ip, joker).sum()) > 0.) {
370 for (Index f = fstart; f < (fstart + nf); f++) {
371 /* for( Index ise=0; ise<ng; ise++ )
372 {
373 if( sds[ise].ext_mat_data.nshelves()>1 )
374 ext = sds[ise].ext_mat_data(joker,0,0,0,0);
375 else
376 ext = sds[ise].ext_mat_data(0,0,0,0,0);
377 cout << " se #" << ise << ": contrib = pnd*ext/bext = "
378 << abs(pnd_data(ip,ise)) << "*" << ext[f] << "/"
379 << abs(bulkext(ip,f)) << " = "
380 << 1e2*abs(pnd_data(ip,ise))*ext[f]/abs(bulkext(ip,f))
381 << "%\n";
382 }*/
383
384 // check that bin-width normalized extinction (or ext*psd) is
385 // decreasing.
386 if (abs(bulkext(ip, f)) > 1e-2 * threshold_bext) {
387 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[0])) > 0. and
388 ext_s0[f] * abs(psd_data(ip, intarr[0])) >=
389 ext_s1[f] * abs(psd_data(ip, intarr[1])),
390 " Bin-width normalized extinction (ext*psd) not decreasing"
391 " at small size edge\n"
392 " at atm level #", ip, " and freq point #", f, ".\n"
393 " ext_s0=", ext_s0[f],
394 ", psd_s0=", abs(psd_data(ip, intarr[0])),
395 ", ext_s0*psd_s0=", ext_s0[f] * abs(psd_data(ip, intarr[0])),
396 "\n LARGER EQUAL\n"
397 " ext_s1=", ext_s1[f],
398 ", psd_s1=", abs(psd_data(ip, intarr[1])),
399 ", ext_s1*psd_s1=", ext_s1[f] * abs(psd_data(ip, intarr[1])),
400 "\n"
401 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
402 " Need to add smaller sized particles!\n")
403
404 ARTS_USER_ERROR_IF (abs(psd_data(ip, intarr[ng - 1])) > 0. and
405 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])) >=
406 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])),
407 "Bin-width normalized extinction (ext*psd) not decreasing"
408 " at large size edge\n"
409 "at atm level #", ip, " and freq point #", f, ".\n"
410 " ext_l0=", ext_l0[f],
411 ", psd_l0=", abs(psd_data(ip, intarr[ng - 1])),
412 ", ext_l0*psd_l0=",
413 ext_l0[f] * abs(psd_data(ip, intarr[ng - 1])),
414 "\n LARGER EQUAL\n"
415 " ext_l1=", ext_l1[f],
416 ", psd_l1=", abs(psd_data(ip, intarr[ng - 2])),
417 ", ext_l1*psd_l1=",
418 ext_l1[f] * abs(psd_data(ip, intarr[ng - 2])), "\n"
419 " Total bulk ext = ", abs(bulkext(ip, f)), "\n"
420 " Need to add larger sized particles!\n")
421 }
422
423 // check that contribution of edge bins to total extinction is
424 // sufficiently small
425 if (abs(bulkext(ip, f)) > threshold_bext) {
426 if (abs(pnd_data(ip, intarr[0])) > threshold_rpnd * max0) {
427 contrib =
428 abs(pnd_data(ip, intarr[0])) * ext_s0[f] / abs(bulkext(ip, f));
429 //cout << " small edge contrib = pnd*ext/bext = "
430 // << abs(pnd_data(ip,intarr[0])) << "*" << ext_s0[f] << "/"
431 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
432 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[0])) * ext_s0[f] >
433 threshold_rsec * abs(bulkext(ip, f)),
434 "Contribution of edge bin to total extinction too high"
435 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
436 ") at small size edge\n"
437 "at atm level #", ip, " and freq point #", f, ".\n"
438 " Need to add smaller sized particles or refine the size"
439 " grid on the small size edge!\n")
440 }
441 if (abs(pnd_data(ip, intarr[ng - 1])) > threshold_rpnd * max1) {
442 contrib = abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] /
443 abs(bulkext(ip, f));
444 //cout << " large edge contrib = pnd*ext/bext = "
445 // << abs(pnd_data(ip,ng-1)) << "*" << ext_l0[f] << "/"
446 // << abs(bulkext(ip,f)) << " = " << contrib << "\n";
447 ARTS_USER_ERROR_IF (abs(pnd_data(ip, intarr[ng - 1])) * ext_l0[f] >
448 threshold_rsec * abs(bulkext(ip, f)),
449 "Contribution of edge bin to total extinction too high"
450 " (", contrib * 1e2, "% of ", abs(bulkext(ip, f)),
451 ") at large size edge\n"
452 "at atm level #", ip, " and freq point #", f, ".\n"
453 " Need to add larger sized particles or refine the size"
454 " grid on the large size edge!\n")
455 }
456 }
457 }
458 }
459 }
460}
461
462
463
464/* Workspace method: Doxygen documentation will be auto-generated */
466 Workspace& ws,
467 Tensor4& pnd_field,
468 ArrayOfTensor4& dpnd_field_dx,
469 const Index& atmosphere_dim,
470 const Vector& p_grid,
471 const Vector& lat_grid,
472 const Vector& lon_grid,
473 const Tensor3& t_field,
474 const Index& cloudbox_on,
475 const ArrayOfIndex& cloudbox_limits,
476 const ArrayOfString& scat_species,
477 const ArrayOfArrayOfSingleScatteringData& scat_data,
478 const ArrayOfArrayOfScatteringMetaData& scat_meta,
479 const Tensor4& particle_bulkprop_field,
480 const ArrayOfString& particle_bulkprop_names,
481 const ArrayOfAgenda& pnd_agenda_array,
482 const ArrayOfArrayOfString& pnd_agenda_array_input_names,
483 const Index& jacobian_do,
484 const ArrayOfRetrievalQuantity& jacobian_quantities,
485 const Verbosity&) {
486
487 // Do nothing if cloudbox is inactive
488 if (!cloudbox_on) {
489 return;
490 }
491
492 ARTS_USER_ERROR_IF (particle_bulkprop_field.empty(),
493 "*particle_bulkprop_field* is empty.");
494
495 // Checks (not totally complete, but should cover most mistakes)
496 chk_if_in_range("atmosphere_dim", atmosphere_dim, 1, 3);
497 chk_atm_grids(atmosphere_dim, p_grid, lat_grid, lon_grid);
498 chk_atm_field("t_field", t_field, atmosphere_dim, p_grid, lat_grid, lon_grid);
499 chk_atm_field("particle_bulkprop_field",
500 particle_bulkprop_field,
501 atmosphere_dim,
502 particle_bulkprop_names.nelem(),
503 p_grid,
504 lat_grid,
505 lon_grid);
506
507 // Number of scattering species
508 const Index nss = scat_data.nelem();
509
510 ARTS_USER_ERROR_IF (particle_bulkprop_names.nelem() == 0 ||
511 particle_bulkprop_names.nelem() != particle_bulkprop_field.nbooks(),
512 "Number of fields in *particle_bulkprop_field*"
513 " inconsistent with number of names in"
514 " *particle_bulkprop_names*.");
515 ARTS_USER_ERROR_IF (particle_bulkprop_field.nbooks() < nss,
516 "At least one field per scattering species required"
517 " in *particle_bulkprop_field*.");
518
519 ARTS_USER_ERROR_IF (cloudbox_limits.nelem() != 2 * atmosphere_dim,
520 "Length of *cloudbox_limits* incorrect with respect "
521 "to *atmosphere_dim*.");
522 ARTS_USER_ERROR_IF (cloudbox_limits[1] <= cloudbox_limits[0] || cloudbox_limits[0] < 0 ||
523 cloudbox_limits[1] >= p_grid.nelem(),
524 "Invalid data in pressure part of *cloudbox_limits*.");
525 if (atmosphere_dim > 1) {
526 ARTS_USER_ERROR_IF (cloudbox_limits[3] <= cloudbox_limits[2] || cloudbox_limits[2] < 0 ||
527 cloudbox_limits[3] >= lat_grid.nelem(),
528 "Invalid data in latitude part of *cloudbox_limits*.");
529 if (atmosphere_dim > 2) {
530 ARTS_USER_ERROR_IF (cloudbox_limits[5] <= cloudbox_limits[4] || cloudbox_limits[4] < 0 ||
531 cloudbox_limits[5] >= lon_grid.nelem(),
532 "Invalid data in longitude part of *cloudbox_limits*.");
533 }
534 }
535
536 ARTS_USER_ERROR_IF (nss < 1, "*scat_data* is empty!.");
537 ARTS_USER_ERROR_IF (scat_species.nelem() != nss,
538 "*scat_data* and *scat_species* are inconsistent in size.");
539 ARTS_USER_ERROR_IF (scat_meta.nelem() != nss,
540 "*scat_data* and *scat_meta* are inconsistent in size.");
541 ARTS_USER_ERROR_IF (pnd_agenda_array.nelem() != nss,
542 "*scat_data* and *pnd_agenda_array* are inconsistent "
543 "in size.");
544 ARTS_USER_ERROR_IF (pnd_agenda_array_input_names.nelem() != nss,
545 "*scat_data* and *pnd_agenda_array_input_names* are "
546 "inconsistent in size.");
547 // Further checks of scat_data vs. scat_meta below
548
549 // Effective lengths of cloudbox
550 const Index np = cloudbox_limits[1] - cloudbox_limits[0] + 1;
551 Index np_nonzero = np; // Can be changed below
552 Index ip_offset = cloudbox_limits[0]; // Can be changed below
553 Index pf_offset = 0; // Can be changed below
554 Index nlat = 1;
555 Index ilat_offset = 0;
556 if (atmosphere_dim > 1) {
557 nlat = cloudbox_limits[3] - cloudbox_limits[2] + 1;
558 ilat_offset = cloudbox_limits[2];
559 }
560 Index nlon = 1;
561 Index ilon_offset = 0;
562 if (atmosphere_dim > 2) {
563 nlon = cloudbox_limits[5] - cloudbox_limits[4] + 1;
564 ilon_offset = cloudbox_limits[4];
565 }
566
567 // Check that *particle_bulkprop_field* contains zeros outside and at
568 // cloudbox boundaries
569 constexpr std::string_view estring =
570 "*particle_bulkprop_field* allowed to contain"
571 " non-zero values only inside the cloudbox.";
572 // Pressure end ranges
573 if (cloudbox_limits[0] != 0) {
574 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
575 joker, Range(0, ip_offset + 1), joker, joker)) > 0 ||
576 min(particle_bulkprop_field(
577 joker, Range(0, ip_offset + 1), joker, joker)) < 0,
578 estring);
579 np_nonzero--;
580 ip_offset++;
581 pf_offset = 1;
582 }
583 if (cloudbox_limits[1] != p_grid.nelem() - 1) {
584 const Index np_above = p_grid.nelem() + 1 - (np + ip_offset);
585 ARTS_USER_ERROR_IF (max(particle_bulkprop_field(
586 joker, Range(cloudbox_limits[1], np_above), joker, joker)) > 0 ||
587 min(particle_bulkprop_field(
588 joker, Range(cloudbox_limits[1], np_above), joker, joker)) < 0,
589 estring);
590 np_nonzero--;
591 }
592
593 // Cumulative number of scattering elements
594 ArrayOfIndex ncumse(nss + 1);
595 ncumse[0] = 0;
596 for (Index i = 0; i < nss; i++) {
597 ARTS_USER_ERROR_IF (scat_data[i].nelem() != scat_meta[i].nelem(),
598 "*scat_data* and *scat_meta* have inconsistent sizes.");
599 ncumse[i + 1] = ncumse[i] + scat_data[i].nelem();
600 }
601
602 // Allocate output variables
603 //
604 pnd_field.resize(ncumse[nss], np, nlat, nlon);
605 pnd_field = 0.0;
606 //
607 // Help variables for partial derivatives
608 Index nq = 0;
609 ArrayOfArrayOfIndex scatspecies_to_jq;
610 //
611 if (!jacobian_do) {
612 dpnd_field_dx.resize(0);
613 } else {
614 nq = jacobian_quantities.nelem();
615 dpnd_field_dx.resize(nq);
616 scatspecies_to_jq.resize(nss);
617 //
618 for (Index iq = 0; iq < nq; iq++) {
619 if (jacobian_quantities[iq] == Jacobian::Special::ScatteringString) {
620 const Index ihit =
621 find_first(scat_species, jacobian_quantities[iq].Subtag());
622 ARTS_USER_ERROR_IF (ihit < 0,
623 "Jacobian quantity with index ", iq, " refers to\n"
624 " ", jacobian_quantities[iq].Subtag(),
625 "\nbut this species could not be found in *scat_species*.")
626 scatspecies_to_jq[ihit].push_back(iq);
627 dpnd_field_dx[iq].resize(ncumse[nss], np, nlat, nlon);
628 dpnd_field_dx[iq] = 0.0;
629 }
630 }
631 }
632
633 // Extract data from pnd-agenda array
634 for (Index is = 0; is < nss; is++) {
635 // Index range with respect to pnd_field
636 Range se_range(ncumse[is], ncumse[is + 1] - ncumse[is]);
637
638 // Determine how pnd_agenda_array_input_names are related to input fields
639 //
640 const Index nin = pnd_agenda_array_input_names[is].nelem();
641 ArrayOfIndex i_pbulkprop(nin);
642 //
643 for (Index i = 0; i < nin; i++) {
644 i_pbulkprop[i] = find_first(particle_bulkprop_names,
645 pnd_agenda_array_input_names[is][i]);
646 ARTS_USER_ERROR_IF (i_pbulkprop[i] < 0,
647 "Pnd-agenda with index ", is, " is set to require \"",
648 pnd_agenda_array_input_names[is][i], "\",\nbut this quantity "
649 "could not found in *particle_bulkprop_names*.\n"
650 "(Note that temperature must be written as \"Temperature\")")
651 }
652
653 // Set *dpnd_data_dx_names*
654 //
655 Index ndx = 0;
656 ArrayOfString dpnd_data_dx_names(0);
657 //
658 if (jacobian_do) {
659 ndx = scatspecies_to_jq[is].nelem();
660 dpnd_data_dx_names.resize(ndx);
661 for (Index ix = 0; ix < ndx; ix++) {
662 dpnd_data_dx_names[ix] =
663 jacobian_quantities[scatspecies_to_jq[is][ix]].SubSubtag();
664 }
665 }
666
667 // Loop lat/lon positions and call *pnd_agenda*
668 for (Index ilon = 0; ilon < nlon; ilon++) {
669 for (Index ilat = 0; ilat < nlat; ilat++) {
670
671 // Nothing to do for lat/lon end points, if not 1D
672 if ((nlat > 1 && (ilat == 0 || ilat == nlat - 1)) ||
673 (nlon > 1 && (ilon == 0 || ilon == nlon - 1))) {
674 continue;
675 }
676
677 Matrix pnd_agenda_input(np_nonzero, nin);
678 //
679 for (Index i = 0; i < nin; i++) {
680 for (Index ip = 0; ip < np_nonzero; ip++) {
681 pnd_agenda_input(ip, i) =
682 particle_bulkprop_field(i_pbulkprop[i],
683 ip_offset + ip,
684 ilat_offset + ilat,
685 ilon_offset + ilon);
686 }
687 }
688
689 Vector pnd_agenda_input_t(np);
690 //
691 for (Index ip = 0; ip < np_nonzero; ip++) {
692 pnd_agenda_input_t[ip] =
693 t_field(ip_offset + ip, ilat_offset + ilat, ilon_offset + ilon);
694 }
695
696 // Call pnd-agenda array
697 Matrix pnd_data;
698 Tensor3 dpnd_data_dx;
699 //
701 pnd_data,
702 dpnd_data_dx,
703 is,
704 pnd_agenda_input_t,
705 pnd_agenda_input,
706 pnd_agenda_array_input_names[is],
707 dpnd_data_dx_names,
708 pnd_agenda_array);
709
710 // Copy to output variables
711 for (Index ip = 0; ip < np_nonzero; ip++) {
712 pnd_field(se_range, pf_offset+ip, ilat, ilon) = pnd_data(ip, joker);
713 }
714 for (Index ix = 0; ix < ndx; ix++) {
715 for (Index ip = 0; ip < np_nonzero; ip++) {
716 dpnd_field_dx[scatspecies_to_jq[is][ix]](se_range, pf_offset+ip, ilat, ilon) =
717 dpnd_data_dx(ix, ip, joker);
718 }
719 }
720 }
721 }
722 }
723}
724
725
726
727/* Workspace method: Doxygen documentation will be auto-generated */
728void ScatSpeciesSizeMassInfo(Vector& scat_species_x,
729 Numeric& scat_species_a,
730 Numeric& scat_species_b,
731 const ArrayOfArrayOfScatteringMetaData& scat_meta,
732 const Index& species_index,
733 const String& x_unit,
734 const Numeric& x_fit_start,
735 const Numeric& x_fit_end,
736 const Index& do_only_x,
737 const Verbosity&) {
738 // Checks
739 const Index nss = scat_meta.nelem();
740 ARTS_USER_ERROR_IF (nss == 0, "*scat_meta* is empty!");
741 ARTS_USER_ERROR_IF (nss < species_index + 1,
742 "Selected scattering species index is ", species_index,
743 " but this "
744 "is not allowed since *scat_meta* has only ", nss, " elements.")
745 //
746 const Index nse = scat_meta[species_index].nelem();
747 ARTS_USER_ERROR_IF (nse < 2,
748 "The scattering species must have at least two "
749 "elements to use this method.");
750
751 // Extract particle masses
752 //
753 Vector mass(nse);
754 //
755 for (Index i = 0; i < nse; i++) {
756 mass[i] = scat_meta[species_index][i].mass;
757 }
758
759 // Create size grid
760 //
761 scat_species_x.resize(nse);
762 //
763 if (x_unit == "dveq") {
764 for (Index i = 0; i < nse; i++) {
765 scat_species_x[i] = scat_meta[species_index][i].diameter_volume_equ;
766 }
767 //
768 if (do_only_x) {
769 scat_species_a = -1;
770 scat_species_b = -1;
771 } else
772 derive_scat_species_a_and_b(scat_species_a,
773 scat_species_b,
774 scat_species_x,
775 mass,
776 x_fit_start,
777 x_fit_end);
778 }
779
780 else if (x_unit == "dmax") {
781 for (Index i = 0; i < nse; i++) {
782 scat_species_x[i] = scat_meta[species_index][i].diameter_max;
783 }
784 //
785 if (do_only_x) {
786 scat_species_a = -1;
787 scat_species_b = -1;
788 } else
789 derive_scat_species_a_and_b(scat_species_a,
790 scat_species_b,
791 scat_species_x,
792 mass,
793 x_fit_start,
794 x_fit_end);
795 }
796
797 else if (x_unit == "area") {
798 for (Index i = 0; i < nse; i++) {
799 scat_species_x[i] =
800 scat_meta[species_index][i].diameter_area_equ_aerodynamical;
801 }
802 //
803 if (do_only_x) {
804 scat_species_a = -1;
805 scat_species_b = -1;
806 } else
807 derive_scat_species_a_and_b(scat_species_a,
808 scat_species_b,
809 scat_species_x,
810 mass,
811 x_fit_start,
812 x_fit_end);
813 }
814
815 else if (x_unit == "mass") {
816 scat_species_x = mass;
817 //
818 scat_species_a = 1;
819 scat_species_b = 1;
820 }
821
822 else {
823 ARTS_USER_ERROR ("You have selected the x_unit: ", x_unit,
824 "while accepted choices are: \"dveq\", \"dmax\", \"mass\" and \"area\"")
825 }
826}
827
This file contains the definition of Array.
Index TotalNumberOfElements(const Array< Array< base > > &aa)
Determine total number of elements in an ArrayOfArray.
Definition: array.h:343
Index find_first(const Array< base > &x, const base &w)
Find first occurance.
Definition: array.h:292
The global header file for ARTS.
void pnd_agenda_arrayExecute(Workspace &ws, Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Index agenda_array_index, const Vector &pnd_agenda_input_t, const Matrix &pnd_agenda_input, const ArrayOfString &pnd_agenda_input_names, const ArrayOfString &dpnd_data_dx_names, const ArrayOfAgenda &input_agenda_array)
Definition: auto_md.cc:24073
void chk_atm_grids(const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid)
chk_atm_grids
void chk_if_in_range(const String &x_name, const Index &x, const Index &x_low, const Index &x_high)
chk_if_in_range
Definition: check_input.cc:86
void chk_atm_field(const String &x_name, ConstTensor3View x, const Index &dim, ConstVectorView p_grid, ConstVectorView lat_grid, ConstVectorView lon_grid, const bool &chk_lat90)
chk_atm_field (simple fields)
This can be used to make arrays out of anything.
Definition: array.h:107
Index nelem() const ARTS_NOEXCEPT
Number of elements.
Definition: array.h:195
Index nrows() const ARTS_NOEXCEPT
Returns the number of rows.
Definition: matpackI.cc:433
Index ncols() const ARTS_NOEXCEPT
Returns the number of columns.
Definition: matpackI.cc:436
Index npages() const
Returns the number of pages.
Definition: matpackIII.h:144
Index ncols() const
Returns the number of columns.
Definition: matpackIII.h:150
bool empty() const
Check if variable is empty.
Definition: matpackIII.cc:36
Index nbooks() const
Returns the number of books.
Definition: matpackIV.cc:55
bool empty() const
Check if variable is empty.
Definition: matpackIV.cc:50
Index nelem() const ARTS_NOEXCEPT
Returns the number of elements.
Definition: matpackI.cc:48
The Matrix class.
Definition: matpackI.h:1225
void resize(Index r, Index c)
Resize function.
Definition: matpackI.cc:1056
The range class.
Definition: matpackI.h:165
The Tensor3 class.
Definition: matpackIII.h:339
void resize(Index p, Index r, Index c)
Resize function.
Definition: matpackIII.cc:658
The Tensor4 class.
Definition: matpackIV.h:421
void resize(Index b, Index p, Index r, Index c)
Resize function.
Definition: matpackIV.cc:1058
The Vector class.
Definition: matpackI.h:876
void resize(Index n)
Resize function.
Definition: matpackI.cc:408
Workspace class.
Definition: workspace_ng.h:40
void bin_quadweights(Vector &w, const Vector &x, const Index &order)
Definition: cloudbox.cc:572
Internal cloudbox functions.
#define ARTS_USER_ERROR(...)
Definition: debug.h:150
#define ARTS_USER_ERROR_IF(condition,...)
Definition: debug.h:134
This file contains basic functions to handle ASCII files.
Header file for interpolation.cc.
#define abs(x)
#define min(a, b)
#define max(a, b)
Linear algebra functions.
bool is_increasing(ConstVectorView x)
Checks if a vector is sorted and strictly increasing.
Definition: logic.cc:215
Header file for logic.cc.
void pndFromPsdBasic(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const Index &quad_order, const Verbosity &)
WORKSPACE METHOD: pndFromPsdBasic.
void particle_massesFromMetaDataSingleCategory(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaDataSingleCategory.
void pndFromPsd(Matrix &pnd_data, Tensor3 &dpnd_data_dx, const Vector &pnd_size_grid, const Matrix &psd_data, const Vector &psd_size_grid, const Tensor3 &dpsd_data_dx, const ArrayOfArrayOfSingleScatteringData &scat_data, const Vector &f_grid, const Index &scat_data_checked, const Index &quad_order, const Index &scat_index, const Numeric &threshold_rsec, const Numeric &threshold_bext, const Numeric &threshold_rpnd, const Verbosity &)
WORKSPACE METHOD: pndFromPsd.
void ScatSpeciesSizeMassInfo(Vector &scat_species_x, Numeric &scat_species_a, Numeric &scat_species_b, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Index &species_index, const String &x_unit, const Numeric &x_fit_start, const Numeric &x_fit_end, const Index &do_only_x, const Verbosity &)
WORKSPACE METHOD: ScatSpeciesSizeMassInfo.
void pnd_fieldCalcFromParticleBulkProps(Workspace &ws, Tensor4 &pnd_field, ArrayOfTensor4 &dpnd_field_dx, const Index &atmosphere_dim, const Vector &p_grid, const Vector &lat_grid, const Vector &lon_grid, const Tensor3 &t_field, const Index &cloudbox_on, const ArrayOfIndex &cloudbox_limits, const ArrayOfString &scat_species, const ArrayOfArrayOfSingleScatteringData &scat_data, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Tensor4 &particle_bulkprop_field, const ArrayOfString &particle_bulkprop_names, const ArrayOfAgenda &pnd_agenda_array, const ArrayOfArrayOfString &pnd_agenda_array_input_names, const Index &jacobian_do, const ArrayOfRetrievalQuantity &jacobian_quantities, const Verbosity &)
WORKSPACE METHOD: pnd_fieldCalcFromParticleBulkProps.
void particle_massesFromMetaData(Matrix &particle_masses, const ArrayOfArrayOfScatteringMetaData &scat_meta, const Verbosity &)
WORKSPACE METHOD: particle_massesFromMetaData.
INDEX Index
The type to use for all integer numbers and indices.
Definition: matpack.h:39
NUMERIC Numeric
The type to use for all floating point numbers.
Definition: matpack.h:33
const Joker joker
Declarations having to do with the four output streams.
void derive_scat_species_a_and_b(Numeric &a, Numeric &b, const Vector &x, const Vector &mass, const Numeric &x_fit_start, const Numeric &x_fit_end)
Definition: microphysics.cc:77
Internal functions for microphysics calculations (size distributions etc.)
Index nelem(const Lines &l)
Number of lines.
Vector mass(const ConstVectorView &atmospheric_vmrs, const ArrayOfArrayOfSpeciesTag &atmospheric_species, const ArrayOfSpecies &lineshape_species, const SpeciesIsotopologueRatios &ir) ARTS_NOEXCEPT
Returns a mass vector for this model's main calculations.
void sum(PropagationMatrix &pm, const ComplexVectorView &abs, const PolarizationVector &polvec, const bool do_phase) ARTS_NOEXCEPT
Sums the Zeeman components into a propagation matrix.
Definition: zeemandata.cc:375
constexpr bool isnan(double d) noexcept
Definition: nonstd.h:39
Scattering database structure and functions.
This file contains header information for the dealing with command line parameters.
This file contains declerations of functions of physical character.
Internal functions associated with size distributions.
Declaration of functions in rte.cc.
Contains sorting routines.
void get_sorted_indexes(ArrayOfIndex &sorted, const T &data)
get_sorted_indexes
Definition: sorting.h:73
Header file for special_interp.cc.
This file contains basic functions to handle XML data files.