|
16 | 16 |
|
17 | 17 | # Now we need to use some means to condense the u and v fields in the same way into
|
18 | 18 | # having 1 time point, not 720 - for example we can just pick a time value out:
|
19 |
| -chosen_time = "2006-01-16 00:00:00" |
| 19 | +print("Times are", v.construct("T").data.datetime_as_string) |
| 20 | +chosen_time = "2006-01-15 23:30:00" # 720 choices to choose from! |
20 | 21 | v_1 = v.subspace(T=cf.dt(chosen_time))
|
21 | 22 | u_1 = u.subspace(T=cf.dt(chosen_time))
|
22 | 23 | v_1 = v_1.squeeze()
|
|
32 | 33 | # Note that there appear to be some really large vectors all pointing in the
|
33 | 34 | # same direction which are spamming the plot. We need to remove these. By
|
34 | 35 | # looking at the data we can see what these are and work out how to remove them:
|
35 |
| -print(u.data) |
36 |
| -print(u[:10].data.array) |
37 | 36 |
|
38 | 37 | # ... shows more of the array
|
39 | 38 |
|
|
52 | 51 | # This field also needs masking for those data points.
|
53 | 52 | w_2 = w_1.where(cf.lt(-9e+03), cf.masked)
|
54 | 53 | print(w_2)
|
55 |
| -print(w_2, w_2[:10].data.array) |
| 54 | + |
| 55 | + |
| 56 | +# Plot divergence in the background |
| 57 | +div = cf.div_xy(u_2, v_2, radius="earth") |
| 58 | + |
56 | 59 |
|
57 | 60 | # Our final basic plot:
|
58 | 61 | cfp.mapset(resolution="10m") # makes UK coastline more high-res
|
59 |
| -cfp.gopen(file="irish-sea-currents.png") |
60 |
| -# BTW ignore the warnings below - they aren't relevant. |
61 |
| -cfp.vect(u=u_2, v=v_2, stride=5, scale=2, key_length=1) |
62 |
| -cfp.levs(min=-5, max=5, step=0.5) |
63 |
| -cfp.con(w_1, blockfill=True, lines=False) |
| 62 | +cfp.gopen(file=f"irish-sea-currents-with-divergence-{chosen_time}.png") |
| 63 | +cfp.cscale("ncl_default") |
| 64 | +cfp.vect(u=u_2, v=v_2, stride=6, scale=3, key_length=1) |
| 65 | +cfp.con(div, lines=False) |
64 | 66 | cfp.gclose()
|
65 |
| - |
66 |
| -# Ideas for TODOs: |
67 |
| -# investigate difference days (do this by changing the 'T=cf.dt("2006-01-16 00:00:00")') datetime |
68 |
| -# values to different ones in the time coordinate data so you look at different days, or repace it |
69 |
| -# with a collapse over some stat e.g. mean to show the mean over all the times, |
70 |
| -# calculate divergence, calculate curl / relative voriticity, calculate absolute voriticity, |
71 |
| -# explore the other dataset as well (that covers other dates/times) - you could compare the |
72 |
| -# two to effectively compare the currents across different dates. |
0 commit comments