|
1 | 1 | #!/usr/bin/env python3 |
2 | 2 | # -*- coding: utf-8 -*- |
3 | 3 | """ |
4 | | -Created on Wed Nov 20 11:06:41 2019 |
| 4 | +The observational TC data are obtained from Prof. Kerry Emanuel’s website (https://emanuel.mit.edu/products). Click “Global Tropical Cyclone Data in NETCDF format (updated through 2018)” the files will be downloaded. The TC locations are tracked 6 hourly. |
5 | 5 |
|
6 | | -@author: bala635 |
7 | | -The observational TC data are obtained from Prof. Kerry Emanuel’s website (https://emanuel.mit.edu/products). Click “Global Tropical Cyclone Data in NETCDF format (updated through 2018)” the files will be downloaded. |
| 6 | +Another data source is IBTrACS from NOAA, which has 3hourly track data. |
| 7 | +
|
| 8 | +Note, The tc density maps created from both data source have identical distribution, but values are twice from IBTrACS compared to MIT due to higher time frequency. For now the TC analysis has been using 6hourly E3SM output, therefore the MIT data is being used for consistency. |
8 | 9 | """ |
9 | 10 |
|
| 11 | +import os |
10 | 12 | import sys |
11 | 13 | import warnings |
| 14 | +from datetime import datetime, timedelta |
12 | 15 |
|
13 | 16 | if not sys.warnoptions: |
14 | 17 | warnings.simplefilter("ignore") |
15 | 18 | import numpy as np |
16 | 19 | from netCDF4 import Dataset as netcdffile |
17 | 20 |
|
18 | | -################################################### |
19 | | -################################################### |
20 | | - |
21 | 21 | all_lon = [] |
22 | 22 | all_lat = [] |
23 | 23 |
|
24 | 24 | ################################################### |
25 | | - |
26 | | -nc = netcdffile('./attracks.nc') |
27 | | -latmc = np.squeeze(nc['latmc'][:,:]) |
28 | | -longmc = np.squeeze(nc['longmc'][:,:]) |
29 | | -vsmc = np.squeeze(nc['vsmc'][:,:]) |
30 | | -yearic = np.squeeze(nc['yearic'][:]) |
31 | | - |
32 | | -for i in range(0,np.shape(latmc)[0]): |
33 | | - for j in range(0,np.shape(latmc)[1]): |
34 | | - |
35 | | - if yearic[j] > 1980 and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 25: |
36 | | - |
37 | | - all_lon.append(longmc[i,j]) |
38 | | - all_lat.append(latmc[i,j]) |
39 | | - |
40 | | -################################################### |
41 | | - |
42 | | -nc = netcdffile('./eptracks.nc') |
43 | | -latmc = np.squeeze(nc['latmc'][:,:]) |
44 | | -longmc = np.squeeze(nc['longmc'][:,:]) |
45 | | -vsmc = np.squeeze(nc['vsmc'][:,:]) |
46 | | -yearic = np.squeeze(nc['yearic'][:]) |
47 | | - |
48 | | -for i in range(0,np.shape(latmc)[0]): |
49 | | - for j in range(0,np.shape(latmc)[1]): |
50 | | - |
51 | | - if yearic[j] > 1980 and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 25: |
52 | | - |
53 | | - all_lon.append(longmc[i,j]) |
54 | | - all_lat.append(latmc[i,j]) |
55 | | - |
56 | | -################################################### |
57 | | - |
58 | | -nc = netcdffile('./wptracks.nc') |
59 | | -latmc = np.squeeze(nc['latmc'][:,:]) |
60 | | -longmc = np.squeeze(nc['longmc'][:,:]) |
61 | | -vsmc = np.squeeze(nc['vsmc'][:,:]) |
62 | | -yearic = np.squeeze(nc['yearic'][:]) |
63 | | - |
64 | | -for i in range(0,np.shape(latmc)[0]): |
65 | | - for j in range(0,np.shape(latmc)[1]): |
66 | | - |
67 | | - if yearic[j] > 1980 and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 25: |
68 | | - |
69 | | - all_lon.append(longmc[i,j]) |
70 | | - all_lat.append(latmc[i,j]) |
71 | | - |
72 | | -################################################### |
73 | | - |
74 | | -nc = netcdffile('./iotracks.nc') |
75 | | -latmc = np.squeeze(nc['latmc'][:,:]) |
76 | | -longmc = np.squeeze(nc['longmc'][:,:]) |
77 | | -vsmc = np.squeeze(nc['vsmc'][:,:]) |
78 | | -yearic = np.squeeze(nc['yearic'][:]) |
79 | | - |
80 | | -for i in range(0,np.shape(latmc)[0]): |
81 | | - for j in range(0,np.shape(latmc)[1]): |
82 | | - |
83 | | - if yearic[j] > 1980 and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 25: |
84 | | - |
85 | | - all_lon.append(longmc[i,j]) |
86 | | - all_lat.append(latmc[i,j]) |
87 | | - |
88 | | -################################################### |
89 | | - |
90 | | -nc = netcdffile('./shtracks.nc') |
91 | | -latmc = np.squeeze(nc['latmc'][:,:]) |
92 | | -longmc = np.squeeze(nc['longmc'][:,:]) |
93 | | -vsmc = np.squeeze(nc['vsmc'][:,:]) |
94 | | -yearic = np.squeeze(nc['yearic'][:]) |
95 | | - |
96 | | -for i in range(0,np.shape(latmc)[0]): |
97 | | - for j in range(0,np.shape(latmc)[1]): |
98 | | - |
99 | | - if yearic[j] > 1980 and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 25: |
100 | | - |
101 | | - all_lon.append(longmc[i,j]) |
102 | | - all_lat.append(latmc[i,j]) |
103 | | - |
104 | | -################################################### |
| 25 | +origin_path = '/Users/zhang40/Documents/ACME/e3sm_tc_diags' |
| 26 | +start_yr = 1979 |
| 27 | +end_yr = 2018 |
| 28 | + |
| 29 | +ibtracs = False #MIT data |
| 30 | +ibtracs = True |
| 31 | + |
| 32 | + |
| 33 | +if ibtracs: |
| 34 | + data_name = 'IBTrACS' |
| 35 | + print('Use IBTrACS 3hourly') |
| 36 | + basins = ["NA", "WP", "EP", "NI", "SI", "SP"] |
| 37 | + for basin in basins: |
| 38 | + nc = netcdffile( |
| 39 | + os.path.join(origin_path, "IBTrACS.{}.v04r00.nc".format(basin)) |
| 40 | + ) |
| 41 | + latmc = np.squeeze(nc["lat"][:, :]) |
| 42 | + longmc = np.squeeze(nc["lon"][:, :]) |
| 43 | + time = np.squeeze(nc["time"][:, :]) |
| 44 | + yearic = np.zeros((np.shape(latmc)[0])) |
| 45 | + |
| 46 | + for i in range(0, np.shape(time)[0]): |
| 47 | + for j in range(0, np.shape(time)[1]): |
| 48 | + if time[i, j] is not np.ma.masked: |
| 49 | + day_hurr = datetime(1858, 11, 17, 0, 0, 0) + timedelta(time[i, j]) |
| 50 | + yearic[i] = day_hurr.year |
| 51 | + if yearic[i] >= start_yr and yearic[i] <= end_yr and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 0: |
| 52 | + if(longmc[i, j]<0): |
| 53 | + longmc[i, j] = 360 + longmc[i, j] |
| 54 | + all_lat.append(latmc[i, j]) |
| 55 | + all_lon.append(longmc[i, j]) |
| 56 | + print(len(all_lat)) |
| 57 | + |
| 58 | + |
| 59 | +else: |
| 60 | + print('MIT 6hrly') |
| 61 | + data_name = 'MIT' |
| 62 | + basins = ['at', 'ep','wp','io','sh'] |
| 63 | + for basin in basins: |
| 64 | + nc = netcdffile('{}/{}tracks.nc'.format(origin_path, basin)) |
| 65 | + latmc = np.squeeze(nc['latmc'][:,:]) |
| 66 | + longmc = np.squeeze(nc['longmc'][:,:]) |
| 67 | + vsmc = np.squeeze(nc['vsmc'][:,:]) |
| 68 | + yearic = np.squeeze(nc['yearic'][:]) |
| 69 | + |
| 70 | + for i in range(0,np.shape(latmc)[0]): |
| 71 | + for j in range(0,np.shape(latmc)[1]): |
| 72 | + |
| 73 | + if yearic[j] >=start_yr and yearic[j]<=end_yr and np.abs(latmc[i,j]) > 0 and np.abs(longmc[i,j]) > 0: |
| 74 | + all_lon.append(longmc[i,j]) |
| 75 | + all_lat.append(latmc[i,j]) |
| 76 | + print(len(all_lon)) |
105 | 77 |
|
106 | 78 | all_lat = np.asarray(all_lat) |
107 | 79 | all_lon = np.asarray(all_lon) |
|
111 | 83 |
|
112 | 84 | import pandas as pd |
113 | 85 |
|
114 | | -raw_data = {'lon': all_lon_new, |
| 86 | +raw_data = { |
| 87 | + 'lon': all_lon_new, |
115 | 88 | 'lat': all_lat_new} |
116 | 89 |
|
117 | 90 | df = pd.DataFrame.from_dict(raw_data) |
118 | | -# columns = ['SST', 'STRAT','TCHP','TDY_SALT','TDY_NOSALT','INT','LAT','LON','SHR','DIV','PER','DELTA_INT'] |
119 | | -df.to_csv('cyclones_all_obs.csv', sep='\t') |
| 91 | +print(data_name) |
| 92 | +out_file = '/Users/zhang40/Documents/ACME/e3sm_tc_diags/cyclones_hist_{}_{}_{}.csv'.format(data_name,start_yr, end_yr) |
| 93 | +with open(out_file, 'w') as file: |
| 94 | + file.write('start {}\n'.format(len(all_lon))) |
| 95 | + df.to_csv(file, header=False, sep='\t') |
120 | 96 |
|
121 | 97 | ################################################### |
122 | 98 | # Convert the .csv file to .nc by calling tempest-extremes from command line: |
123 | | -#tempestextremes/bin/HistogramNodes --in cyclones_all_obs.csv --iloncol 3 --ilatcol 4 --out cyclones_all_obs.nc |
| 99 | +#tempestextremes/bin/HistogramNodes --in cyclones_hist_IBTrACS_1979_2018.csv --iloncol 2 --ilatcol 3 --out cyclones_hist_IBTrACS_1979_2018.nc |
0 commit comments