|
| 1 | +import tensorflow as tf |
| 2 | +from tensorflow.keras.models import Sequential, load_model, Model |
| 3 | +from tensorflow.keras.layers import Dense, Activation, Embedding, Flatten, Dropout, TimeDistributed, Reshape, Lambda |
| 4 | +from tensorflow.keras.layers import LSTM, ConvLSTM2D, Conv2D, ZeroPadding2D |
| 5 | +import numpy as np |
| 6 | +import xarray as xr |
| 7 | +from glob import glob |
| 8 | +import matplotlib.pyplot as plt |
| 9 | +import warnings |
| 10 | +from datetime import datetime, timedelta |
| 11 | +warnings.filterwarnings("ignore") |
| 12 | + |
| 13 | + |
| 14 | +radar_grids_path = '/home/rjackson/bebop_earthscience/rjackson/deep_learning/2005/' |
| 15 | +tfrecords_path = '/home/rjackson/tfrecords/2005/' |
| 16 | +num_frames_in_past = 3 |
| 17 | +my_shape = (201, 201) |
| 18 | +is_training = True |
| 19 | +shuffle = False |
| 20 | + |
| 21 | +def create_tf_record(radar_list, scan_no): |
| 22 | + # Get radar file from bebop |
| 23 | + |
| 24 | + |
| 25 | + # First get previous frames |
| 26 | + # Normalization: -20 dBZ = 0, 60 dBZ = 1 |
| 27 | + try: |
| 28 | + grid = xr.open_dataset(radar_list[scan_no]) |
| 29 | + Zn = grid.Znorm.fillna(-20).values |
| 30 | + if (np.nanmax(Zn) > 0): |
| 31 | + Zn = (Zn + 20.)/(80) |
| 32 | + else: |
| 33 | + Zn = np.zeros_like(Zn) |
| 34 | + except: |
| 35 | + return |
| 36 | + times = grid.time.values |
| 37 | + grid.close() |
| 38 | + shp = Zn.shape |
| 39 | + my_shape = shp |
| 40 | + width = shp[0] |
| 41 | + height = shp[1] |
| 42 | + |
| 43 | + #if dt_future > timedelta(minutes=60): |
| 44 | + # print("Data not continous") |
| 45 | + # return |
| 46 | + |
| 47 | + fname = tfrecords_path + radar_list[scan_no][-16:] + '.tfrecord' |
| 48 | + writer = tf.python_io.TFRecordWriter(fname) |
| 49 | + |
| 50 | + #norm = norm.SerializeToString() |
| 51 | + example = tf.train.Example(features=tf.train.Features( |
| 52 | + feature={ |
| 53 | + 'width': _int64_feature(width), |
| 54 | + 'height': _int64_feature(height), |
| 55 | + 'image_raw': _bytes_feature(Zn), |
| 56 | + 'time': _float_feature(times), |
| 57 | + })) |
| 58 | + writer.write(example.SerializeToString()) |
| 59 | + print(times) |
| 60 | + del writer |
| 61 | + |
| 62 | +def _int64_feature(value): |
| 63 | + """Creates a tf.Train.Feature from an int64 value.""" |
| 64 | + if value is None: |
| 65 | + value = [] |
| 66 | + if not isinstance(value, list): |
| 67 | + value = [value] |
| 68 | + return tf.train.Feature(int64_list=tf.train.Int64List(value=value)) |
| 69 | + |
| 70 | + |
| 71 | +def _bytes_feature(value): |
| 72 | + """Creates a tf.Train.Feature from a bytes value.""" |
| 73 | + if value is None: |
| 74 | + value = [] |
| 75 | + if isinstance(value, np.ndarray): |
| 76 | + value = value.reshape(-1) |
| 77 | + value = bytes(value) |
| 78 | + if not isinstance(value, list): |
| 79 | + value = [value] |
| 80 | + return tf.train.Feature(bytes_list=tf.train.BytesList(value=value)) |
| 81 | + |
| 82 | +def _float_feature(value): |
| 83 | + """Creates a tf.Train.Feature from a bytes value.""" |
| 84 | + if value is None: |
| 85 | + value = [] |
| 86 | + if isinstance(value, np.ndarray): |
| 87 | + value = value.reshape(-1) |
| 88 | + value = bytes(value) |
| 89 | + if not isinstance(value, list): |
| 90 | + value = [value] |
| 91 | + return tf.train.Feature(float_list=tf.train.FloatList(value=value)) |
| 92 | + |
| 93 | +if __name__ == "__main__": |
| 94 | + file_list = sorted(glob(radar_grids_path + '/**/*.cdf', recursive=True)) |
| 95 | + print("About to process %d files" % len(file_list)) |
| 96 | + for i in range(len(file_list)): |
| 97 | + create_tf_record(file_list, i) |
0 commit comments