Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
#!/usr/bin/env python3
########################
#Author: Minyan Zhong, Herresh Fattahi
# For Geocoding SLCs
#######################
import os, sys, glob
import argparse
import configparser
import datetime
import numpy as np
import isce
import isceobj
from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
from Stack import config, run, sentinelSLC
helpstr= '''
Processor for Sentinel-1 data using ISCE software.
For a full list of different options, try sentinelApp.py -h
sentinelApp.py generates all configuration and run files required to be executed for Sentinel-1 TOPS data.
Following are required to start processing:
1) a folder that includes Sentinel-1 SLCs,
2) a DEM (Digital Elevation Model)
3) a folder that includes precise orbits (use dloadOrbits.py to download or to update your orbit folder)
4) a folder for Sentinel-1 Aux files (which is used for correcting the Elevation Antenna Pattern).
5) bounding box as South North West East.
Note that sentinelApp.py does not process any data. It only prepares a lot of input files for processing and a lot of run files. Then you need to execute all those generated run files in order. To know what is really going on, after running sentinelApp.py, look at each run file generated by sentinelApp.py. Each run file actually has several commands that are independent from each other and can be executed in parallel. The config files for each run file include the processing options to execute a specific command/function.
'''
class customArgparseAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
'''
The action to be performed.
'''
print(helpstr)
parser.exit()
def createParser():
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for the processing of Sentinel data')
parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction,
help='Display detailed help information.')
parser.add_argument('-s', '--slc_directory', dest='slc_dirname', type=str, required=True,
help='Directory with all Sentinel SLCs')
parser.add_argument('-o', '--orbit_directory', dest='orbit_dirname', type=str, required=True,
help='Directory with all orbits')
parser.add_argument('-a', '--aux_directory', dest='aux_dirname', type=str, required=True,
help='Directory with all orbits')
parser.add_argument('-w', '--working_directory', dest='work_dir', type=str, default='./',
help='Working directory ')
parser.add_argument('-d', '--dem', dest='dem', type=str, required=True,
help='Directory with secondary acquisition')
parser.add_argument('-n', '--swath_num', dest='swath_num', type=str, default='1 2 3',
help='A list of swaths to be processed')
parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None, help='Lat/Lon Bounding SNWE')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;'
, help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;')
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv'
, help='SAR data polarization')
parser.add_argument('-u','--update', dest='update', type=int, default=0, help='re-run (0) or update (1)')
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3'
, help='Number of looks in azimuth for interferogram multi-looking')
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9'
, help='Number of looks in range for interferogram multi-looking')
return parser
def cmdLineParse(iargs = None):
parser = createParser()
inps = parser.parse_args(args=iargs)
inps.slc_dirname = os.path.abspath(inps.slc_dirname)
inps.orbit_dirname = os.path.abspath(inps.orbit_dirname)
inps.aux_dirname = os.path.abspath(inps.aux_dirname)
inps.work_dir = os.path.abspath(inps.work_dir)
inps.dem = os.path.abspath(inps.dem)
return inps
####################################
def get_dates(inps):
# Given the SLC directory This function extracts the acquisition dates
# and prepares a dictionary of sentinel slc files such that keys are
# acquisition dates and values are object instances of sentinelSLC class
# which is defined in Stack.py
if inps.bbox is not None:
bbox = [float(val) for val in inps.bbox.split()]
if os.path.isfile(inps.slc_dirname):
print('reading SAFE files from: ' + inps.slc_dirname)
SAFE_files = []
for line in open(inps.slc_dirname):
SAFE_files.append(str.replace(line,'\n','').strip())
else:
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong
if len(SAFE_files) == 0:
raise Exception('No SAFE file found')
else:
print ("Number of SAFE files found: "+str(len(SAFE_files)))
################################
# write down the list of SAFE files in a txt file:
f = open('SAFE_files.txt','w')
for safe in SAFE_files:
f.write(safe + '\n')
f.close()
################################
# group the files based on dates
safe_dict={}
for safe in SAFE_files:
safeObj=sentinelSLC(safe)
safeObj.get_dates()
safeObj.get_orbit(inps.orbit_dirname, inps.work_dir)
if safeObj.date not in safe_dict.keys():
safe_dict[safeObj.date]=safeObj
else:
safe_dict[safeObj.date].safe_file = safe_dict[safeObj.date].safe_file + ' ' + safe
################################
dateList = [key for key in safe_dict.keys()]
dateList.sort()
print ("*****************************************")
print ("Number of dates : " +str(len(dateList)))
print ("List of dates : ")
print (dateList)
################################
#get the files covering the bounding box
S=[]
N=[]
W=[]
E=[]
safe_dict_bbox={}
print ('date south north west east')
for date in dateList:
#safe_dict[date].get_lat_lon()
safe_dict[date].get_lat_lon_v2()
#safe_dict[date].get_lat_lon_v3(inps)
S.append(safe_dict[date].SNWE[0])
N.append(safe_dict[date].SNWE[1])
W.append(safe_dict[date].SNWE[2])
E.append(safe_dict[date].SNWE[3])
print (date, safe_dict[date].SNWE[0],safe_dict[date].SNWE[1],safe_dict[date].SNWE[2],safe_dict[date].SNWE[3])
if inps.bbox is not None:
if safe_dict[date].SNWE[0] <= bbox[0] and safe_dict[date].SNWE[1] >= bbox[1] and safe_dict[date].SNWE[2] <= bbox[2] and safe_dict[date].SNWE[3] >=bbox[3]:
safe_dict_bbox[date] = safe_dict[date]
print ("*****************************************")
################################
print ('All dates')
print (dateList)
if inps.bbox is not None:
safe_dict = safe_dict_bbox
dateList = [key for key in safe_dict.keys()]
dateList.sort()
print ('dates covering the bbox')
print (dateList)
return dateList, safe_dict
def checkCurrentStatus(inps):
acquisitionDates, safe_dict = get_dates(inps)
slcDir = os.path.join(inps.work_dir, 'slc')
if os.path.exists(slcDir):
slcFiles = glob.glob(os.path.join(slcDir, '*'))
existed_dates = [os.path.basename(slc) for slc in slcFiles]
existed_dates.sort()
if inps.update and len(existed_dates)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('Old processed acquisitions are found: ')
print(existed_dates)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
acquisitionDates = list(set(acquisitionDates).difference(set(existed_dates)))
acquisitionDates.sort()
if len(acquisitionDates)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('New acquisitions are found and will be processed: ')
print(acquisitionDates)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
else:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('No new acquisition: ')
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
safe_dict_new={}
for d in acquisitionDates:
safe_dict_new[d] = safe_dict[d]
safe_dict = safe_dict_new
else:
print('No existing processed slc are identified. All the slcs will be processed.')
return acquisitionDates, safe_dict
def slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=False):
#############################
i=0
i+=1
runObj = run()
runObj.configure(inps, 'run_' + str(i))
runObj.unpackSLC(acquisitionDates, safe_dict)
runObj.finalize()
if mergeSLC:
i+=1
runObj = run()
runObj.configure(inps, 'run_' + str(i))
runObj.mergeSLC(acquisitionDates, virtual = 'False')
runObj.finalize()
return i
def main(iargs=None):
inps = cmdLineParse(iargs)
if os.path.exists(os.path.join(inps.work_dir, 'run_files')):
print('')
print('**************************')
print('run_files folder exists.')
print(os.path.join(inps.work_dir, 'run_files'), ' already exists.')
print('Please remove or rename this folder and try again.')
print('')
print('**************************')
sys.exit(1)
acquisitionDates, safe_dict = checkCurrentStatus(inps)
slcSimple(inps, acquisitionDates, safe_dict, mergeSLC=True)
if __name__ == "__main__":
# Main engine
main()