13
13
# limitations under the License.
14
14
import itertools
15
15
import json
16
+ import logging
16
17
import os
17
18
from os import path
18
19
import threading
19
20
from ._fatomic import atomic_open
20
- from .utils import write_to_file
21
+ from . import exceptions
22
+ from .utils import complete
21
23
from .utils import strp_timestamp
22
24
from .utils import strf_timestamp
25
+ from .utils import write_to_file
26
+
27
+
28
+ _logger = logging .getLogger (__name__ )
23
29
24
30
25
31
class _SyncTool (object ):
@@ -35,6 +41,8 @@ def __init__(self, client, destination, aoi, scene_type, products,
35
41
self .workspace = filters .get ('workspace' , None )
36
42
self ._init ()
37
43
self .sync_file = path .join (self .destination , 'sync.json' )
44
+ self .error_handler = _logger .exception
45
+ self ._cancel = False
38
46
39
47
def _init (self ):
40
48
dest = self .destination
@@ -87,7 +95,7 @@ def sync(self, callback):
87
95
summary = _SyncSummary (self ._scene_count * len (self .products ))
88
96
89
97
all_scenes = self .get_scenes_to_sync ()
90
- while True :
98
+ while not self . _cancel :
91
99
# bite of chunks of work to not bog down on too many queued jobs
92
100
scenes = list (itertools .islice (all_scenes , 100 ))
93
101
if not scenes :
@@ -100,17 +108,26 @@ def sync(self, callback):
100
108
for h in handlers :
101
109
h .run (self .client , self .scene_type , self .products )
102
110
# synchronously await them and then write metadata
103
- for h in handlers :
104
- h .finish ()
111
+ complete (handlers , self ._future_handler , self .client )
105
112
106
- if summary .latest :
113
+ if summary .latest and not self . _cancel :
107
114
sync = self ._read_sync_file ()
108
115
sync ['latest' ] = strf_timestamp (summary .latest )
109
116
with atomic_open (self .sync_file , 'wb' ) as fp :
110
117
fp .write (json .dumps (sync , indent = 2 ).encode ('utf-8' ))
111
118
112
119
return summary
113
120
121
+ def _future_handler (self , futures ):
122
+ for f in futures :
123
+ try :
124
+ f .finish ()
125
+ except exceptions .RequestCancelled :
126
+ self ._cancel = True
127
+ break
128
+ except :
129
+ self .error_handler ('Unexpected error' )
130
+
114
131
115
132
class _SyncSummary (object ):
116
133
'''Keep track of summary state, thread safe.'''
@@ -137,19 +154,40 @@ def __init__(self, destination, summary, metadata, user_callback):
137
154
self .summary = summary
138
155
self .metadata = metadata
139
156
self .user_callback = user_callback or (lambda * args : None )
157
+ self ._cancel = False
158
+ self .futures = []
140
159
141
160
def run (self , client , scene_type , products ):
142
- self .futures = []
161
+ '''start asynchronous execution, must call finish to await'''
162
+ if self ._cancel :
163
+ return
143
164
for product in products :
144
165
self .futures .extend (client .fetch_scene_geotiffs (
145
166
[self .metadata ['id' ]],
146
167
scene_type , product ,
147
168
callback = self ))
148
169
170
+ def cancel (self ):
171
+ '''cancel pending downloads'''
172
+ self ._cancel = True
173
+ futures = getattr (self , 'futures' , [])
174
+ for f in futures :
175
+ f .cancel ()
176
+
149
177
def finish (self ):
178
+ '''await pending downloads and write out metadata
179
+ @todo this is not an atomic operation - it's possible that one
180
+ product gets downloaded and the other fails.
181
+ '''
182
+ if self ._cancel :
183
+ return
184
+
150
185
for f in self .futures :
151
186
f .await ()
152
187
188
+ if self ._cancel :
189
+ return
190
+
153
191
# write out metadata
154
192
metadata = os .path .join (self .destination ,
155
193
'%s_metadata.json' % self .metadata ['id' ])
0 commit comments