1515STORAGE_ACCOUNT = os .getenv ('AZURE_EXTENSION_TARGET_STORAGE_ACCOUNT' )
1616STORAGE_CONTAINER = os .getenv ('AZURE_EXTENSION_TARGET_STORAGE_CONTAINER' )
1717COMMIT_NUM = os .getenv ('AZURE_EXTENSION_COMMIT_NUM' ) or 1
18+ BLOB_PREFIX = os .getenv ('AZURE_EXTENSION_BLOB_PREFIX' )
1819
1920
2021def _get_updated_extension_filenames ():
2122 cmd = 'git --no-pager diff --diff-filter=ACMRT HEAD~{} -- src/index.json' .format (COMMIT_NUM )
2223 updated_content = check_output (cmd .split ()).decode ('utf-8' )
2324 FILENAME_REGEX = r'"filename":\s+"(.*?)"'
24- added_ext_filenames = [ re .findall (FILENAME_REGEX , line )[0 ] for line in updated_content .splitlines () if line .startswith ('+' ) and not line .startswith ('+++' ) and 'filename' in line ]
25- deleted_ext_filenames = [ re .findall (FILENAME_REGEX , line )[0 ] for line in updated_content .splitlines () if line .startswith ('-' ) and not line .startswith ('---' ) and 'filename' in line ]
25+ added_ext_filenames = { re .findall (FILENAME_REGEX , line )[0 ] for line in updated_content .splitlines () if line .startswith ('+' ) and not line .startswith ('+++' ) and 'filename' in line }
26+ deleted_ext_filenames = { re .findall (FILENAME_REGEX , line )[0 ] for line in updated_content .splitlines () if line .startswith ('-' ) and not line .startswith ('---' ) and 'filename' in line }
2627 return added_ext_filenames , deleted_ext_filenames
2728
2829
@@ -53,19 +54,20 @@ def _sync_wheel(ext, updated_indexes, failed_urls, client, overwrite, temp_dir):
5354 download_url = ext ['downloadUrl' ]
5455 whl_file = download_url .split ('/' )[- 1 ]
5556 whl_path = os .path .join (temp_dir , whl_file )
57+ blob_name = f'{ BLOB_PREFIX } /{ whl_file } ' if BLOB_PREFIX else whl_file
5658 try :
5759 download_file (download_url , whl_path )
5860 except Exception :
5961 failed_urls .append (download_url )
6062 return
6163 if not overwrite :
62- exists = client .exists (container_name = STORAGE_CONTAINER , blob_name = whl_file )
64+ exists = client .exists (container_name = STORAGE_CONTAINER , blob_name = blob_name )
6365 if exists :
6466 print ("Skipping '{}' as it already exists..." .format (whl_file ))
6567 return
66- client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = whl_file ,
68+ client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = blob_name ,
6769 file_path = os .path .abspath (whl_path ))
68- url = client .make_blob_url (container_name = STORAGE_CONTAINER , blob_name = whl_file )
70+ url = client .make_blob_url (container_name = STORAGE_CONTAINER , blob_name = blob_name )
6971 updated_index = ext
7072 updated_index ['downloadUrl' ] = url
7173 updated_indexes .append (updated_index )
@@ -104,12 +106,17 @@ def main():
104106 import tempfile
105107 from azure .storage .blob import BlockBlobService
106108
107- added_ext_filenames = []
108- deleted_ext_filenames = []
109+ net_added_ext_filenames = []
110+ net_deleted_ext_filenames = []
109111 sync_all = (os .getenv ('AZURE_SYNC_ALL_EXTENSIONS' ) and os .getenv ('AZURE_SYNC_ALL_EXTENSIONS' ).lower () == 'true' )
110112 if not sync_all :
111113 added_ext_filenames , deleted_ext_filenames = _get_updated_extension_filenames ()
112- if not added_ext_filenames and not deleted_ext_filenames :
114+ # when there are large amount of changes, for instance deleting a lot of old versions of extensions,
115+ # git may not accurately recognize the right changes, so we need to compare added filenames and deleted filenames
116+ # to get the real changed ones.
117+ net_added_ext_filenames = added_ext_filenames - deleted_ext_filenames
118+ net_deleted_ext_filenames = deleted_ext_filenames - added_ext_filenames
119+ if not net_added_ext_filenames and not net_deleted_ext_filenames :
113120 print ('index.json not changed. End task.' )
114121 return
115122 temp_dir = tempfile .mkdtemp ()
@@ -119,25 +126,33 @@ def main():
119126 target_index = DEFAULT_TARGET_INDEX_URL
120127 os .mkdir (os .path .join (temp_dir , 'target' ))
121128 target_index_path = os .path .join (temp_dir , 'target' , 'index.json' )
122- download_file (target_index , target_index_path )
123-
129+ try :
130+ download_file (target_index , target_index_path )
131+ except Exception as ex :
132+ if sync_all and '404' in str (ex ):
133+ initial_index = {"extensions" : {}, "formatVersion" : "1" }
134+ open (target_index_path , 'w' ).write (json .dumps (initial_index , indent = 4 , sort_keys = True ))
135+ else :
136+ raise
124137 client = BlockBlobService (account_name = STORAGE_ACCOUNT , account_key = STORAGE_ACCOUNT_KEY )
125138 updated_indexes = []
126139 failed_urls = []
127140 if sync_all :
128141 print ('Syncing all extensions...\n ' )
129142 # backup the old index.json
130- client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = 'index.json.sav' ,
143+ backup_index_name = f'{ BLOB_PREFIX } /index.json.sav' if BLOB_PREFIX else 'index.json.sav'
144+ client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = backup_index_name ,
131145 file_path = os .path .abspath (target_index_path ))
132- inital_index = {"extensions" : {}, "formatVersion" : "1" }
133- open (target_index_path , 'w' ).write (json .dumps (inital_index , indent = 4 , sort_keys = True ))
146+ # start with an empty index.json to sync all extensions
147+ initial_index = {"extensions" : {}, "formatVersion" : "1" }
148+ open (target_index_path , 'w' ).write (json .dumps (initial_index , indent = 4 , sort_keys = True ))
134149 for extension_name in current_extensions .keys ():
135150 for ext in current_extensions [extension_name ]:
136151 print ('Uploading {}' .format (ext ['filename' ]))
137152 _sync_wheel (ext , updated_indexes , failed_urls , client , True , temp_dir )
138153 else :
139154 NAME_REGEX = r'^(.*?)-\d+.\d+.\d+'
140- for filename in added_ext_filenames :
155+ for filename in net_added_ext_filenames :
141156 extension_name = re .findall (NAME_REGEX , filename )[0 ].replace ('_' , '-' )
142157 print ('Uploading {}' .format (filename ))
143158 ext = current_extensions [extension_name ][- 1 ]
@@ -147,17 +162,19 @@ def main():
147162 _sync_wheel (ext , updated_indexes , failed_urls , client , True , temp_dir )
148163
149164 print ("" )
150- _update_target_extension_index (updated_indexes , deleted_ext_filenames , target_index_path )
151- client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = 'index.json' ,
165+ _update_target_extension_index (updated_indexes , net_deleted_ext_filenames , target_index_path )
166+ index_name = f'{ BLOB_PREFIX } /index.json' if BLOB_PREFIX else 'index.json'
167+ client .create_blob_from_path (container_name = STORAGE_CONTAINER , blob_name = index_name ,
152168 file_path = os .path .abspath (target_index_path ))
169+ print ("\n Sync finished." )
153170 if updated_indexes :
154- print ("\n Sync finished, extensions available in:" )
171+ print ("New extensions available in:" )
155172 for updated_index in updated_indexes :
156173 print (updated_index ['downloadUrl' ])
157174 shutil .rmtree (temp_dir )
158175
159176 if failed_urls :
160- print ("\n Failed to donwload and sync the following files. They are skipped:" )
177+ print ("\n Failed to download and sync the following files. They are skipped:" )
161178 for url in failed_urls :
162179 print (url )
163180 print ("" )
0 commit comments