@@ -972,28 +972,21 @@ void IsolateReloadContext::CheckpointClasses() {
972
972
// Copy the size of the class table.
973
973
saved_num_cids_ = I->class_table ()->NumCids ();
974
974
975
- // Copy of the class table.
976
- ClassAndSize* local_saved_class_table = reinterpret_cast <ClassAndSize*>(
977
- malloc (sizeof (ClassAndSize) * saved_num_cids_));
975
+ ClassAndSize* saved_class_table = nullptr ;
976
+ class_table->CopyBeforeHotReload (&saved_class_table, &saved_num_cids_);
978
977
979
978
// Copy classes into saved_class_table_ first. Make sure there are no
980
979
// safepoints until saved_class_table_ is filled up and saved so class raw
981
980
// pointers in saved_class_table_ are properly visited by GC.
982
981
{
983
982
NoSafepointScope no_safepoint_scope (Thread::Current ());
984
983
985
- for (intptr_t i = 0 ; i < saved_num_cids_; i++) {
986
- if (class_table->IsValidIndex (i) && class_table->HasValidClassAt (i)) {
987
- // Copy the class into the saved class table.
988
- local_saved_class_table[i] = class_table->PairAt (i);
989
- } else {
990
- // No class at this index, mark it as NULL.
991
- local_saved_class_table[i] = ClassAndSize (NULL );
992
- }
993
- }
984
+ // The saved_class_table_ is now source of truth for GC.
985
+ AtomicOperations::StoreRelease (&saved_class_table_, saved_class_table);
994
986
995
- // Elements of saved_class_table_ are now visible to GC.
996
- saved_class_table_ = local_saved_class_table;
987
+ // We can therefore wipe out all of the old entries (if that table is used
988
+ // for GC during the hot-reload we have a bug).
989
+ class_table->ResetBeforeHotReload ();
997
990
}
998
991
999
992
// Add classes to the set. Set is stored in the Array, so adding an element
@@ -1026,20 +1019,6 @@ bool IsolateReloadContext::ScriptModifiedSince(const Script& script,
1026
1019
return (*file_modified_callback_)(url_chars, since);
1027
1020
}
1028
1021
1029
- static void PropagateLibraryModified (
1030
- const ZoneGrowableArray<ZoneGrowableArray<intptr_t >*>* imported_by,
1031
- intptr_t lib_index,
1032
- BitVector* modified_libs) {
1033
- ZoneGrowableArray<intptr_t >* dep_libs = (*imported_by)[lib_index];
1034
- for (intptr_t i = 0 ; i < dep_libs->length (); i++) {
1035
- intptr_t dep_lib_index = (*dep_libs)[i];
1036
- if (!modified_libs->Contains (dep_lib_index)) {
1037
- modified_libs->Add (dep_lib_index);
1038
- PropagateLibraryModified (imported_by, dep_lib_index, modified_libs);
1039
- }
1040
- }
1041
- }
1042
-
1043
1022
static bool ContainsScriptUri (const GrowableArray<const char *>& seen_uris,
1044
1023
const char * uri) {
1045
1024
for (intptr_t i = 0 ; i < seen_uris.length (); i++) {
@@ -1112,109 +1091,6 @@ void IsolateReloadContext::FindModifiedSources(
1112
1091
}
1113
1092
}
1114
1093
1115
- BitVector* IsolateReloadContext::FindModifiedLibraries (bool force_reload,
1116
- bool root_lib_modified) {
1117
- Thread* thread = Thread::Current ();
1118
- int64_t last_reload = I->last_reload_timestamp ();
1119
-
1120
- const GrowableObjectArray& libs =
1121
- GrowableObjectArray::Handle (object_store ()->libraries ());
1122
- Library& lib = Library::Handle ();
1123
- Array& scripts = Array::Handle ();
1124
- Script& script = Script::Handle ();
1125
- intptr_t num_libs = libs.Length ();
1126
-
1127
- // Construct the imported-by graph.
1128
- ZoneGrowableArray<ZoneGrowableArray<intptr_t >*>* imported_by = new (zone_)
1129
- ZoneGrowableArray<ZoneGrowableArray<intptr_t >*>(zone_, num_libs);
1130
- imported_by->SetLength (num_libs);
1131
- for (intptr_t i = 0 ; i < num_libs; i++) {
1132
- (*imported_by)[i] = new (zone_) ZoneGrowableArray<intptr_t >(zone_, 0 );
1133
- }
1134
- Array& ports = Array::Handle ();
1135
- Namespace& ns = Namespace::Handle ();
1136
- Library& target = Library::Handle ();
1137
-
1138
- for (intptr_t lib_idx = 0 ; lib_idx < num_libs; lib_idx++) {
1139
- lib ^= libs.At (lib_idx);
1140
- ASSERT (lib_idx == lib.index ());
1141
- if (lib.is_dart_scheme ()) {
1142
- // We don't care about imports among dart scheme libraries.
1143
- continue ;
1144
- }
1145
-
1146
- // Add imports to the import-by graph.
1147
- ports = lib.imports ();
1148
- for (intptr_t import_idx = 0 ; import_idx < ports.Length (); import_idx++) {
1149
- ns ^= ports.At (import_idx);
1150
- if (!ns.IsNull ()) {
1151
- target = ns.library ();
1152
- (*imported_by)[target.index ()]->Add (lib.index ());
1153
- }
1154
- }
1155
-
1156
- // Add exports to the import-by graph.
1157
- ports = lib.exports ();
1158
- for (intptr_t export_idx = 0 ; export_idx < ports.Length (); export_idx++) {
1159
- ns ^= ports.At (export_idx);
1160
- if (!ns.IsNull ()) {
1161
- target = ns.library ();
1162
- (*imported_by)[target.index ()]->Add (lib.index ());
1163
- }
1164
- }
1165
-
1166
- // Add prefixed imports to the import-by graph.
1167
- DictionaryIterator entries (lib);
1168
- Object& entry = Object::Handle ();
1169
- LibraryPrefix& prefix = LibraryPrefix::Handle ();
1170
- while (entries.HasNext ()) {
1171
- entry = entries.GetNext ();
1172
- if (entry.IsLibraryPrefix ()) {
1173
- prefix ^= entry.raw ();
1174
- ports = prefix.imports ();
1175
- for (intptr_t import_idx = 0 ; import_idx < ports.Length ();
1176
- import_idx++) {
1177
- ns ^= ports.At (import_idx);
1178
- if (!ns.IsNull ()) {
1179
- target = ns.library ();
1180
- (*imported_by)[target.index ()]->Add (lib.index ());
1181
- }
1182
- }
1183
- }
1184
- }
1185
- }
1186
-
1187
- BitVector* modified_libs = new (Z) BitVector (Z, num_libs);
1188
-
1189
- if (root_lib_modified) {
1190
- // The root library was either moved or replaced. Mark it as modified to
1191
- // force a reload of the potential root library replacement.
1192
- lib = object_store ()->root_library ();
1193
- modified_libs->Add (lib.index ());
1194
- }
1195
-
1196
- for (intptr_t lib_idx = 0 ; lib_idx < num_libs; lib_idx++) {
1197
- lib ^= libs.At (lib_idx);
1198
- if (lib.is_dart_scheme () || modified_libs->Contains (lib_idx)) {
1199
- // We don't consider dart scheme libraries during reload. If
1200
- // the modified libs set already contains this library, then we
1201
- // have already visited it.
1202
- continue ;
1203
- }
1204
- scripts = lib.LoadedScripts ();
1205
- for (intptr_t script_idx = 0 ; script_idx < scripts.Length (); script_idx++) {
1206
- script ^= scripts.At (script_idx);
1207
- if (force_reload || ScriptModifiedSince (script, last_reload)) {
1208
- modified_libs->Add (lib_idx);
1209
- PropagateLibraryModified (imported_by, lib_idx, modified_libs);
1210
- break ;
1211
- }
1212
- }
1213
- }
1214
-
1215
- return modified_libs;
1216
- }
1217
-
1218
1094
void IsolateReloadContext::CheckpointLibraries () {
1219
1095
TIMELINE_SCOPE (CheckpointLibraries);
1220
1096
TIR_Print (" ---- CHECKPOINTING LIBRARIES\n " );
@@ -1272,16 +1148,8 @@ void IsolateReloadContext::RollbackClasses() {
1272
1148
TIR_Print (" ---- ROLLING BACK CLASS TABLE\n " );
1273
1149
ASSERT (saved_num_cids_ > 0 );
1274
1150
ASSERT (saved_class_table_ != NULL );
1275
- ClassTable* class_table = I->class_table ();
1276
- class_table->SetNumCids (saved_num_cids_);
1277
- // Overwrite classes in class table with the saved classes.
1278
- for (intptr_t i = 0 ; i < saved_num_cids_; i++) {
1279
- if (class_table->IsValidIndex (i)) {
1280
- class_table->SetAt (i, saved_class_table_[i].get_raw_class ());
1281
- }
1282
- }
1283
1151
1284
- DiscardSavedClassTable ();
1152
+ DiscardSavedClassTable (/* is_rollback= */ true );
1285
1153
}
1286
1154
1287
1155
void IsolateReloadContext::RollbackLibraries () {
@@ -1676,7 +1544,7 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
1676
1544
TIMELINE_SCOPE (MorphInstances);
1677
1545
if (!HasInstanceMorphers ()) {
1678
1546
// Fast path: no class had a shape change.
1679
- DiscardSavedClassTable ();
1547
+ DiscardSavedClassTable (/* is_rollback= */ false );
1680
1548
return ;
1681
1549
}
1682
1550
@@ -1698,7 +1566,7 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
1698
1566
intptr_t count = locator.count ();
1699
1567
if (count == 0 ) {
1700
1568
// Fast path: classes with shape change have no instances.
1701
- DiscardSavedClassTable ();
1569
+ DiscardSavedClassTable (/* is_rollback= */ false );
1702
1570
return ;
1703
1571
}
1704
1572
@@ -1742,8 +1610,10 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
1742
1610
saved_class_table_[i] = ClassAndSize (nullptr , -1 );
1743
1611
}
1744
1612
#endif
1745
- free (saved_class_table_);
1746
- saved_class_table_ = nullptr ;
1613
+
1614
+ // We accepted the hot-reload and morphed instances. So now we can commit to
1615
+ // the changed class table and deleted the saved one.
1616
+ DiscardSavedClassTable (/* is_rollback=*/ false );
1747
1617
1748
1618
Become::ElementsForwardIdentity (before, after);
1749
1619
// The heap now contains only instances with the new size. Ordinary GC is safe
@@ -1808,7 +1678,7 @@ RawClass* IsolateReloadContext::FindOriginalClass(const Class& cls) {
1808
1678
1809
1679
RawClass* IsolateReloadContext::GetClassForHeapWalkAt (intptr_t cid) {
1810
1680
ClassAndSize* class_table =
1811
- AtomicOperations::LoadRelaxed (&saved_class_table_);
1681
+ AtomicOperations::LoadAcquire (&saved_class_table_);
1812
1682
if (class_table != NULL ) {
1813
1683
ASSERT (cid > 0 );
1814
1684
ASSERT (cid < saved_num_cids_);
@@ -1820,7 +1690,7 @@ RawClass* IsolateReloadContext::GetClassForHeapWalkAt(intptr_t cid) {
1820
1690
1821
1691
intptr_t IsolateReloadContext::GetClassSizeForHeapWalkAt (intptr_t cid) {
1822
1692
ClassAndSize* class_table =
1823
- AtomicOperations::LoadRelaxed (&saved_class_table_);
1693
+ AtomicOperations::LoadAcquire (&saved_class_table_);
1824
1694
if (class_table != NULL ) {
1825
1695
ASSERT (cid > 0 );
1826
1696
ASSERT (cid < saved_num_cids_);
@@ -1830,14 +1700,12 @@ intptr_t IsolateReloadContext::GetClassSizeForHeapWalkAt(intptr_t cid) {
1830
1700
}
1831
1701
}
1832
1702
1833
- void IsolateReloadContext::DiscardSavedClassTable () {
1703
+ void IsolateReloadContext::DiscardSavedClassTable (bool is_rollback ) {
1834
1704
ClassAndSize* local_saved_class_table = saved_class_table_;
1835
- saved_class_table_ = nullptr ;
1836
- // Can't free this table immediately as another thread (e.g., concurrent
1837
- // marker or sweeper) may be between loading the table pointer and loading the
1838
- // table element. The table will be freed at the next major GC or isolate
1839
- // shutdown.
1840
- I->class_table ()->AddOldTable (local_saved_class_table);
1705
+ I->class_table ()->ResetAfterHotReload (local_saved_class_table,
1706
+ saved_num_cids_, is_rollback);
1707
+ AtomicOperations::StoreRelease (&saved_class_table_,
1708
+ static_cast <ClassAndSize*>(nullptr ));
1841
1709
}
1842
1710
1843
1711
RawLibrary* IsolateReloadContext::saved_root_library () const {
0 commit comments