@@ -278,6 +278,7 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
278
278
.For = > return forExpr (mod , scope , rl , node .castTag (.For ).? ),
279
279
.ArrayAccess = > return arrayAccess (mod , scope , rl , node .castTag (.ArrayAccess ).? ),
280
280
.Slice = > return rlWrap (mod , scope , rl , try sliceExpr (mod , scope , node .castTag (.Slice ).? )),
281
+ .Try = > return tryExpr (mod , scope , rl , node .castTag (.Try ).? ),
281
282
.Catch = > return catchExpr (mod , scope , rl , node .castTag (.Catch ).? ),
282
283
.Comptime = > return comptimeKeyword (mod , scope , rl , node .castTag (.Comptime ).? ),
283
284
.OrElse = > return orelseExpr (mod , scope , rl , node .castTag (.OrElse ).? ),
@@ -286,7 +287,6 @@ pub fn expr(mod: *Module, scope: *Scope, rl: ResultLoc, node: *ast.Node) InnerEr
286
287
.Defer = > return mod .failNode (scope , node , "TODO implement astgen.expr for .Defer" , .{}),
287
288
.Await = > return mod .failNode (scope , node , "TODO implement astgen.expr for .Await" , .{}),
288
289
.Resume = > return mod .failNode (scope , node , "TODO implement astgen.expr for .Resume" , .{}),
289
- .Try = > return mod .failNode (scope , node , "TODO implement astgen.expr for .Try" , .{}),
290
290
.ArrayInitializer = > return mod .failNode (scope , node , "TODO implement astgen.expr for .ArrayInitializer" , .{}),
291
291
.ArrayInitializerDot = > return mod .failNode (scope , node , "TODO implement astgen.expr for .ArrayInitializerDot" , .{}),
292
292
.StructInitializer = > return mod .failNode (scope , node , "TODO implement astgen.expr for .StructInitializer" , .{}),
@@ -870,6 +870,94 @@ fn errorType(mod: *Module, scope: *Scope, node: *ast.Node.OneToken) InnerError!*
870
870
});
871
871
}
872
872
873
+ fn tryExpr (mod : * Module , scope : * Scope , rl : ResultLoc , node : * ast.Node.SimplePrefixOp ) InnerError ! * zir.Inst {
874
+ const tree = scope .tree ();
875
+ const src = tree .token_locs [node .op_token ].start ;
876
+
877
+ const operand_ptr = try expr (mod , scope , .ref , node .rhs );
878
+ // TODO we could avoid an unnecessary copy if .iserr, .isnull took a pointer
879
+ const err_union = try addZIRUnOp (mod , scope , src , .deref , operand_ptr );
880
+ const cond = try addZIRUnOp (mod , scope , src , .iserr , err_union );
881
+
882
+ var block_scope : Scope.GenZIR = .{
883
+ .parent = scope ,
884
+ .decl = scope .decl ().? ,
885
+ .arena = scope .arena (),
886
+ .instructions = .{},
887
+ };
888
+ defer block_scope .instructions .deinit (mod .gpa );
889
+
890
+ const condbr = try addZIRInstSpecial (mod , & block_scope .base , src , zir .Inst .CondBr , .{
891
+ .condition = cond ,
892
+ .then_body = undefined , // populated below
893
+ .else_body = undefined , // populated below
894
+ }, .{});
895
+
896
+ const block = try addZIRInstBlock (mod , scope , src , .block , .{
897
+ .instructions = try block_scope .arena .dupe (* zir .Inst , block_scope .instructions .items ),
898
+ });
899
+
900
+ // Most result location types can be forwarded directly; however
901
+ // if we need to write to a pointer which has an inferred type,
902
+ // proper type inference requires peer type resolution on the if's
903
+ // branches.
904
+ const branch_rl : ResultLoc = switch (rl ) {
905
+ .discard , .none , .ty , .ptr , .ref = > rl ,
906
+ .inferred_ptr , .bitcasted_ptr , .block_ptr = > .{ .block_ptr = block },
907
+ };
908
+
909
+ var then_scope : Scope.GenZIR = .{
910
+ .parent = scope ,
911
+ .decl = block_scope .decl ,
912
+ .arena = block_scope .arena ,
913
+ .instructions = .{},
914
+ };
915
+ defer then_scope .instructions .deinit (mod .gpa );
916
+
917
+ var err_val_scope : Scope.LocalVal = undefined ;
918
+ const then_sub_scope = blk : {
919
+ const unwrapped_err_ptr = try addZIRUnOp (mod , & then_scope .base , src , .unwrap_err_code , operand_ptr );
920
+ err_val_scope = .{
921
+ .parent = & then_scope .base ,
922
+ .gen_zir = & then_scope ,
923
+ // TODO is the name unnecessary?
924
+ .name = "" ,
925
+ .inst = try addZIRUnOp (mod , & then_scope .base , src , .deref , unwrapped_err_ptr ),
926
+ };
927
+ break :blk & err_val_scope .base ;
928
+ };
929
+
930
+ _ = try addZIRInst (mod , & then_scope .base , src , zir .Inst .Break , .{
931
+ .block = block ,
932
+ .operand = try addZIRUnOp (mod , then_sub_scope , src , .@"return" , try addZIRBinOp (
933
+ mod ,
934
+ then_sub_scope ,
935
+ src ,
936
+ .as ,
937
+ try addZIRNoOp (mod , then_sub_scope , src , .ret_type ),
938
+ err_val_scope .inst ,
939
+ )),
940
+ }, .{});
941
+
942
+ var else_scope : Scope.GenZIR = .{
943
+ .parent = scope ,
944
+ .decl = block_scope .decl ,
945
+ .arena = block_scope .arena ,
946
+ .instructions = .{},
947
+ };
948
+ defer else_scope .instructions .deinit (mod .gpa );
949
+
950
+ const unwrapped_payload = try addZIRUnOp (mod , & else_scope .base , src , .unwrap_err_unsafe , operand_ptr );
951
+ _ = try addZIRInst (mod , & else_scope .base , src , zir .Inst .Break , .{
952
+ .block = block ,
953
+ .operand = unwrapped_payload ,
954
+ }, .{});
955
+
956
+ condbr .positionals .then_body = .{ .instructions = try then_scope .arena .dupe (* zir .Inst , then_scope .instructions .items ) };
957
+ condbr .positionals .else_body = .{ .instructions = try else_scope .arena .dupe (* zir .Inst , else_scope .instructions .items ) };
958
+ return rlWrapPtr (mod , scope , rl , & block .base );
959
+ }
960
+
873
961
fn catchExpr (mod : * Module , scope : * Scope , rl : ResultLoc , node : * ast.Node.Catch ) InnerError ! * zir.Inst {
874
962
return orelseCatchExpr (mod , scope , rl , node .lhs , node .op_token , .iserr , .unwrap_err_unsafe , node .rhs , node .payload );
875
963
}
0 commit comments