diff --git a/COPYRIGHT b/COPYRIGHT new file mode 100644 index 0000000..3c3098f --- /dev/null +++ b/COPYRIGHT @@ -0,0 +1,7 @@ +Copyright (C) 2026 Mark Pro + +The `dig` source code and repository contents are copyright Mark Pro and +contributors, unless otherwise noted. + +`dig` is licensed under the GNU General Public License, version 3 or, at your +option, any later version. See the LICENSE file for the full license text. diff --git a/Cargo.toml b/Cargo.toml index 1e9e836..c80f1f0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,6 +2,7 @@ name = "dig" version = "0.0.1" edition = "2024" +license = "GPL-3.0-or-later" [dependencies] clap = { version = "4.6.0", features = ["derive"] } diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/README.md b/README.md index f7105da..2cd8b23 100644 --- a/README.md +++ b/README.md @@ -166,3 +166,15 @@ dig sync --continue If the next descendant also conflicts, repeat the same process and run `dig sync --continue` again. While an operation is paused, start by finishing or aborting that rebase before running more `dig` workflow commands. If you abort with `git rebase --abort`, rerun the original `dig` command after the rebase state has been cleared. + +## License + +`dig` is licensed under the GNU General Public License, version 3 or, at your option, any later version. See [LICENSE](LICENSE) for the full text. + +Copyright (C) 2026 Mark Pro. See [COPYRIGHT](COPYRIGHT) for the project copyright notice. + +Commercial use of `dig` is allowed. You may use `dig` in commercial environments, on private repositories, and on proprietary codebases. + +Using `dig` as a tool against a repository does not by itself change the license of that repository or require that repository to be open source. In other words, running `dig` on your project does not impose the GPL on your project's source code merely because `dig` was used as part of the workflow. + +If you modify and redistribute `dig` itself, or distribute a larger combined work that incorporates `dig`'s GPL-covered code, those distributions must comply with the GPL. diff --git a/src/cli/pr/mod.rs b/src/cli/pr/mod.rs index 4816bc1..7af5128 100644 --- a/src/cli/pr/mod.rs +++ b/src/cli/pr/mod.rs @@ -4,7 +4,8 @@ use clap::{Args, Subcommand}; use crate::core::git; use crate::core::pr::{ - self, PrOptions, PrOutcomeKind, TrackedPullRequestListNode, TrackedPullRequestListView, + self, PrMergeOutcome, PrOptions, PrOutcomeKind, RetargetedPullRequest, + TrackedPullRequestListNode, TrackedPullRequestListView, }; use super::CommandOutcome; @@ -36,6 +37,9 @@ pub struct PrArgs { pub enum PrCommand { /// List open pull requests that are tracked by dig List(PrListArgs), + + /// Merge the current tracked pull request on GitHub + Merge(PrMergeArgs), } #[derive(Args, Debug, Clone, Default)] @@ -45,9 +49,13 @@ pub struct PrListArgs { pub view: bool, } +#[derive(Args, Debug, Clone, Default)] +pub struct PrMergeArgs {} + pub fn execute(args: PrArgs) -> io::Result { match args.command.clone() { Some(PrCommand::List(list_args)) => execute_list(list_args), + Some(PrCommand::Merge(_)) => execute_merge(), None => execute_current(args), } } @@ -130,6 +138,18 @@ fn execute_list(args: PrListArgs) -> io::Result { }) } +fn execute_merge() -> io::Result { + let outcome = pr::merge_current_pull_request()?; + let output = format_pr_merge_output(&outcome); + if !output.is_empty() { + println!("{output}"); + } + + Ok(CommandOutcome { + status: outcome.status, + }) +} + fn render_pull_request_list(view: &TrackedPullRequestListView) -> String { common::render_tree( view.root_label.clone(), @@ -146,6 +166,40 @@ fn format_pull_request_label(node: &TrackedPullRequestListNode) -> String { ) } +fn format_pr_merge_output(outcome: &PrMergeOutcome) -> String { + let mut sections = Vec::new(); + + let retargeted = format_retargeted_pull_requests(&outcome.retargeted_pull_requests); + if !retargeted.is_empty() { + sections.push(retargeted); + } + + sections.push(format!( + "Merged pull request #{} for '{}' into '{}'.", + outcome.pull_request_number, outcome.branch_name, outcome.base_branch_name + )); + + common::join_sections(§ions) +} + +fn format_retargeted_pull_requests(retargeted: &[RetargetedPullRequest]) -> String { + if retargeted.is_empty() { + return String::new(); + } + + let mut lines = vec!["Retargeted child pull requests:".to_string()]; + for pull_request in retargeted { + lines.push(format!( + "- #{} for {} to {}", + pull_request.pull_request_number, + pull_request.branch_name, + pull_request.new_base_branch_name + )); + } + + lines.join("\n") +} + impl From for PrOptions { fn from(args: PrArgs) -> Self { Self { @@ -159,9 +213,13 @@ impl From for PrOptions { #[cfg(test)] mod tests { - use super::{PrArgs, PrCommand, PrListArgs, render_pull_request_list}; + use super::{ + PrArgs, PrCommand, PrListArgs, PrMergeArgs, RetargetedPullRequest, format_pr_merge_output, + render_pull_request_list, + }; + use crate::core::git; use crate::core::pr::PrOptions; - use crate::core::pr::{TrackedPullRequestListNode, TrackedPullRequestListView}; + use crate::core::pr::{PrMergeOutcome, TrackedPullRequestListNode, TrackedPullRequestListView}; #[test] fn converts_cli_args_into_core_pr_options() { @@ -191,6 +249,24 @@ mod tests { .unwrap() { PrCommand::List(args) => assert!(args.view), + PrCommand::Merge(_) => unreachable!(), + } + } + + #[test] + fn preserves_pr_merge_subcommand_args() { + match (PrArgs { + command: Some(PrCommand::Merge(PrMergeArgs::default())), + title: None, + body: None, + draft: false, + view: false, + }) + .command + .unwrap() + { + PrCommand::Merge(_) => {} + _ => unreachable!(), } } @@ -224,4 +300,28 @@ mod tests { ) ); } + + #[test] + fn formats_pr_merge_output_with_retargeted_children() { + let output = format_pr_merge_output(&PrMergeOutcome { + status: git::success_status().unwrap(), + branch_name: "feat/auth".into(), + base_branch_name: "main".into(), + pull_request_number: 123, + retargeted_pull_requests: vec![RetargetedPullRequest { + branch_name: "feat/auth-ui".into(), + pull_request_number: 124, + new_base_branch_name: "main".into(), + }], + }); + + assert_eq!( + output, + concat!( + "Retargeted child pull requests:\n", + "- #124 for feat/auth-ui to main\n\n", + "Merged pull request #123 for 'feat/auth' into 'main'." + ) + ); + } } diff --git a/src/cli/sync/mod.rs b/src/cli/sync/mod.rs index ff74c15..bf9fd04 100644 --- a/src/cli/sync/mod.rs +++ b/src/cli/sync/mod.rs @@ -165,6 +165,25 @@ pub fn execute(args: SyncArgs) -> io::Result { } } + if final_status.success() { + let pull_request_update_plan = + sync::plan_pull_request_updates(&restacked_branch_names)?; + if !pull_request_update_plan.actions.is_empty() { + if printed_output { + println!(); + } + + let updated_pull_requests = + sync::execute_pull_request_update_plan(&pull_request_update_plan)?; + let output = + format_pull_request_update_success_output(&updated_pull_requests); + if !output.is_empty() { + println!("{output}"); + printed_output = true; + } + } + } + if final_status.success() { let push_plan = sync::plan_remote_pushes(&restacked_branch_names, &excluded_branch_names)?; @@ -241,6 +260,20 @@ impl From for SyncOptions { fn format_full_sync_summary(outcome: &sync::FullSyncOutcome) -> String { let mut sections = Vec::new(); + if !outcome.repaired_pull_requests.is_empty() { + let mut lines = vec!["Recovered pull requests:".to_string()]; + for repair in &outcome.repaired_pull_requests { + lines.push(format!( + "- {} (#{}): reopened as draft and retargeted from {} to {}", + repair.branch_name, + repair.pull_request_number, + repair.old_base_branch_name, + repair.new_base_branch_name + )); + } + sections.push(lines.join("\n")); + } + if !outcome.deleted_branches.is_empty() { let mut lines = vec!["Deleted locally and no longer tracked by dig:".to_string()]; for branch_name in &outcome.deleted_branches { @@ -280,6 +313,24 @@ fn format_remote_push_plan(plan: &sync::RemotePushPlan) -> String { lines.join("\n") } +fn format_pull_request_update_success_output( + updated_pull_requests: &[sync::PullRequestUpdateAction], +) -> String { + if updated_pull_requests.is_empty() { + return String::new(); + } + + let mut lines = vec!["Updated pull requests:".to_string()]; + for action in updated_pull_requests { + lines.push(format!( + "- retargeted #{} for {} to {}", + action.pull_request_number, action.branch_name, action.new_base_branch_name + )); + } + + lines.join("\n") +} + fn confirm_remote_pushes() -> io::Result { common::confirm_yes_no("Push these remote updates? [y/N] ") } diff --git a/src/core/clean/plan.rs b/src/core/clean/plan.rs index a21dd55..d8859cc 100644 --- a/src/core/clean/plan.rs +++ b/src/core/clean/plan.rs @@ -206,12 +206,17 @@ fn evaluate_integrated_branch( let graph = BranchGraph::new(state); - let Some(parent_branch_name) = graph.parent_branch_name(node, trunk_branch) else { - return Ok(BranchEvaluation::Blocked(BlockedBranch { - branch_name: node.branch_name.clone(), - reason: CleanBlockReason::ParentMissingFromDig, - })); - }; + let (local_parent_base, resolved_parent) = + match deleted_local::resolve_replacement_parent(state, trunk_branch, &node.parent) { + Ok(resolved) => resolved, + Err(_) => { + return Ok(BranchEvaluation::Blocked(BlockedBranch { + branch_name: node.branch_name.clone(), + reason: CleanBlockReason::ParentMissingFromDig, + })); + } + }; + let parent_branch_name = local_parent_base.branch_name.clone(); if !git::branch_exists(&parent_branch_name)? { return Ok(BranchEvaluation::Blocked(BlockedBranch { @@ -232,7 +237,6 @@ fn evaluate_integrated_branch( })); } - let local_parent_base = RestackBaseTarget::local(&parent_branch_name); let tracked_pull_request_number = node.pull_request.as_ref().map(|pr| pr.number); let parent_base = if branch_is_integrated_for_pull_request( local_parent_base.rebase_ref(), @@ -274,7 +278,7 @@ fn evaluate_integrated_branch( node.id, &node.branch_name, &parent_base, - &node.parent, + &resolved_parent, )?; Ok(BranchEvaluation::Cleanable(CleanCandidate { diff --git a/src/core/deleted_local.rs b/src/core/deleted_local.rs index 97dd859..f2b6458 100644 --- a/src/core/deleted_local.rs +++ b/src/core/deleted_local.rs @@ -201,7 +201,7 @@ pub(crate) fn plan_deleted_local_step_for_branch( plan_deleted_local_step(state, trunk_branch, node.id) } -fn resolve_replacement_parent( +pub(crate) fn resolve_replacement_parent( state: &DigState, trunk_branch: &str, parent: &ParentRef, @@ -215,7 +215,7 @@ fn resolve_replacement_parent( } ParentRef::Branch { node_id } => { let parent_node = state - .find_branch_by_id(node_id) + .find_any_branch_by_id(node_id) .ok_or_else(|| io::Error::other("tracked parent branch was not found"))?; if !parent_node.archived && git::branch_exists(&parent_node.branch_name)? { diff --git a/src/core/gh.rs b/src/core/gh.rs index a648de4..fdb84b7 100644 --- a/src/core/gh.rs +++ b/src/core/gh.rs @@ -19,6 +19,26 @@ pub struct PullRequestDetails { pub url: String, } +#[derive(Debug, Clone, Copy, PartialEq, Eq, Deserialize)] +#[serde(rename_all = "SCREAMING_SNAKE_CASE")] +pub enum PullRequestState { + Open, + Closed, + Merged, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PullRequestStatus { + pub number: u64, + pub state: PullRequestState, + pub merged_at: Option, + pub base_ref_name: String, + pub head_ref_name: String, + pub head_ref_oid: Option, + pub is_draft: bool, + pub url: String, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct CreatePullRequestOptions { pub base_branch_name: String, @@ -75,6 +95,23 @@ struct PullRequestDetailsRecord { url: String, } +#[derive(Debug, Deserialize)] +struct PullRequestStatusRecord { + number: u64, + state: PullRequestState, + #[serde(rename = "mergedAt")] + merged_at: Option, + #[serde(rename = "baseRefName")] + base_ref_name: String, + #[serde(rename = "headRefName")] + head_ref_name: String, + #[serde(rename = "headRefOid", default)] + head_ref_oid: Option, + #[serde(rename = "isDraft", default)] + is_draft: bool, + url: String, +} + pub fn list_open_pull_requests_for_head(branch_name: &str) -> io::Result> { let output = run_gh_capture_output(&[ "pr".to_string(), @@ -132,6 +169,18 @@ pub fn create_pull_request(options: &CreatePullRequestOptions) -> io::Result io::Result { + let output = run_gh_capture_output(&[ + "pr".to_string(), + "view".to_string(), + number.to_string(), + "--json".to_string(), + "number,state,mergedAt,baseRefName,headRefName,headRefOid,isDraft,url".to_string(), + ])?; + + parse_pull_request_status(&output.stdout) +} + pub fn list_open_pull_requests() -> io::Result> { let output = run_gh_capture_output(&[ "pr".to_string(), @@ -145,6 +194,55 @@ pub fn list_open_pull_requests() -> io::Result> { parse_open_pull_request_details(&output.stdout) } +pub fn reopen_pull_request(number: u64) -> io::Result<()> { + run_gh_command( + "gh pr reopen", + &["pr".to_string(), "reopen".to_string(), number.to_string()], + ) +} + +pub fn mark_pull_request_as_draft(number: u64) -> io::Result<()> { + run_gh_command( + "gh pr ready --undo", + &[ + "pr".to_string(), + "ready".to_string(), + number.to_string(), + "--undo".to_string(), + ], + ) +} + +pub fn retarget_pull_request_base(number: u64, base_branch_name: &str) -> io::Result<()> { + run_gh_command( + "gh pr edit --base", + &[ + "pr".to_string(), + "edit".to_string(), + number.to_string(), + "--base".to_string(), + base_branch_name.to_string(), + ], + ) +} + +pub fn edit_pull_request_base(number: u64, base_branch_name: &str) -> io::Result<()> { + retarget_pull_request_base(number, base_branch_name) +} + +pub fn merge_pull_request(number: u64) -> io::Result<()> { + run_gh_command( + "gh pr merge", + &[ + "pr".to_string(), + "merge".to_string(), + number.to_string(), + "--squash".to_string(), + "--delete-branch".to_string(), + ], + ) +} + pub fn open_current_pull_request_in_browser() -> io::Result<()> { run_gh_command( "gh pr view --web", @@ -209,6 +307,22 @@ fn parse_open_pull_request_details(stdout: &str) -> io::Result io::Result { + let record: PullRequestStatusRecord = serde_json::from_str(stdout) + .map_err(|err| io::Error::new(io::ErrorKind::InvalidData, err))?; + + Ok(PullRequestStatus { + number: record.number, + state: record.state, + merged_at: record.merged_at, + base_ref_name: record.base_ref_name, + head_ref_name: record.head_ref_name, + head_ref_oid: record.head_ref_oid, + is_draft: record.is_draft, + url: record.url, + }) +} + fn find_pull_request_url(output: &str) -> Option { output .split_whitespace() @@ -369,8 +483,8 @@ fn looks_like_auth_error(message: &str) -> bool { #[cfg(test)] mod tests { use super::{ - find_pull_request_url, parse_open_pull_request_details, parse_open_pull_requests, - pull_request_number_from_url, + PullRequestState, find_pull_request_url, parse_open_pull_request_details, + parse_open_pull_requests, parse_pull_request_status, pull_request_number_from_url, }; #[test] @@ -395,6 +509,22 @@ mod tests { assert_eq!(pull_requests[0].title, "Auth PR"); } + #[test] + fn parses_pull_request_status_output() { + let pull_request = parse_pull_request_status( + r#"{"number":123,"state":"CLOSED","mergedAt":null,"baseRefName":"main","headRefName":"feat/auth","isDraft":false,"url":"https://github.com/acme/dig/pull/123"}"#, + ) + .unwrap(); + + assert_eq!(pull_request.number, 123); + assert_eq!(pull_request.state, PullRequestState::Closed); + assert_eq!(pull_request.base_ref_name, "main"); + assert_eq!(pull_request.head_ref_name, "feat/auth"); + assert_eq!(pull_request.head_ref_oid, None); + assert!(!pull_request.is_draft); + assert_eq!(pull_request.merged_at, None); + } + #[test] fn extracts_pull_request_url_and_number_from_create_output() { let url = find_pull_request_url( diff --git a/src/core/git.rs b/src/core/git.rs index d71805c..f949bef 100644 --- a/src/core/git.rs +++ b/src/core/git.rs @@ -370,6 +370,22 @@ pub fn push_branch_to_remote(target: &BranchPushTarget) -> io::Result io::Result { + let output = Command::new("git") + .args([ + "push", + remote_name, + &format!("{source_ref}:refs/heads/{target_branch_name}"), + ]) + .output()?; + + output_to_git_command_output(output) +} + pub fn force_push_branch_to_remote_with_lease( target: &BranchPushTarget, ) -> io::Result { @@ -386,6 +402,18 @@ pub fn force_push_branch_to_remote_with_lease( output_to_git_command_output(output) } +pub fn delete_branch_from_remote(target: &BranchPushTarget) -> io::Result { + let output = Command::new("git") + .args([ + "push", + &target.remote_name, + &format!(":refs/heads/{}", target.branch_name), + ]) + .output()?; + + output_to_git_command_output(output) +} + pub fn fetch_remote(remote_name: &str) -> io::Result { let output = Command::new("git") .args(["fetch", "--prune", remote_name]) diff --git a/src/core/pr.rs b/src/core/pr.rs index 5b5b85e..f0de6dc 100644 --- a/src/core/pr.rs +++ b/src/core/pr.rs @@ -2,7 +2,9 @@ use std::collections::HashMap; use std::io; use std::process::ExitStatus; -use crate::core::gh::{self, CreatePullRequestOptions, PullRequestDetails, PullRequestSummary}; +use crate::core::gh::{ + self, CreatePullRequestOptions, PullRequestDetails, PullRequestState, PullRequestSummary, +}; use crate::core::git; use crate::core::graph::BranchGraph; use crate::core::store::{ @@ -55,6 +57,22 @@ pub struct PrListOutcome { pub pull_requests: Vec, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct RetargetedPullRequest { + pub branch_name: String, + pub pull_request_number: u64, + pub new_base_branch_name: String, +} + +#[derive(Debug)] +pub struct PrMergeOutcome { + pub status: ExitStatus, + pub branch_name: String, + pub base_branch_name: String, + pub pull_request_number: u64, + pub retargeted_pull_requests: Vec, +} + #[derive(Debug, Clone, PartialEq, Eq)] enum PrTrackingAction { Create, @@ -209,6 +227,73 @@ pub fn current_branch_push_target_for_create() -> io::Result io::Result { + let session = open_initialized("dig is not initialized; run 'dig init' first")?; + workflow::ensure_no_pending_operation(&session.paths, "pr")?; + git::ensure_no_in_progress_operations(&session.repo, "pr")?; + + let branch_name = git::current_branch_name_if_any()?.ok_or_else(|| { + io::Error::other("dig pr requires a named branch; detached HEAD is not supported") + })?; + let node = session + .state + .find_branch_by_name(&branch_name) + .cloned() + .ok_or_else(|| { + io::Error::other(format!("branch '{}' is not tracked by dig", branch_name)) + })?; + let pull_request = node.pull_request.clone().ok_or_else(|| { + io::Error::other(format!( + "branch '{}' does not track a pull request", + branch_name + )) + })?; + + let graph = BranchGraph::new(&session.state); + let base_branch_name = graph + .parent_branch_name(&node, &session.config.trunk_branch) + .ok_or_else(|| { + io::Error::other(format!( + "tracked parent branch for '{}' was not found", + branch_name + )) + })?; + + let mut retargeted_pull_requests = Vec::new(); + for child_id in graph.active_children_ids(node.id) { + let Some(child_node) = session.state.find_branch_by_id(child_id) else { + continue; + }; + let Some(child_pull_request) = child_node.pull_request.as_ref() else { + continue; + }; + + let child_pull_request_status = gh::view_pull_request(child_pull_request.number)?; + if child_pull_request_status.state != PullRequestState::Open + || child_pull_request_status.base_ref_name == base_branch_name + { + continue; + } + + gh::edit_pull_request_base(child_pull_request.number, &base_branch_name)?; + retargeted_pull_requests.push(RetargetedPullRequest { + branch_name: child_node.branch_name.clone(), + pull_request_number: child_pull_request.number, + new_base_branch_name: base_branch_name.clone(), + }); + } + + gh::merge_pull_request(pull_request.number)?; + + Ok(PrMergeOutcome { + status: git::success_status()?, + branch_name, + base_branch_name, + pull_request_number: pull_request.number, + retargeted_pull_requests, + }) +} + pub fn list_open_tracked_pull_requests() -> io::Result { open_initialized("dig is not initialized; run 'dig init' first")?; let open_pull_requests = gh::list_open_pull_requests()?; diff --git a/src/core/restack.rs b/src/core/restack.rs index dd0ebcb..9981ea6 100644 --- a/src/core/restack.rs +++ b/src/core/restack.rs @@ -201,13 +201,53 @@ pub fn plan_after_deleted_branch( deleted_branch_name: &str, new_parent_base: &RestackBaseTarget, new_parent: &ParentRef, +) -> io::Result> { + plan_after_deleted_branch_with_optional_old_upstream_override( + state, + deleted_node_id, + deleted_branch_name, + new_parent_base, + new_parent, + None, + ) +} + +#[cfg(test)] +pub fn plan_after_deleted_branch_with_old_upstream_override( + state: &DigState, + deleted_node_id: Uuid, + deleted_branch_name: &str, + new_parent_base: &RestackBaseTarget, + new_parent: &ParentRef, + old_upstream_oid_override: &str, +) -> io::Result> { + plan_after_deleted_branch_with_optional_old_upstream_override( + state, + deleted_node_id, + deleted_branch_name, + new_parent_base, + new_parent, + Some(old_upstream_oid_override), + ) +} + +fn plan_after_deleted_branch_with_optional_old_upstream_override( + state: &DigState, + deleted_node_id: Uuid, + deleted_branch_name: &str, + new_parent_base: &RestackBaseTarget, + new_parent: &ParentRef, + old_upstream_oid_override: Option<&str>, ) -> io::Result> { let graph = BranchGraph::new(state); let mut actions = Vec::new(); for child_id in graph.active_children_ids(deleted_node_id) { let child = load_active_branch_node(state, child_id)?; - let old_upstream_oid = git::merge_base(new_parent_base.rebase_ref(), &child.branch_name)?; + let old_upstream_oid = match old_upstream_oid_override { + Some(old_upstream_oid_override) => old_upstream_oid_override.to_string(), + None => git::merge_base(new_parent_base.rebase_ref(), &child.branch_name)?, + }; let old_head_oid = git::ref_oid(&child.branch_name)?; actions.push(RestackAction { node_id: child_id, @@ -397,7 +437,7 @@ fn load_active_branch_node( mod tests { use super::{ RestackAction, RestackBaseTarget, plan_after_branch_advance, plan_after_branch_reparent, - previews_for_actions, + plan_after_deleted_branch_with_old_upstream_override, previews_for_actions, }; use crate::core::git; use crate::core::store::types::DIG_STATE_VERSION; @@ -533,6 +573,64 @@ mod tests { }); } + #[test] + fn uses_deleted_branch_head_override_when_promoting_child() { + with_temp_repo("dig-restack", |repo| { + initialize_main_repo(repo); + git_ok(repo, &["checkout", "-b", "feat/auth"]); + commit_file(repo, "auth.txt", "auth\n", "feat: auth"); + let deleted_branch_head_oid = git::ref_oid("feat/auth").unwrap(); + git_ok(repo, &["checkout", "-b", "feat/auth-ui"]); + commit_file(repo, "ui.txt", "ui\n", "feat: ui"); + + let parent_id = Uuid::new_v4(); + let child_id = Uuid::new_v4(); + let state = crate::core::store::types::DigState { + version: DIG_STATE_VERSION, + nodes: vec![ + BranchNode { + id: parent_id, + branch_name: "feat/auth".into(), + parent: ParentRef::Trunk, + base_ref: "main".into(), + fork_point_oid: "root".into(), + head_oid_at_creation: "root".into(), + created_at_unix_secs: 1, + pull_request: None, + archived: false, + }, + BranchNode { + id: child_id, + branch_name: "feat/auth-ui".into(), + parent: ParentRef::Branch { node_id: parent_id }, + base_ref: "feat/auth".into(), + fork_point_oid: deleted_branch_head_oid.clone(), + head_oid_at_creation: deleted_branch_head_oid.clone(), + created_at_unix_secs: 2, + pull_request: None, + archived: false, + }, + ], + }; + + let planned = plan_after_deleted_branch_with_old_upstream_override( + &state, + parent_id, + "feat/auth", + &RestackBaseTarget::local("main"), + &ParentRef::Trunk, + &deleted_branch_head_oid, + ) + .unwrap(); + + assert_eq!(planned.len(), 1); + assert_eq!(planned[0].branch_name, "feat/auth-ui"); + assert_eq!(planned[0].old_upstream_branch_name, "feat/auth"); + assert_eq!(planned[0].old_upstream_oid, deleted_branch_head_oid); + assert_eq!(planned[0].new_base.branch_name, "main"); + }); + } + #[test] fn plans_restack_after_branch_reparent_with_parent_change_only_on_target_branch() { with_temp_repo("dig-restack", |repo| { diff --git a/src/core/store/types.rs b/src/core/store/types.rs index ea10384..e5edc29 100644 --- a/src/core/store/types.rs +++ b/src/core/store/types.rs @@ -53,6 +53,10 @@ impl DigState { .find(|node| !node.archived && node.id == node_id) } + pub fn find_any_branch_by_id(&self, node_id: Uuid) -> Option<&BranchNode> { + self.nodes.iter().find(|node| node.id == node_id) + } + pub fn find_branch_by_id_mut(&mut self, node_id: Uuid) -> Option<&mut BranchNode> { self.nodes .iter_mut() diff --git a/src/core/sync.rs b/src/core/sync.rs index fd904ce..27c81eb 100644 --- a/src/core/sync.rs +++ b/src/core/sync.rs @@ -4,11 +4,13 @@ use std::process::ExitStatus; use crate::core::clean::{self, CleanOptions, CleanPlanMode}; use crate::core::deleted_local; +use crate::core::gh::{self, PullRequestState, PullRequestStatus}; use crate::core::graph::BranchGraph; use crate::core::restack::{self, RestackAction, RestackPreview}; +use crate::core::store::types::DigState; use crate::core::store::{ - PendingOperationKind, PendingOperationState, PendingSyncOperation, PendingSyncPhase, - clear_operation, load_operation, open_initialized, + BranchNode, ParentRef, PendingOperationKind, PendingOperationState, PendingSyncOperation, + PendingSyncPhase, clear_operation, load_operation, open_initialized, }; use crate::core::workflow; use crate::core::{adopt, commit, git, merge, orphan, reparent}; @@ -34,6 +36,7 @@ pub enum SyncCompletion { #[derive(Debug)] pub struct FullSyncOutcome { + pub repaired_pull_requests: Vec, pub deleted_branches: Vec, pub restacked_branches: Vec, pub cleanup_plan: clean::CleanPlan, @@ -73,8 +76,45 @@ pub struct RemotePushOutcome { pub failure_output: Option, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PullRequestRepairOutcome { + pub branch_name: String, + pub pull_request_number: u64, + pub old_base_branch_name: String, + pub new_base_branch_name: String, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PullRequestUpdateAction { + pub branch_name: String, + pub pull_request_number: u64, + pub new_base_branch_name: String, +} + +#[derive(Debug, Clone, Default, PartialEq, Eq)] +pub struct PullRequestUpdatePlan { + pub actions: Vec, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct PendingPullRequestRepair { + branch_name: String, + pull_request_number: u64, + old_base_branch_name: String, + new_base_branch_name: String, + was_draft: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +struct ParentPullRequestRepairPlan { + remote_target: git::BranchPushTarget, + restore_source_ref: String, + new_base_branch_name: String, +} + #[derive(Debug, Default, Clone)] struct LocalSyncProgress { + repaired_pull_requests: Vec, deleted_branches: Vec, restacked_branches: Vec, } @@ -83,6 +123,7 @@ struct LocalSyncProgress { struct LocalSyncOutcome { status: ExitStatus, remote_sync_enabled: bool, + repaired_pull_requests: Vec, deleted_branches: Vec, restacked_branches: Vec, failure_output: Option, @@ -211,12 +252,22 @@ fn run_full_sync() -> io::Result { workflow::ensure_ready_for_operation(&session.repo, "sync")?; workflow::ensure_no_pending_operation(&session.paths, "sync")?; let remote_sync_enabled = fetch_sync_remotes(&session)?; - + let repaired_pull_requests = if remote_sync_enabled { + repair_closed_pull_requests_for_deleted_parent_branches(&session)? + } else { + Vec::new() + }; let original_branch = git::current_branch_name()?; + if remote_sync_enabled { + delete_local_branches_merged_into_deleted_parent_branches(&session, &original_branch)?; + } let outcome = execute_local_sync( &mut session, original_branch, - LocalSyncProgress::default(), + LocalSyncProgress { + repaired_pull_requests, + ..LocalSyncProgress::default() + }, remote_sync_enabled, )?; @@ -229,6 +280,7 @@ fn resume_full_sync( ) -> io::Result { let mut session = open_initialized("dig is not initialized; run 'dig init' first")?; let mut progress = LocalSyncProgress { + repaired_pull_requests: Vec::new(), deleted_branches: payload.deleted_branches, restacked_branches: payload.restacked_branches, }; @@ -246,6 +298,7 @@ fn resume_full_sync( return Ok(LocalSyncOutcome { status: restack_outcome.status, remote_sync_enabled: payload.remote_sync_enabled, + repaired_pull_requests: progress.repaired_pull_requests, deleted_branches: progress.deleted_branches, restacked_branches: progress.restacked_branches, failure_output: restack_outcome.failure_output, @@ -280,6 +333,7 @@ fn finalize_full_sync_outcome(outcome: LocalSyncOutcome) -> io::Result io::Result> { + let graph = BranchGraph::new(&session.state); + let mut candidates = session + .state + .nodes + .iter() + .filter(|node| !node.archived) + .cloned() + .collect::>(); + + candidates.sort_by(|left, right| { + graph + .branch_depth(left.id) + .cmp(&graph.branch_depth(right.id)) + .then_with(|| left.branch_name.cmp(&right.branch_name)) + }); + + let mut repaired_pull_requests = Vec::new(); + for node in candidates { + let Some(parent_plan) = plan_parent_pull_request_repair(session, &node)? else { + continue; + }; + + let pending_repairs = plan_pull_request_repairs_for_children( + session, + &node, + &parent_plan.new_base_branch_name, + )?; + if pending_repairs.is_empty() { + continue; + } + + restore_remote_branch_for_pull_request_repair( + &parent_plan.remote_target, + &parent_plan.restore_source_ref, + )?; + + for repair in &pending_repairs { + gh::reopen_pull_request(repair.pull_request_number).map_err(|err| { + io::Error::other(format!( + "failed to reopen tracked pull request #{} for '{}': {err}", + repair.pull_request_number, repair.branch_name + )) + })?; + + if !repair.was_draft { + gh::mark_pull_request_as_draft(repair.pull_request_number).map_err(|err| { + io::Error::other(format!( + "failed to convert tracked pull request #{} for '{}' back to draft: {err}", + repair.pull_request_number, repair.branch_name + )) + })?; + } + + gh::retarget_pull_request_base( + repair.pull_request_number, + &repair.new_base_branch_name, + ) + .map_err(|err| { + io::Error::other(format!( + "failed to retarget tracked pull request #{} for '{}' onto '{}': {err}", + repair.pull_request_number, repair.branch_name, repair.new_base_branch_name + )) + })?; + } + + delete_restored_remote_branch_after_pull_request_repair(&parent_plan.remote_target)?; + + repaired_pull_requests.extend(pending_repairs.into_iter().map(|repair| { + PullRequestRepairOutcome { + branch_name: repair.branch_name, + pull_request_number: repair.pull_request_number, + old_base_branch_name: repair.old_base_branch_name, + new_base_branch_name: repair.new_base_branch_name, + } + })); + } + + Ok(repaired_pull_requests) +} + +fn delete_local_branches_merged_into_deleted_parent_branches( + session: &crate::core::store::StoreSession, + current_branch_name: &str, +) -> io::Result<()> { + let graph = BranchGraph::new(&session.state); + let mut candidates = session + .state + .nodes + .iter() + .filter(|node| !node.archived) + .cloned() + .collect::>(); + + candidates.sort_by(|left, right| { + graph + .branch_depth(right.id) + .cmp(&graph.branch_depth(left.id)) + .then_with(|| left.branch_name.cmp(&right.branch_name)) + }); + + for node in candidates { + if node.branch_name == current_branch_name || !git::branch_exists(&node.branch_name)? { + continue; + } + if !parent_branch_is_unavailable_for_sync_cleanup(&session.state, &node)? { + continue; + } + + let Some(remote_target) = git::branch_push_target(&node.branch_name)? else { + continue; + }; + if git::remote_tracking_branch_exists( + &remote_target.remote_name, + &remote_target.branch_name, + )? { + continue; + } + if merged_pull_request_restore_source(&node)?.is_none() { + continue; + } + + let delete_status = git::delete_branch_force(&node.branch_name)?; + if !delete_status.success() { + return Err(io::Error::other(format!( + "failed to remove merged local branch '{}' before sync cleanup", + node.branch_name + ))); + } + } + + Ok(()) +} + +fn parent_branch_is_unavailable_for_sync_cleanup( + state: &DigState, + node: &BranchNode, +) -> io::Result { + let ParentRef::Branch { node_id } = node.parent else { + return Ok(false); + }; + let Some(parent_node) = state.find_any_branch_by_id(node_id) else { + return Ok(false); + }; + + Ok(parent_node.archived || !git::branch_exists(&parent_node.branch_name)?) +} + +fn plan_parent_pull_request_repair( + session: &crate::core::store::StoreSession, + node: &BranchNode, +) -> io::Result> { + let Some(remote_target) = git::branch_push_target(&node.branch_name)? else { + return Ok(None); + }; + if git::remote_tracking_branch_exists(&remote_target.remote_name, &remote_target.branch_name)? { + return Ok(None); + } + + if let Some(cleanup_candidate) = clean::cleanup_candidate_for_branch( + &session.state, + &session.config.trunk_branch, + node, + CleanPlanMode::RemoteAwareSync, + )? { + let restore_source_ref = if git::branch_exists(&node.branch_name)? { + node.branch_name.clone() + } else if let Some(source_ref) = merged_pull_request_restore_source(node)? { + source_ref + } else { + return Ok(None); + }; + + return Ok(Some(ParentPullRequestRepairPlan { + remote_target, + restore_source_ref, + new_base_branch_name: cleanup_candidate.parent_branch_name, + })); + } + + let Some(restore_source_ref) = merged_pull_request_restore_source(node)? else { + return Ok(None); + }; + let Ok((new_parent_base, _)) = deleted_local::resolve_replacement_parent( + &session.state, + &session.config.trunk_branch, + &node.parent, + ) else { + return Ok(None); + }; + + Ok(Some(ParentPullRequestRepairPlan { + remote_target, + restore_source_ref, + new_base_branch_name: new_parent_base.branch_name, + })) +} + +fn merged_pull_request_restore_source(node: &BranchNode) -> io::Result> { + let Some(pull_request) = node.pull_request.as_ref() else { + return Ok(None); + }; + let pull_request_status = gh::view_pull_request(pull_request.number).map_err(|err| { + io::Error::other(format!( + "failed to inspect tracked pull request #{} for '{}': {}", + pull_request.number, node.branch_name, err + )) + })?; + + if pull_request_status.state != PullRequestState::Merged + || pull_request_status.merged_at.is_none() + { + return Ok(None); + } + + Ok(pull_request_status.head_ref_oid) +} + +fn plan_pull_request_repairs_for_children( + session: &crate::core::store::StoreSession, + parent_node: &BranchNode, + new_base_branch_name: &str, +) -> io::Result> { + let graph = BranchGraph::new(&session.state); + let mut children = graph + .active_children_ids(parent_node.id) + .into_iter() + .filter_map(|child_id| session.state.find_branch_by_id(child_id).cloned()) + .collect::>(); + children.sort_by(|left, right| left.branch_name.cmp(&right.branch_name)); + + let mut pending_repairs = Vec::new(); + for child in children { + if !git::branch_exists(&child.branch_name)? { + continue; + } + + let Some(tracked_pull_request) = child.pull_request.as_ref() else { + continue; + }; + let pull_request_status = + gh::view_pull_request(tracked_pull_request.number).map_err(|err| { + io::Error::other(format!( + "failed to inspect tracked pull request #{} for '{}': {err}", + tracked_pull_request.number, child.branch_name + )) + })?; + + if pull_request_needs_repair( + &pull_request_status, + &child.branch_name, + &parent_node.branch_name, + ) { + pending_repairs.push(PendingPullRequestRepair { + branch_name: child.branch_name.clone(), + pull_request_number: tracked_pull_request.number, + old_base_branch_name: parent_node.branch_name.clone(), + new_base_branch_name: new_base_branch_name.to_string(), + was_draft: pull_request_status.is_draft, + }); + } + } + + Ok(pending_repairs) +} + +fn pull_request_needs_repair( + pull_request_status: &PullRequestStatus, + expected_head_branch_name: &str, + expected_base_branch_name: &str, +) -> bool { + pull_request_status.state == PullRequestState::Closed + && pull_request_status.merged_at.is_none() + && pull_request_status.head_ref_name == expected_head_branch_name + && pull_request_status.base_ref_name == expected_base_branch_name +} + +fn restore_remote_branch_for_pull_request_repair( + target: &git::BranchPushTarget, + restore_source_ref: &str, +) -> io::Result<()> { + let push_output = git::push_ref_to_remote_branch( + &target.remote_name, + restore_source_ref, + &target.branch_name, + )?; + if push_output.status.success() { + Ok(()) + } else { + let combined_output = push_output.combined_output(); + Err(io::Error::other(if combined_output.is_empty() { + format!( + "failed to temporarily restore remote branch '{}' on '{}'", + target.branch_name, target.remote_name + ) + } else { + format!( + "failed to temporarily restore remote branch '{}' on '{}': {}", + target.branch_name, target.remote_name, combined_output + ) + })) + } +} + +fn delete_restored_remote_branch_after_pull_request_repair( + target: &git::BranchPushTarget, +) -> io::Result<()> { + let delete_output = git::delete_branch_from_remote(target)?; + if delete_output.status.success() { + Ok(()) + } else { + let combined_output = delete_output.combined_output(); + Err(io::Error::other(if combined_output.is_empty() { + format!( + "failed to delete temporary remote branch '{}' on '{}'", + target.branch_name, target.remote_name + ) + } else { + format!( + "failed to delete temporary remote branch '{}' on '{}': {}", + target.branch_name, target.remote_name, combined_output + ) + })) + } +} + fn fetch_sync_remotes(session: &crate::core::store::StoreSession) -> io::Result { let mut remote_names = BTreeSet::new(); @@ -647,6 +1035,73 @@ pub fn execute_remote_push_plan(plan: &RemotePushPlan) -> io::Result io::Result { + let session = open_initialized("dig is not initialized; run 'dig init' first")?; + let candidate_branch_names = dedup_branch_names(restacked_branch_names); + let mut actions = Vec::new(); + + for branch_name in candidate_branch_names { + let Some(node) = session.state.find_branch_by_name(&branch_name) else { + continue; + }; + let Some(pull_request) = node.pull_request.as_ref() else { + continue; + }; + let Ok((parent_base, _)) = deleted_local::resolve_replacement_parent( + &session.state, + &session.config.trunk_branch, + &node.parent, + ) else { + continue; + }; + + let pull_request_status = gh::view_pull_request(pull_request.number).map_err(|err| { + io::Error::other(format!( + "failed to inspect tracked pull request #{} for '{}': {}", + pull_request.number, node.branch_name, err + )) + })?; + + if pull_request_status.state != PullRequestState::Open + || pull_request_status.base_ref_name == parent_base.branch_name + { + continue; + } + + actions.push(PullRequestUpdateAction { + branch_name: node.branch_name.clone(), + pull_request_number: pull_request.number, + new_base_branch_name: parent_base.branch_name, + }); + } + + Ok(PullRequestUpdatePlan { actions }) +} + +pub fn execute_pull_request_update_plan( + plan: &PullRequestUpdatePlan, +) -> io::Result> { + let mut updated_actions = Vec::new(); + + for action in &plan.actions { + gh::retarget_pull_request_base(action.pull_request_number, &action.new_base_branch_name) + .map_err(|err| { + io::Error::other(format!( + "failed to retarget tracked pull request #{} for '{}' onto '{}': {}", + action.pull_request_number, + action.branch_name, + action.new_base_branch_name, + err + )) + })?; + updated_actions.push(action.clone()); + } + + Ok(updated_actions) +} + fn dedup_branch_names(branch_names: &[String]) -> Vec { let mut seen = HashSet::new(); let mut deduped = Vec::new(); @@ -714,7 +1169,8 @@ fn plan_remote_push_action( #[cfg(test)] mod tests { - use super::{RemotePushActionKind, plan_remote_pushes}; + use super::{RemotePushActionKind, plan_remote_pushes, pull_request_needs_repair}; + use crate::core::gh::{PullRequestState, PullRequestStatus}; use crate::core::test_support::{ append_file, commit_file, create_tracked_branch, git_ok, initialize_main_repo, with_temp_repo, @@ -793,4 +1249,78 @@ mod tests { ); }); } + + #[test] + fn repairs_only_closed_unmerged_pull_requests_with_expected_head_and_base() { + assert!(pull_request_needs_repair( + &PullRequestStatus { + number: 42, + state: PullRequestState::Closed, + merged_at: None, + base_ref_name: "feat/auth".into(), + head_ref_name: "feat/auth-ui".into(), + head_ref_oid: None, + is_draft: false, + url: "https://github.com/acme/dig/pull/42".into(), + }, + "feat/auth-ui", + "feat/auth", + )); + assert!(!pull_request_needs_repair( + &PullRequestStatus { + number: 42, + state: PullRequestState::Open, + merged_at: None, + base_ref_name: "feat/auth".into(), + head_ref_name: "feat/auth-ui".into(), + head_ref_oid: None, + is_draft: false, + url: "https://github.com/acme/dig/pull/42".into(), + }, + "feat/auth-ui", + "feat/auth", + )); + assert!(!pull_request_needs_repair( + &PullRequestStatus { + number: 42, + state: PullRequestState::Closed, + merged_at: Some("2026-03-26T12:00:00Z".into()), + base_ref_name: "feat/auth".into(), + head_ref_name: "feat/auth-ui".into(), + head_ref_oid: None, + is_draft: false, + url: "https://github.com/acme/dig/pull/42".into(), + }, + "feat/auth-ui", + "feat/auth", + )); + assert!(!pull_request_needs_repair( + &PullRequestStatus { + number: 42, + state: PullRequestState::Closed, + merged_at: None, + base_ref_name: "main".into(), + head_ref_name: "feat/auth-ui".into(), + head_ref_oid: None, + is_draft: false, + url: "https://github.com/acme/dig/pull/42".into(), + }, + "feat/auth-ui", + "feat/auth", + )); + assert!(!pull_request_needs_repair( + &PullRequestStatus { + number: 42, + state: PullRequestState::Closed, + merged_at: None, + base_ref_name: "feat/auth".into(), + head_ref_name: "feat/auth-api".into(), + head_ref_oid: None, + is_draft: false, + url: "https://github.com/acme/dig/pull/42".into(), + }, + "feat/auth-ui", + "feat/auth", + )); + } } diff --git a/tests/pr.rs b/tests/pr.rs index 12e732c..e873d04 100644 --- a/tests/pr.rs +++ b/tests/pr.rs @@ -3,6 +3,7 @@ mod support; use std::fs; use std::path::{Path, PathBuf}; +use serde_json::json; use support::{ dig_ok, dig_ok_with_env, dig_with_input_and_env, find_node, git_binary_path, git_ok, git_stdout, initialize_main_repo, install_fake_executable, load_events_json, load_state_json, @@ -23,6 +24,21 @@ fn clear_log(path: &str) { fs::write(path, "").unwrap(); } +fn track_pull_request_number(repo: &Path, branch_name: &str, number: u64) { + let state_path = repo.join(".git/dig/state.json"); + let mut state = load_state_json(repo); + let nodes = state["nodes"].as_array_mut().unwrap(); + let node = nodes + .iter_mut() + .find(|node| { + node["branch_name"].as_str() == Some(branch_name) + && node["archived"].as_bool() == Some(false) + }) + .unwrap(); + node["pull_request"] = json!({ "number": number }); + fs::write(state_path, serde_json::to_string_pretty(&state).unwrap()).unwrap(); +} + fn initialize_origin_remote(repo: &Path) { git_ok(repo, &["init", "--bare", "origin.git"]); git_ok(repo, &["remote", "add", "origin", "origin.git"]); @@ -104,6 +120,64 @@ exit 1 }); } +#[test] +fn pr_merge_retargets_open_child_pull_request_before_merging_parent() { + with_temp_repo("dig-pr-cli", |repo| { + initialize_main_repo(repo); + dig_ok(repo, &["init"]); + dig_ok(repo, &["branch", "feat/auth"]); + track_pull_request_number(repo, "feat/auth", 123); + dig_ok(repo, &["branch", "feat/auth-ui"]); + track_pull_request_number(repo, "feat/auth-ui", 124); + git_ok(repo, &["checkout", "feat/auth"]); + + let (_, path, log_path) = install_fake_gh( + repo, + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "124" ]; then + printf '{"number":124,"state":"OPEN","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","headRefOid":"abc123","isDraft":false,"url":"https://github.com/acme/dig/pull/124"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$3" = "124" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "merge" ] && [ "$3" = "123" ] && [ "$4" = "--squash" ] && [ "$5" = "--delete-branch" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"#, + ); + + let output = dig_ok_with_env( + repo, + &["pr", "merge"], + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let gh_log = fs::read_to_string(log_path).unwrap(); + + assert!(stdout.contains("Retargeted child pull requests:")); + assert!(stdout.contains("- #124 for feat/auth-ui to main")); + assert!(stdout.contains("Merged pull request #123 for 'feat/auth' into 'main'.")); + + let lines = gh_log.lines().collect::>(); + assert_eq!( + lines, + vec![ + "pr view 124 --json number,state,mergedAt,baseRefName,headRefName,headRefOid,isDraft,url", + "pr edit 124 --base main", + "pr merge 123 --squash --delete-branch", + ] + ); + }); +} + #[test] fn pr_creates_child_pull_request_against_tracked_parent() { with_temp_repo("dig-pr-cli", |repo| { diff --git a/tests/sync.rs b/tests/sync.rs index 6b0279f..b3628a2 100644 --- a/tests/sync.rs +++ b/tests/sync.rs @@ -5,9 +5,10 @@ use std::path::{Path, PathBuf}; use serde_json::json; use support::{ - active_rebase_head_name, commit_file, dig, dig_ok, dig_with_input, find_archived_node, - find_node, git_ok, git_stdout, initialize_main_repo, load_events_json, load_operation_json, - load_state_json, overwrite_file, strip_ansi, with_temp_repo, write_file, + active_rebase_head_name, commit_file, dig, dig_ok, dig_with_env, dig_with_input, + dig_with_input_and_env, find_archived_node, find_node, git_ok, git_stdout, + initialize_main_repo, install_fake_executable, load_events_json, load_operation_json, + load_state_json, overwrite_file, path_with_prepend, strip_ansi, with_temp_repo, write_file, }; fn initialize_origin_remote(repo: &Path) { @@ -35,6 +36,38 @@ fn clone_origin(repo: &Path, clone_name: &str) -> PathBuf { clone_dir } +fn install_fake_gh(repo: &Path, script: &str) -> (String, String) { + let bin_dir = repo.join(".git").join("fake-bin"); + install_fake_executable(&bin_dir, "gh", script); + + let path = path_with_prepend(&bin_dir); + let log_path = repo.join(".git").join("gh.log"); + fs::write(&log_path, "").unwrap(); + + (path, log_path.display().to_string()) +} + +fn install_remote_update_logger(repo: &Path) -> String { + let hooks_dir = repo.join(".git").join("origin.git").join("hooks"); + let log_path = repo.join(".git").join("origin-updates.log"); + let script = format!( + "#!/bin/sh\nset -eu\nprintf '%s %s %s\\n' \"$1\" \"$2\" \"$3\" >> \"{}\"\n", + log_path.display() + ); + install_fake_executable(&hooks_dir, "update", &script); + fs::write(&log_path, "").unwrap(); + + log_path.display().to_string() +} + +fn count_remote_ref_updates(log_path: &str, ref_name: &str) -> usize { + fs::read_to_string(log_path) + .unwrap() + .lines() + .filter(|line| line.split_whitespace().next() == Some(ref_name)) + .count() +} + fn track_pull_request_number(repo: &Path, branch_name: &str, number: u64) { let state_path = repo.join(".git/dig/state.json"); let mut state = load_state_json(repo); @@ -50,6 +83,19 @@ fn track_pull_request_number(repo: &Path, branch_name: &str, number: u64) { fs::write(state_path, serde_json::to_string_pretty(&state).unwrap()).unwrap(); } +fn set_branch_archived(repo: &Path, branch_name: &str, archived: bool) { + let state_path = repo.join(".git/dig/state.json"); + let mut state = load_state_json(repo); + let node = state["nodes"] + .as_array_mut() + .unwrap() + .iter_mut() + .find(|node| node["branch_name"].as_str() == Some(branch_name)) + .unwrap(); + node["archived"] = json!(archived); + fs::write(state_path, serde_json::to_string_pretty(&state).unwrap()).unwrap(); +} + fn setup_remotely_merged_root_branch_with_local_trunk_advance(repo: &Path) { initialize_main_repo(repo); initialize_origin_remote(repo); @@ -78,6 +124,46 @@ fn setup_remotely_merged_root_branch_with_local_trunk_advance(repo: &Path) { git_ok(&remote_repo, &["push", "origin", "--delete", "feat/auth"]); } +fn setup_remotely_merged_root_branch_with_children( + repo: &Path, + children: &[(&str, &str, &str, &str)], +) { + initialize_main_repo(repo); + initialize_origin_remote(repo); + dig_ok(repo, &["init"]); + dig_ok(repo, &["branch", "feat/auth"]); + overwrite_file(repo, "shared.txt", "feature\n", "feat: auth"); + git_ok(repo, &["push", "-u", "origin", "feat/auth"]); + + for (index, (branch_name, file_name, contents, message)) in children.iter().enumerate() { + if index > 0 { + git_ok(repo, &["checkout", "feat/auth"]); + } + + dig_ok(repo, &["branch", branch_name]); + commit_file(repo, file_name, contents, message); + git_ok(repo, &["push", "-u", "origin", branch_name]); + } + + git_ok(repo, &["checkout", "main"]); + overwrite_file( + repo, + "shared.txt", + "local trunk\n", + "feat: local trunk follow-up", + ); + + let remote_repo = clone_origin(repo, "origin-worktree"); + git_ok(&remote_repo, &["checkout", "main"]); + git_ok(&remote_repo, &["merge", "--squash", "origin/feat/auth"]); + git_ok( + &remote_repo, + &["commit", "--quiet", "-m", "feat: merge auth"], + ); + git_ok(&remote_repo, &["push", "origin", "main"]); + git_ok(&remote_repo, &["push", "origin", "--delete", "feat/auth"]); +} + #[test] fn sync_reports_noop_when_local_stacks_are_already_in_sync() { with_temp_repo("dig-sync-cli", |repo| { @@ -671,6 +757,509 @@ fn sync_skips_recreating_remotely_merged_root_branch_when_cleanup_is_declined() }); } +#[test] +fn sync_repairs_closed_child_pull_request_after_remote_parent_branch_deletion() { + with_temp_repo("dig-sync-cli", |repo| { + setup_remotely_merged_root_branch_with_children( + repo, + &[("feat/auth-ui", "ui.txt", "ui\n", "feat: ui")], + ); + track_pull_request_number(repo, "feat/auth-ui", 234); + + let (path, log_path) = install_fake_gh( + repo, + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "234" ]; then + printf '{"number":234,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/234"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "reopen" ] && [ "$3" = "234" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "ready" ] && [ "$3" = "234" ] && [ "$4" = "--undo" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$3" = "234" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"#, + ); + + let output = dig_with_input_and_env( + repo, + &["sync"], + "y\nn\n", + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!( + output.status.success(), + "stdout:\n{stdout}\nstderr:\n{stderr}" + ); + assert!(stdout.contains("Recovered pull requests:")); + assert!(stdout.contains( + "- feat/auth-ui (#234): reopened as draft and retargeted from feat/auth to main" + )); + assert!(stdout.contains("Merged branches ready to clean:")); + assert!(stdout.contains("- feat/auth-ui onto main")); + + let gh_log = fs::read_to_string(log_path).unwrap(); + assert!(gh_log.contains( + "pr view 234 --json number,state,mergedAt,baseRefName,headRefName,headRefOid,isDraft,url" + )); + assert!(gh_log.contains("pr reopen 234")); + assert!(gh_log.contains("pr ready 234 --undo")); + assert!(gh_log.contains("pr edit 234 --base main")); + + assert_eq!( + git_stdout( + repo, + &["ls-remote", "--heads", "origin", "refs/heads/feat/auth"] + ), + "" + ); + + let state = load_state_json(repo); + let child = find_node(&state, "feat/auth-ui").unwrap(); + assert_eq!(child["base_ref"], "main"); + assert_eq!(child["parent"]["kind"], "trunk"); + assert!(find_node(&state, "feat/auth").is_none()); + assert!(find_archived_node(&state, "feat/auth").is_some()); + }); +} + +#[test] +fn sync_repairs_multiple_child_pull_requests_with_one_temporary_parent_restore() { + with_temp_repo("dig-sync-cli", |repo| { + setup_remotely_merged_root_branch_with_children( + repo, + &[ + ("feat/auth-api", "api.txt", "api\n", "feat: api"), + ("feat/auth-ui", "ui.txt", "ui\n", "feat: ui"), + ], + ); + track_pull_request_number(repo, "feat/auth-api", 111); + track_pull_request_number(repo, "feat/auth-ui", 222); + let remote_update_log = install_remote_update_logger(repo); + + let (path, gh_log_path) = install_fake_gh( + repo, + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "111" ]; then + printf '{"number":111,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-api","isDraft":false,"url":"https://github.com/acme/dig/pull/111"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "222" ]; then + printf '{"number":222,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/222"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "reopen" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "ready" ] && [ "$4" = "--undo" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"#, + ); + + let output = dig_with_input_and_env( + repo, + &["sync"], + "y\nn\n", + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", gh_log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!( + output.status.success(), + "stdout:\n{stdout}\nstderr:\n{stderr}" + ); + assert!(stdout.contains("Recovered pull requests:")); + assert_eq!( + count_remote_ref_updates(&remote_update_log, "refs/heads/feat/auth"), + 2 + ); + + let gh_log = fs::read_to_string(gh_log_path).unwrap(); + assert_eq!(gh_log.matches("pr reopen ").count(), 2); + assert_eq!(gh_log.matches("pr ready ").count(), 2); + assert_eq!(gh_log.matches("pr edit ").count(), 2); + assert_eq!( + git_stdout( + repo, + &["ls-remote", "--heads", "origin", "refs/heads/feat/auth"] + ), + "" + ); + }); +} + +#[test] +fn sync_skips_pull_request_repair_for_open_merged_or_retargeted_children() { + with_temp_repo("dig-sync-cli", |repo| { + setup_remotely_merged_root_branch_with_children( + repo, + &[ + ("feat/auth-api", "api.txt", "api\n", "feat: api"), + ("feat/auth-ui", "ui.txt", "ui\n", "feat: ui"), + ("feat/auth-tests", "tests.txt", "tests\n", "feat: tests"), + ], + ); + track_pull_request_number(repo, "feat/auth-api", 301); + track_pull_request_number(repo, "feat/auth-ui", 302); + track_pull_request_number(repo, "feat/auth-tests", 303); + let remote_update_log = install_remote_update_logger(repo); + + let (path, gh_log_path) = install_fake_gh( + repo, + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "301" ]; then + printf '{"number":301,"state":"OPEN","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-api","isDraft":false,"url":"https://github.com/acme/dig/pull/301"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "302" ]; then + printf '{"number":302,"state":"CLOSED","mergedAt":"2026-03-26T12:00:00Z","baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/302"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "303" ]; then + printf '{"number":303,"state":"CLOSED","mergedAt":null,"baseRefName":"main","headRefName":"feat/auth-tests","isDraft":false,"url":"https://github.com/acme/dig/pull/303"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$3" = "301" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"#, + ); + + let output = dig_with_input_and_env( + repo, + &["sync"], + "y\nn\n", + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", gh_log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!( + output.status.success(), + "stdout:\n{stdout}\nstderr:\n{stderr}" + ); + assert!(!stdout.contains("Recovered pull requests:")); + + let gh_log = fs::read_to_string(gh_log_path).unwrap(); + assert!(gh_log.contains("pr view 301")); + assert!(gh_log.contains("pr view 302")); + assert!(gh_log.contains("pr view 303")); + assert!(!gh_log.contains("pr reopen")); + assert!(!gh_log.contains("pr ready")); + assert_eq!(gh_log.matches("pr edit ").count(), 1); + assert!(gh_log.contains("pr edit 301 --base main")); + assert_eq!( + count_remote_ref_updates(&remote_update_log, "refs/heads/feat/auth"), + 0 + ); + }); +} + +#[test] +fn sync_repairs_closed_child_pull_request_when_parent_branch_is_missing_locally() { + with_temp_repo("dig-sync-cli", |repo| { + initialize_main_repo(repo); + initialize_origin_remote(repo); + dig_ok(repo, &["init"]); + dig_ok(repo, &["branch", "feat/root"]); + commit_file(repo, "root.txt", "root\n", "feat: root"); + git_ok(repo, &["push", "-u", "origin", "feat/root"]); + track_pull_request_number(repo, "feat/root", 101); + dig_ok(repo, &["branch", "feat/auth"]); + commit_file(repo, "auth.txt", "auth\n", "feat: auth"); + git_ok(repo, &["push", "-u", "origin", "feat/auth"]); + track_pull_request_number(repo, "feat/auth", 102); + dig_ok(repo, &["branch", "feat/auth-ui"]); + commit_file(repo, "ui.txt", "ui\n", "feat: ui"); + git_ok(repo, &["push", "-u", "origin", "feat/auth-ui"]); + track_pull_request_number(repo, "feat/auth-ui", 103); + + let parent_head_oid = git_stdout(repo, &["rev-parse", "feat/auth"]); + let remote_repo = clone_origin(repo, "origin-worktree-missing-parent"); + git_ok(&remote_repo, &["checkout", "main"]); + git_ok(&remote_repo, &["merge", "--squash", "origin/feat/root"]); + git_ok( + &remote_repo, + &["commit", "--quiet", "-m", "feat: merge root"], + ); + git_ok(&remote_repo, &["push", "origin", "main"]); + git_ok(&remote_repo, &["push", "origin", "--delete", "feat/root"]); + git_ok(&remote_repo, &["push", "origin", "--delete", "feat/auth"]); + + git_ok(repo, &["checkout", "main"]); + git_ok(repo, &["branch", "-D", "feat/root"]); + git_ok(repo, &["branch", "-D", "feat/auth"]); + set_branch_archived(repo, "feat/root", true); + + let remote_update_log = install_remote_update_logger(repo); + let (path, gh_log_path) = install_fake_gh( + repo, + &format!( + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "102" ]; then + printf '{{"number":102,"state":"MERGED","mergedAt":"2026-03-26T12:00:00Z","baseRefName":"feat/root","headRefName":"feat/auth","headRefOid":"{parent_head_oid}","isDraft":false,"url":"https://github.com/acme/dig/pull/102"}}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "103" ]; then + printf '{{"number":103,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/103"}}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "reopen" ] && [ "$3" = "103" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "ready" ] && [ "$3" = "103" ] && [ "$4" = "--undo" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$3" = "103" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"# + ), + ); + + let output = dig_with_input_and_env( + repo, + &["sync"], + "n\n", + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", gh_log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!( + output.status.success(), + "stdout:\n{stdout}\nstderr:\n{stderr}" + ); + assert!(stdout.contains("Recovered pull requests:")); + assert!(stdout.contains( + "- feat/auth-ui (#103): reopened as draft and retargeted from feat/auth to main" + )); + assert!(stdout.contains("Deleted locally and no longer tracked by dig:")); + assert!(stdout.contains("- feat/auth")); + assert!(stdout.contains("Restacked:")); + assert!(stdout.contains("- feat/auth-ui onto main")); + + let gh_log = fs::read_to_string(gh_log_path).unwrap(); + assert!(gh_log.contains("pr view 102 --json")); + assert!(gh_log.contains("pr view 103 --json")); + assert!(gh_log.contains("pr reopen 103")); + assert!(gh_log.contains("pr ready 103 --undo")); + assert!(gh_log.contains("pr edit 103 --base main")); + assert_eq!( + count_remote_ref_updates(&remote_update_log, "refs/heads/feat/auth"), + 2 + ); + assert_eq!( + git_stdout( + repo, + &["ls-remote", "--heads", "origin", "refs/heads/feat/auth"] + ), + "" + ); + + let state = load_state_json(repo); + let child = find_node(&state, "feat/auth-ui").unwrap(); + assert_eq!(child["base_ref"], "main"); + assert_eq!(child["parent"]["kind"], "trunk"); + assert!(find_archived_node(&state, "feat/root").is_some()); + assert!(find_archived_node(&state, "feat/auth").is_some()); + }); +} + +#[test] +fn sync_removes_local_parent_branch_after_repair_when_parent_was_merged_upstream() { + with_temp_repo("dig-sync-cli", |repo| { + initialize_main_repo(repo); + initialize_origin_remote(repo); + dig_ok(repo, &["init"]); + dig_ok(repo, &["branch", "feat/root"]); + commit_file(repo, "root.txt", "root\n", "feat: root"); + git_ok(repo, &["push", "-u", "origin", "feat/root"]); + track_pull_request_number(repo, "feat/root", 101); + dig_ok(repo, &["branch", "feat/auth"]); + commit_file(repo, "auth.txt", "auth\n", "feat: auth"); + git_ok(repo, &["push", "-u", "origin", "feat/auth"]); + track_pull_request_number(repo, "feat/auth", 102); + dig_ok(repo, &["branch", "feat/auth-ui"]); + commit_file(repo, "ui.txt", "ui\n", "feat: ui"); + git_ok(repo, &["push", "-u", "origin", "feat/auth-ui"]); + track_pull_request_number(repo, "feat/auth-ui", 103); + + let parent_head_oid = git_stdout(repo, &["rev-parse", "feat/auth"]); + let remote_repo = clone_origin(repo, "origin-worktree-local-parent"); + git_ok(&remote_repo, &["checkout", "main"]); + git_ok(&remote_repo, &["merge", "--squash", "origin/feat/root"]); + git_ok( + &remote_repo, + &["commit", "--quiet", "-m", "feat: merge root"], + ); + git_ok(&remote_repo, &["push", "origin", "main"]); + git_ok(&remote_repo, &["push", "origin", "--delete", "feat/root"]); + git_ok(&remote_repo, &["push", "origin", "--delete", "feat/auth"]); + + git_ok(repo, &["checkout", "main"]); + git_ok(repo, &["branch", "-D", "feat/root"]); + set_branch_archived(repo, "feat/root", true); + + let remote_update_log = install_remote_update_logger(repo); + let (path, gh_log_path) = install_fake_gh( + repo, + &format!( + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "102" ]; then + printf '{{"number":102,"state":"MERGED","mergedAt":"2026-03-26T12:00:00Z","baseRefName":"feat/root","headRefName":"feat/auth","headRefOid":"{parent_head_oid}","isDraft":false,"url":"https://github.com/acme/dig/pull/102"}}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "103" ]; then + printf '{{"number":103,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/103"}}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "reopen" ] && [ "$3" = "103" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "ready" ] && [ "$3" = "103" ] && [ "$4" = "--undo" ]; then + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "edit" ] && [ "$3" = "103" ] && [ "$4" = "--base" ] && [ "$5" = "main" ]; then + exit 0 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"# + ), + ); + + let output = dig_with_input_and_env( + repo, + &["sync"], + "n\n", + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", gh_log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!( + output.status.success(), + "stdout:\n{stdout}\nstderr:\n{stderr}" + ); + assert!(stdout.contains("Recovered pull requests:")); + assert!(stdout.contains("Deleted locally and no longer tracked by dig:")); + assert!(stdout.contains("- feat/auth")); + assert_eq!(git_stdout(repo, &["branch", "--list", "feat/auth"]), ""); + + let state = load_state_json(repo); + let child = find_node(&state, "feat/auth-ui").unwrap(); + assert_eq!(child["base_ref"], "main"); + assert_eq!(child["parent"]["kind"], "trunk"); + assert!(find_archived_node(&state, "feat/auth").is_some()); + assert_eq!( + count_remote_ref_updates(&remote_update_log, "refs/heads/feat/auth"), + 2 + ); + }); +} + +#[test] +fn sync_aborts_before_local_cleanup_when_pull_request_repair_fails() { + with_temp_repo("dig-sync-cli", |repo| { + setup_remotely_merged_root_branch_with_children( + repo, + &[("feat/auth-ui", "ui.txt", "ui\n", "feat: ui")], + ); + track_pull_request_number(repo, "feat/auth-ui", 234); + + let (path, gh_log_path) = install_fake_gh( + repo, + r#"#!/bin/sh +set -eu +printf '%s\n' "$*" >> "$DIG_TEST_GH_LOG" +if [ "$1" = "pr" ] && [ "$2" = "view" ] && [ "$3" = "234" ]; then + printf '{"number":234,"state":"CLOSED","mergedAt":null,"baseRefName":"feat/auth","headRefName":"feat/auth-ui","isDraft":false,"url":"https://github.com/acme/dig/pull/234"}\n' + exit 0 +fi +if [ "$1" = "pr" ] && [ "$2" = "reopen" ] && [ "$3" = "234" ]; then + echo "boom" >&2 + exit 1 +fi +echo "unexpected gh args: $*" >&2 +exit 1 +"#, + ); + + let output = dig_with_env( + repo, + &["sync"], + &[ + ("PATH", path.as_str()), + ("DIG_TEST_GH_LOG", gh_log_path.as_str()), + ], + ); + let stdout = strip_ansi(&String::from_utf8(output.stdout).unwrap()); + let stderr = String::from_utf8(output.stderr).unwrap(); + + assert!(!output.status.success()); + assert!(stderr.contains("failed to reopen tracked pull request #234 for 'feat/auth-ui'")); + assert!(!stdout.contains("Merged branches ready to clean:")); + assert!(!stdout.contains("Restacked:")); + assert!(load_operation_json(repo).is_none()); + + let state = load_state_json(repo); + let parent = find_node(&state, "feat/auth").unwrap(); + let child = find_node(&state, "feat/auth-ui").unwrap(); + assert_eq!(child["parent"]["kind"], "branch"); + assert_eq!(child["parent"]["node_id"], parent["id"]); + assert_eq!( + git_stdout(repo, &["merge-base", "feat/auth", "feat/auth-ui"]), + git_stdout(repo, &["rev-parse", "feat/auth"]) + ); + }); +} + #[test] fn sync_cleans_middle_branch_merged_remotely_and_excludes_it_from_remote_pushes() { with_temp_repo("dig-sync-cli", |repo| {