diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..c8cb1a0 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,10 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +## Version: 0.2.120 + +### New + + + diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..7fea10c --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,2 @@ +[workspace] +members = [ 'ton_api', 'ton_tl_codegen' ] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..f288702 --- /dev/null +++ b/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. diff --git a/commit_hash.txt b/commit_hash.txt new file mode 100644 index 0000000..c3c16c6 --- /dev/null +++ b/commit_hash.txt @@ -0,0 +1 @@ +bf52d9045a566cd9cb089582081385c81d435072 diff --git a/deps_map.json b/deps_map.json new file mode 100644 index 0000000..d2c9313 --- /dev/null +++ b/deps_map.json @@ -0,0 +1 @@ +{"ton-block":"da0becab48cc99a1518e08c386ca58b18a82e54a","ton-types":"12ddd04a55d73be85f44ac44b579cea3172ab32b"} \ No newline at end of file diff --git a/ton_api/Cargo.toml b/ton_api/Cargo.toml new file mode 100644 index 0000000..224af18 --- /dev/null +++ b/ton_api/Cargo.toml @@ -0,0 +1,27 @@ +[package] +authors = [ 'Aaron Gallagher <_@habnab.it>', 'Connie Hilarides ', 'Denis K. ', 'Alexey Vavilin >(); + + assert!(files.len() > 0); + files.sort(); + + let mut input = String::new(); + for file in files { + if input.len() > 0 { + input += "---types---\n"; + } + fs::File::open(&file) + .expect(format!("Unable to open file for reading: {}", file.to_string_lossy()).as_str()) + .read_to_string(&mut input) + .expect(format!("Unable to read file contents: {}", file.to_string_lossy()).as_str()); + println!("cargo:rerun-if-changed={}", file.to_string_lossy()); + } + + let config_path = Path::new(TL_DIR).join("codegen.json"); + let config: Option = if config_path.exists() && config_path.is_file() { + let mut config_string = String::new(); + fs::File::open(&config_path) + .expect(format!("Unable to open file for reading: {}", config_path.to_string_lossy()).as_str()) + .read_to_string(&mut config_string) + .expect(format!("Unable to read file contents: {}", config_path.to_string_lossy()).as_str()); + Some(serde_json::from_str(config_string.as_str()) + .expect(format!("Unable to parse file as JSON: {}", config_path.to_string_lossy()).as_str())) + } else { + None + }; + + ton_tl_codegen::generate_code_for(config, &input, Path::new(OUTPUT_DIR)); +} + diff --git a/ton_api/src/lib.rs b/ton_api/src/lib.rs new file mode 100644 index 0000000..7a7a6d8 --- /dev/null +++ b/ton_api/src/lib.rs @@ -0,0 +1,417 @@ +/* +* Copyright (C) 2019-2021 TON Labs. All Rights Reserved. +* +* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use +* this file except in compliance with the License. +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific TON DEV software governing permissions and +* limitations under the License. +*/ + +#![allow(clippy::unreadable_literal)] +#![deny(private_in_public)] + +use crate::ton_prelude::TLObject; +use failure::Fail; +use std::{any::Any, fmt, hash::Hash, io::{self, Read, Write}}; + +use ton_block::{BlockIdExt, ShardIdent}; +use ton_types::Result; +use ton_types::{fail, UInt256}; + +macro_rules! _invalid_id { + ($id:ident) => { + Err(crate::InvalidConstructor { expected: Self::possible_constructors(), received: $id }.into()) + }; +} + +#[allow(non_camel_case_types)] +pub mod ton; +pub mod secure; +mod ton_prelude; + +/// Struct representing TL constructor number (CRC32 calculated from constructor definition string) +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct ConstructorNumber(pub u32); + +impl fmt::Debug for ConstructorNumber { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "#{:08x}", self.0) + } +} + +/// Struct for handling mismatched constructor number +#[derive(Debug, Fail)] +#[fail(display = "expected a constructor in {:?}; got {:?}", expected, received)] +pub struct InvalidConstructor { + pub expected: Vec, + pub received: ConstructorNumber, +} + +/// Struct for deserializing TL-scheme objects from any `io::Read` +pub struct Deserializer<'r> { + reader: &'r mut dyn Read, +} + +impl<'r> Deserializer<'r> { + /// Create `Deserializer` with given `io::Read` trait object + pub fn new(reader: &'r mut dyn Read) -> Self { + Deserializer { reader } + } + + /// Read `ConstructorNumber` from reader + pub fn read_constructor(&mut self) -> Result { + use byteorder::{LittleEndian, ReadBytesExt}; + Ok(ConstructorNumber(self.read_u32::()?)) + } + + /// Read bare-serialized TL-object + #[inline(always)] + pub fn read_bare(&mut self) -> Result { + D::deserialize_bare(self) + } + + /// Read boxed-serialized TL-object + #[inline(always)] + pub fn read_boxed(&mut self) -> Result { + let constructor = self.read_constructor()?; + D::deserialize_boxed(constructor, self) + } + + /// Returns default value for type + #[inline(always)] + pub fn just_default(&self) -> Result { + Ok(Default::default()) + } +} + +impl<'r> Read for Deserializer<'r> { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + self.reader.read(buf) + } +} + +/// Trait for bare type deserialization +pub trait BareDeserialize +where Self: Sized, +{ + /// Read bare-serialized value using `Deserializer` + fn deserialize_bare(de: &mut Deserializer) -> Result; + + /// Read bare-serialized value from `u8` array + fn bare_deserialized_from_bytes(mut bytes: &[u8]) -> Result { + Deserializer::new(&mut bytes).read_bare() + } +} + +/// Trait for boxed type deserialization +pub trait BoxedDeserialize +where Self: Sized, +{ + /// Returns all possible constructors of boxed type + fn possible_constructors() -> Vec; + + /// Read boxed-serialized value using `Deserializer` + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result; + + /// Read boxed-serialized value from `u8` array + fn boxed_deserialized_from_bytes(mut bytes: &[u8]) -> Result { + Deserializer::new(&mut bytes).read_boxed() + } +} + +/// Trait for deserializing any value represented `Object` TL type +pub trait BoxedDeserializeDynamic: BoxedDeserialize { + /// Read boxed type value with given `ConstructorNumber` using `Deserializer` + fn boxed_deserialize_to_box(id: ConstructorNumber, de: &mut Deserializer) -> Result; +} + +impl BoxedDeserializeDynamic for D +where D: BoxedDeserialize + AnyBoxedSerialize, +{ + fn boxed_deserialize_to_box(id: ConstructorNumber, de: &mut Deserializer) -> Result { + Ok(ton::TLObject::new(D::deserialize_boxed(id, de)?)) + } +} + +/// Struct representing every boxed type for deserializing `Object` TL type +#[derive(Clone, Copy)] +pub struct DynamicDeserializer { + id: ConstructorNumber, + type_name: &'static str, + ton: fn(ConstructorNumber, &mut Deserializer) -> Result, +} + +impl DynamicDeserializer { + #[inline(always)] + pub fn from(id: ConstructorNumber, type_name: &'static str) -> Self { + DynamicDeserializer { + id, + type_name, + ton: D::boxed_deserialize_to_box, + } + } +} + +/// Struct for serializing TL-scheme objects into any `io::Write` +pub struct Serializer<'w> { + writer: &'w mut dyn Write, +} + +impl<'w> Serializer<'w> { + /// Create `Serializer` with given `io::Write` trait object + pub fn new(writer: &'w mut dyn Write) -> Self { + Serializer { writer } + } + + /// Read `ConstructorNumber` into writer + pub fn write_constructor(&mut self, id: ConstructorNumber) -> Result<()> { + use byteorder::{LittleEndian, WriteBytesExt}; + self.write_u32::(id.0)?; + Ok(()) + } + + /// Serialize TL-object as bare value + #[inline(always)] + pub fn write_bare(&mut self, obj: &S) -> Result<()> { + obj.serialize_bare(self) + } + + /// Serialize TL-object as boxed value + #[inline(always)] + pub fn write_boxed(&mut self, obj: &S) -> Result<()> { + let (constructor, bare) = obj.serialize_boxed(); + self.write_constructor(constructor)?; + self.write_bare(bare)?; + Ok(()) + } + + #[inline(always)] + pub fn write_into_boxed(&mut self, obj: &S) -> Result<()> { + let constructor = obj.constructor(); + self.write_constructor(constructor)?; + self.write_bare(obj)?; + Ok(()) + } +} + +impl<'w> Write for Serializer<'w> { + fn write(&mut self, buf: &[u8]) -> io::Result { + self.writer.write(buf) + } + + fn flush(&mut self) -> io::Result<()> { + self.writer.flush() + } +} + +/// Trait for bare type serialization +pub trait BareSerialize { + /// Get constructor id for object (tl_id) + fn constructor(&self) -> crate::ConstructorNumber; + + /// Write object as bare-serialized value using `Serializer` + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()>; + + /// Write object as bare-serialized value into `Vec` + fn bare_serialized_bytes(&self) -> Result> { + let mut buf: Vec = vec![]; + Serializer::new(&mut buf).write_bare(self)?; + Ok(buf) + } +} + +/// Trait for boxed type serialization +pub trait BoxedSerialize { + /// Represent boxed type value as `ConstructorNumber` and `BareSerialize` tuple + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize); + + /// Serialize boxed type value into `Vec` + fn boxed_serialized_bytes(&self) -> Result> { + let mut buf: Vec = vec![]; + Serializer::new(&mut buf).write_boxed(self)?; + Ok(buf) + } +} + +/// Trait for representing bare types as boxed type +pub trait IntoBoxed: BareSerialize { + type Boxed: BoxedSerialize; + fn into_boxed(self) -> Self::Boxed; +} + +/// Trait for representing any boxed type used in `Object` TL type processing +pub trait AnyBoxedSerialize: Any + Send + Sync + BoxedSerialize { + fn as_any(&self) -> &dyn Any; + fn into_boxed_any(self: Box) -> Box; +} + +impl AnyBoxedSerialize for T { + fn as_any(&self) -> &dyn Any { self } + fn into_boxed_any(self: Box) -> Box { self } +} + +/// Trait for functional TL types +pub trait Function: AnyBoxedSerialize { + type Reply: BoxedDeserialize + AnyBoxedSerialize; +} + +impl BareDeserialize for BlockIdExt { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let shard = ShardIdent::with_tagged_prefix( + de.read_bare::()?, + de.read_bare::()? as u64 + )?; + let ret = Self::with_params( + shard, + de.read_bare::()? as u32, + de.read_bare::()?, + de.read_bare::()? + ); + Ok(ret) + } +} + +impl BareSerialize for BlockIdExt { + fn constructor(&self) -> ConstructorNumber { + crate::ton::ton_node::blockidext::TL_TAG + } + fn serialize_bare(&self, se: &mut Serializer) -> Result<()> { + let shard = self.shard(); + se.write_bare::(&shard.workchain_id())?; + se.write_bare::(&(shard.shard_prefix_with_tag() as i64))?; + se.write_bare::(&(self.seq_no() as i32))?; + se.write_bare::(self.root_hash())?; + se.write_bare::(self.file_hash())?; + Ok(()) + } +} + +impl BoxedDeserialize for BlockIdExt { + fn possible_constructors() -> Vec { + vec![crate::ton::ton_node::blockidext::TL_TAG] + } + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result { + if id == crate::ton::ton_node::blockidext::TL_TAG { + de.read_bare() + } else { + _invalid_id!(id) + } + } +} + +impl BoxedSerialize for BlockIdExt { + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize) { + (crate::ton::ton_node::blockidext::TL_TAG, self) + } +} + +impl BareDeserialize for UInt256 { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let mut data = [0u8; 32]; + de.read_exact(&mut data)?; + Ok(Self::with_array(data)) + } +} + +impl BareSerialize for UInt256 { + fn constructor(&self) -> ConstructorNumber { + unreachable!() + } + fn serialize_bare(&self, se: &mut Serializer) -> Result<()> { + se.write_all(self.as_slice())?; + Ok(()) + } +} + +// Deserialize boxed TL object from bytes +pub fn deserialize_boxed(bytes: &[u8]) -> Result { + let mut reader = bytes; + Deserializer::new(&mut reader).read_boxed::() +} + +/// Deserialize bundle of boxed TL objects from bytes +pub fn deserialize_boxed_bundle(bytes: &[u8]) -> Result> { + let mut reader = bytes; + let mut de = Deserializer::new(&mut reader); + let mut ret = Vec::new(); + loop { + match de.read_boxed::() { + Ok(object) => ret.push(object), + Err(_) => if ret.is_empty() { + fail!("Deserialization error") + } else { + break + } + } + } + Ok(ret) +} + +/// Serialize boxed TL object into bytes +pub fn serialize_boxed(object: &T) -> Result> { + let mut ret = Vec::new(); + Serializer::new(&mut ret).write_boxed(object)?; + Ok(ret) +} + +/// Serialize boxed TL object into bytes with appending +pub fn serialize_boxed_append(buf: &mut Vec, object: &T) -> Result<()> { + Serializer::new(buf).write_boxed(object)?; + Ok(()) +} + +/// Serialize boxed TL object into bytes in-place +pub fn serialize_boxed_inplace(buf: &mut Vec, object: &T) -> Result<()> { + buf.truncate(0); + serialize_boxed_append(buf, object) +} + +/// Serialize non-boxed TL object into bytes +pub fn serialize_bare(object: &T) -> Result> { + let mut buf = Vec::new(); + Serializer::new(&mut buf).write_into_boxed(object)?; + Ok(buf) +} + +/// Serialize non-boxed TL object into bytes in-place +pub fn serialize_bare_inplace(buf: &mut Vec, object: &T) -> Result<()> { + buf.truncate(0); + Serializer::new(buf).write_into_boxed(object) +} + +/// Get TL tag from non-boxed object +pub fn tag_from_bare_object(object: &T) -> u32 { + let ConstructorNumber(tag) = object.constructor(); + tag +} + +/// Get TL tag from non-boxed type +pub fn tag_from_bare_type() -> u32 { + let (ConstructorNumber(tag), _) = T::default().into_boxed().serialize_boxed(); + tag +} + +/// Get TL tag from boxed object +pub fn tag_from_boxed_object(object: &T) -> u32 { + let (ConstructorNumber(tag), _) = object.serialize_boxed(); + tag +} + +/// Get TL tag from boxed type +pub fn tag_from_boxed_type() -> u32 { + let (ConstructorNumber(tag), _) = T::default().serialize_boxed(); + tag +} + +/// Get TL tag from data bytes +pub fn tag_from_data(data: &[u8]) -> u32 { + if data.len() < 4 { + 0 + } else { + u32::from_le_bytes([data[0], data[1], data[2], data[3]]) + } +} diff --git a/ton_api/src/secure.rs b/ton_api/src/secure.rs new file mode 100644 index 0000000..00705d1 --- /dev/null +++ b/ton_api/src/secure.rs @@ -0,0 +1,85 @@ +/* +* Copyright (C) 2019-2021 TON Labs. All Rights Reserved. +* +* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use +* this file except in compliance with the License. +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific TON DEV software governing permissions and +* limitations under the License. +*/ + +use std::fmt::{Debug, Display, Formatter}; +use std::hash::{Hash, Hasher}; + +use ::secstr::*; + +use crate::{BareDeserialize, BareSerialize}; + +/// SecureBytes built-in type. +#[derive(Clone, PartialEq)] +pub struct SecureBytes(SecVec); + +impl SecureBytes { + pub fn new(cont: Vec) -> Self { + Self(SecVec::::new(cont)) + } + + /// Borrow the contents of the string. + pub fn unsecure(&self) -> &[u8] { + self.0.unsecure() + } + + /// Mutably borrow the contents of the string. + pub fn unsecure_mut(&mut self) -> &mut [u8] { + self.0.unsecure_mut() + } + + /// Overwrite the string with zeros. This is automatically called in the destructor. + pub fn zero_out(&mut self) { + self.0.zero_out() + } +} + +impl Default for SecureBytes { + fn default() -> Self { + Self::new(Vec::default()) + } +} + +impl Debug for SecureBytes { + fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { + Debug::fmt(&self.0, f) + } +} + +impl Display for SecureBytes { + fn fmt(&self, f: &mut Formatter) -> std::fmt::Result { + Display::fmt(&self.0, f) + } +} + +impl Hash for SecureBytes { + fn hash(&self, state: &mut H) { + self.unsecure().hash(state); + } +} + +impl BareSerialize for SecureBytes { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut crate::Serializer) -> crate::Result<()> { + self.0.unsecure().serialize_bare(ser) + } +} + +impl BareDeserialize for SecureBytes { + fn deserialize_bare(de: &mut crate::Deserializer) -> crate::Result { + Vec::::deserialize_bare(de) + .map(|vec| SecureBytes(SecVec::::new(vec))) + } +} + +/// SecureString built-in type. +pub type SecureString = SecureBytes; diff --git a/ton_api/src/ton_prelude.rs b/ton_api/src/ton_prelude.rs new file mode 100644 index 0000000..a967094 --- /dev/null +++ b/ton_api/src/ton_prelude.rs @@ -0,0 +1,537 @@ +/* +* Copyright (C) 2019-2021 TON Labs. All Rights Reserved. +* +* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use +* this file except in compliance with the License. +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific TON DEV software governing permissions and +* limitations under the License. +*/ + +#![allow(non_camel_case_types)] + +use crate::{ + AnyBoxedSerialize, BareDeserialize, BareSerialize, BoxedDeserialize, BoxedSerialize, + ConstructorNumber, Deserializer, Result, Serializer, ton::Bool +}; + +use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt}; +use extfmt::Hexlify; +use ordered_float::OrderedFloat; +use serde_derive::{Deserialize, Serialize}; +use std::{any::type_name, fmt, hash::{Hash, Hasher}, io::{Read, Write}, marker::PhantomData}; +use ton_types::error; + +const MAX_BYTES_DEBUG_LEN: usize = 4; + +macro_rules! impl_byteslike { + (@common $ty:ident) => { + + impl fmt::Debug for $ty { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.len() <= MAX_BYTES_DEBUG_LEN { + write!(f, "<{}>", Hexlify(&self.0)) + } else { + write!(f, "<{}... {} bytes>", Hexlify(&self.0[..MAX_BYTES_DEBUG_LEN]), self.0.len()) + } + } + } + + impl ::std::ops::Deref for $ty { + type Target = [u8]; + fn deref(&self) -> &[u8] { &self.0 } + } + + impl ::std::ops::DerefMut for $ty { + fn deref_mut(&mut self) -> &mut [u8] { &mut self.0 } + } + + }; + + (@arraylike $ty:ident) => { + + impl_byteslike!(@common $ty); + + impl BareDeserialize for $ty { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let mut ret: Self = Default::default(); + de.read_exact(&mut ret.0)?; + Ok(ret) + } + } + + impl BareSerialize for $ty { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.write_all(&self.0)?; + Ok(()) + } + } + + }; +} + +/// Represents bytes vector. +#[derive(Clone, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct bytes(pub Vec); + +impl BareDeserialize for bytes { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let vec = de.read_bare::>()?; + Ok(bytes(vec)) + } +} + +impl BareSerialize for bytes { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.write_bare::<[u8]>(&self.0) + } +} + +impl From> for bytes { + fn from(v: Vec) -> Self { + bytes(v) + } +} + +/// Represents 128-bit unsigned integer. +#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +pub struct int128(pub [u8; 16]); + +/// Represents 256-bit unsigned integer. +//#[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Hash)] +//pub struct int256(pub [u8; 32]); +pub(crate) type int256 = ton_types::UInt256; + +impl_byteslike!(@common bytes); +impl_byteslike!(@arraylike int128); +//impl_byteslike!(@arraylike int256); + +/// Represents base TL-object type. +pub struct TLObject(Box); + +impl TLObject { + pub fn new(inner: I) -> Self { + TLObject(Box::new(inner)) + } + + pub fn is(&self) -> bool { + self.0.as_any().is::() + } + + pub fn downcast(self) -> ::std::result::Result { + if self.is::() { + Ok(*self.0.into_boxed_any().downcast::().unwrap()) + } else { + Err(self) + } + } +} + +impl Clone for TLObject { + fn clone(&self) -> Self { + unimplemented!() + } +} + +impl Default for TLObject { + fn default() -> Self { + unimplemented!() + } +} + +impl PartialEq for TLObject { + fn eq(&self, _other: &Self) -> bool { + unimplemented!() + } +} + +impl Hash for TLObject { + fn hash(&self, _state: &mut H) { + unimplemented!() + } +} + +impl fmt::Debug for TLObject { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let (type_id, _) = self.0.serialize_boxed(); + write!(f, "(TLObject tl_id:{:?})", type_id) + } +} + +impl BoxedDeserialize for TLObject { + fn possible_constructors() -> Vec { + crate::ton::dynamic::BY_NUMBER.keys().cloned().collect() + } + + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result { + match crate::ton::dynamic::BY_NUMBER.get(&id) { + Some(dynamic) => (dynamic.ton)(id, de), + None => _invalid_id!(id), + } + } +} + +impl BoxedSerialize for TLObject { + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize) { + self.0.serialize_boxed() + } +} + +#[derive(Debug, Clone, PartialEq, Hash)] +pub struct LengthPrefixed(pub T); + +impl From for LengthPrefixed { + fn from(x: T) -> Self { + LengthPrefixed(x) + } +} + +impl BareDeserialize for LengthPrefixed + where T: BoxedDeserialize, +{ + fn deserialize_bare(de: &mut Deserializer) -> Result { + let len = de.read_i32::()? as usize; + let mut buf = vec![0u8; len]; + de.read_exact(&mut buf)?; + Ok(LengthPrefixed(T::boxed_deserialized_from_bytes(&buf)?)) + } +} + +impl BareSerialize for LengthPrefixed + where T: BoxedSerialize, +{ + fn constructor(&self) -> crate::ConstructorNumber { + let inner = self.0.boxed_serialized_bytes().unwrap_or_default(); + crate::ConstructorNumber(inner.len() as u32) + } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + let inner = self.0.boxed_serialized_bytes()?; + ser.write_i32::(inner.len() as i32)?; + ser.write_all(&inner)?; + Ok(()) + } +} + +impl BareSerialize for () { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, _ser: &mut Serializer) -> Result<()> { + Ok(()) + } +} + +impl From for &'static Bool { + fn from(b: bool) -> Self { + if b { &Bool::BoolTrue } else { &Bool::BoolFalse } + } +} + +impl From for Bool { + fn from(b: bool) -> Self { + let b: &'static Bool = b.into(); + b.clone() + } +} + +impl Into for Bool { + fn into(self) -> bool { + match self { + Bool::BoolTrue => true, + Bool::BoolFalse => false, + } + } +} + +impl BoxedDeserialize for bool { + fn possible_constructors() -> Vec { + Bool::possible_constructors() + } + + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result { + Ok(Bool::deserialize_boxed(id, de)?.into()) + } +} + +impl BoxedSerialize for bool { + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize) { + let b: &'static Bool = (*self).into(); + Bool::serialize_boxed(b) + } +} + +impl BareDeserialize for String { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let vec = de.read_bare::>()?; + Ok(String::from_utf8(vec)?) + } +} + +impl BareSerialize for String { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.write_bare::<[u8]>(self.as_bytes())?; + Ok(()) + } +} + +impl BoxedDeserialize for Box + where T: BoxedDeserialize, +{ + fn possible_constructors() -> Vec { + T::possible_constructors() + } + + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result { + Ok(Box::new(T::deserialize_boxed(id, de)?)) + } +} + +impl BoxedSerialize for Box + where T: BoxedSerialize, +{ + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize) { + T::serialize_boxed(self) + } +} + +/// Base enumeration for any bare type. Used as vectors type parameter. +#[derive(PartialEq, Hash)] +pub enum Bare { + None +} + +impl Default for Bare { + fn default() -> Self { + Bare::None + } +} + +/// Base enumeration for any boxed type. Used as vectors type parameter. +#[derive(PartialEq, Hash)] +pub enum Boxed { + None +} + +impl Default for Boxed { + fn default() -> Self { + Boxed::None + } +} + +#[derive(PartialEq, Hash, Default)] +pub struct Vector(pub Vec, PhantomData Det>); +pub type vector = Vector; + +impl Clone for Vector + where T: Clone, +{ + fn clone(&self) -> Self { + Vector(self.0.clone(), PhantomData) + } +} + +impl fmt::Debug for Vector + where T: fmt::Debug, +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("Vector") + .field(&self.0) + .finish() + } +} + +const VECTOR_CONSTRUCTOR: ConstructorNumber = ConstructorNumber(0x1cb5c415); + +macro_rules! impl_vector { + ($det:ident, $det_de:ident, $det_ser:ident, $read_method:ident, $write_method:ident) => { + + impl From> for Vector<$det, T> { + fn from(obj: Vec) -> Self { + Vector(obj, PhantomData) + } + } + + impl ::std::ops::Deref for Vector<$det, T> { + type Target = [T]; + fn deref(&self) -> &[T] { &self.0 } + } + + impl ::std::ops::DerefMut for Vector<$det, T> { + fn deref_mut(&mut self) -> &mut [T] { &mut self.0 } + } + + impl BareDeserialize for Vector<$det, T> + where T: $det_de, + { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let count = de.read_i32::()?; + let mut ret = Vec::new(); + ret.try_reserve_exact(count as usize) + .map_err( + |e| error!("count {} is too big for {}: {}", count, type_name::(), e) + )?; + for _ in 0..count { + ret.push(de.$read_method()?); + } + Ok(ret.into()) + } + } + + impl BoxedDeserialize for Vector<$det, T> + where Self: BareDeserialize, + { + fn possible_constructors() -> Vec { vec![VECTOR_CONSTRUCTOR] } + + fn deserialize_boxed(id: ConstructorNumber, de: &mut Deserializer) -> Result { + assert_eq!(id, VECTOR_CONSTRUCTOR); + Self::deserialize_bare(de) + } + } + + impl BareSerialize for Vector<$det, T> + where T: $det_ser, + { + fn constructor(&self) -> crate::ConstructorNumber { VECTOR_CONSTRUCTOR } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.write_i32::(self.0.len() as i32)?; + for item in &self.0 { + ser.$write_method(item)?; + } + Ok(()) + } + } + + impl BoxedSerialize for Vector<$det, T> + where Self: BareSerialize, + { + fn serialize_boxed(&self) -> (ConstructorNumber, &dyn BareSerialize) { + (VECTOR_CONSTRUCTOR, self) + } + } + + } +} + +impl_vector! { Bare, BareDeserialize, BareSerialize, read_bare, write_bare } +impl_vector! { Boxed, BoxedDeserialize, BoxedSerialize, read_boxed, write_boxed } + +impl BareDeserialize for Vec { + fn deserialize_bare(de: &mut Deserializer) -> Result { + let len = de.read_u8()?; + let (len, mut have_read) = if len != 254 { + (len as usize, 1) + } else { + (de.read_u24::()? as usize, 4) + }; + + let mut buf = Vec::new(); + buf.try_reserve_exact(len) + .map_err( + |e| error!("count {} is too big for {}: {}", len, type_name::(), e) + )?; + buf.resize(len, 0); + de.read_exact(&mut buf)?; + have_read += len; + let remainder = have_read % 4; + if remainder != 0 { + let mut buf = [0u8; 4]; + de.read_exact(&mut buf[remainder..])?; + } + Ok(buf) + } +} + +impl BareSerialize for [u8] { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + let len = self.len(); + let mut have_written = if len < 254 { + ser.write_u8(len as u8)?; + 1 + } else { + ser.write_u8(254)?; + ser.write_u24::(len as u32)?; + 4 + }; + + ser.write_all(self)?; + have_written += len; + let remainder = have_written % 4; + if remainder != 0 { + let buf = [0u8; 4]; + ser.write_all(&buf[remainder..])?; + } + Ok(()) + } +} + +macro_rules! impl_tl_primitive { + ($tltype:ident, $ptype:ty, $read:ident, $write:ident) => { + pub type $tltype = $ptype; + + impl BareDeserialize for $ptype { + fn deserialize_bare(de: &mut Deserializer) -> Result { + Ok(de.$read::()?) + } + } + + impl BareSerialize for $ptype { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.$write::(*self)?; + Ok(()) + } + } + } +} + +impl_tl_primitive! { int, i32, read_i32, write_i32 } +impl_tl_primitive! { uint, u32, read_u32, write_u32 } +impl_tl_primitive! { long, i64, read_i64, write_i64 } +impl_tl_primitive! { ulong, u64, read_u64, write_u64 } + +pub type double = OrderedFloat; + +impl BareDeserialize for double { + fn deserialize_bare(de: &mut Deserializer) -> Result { + Ok(de.read_f64::()?.into()) + } +} + +impl BareSerialize for double { + fn constructor(&self) -> crate::ConstructorNumber { unreachable!() } + fn serialize_bare(&self, ser: &mut Serializer) -> Result<()> { + ser.write_f64::(self.0)?; + Ok(()) + } +} + +// Built-in types: +pub type Int32 = i32; +pub type Int53 = i64; +pub type Int64 = i64; + +pub type int32 = Int32; +pub type int53 = Int53; +pub type int64 = Int64; + +/// Flags built-in type. +pub type Flags = u32; +pub type lengthPrefixedTypedObject = LengthPrefixed; +pub type True = bool; +/// String built-in type. +pub type string = String; +/// Alias of TLObject built-in type. +pub type TypedObject = TLObject; +/// Alias of TLObject built-in type. +pub type Object = TLObject; +/// Function. Alias of TLObject built-in type. +pub type Function = TLObject; +/// Alias of SecureBytes built-in type. +pub type secureBytes = crate::secure::SecureBytes; +/// Alias of SecureString built-in type. +pub type secureString = crate::secure::SecureString; diff --git a/ton_api/tl/codegen.json b/ton_api/tl/codegen.json new file mode 100644 index 0000000..bc99259 --- /dev/null +++ b/ton_api/tl/codegen.json @@ -0,0 +1,9 @@ +{ + "exclude_types": ["TestObject", "testObject", "testInt", "testString", "testVectorBytes", "getTestObject"], + "need_box": ["ConfirmValidation"], + "need_determiner": [], + "replace_with_bytes": [], + "additional_derives": { + "BlockIdExt": ["Serialize", "Deserialize"] + } +} \ No newline at end of file diff --git a/ton_api/tl/lite_api.tl b/ton_api/tl/lite_api.tl new file mode 100644 index 0000000..7d4495e --- /dev/null +++ b/ton_api/tl/lite_api.tl @@ -0,0 +1,81 @@ +int ? = Int; +long ? = Long; +double ? = Double; +string ? = String; +object ? = Object; +function ? = Function; +bytes data:string = Bytes; +true = True; +boolTrue = Bool; +boolFalse = Bool; + + +vector {t:Type} # [ t ] = Vector t; + +int128 4*[ int ] = Int128; +int256 8*[ int ] = Int256; + +tonNode.blockId workchain:int shard:long seqno:int = tonNode.BlockId; +tonNode.blockIdExt workchain:int shard:long seqno:int root_hash:int256 file_hash:int256 = tonNode.BlockIdExt; +tonNode.zeroStateIdExt workchain:int root_hash:int256 file_hash:int256 = tonNode.ZeroStateIdExt; + +adnl.message.query query_id:int256 query:bytes = adnl.Message; +adnl.message.answer query_id:int256 answer:bytes = adnl.Message; + +liteServer.error code:int message:string = liteServer.Error; + +liteServer.accountId workchain:int id:int256 = liteServer.AccountId; + +liteServer.masterchainInfo last:tonNode.blockIdExt state_root_hash:int256 init:tonNode.zeroStateIdExt = liteServer.MasterchainInfo; +liteServer.masterchainInfoExt mode:# version:int capabilities:long last:tonNode.blockIdExt last_utime:int now:int state_root_hash:int256 init:tonNode.zeroStateIdExt = liteServer.MasterchainInfoExt; +liteServer.currentTime now:int = liteServer.CurrentTime; +liteServer.version mode:# version:int capabilities:long now:int = liteServer.Version; +liteServer.blockData id:tonNode.blockIdExt data:bytes = liteServer.BlockData; +liteServer.blockState id:tonNode.blockIdExt root_hash:int256 file_hash:int256 data:bytes = liteServer.BlockState; +liteServer.blockHeader id:tonNode.blockIdExt mode:# header_proof:bytes = liteServer.BlockHeader; +liteServer.sendMsgStatus status:int = liteServer.SendMsgStatus; +liteServer.accountState id:tonNode.blockIdExt shardblk:tonNode.blockIdExt shard_proof:bytes proof:bytes state:bytes = liteServer.AccountState; +liteServer.runMethodResult mode:# id:tonNode.blockIdExt shardblk:tonNode.blockIdExt shard_proof:mode.0?bytes proof:mode.0?bytes state_proof:mode.1?bytes init_c7:mode.3?bytes lib_extras:mode.4?bytes exit_code:int result:mode.2?bytes = liteServer.RunMethodResult; +liteServer.shardInfo id:tonNode.blockIdExt shardblk:tonNode.blockIdExt shard_proof:bytes shard_descr:bytes = liteServer.ShardInfo; +liteServer.allShardsInfo id:tonNode.blockIdExt proof:bytes data:bytes = liteServer.AllShardsInfo; +liteServer.transactionInfo id:tonNode.blockIdExt proof:bytes transaction:bytes = liteServer.TransactionInfo; +liteServer.transactionList ids:(vector tonNode.blockIdExt) transactions:bytes = liteServer.TransactionList; +liteServer.transactionId mode:# account:mode.0?int256 lt:mode.1?long hash:mode.2?int256 = liteServer.TransactionId; +liteServer.transactionId3 account:int256 lt:long = liteServer.TransactionId3; +liteServer.blockTransactions id:tonNode.blockIdExt req_count:# incomplete:Bool ids:(vector liteServer.transactionId) proof:bytes = liteServer.BlockTransactions; +liteServer.signature node_id_short:int256 signature:bytes = liteServer.Signature; +liteServer.signatureSet validator_set_hash:int catchain_seqno:int signatures:(vector liteServer.signature) = liteServer.SignatureSet; +liteServer.blockLinkBack to_key_block:Bool from:tonNode.blockIdExt to:tonNode.blockIdExt dest_proof:bytes proof:bytes state_proof:bytes = liteServer.BlockLink; +liteServer.blockLinkForward to_key_block:Bool from:tonNode.blockIdExt to:tonNode.blockIdExt dest_proof:bytes config_proof:bytes signatures:liteServer.SignatureSet = liteServer.BlockLink; +liteServer.partialBlockProof complete:Bool from:tonNode.blockIdExt to:tonNode.blockIdExt steps:(vector liteServer.BlockLink) = liteServer.PartialBlockProof; +liteServer.configInfo mode:# id:tonNode.blockIdExt state_proof:bytes config_proof:bytes = liteServer.ConfigInfo; +liteServer.validatorStats mode:# id:tonNode.blockIdExt count:int complete:Bool state_proof:bytes data_proof:bytes = liteServer.ValidatorStats; + +liteServer.debug.verbosity value:int = liteServer.debug.Verbosity; + +---functions--- + +liteServer.getMasterchainInfo = liteServer.MasterchainInfo; +liteServer.getMasterchainInfoExt mode:# = liteServer.MasterchainInfoExt; +liteServer.getTime = liteServer.CurrentTime; +liteServer.getVersion = liteServer.Version; +liteServer.getBlock id:tonNode.blockIdExt = liteServer.BlockData; +liteServer.getState id:tonNode.blockIdExt = liteServer.BlockState; +liteServer.getBlockHeader id:tonNode.blockIdExt mode:# = liteServer.BlockHeader; +liteServer.sendMessage body:bytes = liteServer.SendMsgStatus; +liteServer.getAccountState id:tonNode.blockIdExt account:liteServer.accountId = liteServer.AccountState; +liteServer.runSmcMethod mode:# id:tonNode.blockIdExt account:liteServer.accountId method_id:long params:bytes = liteServer.RunMethodResult; +liteServer.getShardInfo id:tonNode.blockIdExt workchain:int shard:long exact:Bool = liteServer.ShardInfo; +liteServer.getAllShardsInfo id:tonNode.blockIdExt = liteServer.AllShardsInfo; +liteServer.getOneTransaction id:tonNode.blockIdExt account:liteServer.accountId lt:long = liteServer.TransactionInfo; +liteServer.getTransactions count:# account:liteServer.accountId lt:long hash:int256 = liteServer.TransactionList; +liteServer.lookupBlock mode:# id:tonNode.blockId lt:mode.1?long utime:mode.2?int = liteServer.BlockHeader; +liteServer.listBlockTransactions id:tonNode.blockIdExt mode:# count:# after:mode.7?liteServer.transactionId3 reverse_order:mode.6?true want_proof:mode.5?true = liteServer.BlockTransactions; +liteServer.getBlockProof mode:# known_block:tonNode.blockIdExt target_block:mode.0?tonNode.blockIdExt = liteServer.PartialBlockProof; +liteServer.getConfigAll mode:# id:tonNode.blockIdExt = liteServer.ConfigInfo; +liteServer.getConfigParams mode:# id:tonNode.blockIdExt param_list:(vector int) = liteServer.ConfigInfo; +liteServer.getValidatorStats#091a58bc mode:# id:tonNode.blockIdExt limit:int start_after:mode.0?int256 modified_after:mode.2?int = liteServer.ValidatorStats; + +liteServer.queryPrefix = Object; +liteServer.query data:bytes = Object; +liteServer.waitMasterchainSeqno seqno:int timeout_ms:int = Object; // query prefix diff --git a/ton_api/tl/ton_api.tl b/ton_api/tl/ton_api.tl new file mode 100644 index 0000000..176666e --- /dev/null +++ b/ton_api/tl/ton_api.tl @@ -0,0 +1,693 @@ +int ? = Int; +long ? = Long; +double ? = Double; +string ? = String; +object ? = Object; +function ? = Function; +bytes data:string = Bytes; +true = True; +boolTrue = Bool; +boolFalse = Bool; + + +vector {t:Type} # [ t ] = Vector t; + +int128 4*[ int ] = Int128; +int256 8*[ int ] = Int256; + +testObject value:int o:object f:function = TestObject; +testString value:string = TestObject; +testInt value:int = TestObject; +testVectorBytes value:(vector bytes) = TestObject; + +tcp.pong random_id:long = tcp.Pong; + +tcp.authentificate nonce:bytes = tcp.Message; +tcp.authentificationNonce nonce:bytes = tcp.Message; +tcp.authentificationComplete key:PublicKey signature:bytes = tcp.Message; + +fec.raptorQ data_size:int symbol_size:int symbols_count:int = fec.Type; +fec.roundRobin data_size:int symbol_size:int symbols_count:int = fec.Type; +fec.online data_size:int symbol_size:int symbols_count:int = fec.Type; + +---functions--- + +tcp.ping random_id:long = tcp.Pong; + +getTestObject = TestObject; + +---types--- + +pk.unenc data:bytes = PrivateKey; +pk.ed25519 key:int256 = PrivateKey; +pk.aes key:int256 = PrivateKey; +pk.overlay name:bytes = PrivateKey; + +pub.unenc data:bytes = PublicKey; +pub.ed25519 key:int256 = PublicKey; +pub.aes key:int256 = PublicKey; +pub.overlay name:bytes = PublicKey; + + +---functions--- + +---types--- + +adnl.id.short id:int256 = adnl.id.Short; + +adnl.proxyToFastHash ip:int port:int date:int data_hash:int256 shared_secret:int256 = adnl.ProxyTo; +adnl.proxyToFast ip:int port:int date:int signature:int256 = adnl.ProxyToSign; + +adnl.proxy.none id:int256 = adnl.Proxy; +adnl.proxy.fast id:int256 shared_secret:bytes = adnl.Proxy; + + +adnl.address.udp ip:int port:int = adnl.Address; +adnl.address.udp6 ip:int128 port:int = adnl.Address; +//adnl.address.tcp ip:int port:int = adnl.Address; +//adnl.address.tcp6 ip:int128 port:int = adnl.Address; + +adnl.address.tunnel to:int256 pubkey:PublicKey = adnl.Address; + +adnl.addressList addrs:(vector adnl.Address) version:int reinit_date:int priority:int expire_at:int = adnl.AddressList; + +adnl.node id:PublicKey addr_list:adnl.addressList = adnl.Node; +adnl.nodes nodes:(vector adnl.node) = adnl.Nodes; + +---functions--- + +---types--- + +adnl.packetContents + rand1:bytes + flags:# + from:flags.0?PublicKey + from_short:flags.1?adnl.id.short + message:flags.2?adnl.Message + messages:flags.3?(vector adnl.Message) + address:flags.4?adnl.addressList + priority_address:flags.5?adnl.addressList + seqno:flags.6?long + confirm_seqno:flags.7?long + recv_addr_list_version:flags.8?int + recv_priority_addr_list_version:flags.9?int + reinit_date:flags.10?int + dst_reinit_date:flags.10?int + signature:flags.11?bytes + rand2:bytes + = adnl.PacketContents; + +adnl.tunnelPacketContents + rand1:bytes + flags:# + from_ip:flags.0?int + from_port:flags.0?int + message:flags.1?bytes + statistics:flags.2?bytes + payment:flags.3?bytes + rand2:bytes + = adnl.TunnelPacketContents; + + +// flag 16 - packet is outbound +// flag 17 - control packet +adnl.proxyPacketHeader + proxy_id:int256 + flags:# + ip:flags.0?int + port:flags.0?int + adnl_start_time:flags.1?int + seqno:flags.2?long + date:flags.3?int + signature:int256 = adnl.ProxyPacketHeader; + +adnl.proxyControlPacketPing id:int256 = adnl.ProxyControlPacket; +adnl.proxyControlPacketPong id:int256 = adnl.ProxyControlPacket; +adnl.proxyControlPacketRegister ip:int port:int = adnl.ProxyControlPacket; + + +adnl.message.createChannel key:int256 date:int = adnl.Message; +adnl.message.confirmChannel key:int256 peer_key:int256 date:int = adnl.Message; + +adnl.message.custom data:bytes = adnl.Message; + +adnl.message.nop = adnl.Message; +adnl.message.reinit date:int = adnl.Message; + +adnl.message.query query_id:int256 query:bytes = adnl.Message; +adnl.message.answer query_id:int256 answer:bytes = adnl.Message; + +adnl.message.part hash:int256 total_size:int offset:int data:bytes = adnl.Message; + +---functions--- +---types--- + +adnl.db.node.key local_id:int256 peer_id:int256 = adnl.db.Key; +adnl.db.node.value date:int id:PublicKey addr_list:adnl.addressList priority_addr_list:adnl.addressList = adnl.db.node.Value; + +---functions--- + + +---types--- + +rldp.messagePart transfer_id:int256 fec_type:fec.Type part:int total_size:long seqno:int data:bytes = rldp.MessagePart; +rldp.confirm transfer_id:int256 part:int seqno:int = rldp.MessagePart; +rldp.complete transfer_id:int256 part:int = rldp.MessagePart; + +rldp.message id:int256 data:bytes = rldp.Message; +rldp.query query_id:int256 max_answer_size:long timeout:int data:bytes = rldp.Message; +rldp.answer query_id:int256 data:bytes = rldp.Message; + + +---functions--- +---types--- +dht.node id:PublicKey addr_list:adnl.addressList version:int signature:bytes = dht.Node; +dht.nodes nodes:(vector dht.node) = dht.Nodes; + +dht.key id:int256 name:bytes idx:int = dht.Key; + +dht.updateRule.signature = dht.UpdateRule; +dht.updateRule.anybody = dht.UpdateRule; +dht.updateRule.overlayNodes = dht.UpdateRule; + +dht.keyDescription key:dht.key id:PublicKey update_rule:dht.UpdateRule signature:bytes = dht.KeyDescription; + +dht.value key:dht.keyDescription value:bytes ttl:int signature:bytes = dht.Value; + +dht.pong random_id:long = dht.Pong; + +dht.valueNotFound nodes:dht.nodes = dht.ValueResult; +dht.valueFound value:dht.Value = dht.ValueResult; + +dht.stored = dht.Stored; +dht.message node:dht.node = dht.Message; + +dht.db.bucket nodes:dht.nodes = dht.db.Bucket; +dht.db.key.bucket id:int = dht.db.Key; + +---functions--- + +dht.ping random_id:long = dht.Pong; +dht.store value:dht.value = dht.Stored; +dht.findNode key:int256 k:int = dht.Nodes; +dht.findValue key:int256 k:int = dht.ValueResult; +dht.getSignedAddressList = dht.Node; + +dht.query node:dht.node = True; + +---types--- + +overlay.node.toSign id:adnl.id.short overlay:int256 version:int = overlay.node.ToSign; +overlay.node id:PublicKey overlay:int256 version:int signature:bytes = overlay.Node; +overlay.nodes nodes:(vector overlay.node) = overlay.Nodes; + +overlay.message overlay:int256 = overlay.Message; +//overlay.randomPeers peers:(vector adnl.node) = overlay.RandomPeers; +overlay.broadcastList hashes:(vector int256) = overlay.BroadcastList; + +overlay.fec.received hash:int256 = overlay.Broadcast; +overlay.fec.completed hash:int256 = overlay.Broadcast; + +overlay.broadcast.id src:int256 data_hash:int256 flags:int = overlay.broadcast.Id; +overlay.broadcastFec.id src:int256 type:int256 data_hash:int256 size:int flags:int = overlay.broadcastFec.Id; +overlay.broadcastFec.partId broadcast_hash:int256 data_hash:int256 seqno:int = overlay.broadcastFec.PartId; + +overlay.broadcast.toSign hash:int256 date:int = overlay.broadcast.ToSign; + +overlay.certificate issued_by:PublicKey expire_at:int max_size:int signature:bytes = overlay.Certificate; +overlay.emptyCertificate = overlay.Certificate; + +overlay.certificateId overlay_id:int256 node:int256 expire_at:int max_size:int = overlay.CertificateId; + +overlay.unicast data:bytes = overlay.Broadcast; +overlay.broadcast src:PublicKey certificate:overlay.Certificate flags:int data:bytes date:int signature:bytes = overlay.Broadcast; +overlay.broadcastFec src:PublicKey certificate:overlay.Certificate data_hash:int256 data_size:int flags:int + data:bytes seqno:int fec:fec.Type date:int signature:bytes = overlay.Broadcast; +overlay.broadcastFecShort src:PublicKey certificate:overlay.Certificate broadcast_hash:int256 part_data_hash:int256 seqno:int signature:bytes = overlay.Broadcast; +overlay.broadcastNotFound = overlay.Broadcast; + +---functions--- + +overlay.getRandomPeers peers:overlay.nodes = overlay.Nodes; + +overlay.query overlay:int256 = True; +overlay.getBroadcast hash:int256 = overlay.Broadcast; +overlay.getBroadcastList list:overlay.broadcastList = overlay.BroadcastList; + +---types--- + +overlay.db.nodes nodes:overlay.nodes = overlay.db.Nodes; +overlay.db.key.nodes local_id:int256 overlay:int256 = overlay.db.Key; + +---functions--- + +---types--- + +catchain.block.id incarnation:int256 src:int256 height:int data_hash:int256 = catchain.block.Id; +catchain.block.dep src:int height:int data_hash:int256 signature:bytes = catchain.block.Dep; +catchain.block.data prev:catchain.block.dep deps:(vector catchain.block.dep) = catchain.block.Data; +catchain.block incarnation:int256 src:int height:int data:catchain.block.data signature:bytes = catchain.Block; +catchain.blocks blocks:(vector catchain.block) = catchain.Blocks; +catchain.blockUpdate block:catchain.block = catchain.Update; + +catchain.block.data.badBlock block:catchain.block = catchain.block.inner.Data; +catchain.block.data.fork left:catchain.block.Dep right:catchain.block.Dep = catchain.block.inner.Data; +catchain.block.data.nop = catchain.block.inner.Data; +catchain.block.data.vector msgs:(vector bytes) = catchain.block.inner.Data; +//catchain.block.data.custom = catchain.block.inner.Data; + +catchain.firstblock unique_hash:int256 nodes:(vector int256) = catchain.FirstBlock; + +catchain.difference sent_upto:(vector int) = catchain.Difference; +catchain.differenceFork left:catchain.block.dep right:catchain.block.dep = catchain.Difference; + +catchain.blockNotFound = catchain.BlockResult; +catchain.blockResult block:catchain.block = catchain.BlockResult; + +catchain.sent cnt:int = catchain.Sent; + +---functions--- + +catchain.getBlock block:int256 = catchain.BlockResult; +catchain.getBlocks blocks:(vector int256) = catchain.Sent; +catchain.getDifference rt:(vector int) = catchain.Difference; +catchain.getBlockHistory block:int256 height:long stop_if:(vector int256) = catchain.Sent; +//catchain.getForkDifference src:int fork:catchain.fork = catchain.ForkDifference; + +---types--- + +validatorSession.round.id session:int256 height:long prev_block:int256 seqno:int = validatorSession.round.Id; + +validatorSession.candidate.id round:int256 block_hash:int256 = validatorSession.tempBlock.Id; + +validatorSession.message.startSession = validatorSession.Message; +validatorSession.message.finishSession = validatorSession.Message; + +validatorSession.message.submittedBlock round:int root_hash:int256 file_hash:int256 + collated_data_file_hash:int256 = validatorSession.round.Message; +validatorSession.message.approvedBlock round:int candidate:int256 signature:bytes = validatorSession.round.Message; +validatorSession.message.rejectedBlock round:int candidate:int256 reason:bytes = validatorSession.round.Message; +validatorSession.message.commit round:int candidate:int256 signature:bytes = validatorSession.round.Message; + +validatorSession.message.vote round:int attempt:int candidate:int256 = validatorSession.round.Message; +validatorSession.message.voteFor round:int attempt:int candidate:int256 = validatorSession.round.Message; +validatorSession.message.precommit round:int attempt:int candidate:int256 = validatorSession.round.Message; +validatorSession.message.empty round:int attempt:int = validatorSession.round.Message; + +validatorSession.pong hash:long = validatorSession.Pong; + +validatorSession.candidateId src:int256 root_hash:int256 file_hash:int256 collated_data_file_hash:int256 = validatorSession.CandidateId; + +validatorSession.blockUpdate ts:long actions:(vector validatorSession.round.Message) state:int = validatorSession.BlockUpdate; +validatorSession.candidate src:int256 round:int root_hash:int256 data:bytes collated_data:bytes = validatorSession.Candidate; + +validatorSession.config catchain_idle_timeout:double catchain_max_deps:int round_candidates:int next_candidate_delay:double round_attempt_duration:int + max_round_attempts:int max_block_size:int max_collated_data_size:int = validatorSession.Config; +validatorSession.configNew catchain_idle_timeout:double catchain_max_deps:int round_candidates:int next_candidate_delay:double round_attempt_duration:int + max_round_attempts:int max_block_size:int max_collated_data_size:int new_catchain_ids:Bool = validatorSession.Config; + +---functions--- + +validatorSession.ping hash:long = validatorSession.Pong; +validatorSession.downloadCandidate round:int id:validatorSession.candidateId = validatorSession.Candidate; + +---types--- + +hashable.bool value:Bool = Hashable; +hashable.int32 value:int = Hashable; +hashable.int64 value:long = Hashable; +hashable.int256 value:int256 = Hashable; +hashable.bytes value:bytes = Hashable; +hashable.pair left:int right:int = Hashable; +hashable.vector value:(vector int) = Hashable; +hashable.validatorSessionOldRound seqno:int block:int signatures:int approve_signatures:int = Hashable; +hashable.validatorSessionRoundAttempt seqno:int votes:int precommitted:int vote_for_inited:int vote_for:int = Hashable; +hashable.validatorSessionRound locked_round:int locked_block:int seqno:int precommitted:Bool + first_attempt:int approved_blocks:int signatures:int attempts:int = Hashable; +hashable.blockSignature signature:int = Hashable; +hashable.sentBlock src:int root_hash:int file_hash:int collated_data_file_hash:int = Hashable; +hashable.sentBlockEmpty = Hashable; +hashable.vote block:int node:int = Hashable; +hashable.blockCandidate block:int approved:int = Hashable; +hashable.blockVoteCandidate block:int approved:int = Hashable; +hashable.blockCandidateAttempt block:int votes:int = Hashable; + +hashable.cntVector data:int = Hashable; +hashable.cntSortedVector data:int = Hashable; + +hashable.validatorSession ts:int old_rounds:int cur_round:int = Hashable; + +---functions--- +---types--- + + +tonNode.sessionId workchain:int shard:long cc_seqno:int opts_hash:int256 = tonNode.SessionId; + + +tonNode.blockSignature who:int256 signature:bytes = tonNode.BlockSignature; + +tonNode.blockId workchain:int shard:long seqno:int = tonNode.BlockId; +tonNode.blockIdExt workchain:int shard:long seqno:int root_hash:int256 file_hash:int256 = tonNode.BlockIdExt; +tonNode.zeroStateIdExt workchain:int root_hash:int256 file_hash:int256 = tonNode.ZeroStateIdExt; + +tonNode.blockDescriptionEmpty = tonNode.BlockDescription; +tonNode.blockDescription id:tonNode.blockIdExt = tonNode.BlockDescription; +tonNode.blocksDescription ids:(vector tonNode.blockIdExt) incomplete:Bool = tonNode.BlocksDescription; +tonNode.preparedProofEmpty = tonNode.PreparedProof; +tonNode.preparedProof = tonNode.PreparedProof; +tonNode.preparedProofLink = tonNode.PreparedProof; +tonNode.preparedState = tonNode.PreparedState; +tonNode.notFoundState = tonNode.PreparedState; +tonNode.prepared = tonNode.Prepared; +tonNode.notFound = tonNode.Prepared; +tonNode.data data:bytes = tonNode.Data; +//tonNode.preparedKeyBlockProofEmpty = tonNode.PreparedKeyBlockProof; +//tonNode.preparedKeyBlockProof block_id:tonNode.blockIdExt = tonNode.PreparedKeyBlockProof; + +tonNode.ihrMessage data:bytes = tonNode.IhrMessage; +tonNode.externalMessage data:bytes = tonNode.ExternalMessage; + +tonNode.newShardBlock block:tonNode.blockIdExt cc_seqno:int data:bytes = tonNode.NewShardBlock; + +tonNode.blockBroadcast id:tonNode.blockIdExt catchain_seqno:int validator_set_hash:int + signatures:(vector tonNode.blockSignature) + proof:bytes data:bytes = tonNode.Broadcast; +tonNode.ihrMessageBroadcast message:tonNode.ihrMessage = tonNode.Broadcast; +tonNode.externalMessageBroadcast message:tonNode.externalMessage = tonNode.Broadcast; +tonNode.newShardBlockBroadcast block:tonNode.newShardBlock = tonNode.Broadcast; +tonNode.connectivityCheckBroadcast pub_key:int256 padding:bytes = tonNode.Broadcast; + +tonNode.shardPublicOverlayId workchain:int shard:long zero_state_file_hash:int256 = tonNode.ShardPublicOverlayId; + +tonNode.keyBlocks blocks:(vector tonNode.blockIdExt) incomplete:Bool error:Bool = tonNode.KeyBlocks; + +ton.blockId root_cell_hash:int256 file_hash:int256 = ton.BlockId; +ton.blockIdApprove root_cell_hash:int256 file_hash:int256 = ton.BlockId; + +tonNode.dataList data:(vector bytes) = tonNode.DataList; + +tonNode.dataFull id:tonNode.blockIdExt proof:bytes block:bytes is_link:Bool = tonNode.DataFull; +tonNode.dataFullEmpty = tonNode.DataFull; + +tonNode.capabilities version:int capabilities:long = tonNode.Capabilities; + +tonNode.success = tonNode.Success; + +tonNode.archiveNotFound = tonNode.ArchiveInfo; +tonNode.archiveInfo id:long = tonNode.ArchiveInfo; + +---functions--- + +tonNode.getNextBlockDescription prev_block:tonNode.blockIdExt = tonNode.BlockDescription; +tonNode.getNextBlocksDescription prev_block:tonNode.blockIdExt limit:int = tonNode.BlocksDescription; +tonNode.getPrevBlocksDescription next_block:tonNode.blockIdExt limit:int cutoff_seqno:int = tonNode.BlocksDescription; +tonNode.prepareBlockProof block:tonNode.blockIdExt allow_partial:Bool = tonNode.PreparedProof; +tonNode.prepareKeyBlockProof block:tonNode.blockIdExt allow_partial:Bool = tonNode.PreparedProof; +tonNode.prepareBlockProofs blocks:(vector tonNode.blockIdExt) allow_partial:Bool = tonNode.PreparedProof; +tonNode.prepareKeyBlockProofs blocks:(vector tonNode.blockIdExt) allow_partial:Bool = tonNode.PreparedProof; +tonNode.prepareBlock block:tonNode.blockIdExt = tonNode.Prepared; +tonNode.prepareBlocks blocks:(vector tonNode.blockIdExt) = tonNode.Prepared; +tonNode.preparePersistentState block:tonNode.blockIdExt masterchain_block:tonNode.blockIdExt = tonNode.PreparedState; +tonNode.prepareZeroState block:tonNode.blockIdExt = tonNode.PreparedState; +tonNode.getNextKeyBlockIds block:tonNode.blockIdExt max_size:int = tonNode.KeyBlocks; +tonNode.downloadNextBlockFull prev_block:tonNode.blockIdExt = tonNode.DataFull; +tonNode.downloadBlockFull block:tonNode.blockIdExt = tonNode.DataFull; +tonNode.downloadBlock block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadBlocks blocks:(vector tonNode.blockIdExt) = tonNode.DataList; +tonNode.downloadPersistentState block:tonNode.blockIdExt masterchain_block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadPersistentStateSlice block:tonNode.blockIdExt masterchain_block:tonNode.blockIdExt offset:long max_size:long = tonNode.Data; +tonNode.downloadZeroState block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadBlockProof block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadKeyBlockProof block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadBlockProofs blocks:(vector tonNode.blockIdExt) = tonNode.DataList; +tonNode.downloadKeyBlockProofs blocks:(vector tonNode.blockIdExt) = tonNode.DataList; +tonNode.downloadBlockProofLink block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadKeyBlockProofLink block:tonNode.blockIdExt = tonNode.Data; +tonNode.downloadBlockProofLinks blocks:(vector tonNode.blockIdExt) = tonNode.DataList; +tonNode.downloadKeyBlockProofLinks blocks:(vector tonNode.blockIdExt) = tonNode.DataList; +tonNode.getArchiveInfo masterchain_seqno:int = tonNode.ArchiveInfo; +tonNode.getArchiveSlice archive_id:long offset:long max_size:int = tonNode.Data; + +tonNode.getCapabilities = tonNode.Capabilities; + +tonNode.slave.sendExtMessage message:tonNode.externalMessage = tonNode.Success; + +tonNode.query = Object; + +// MBPP +---types--- +mbpp.newMcBlockSome block:tonNode.blockBroadcast = mbpp.NewMcBlock; +mbpp.newMcBlockNone = mbpp.NewMcBlock; +mbpp.newShardBlock id:tonNode.blockIdExt cc_seqno:int tbd:bytes block:bytes = mbpp.NewShardBlock; +---functions--- +mbpp.submitNewShardBlock block:mbpp.newShardBlock last_known_mc_block:int = mbpp.NewMcBlock; + + +---types--- + +// bit 0 - started +// bit 1 - ready to switch +// bit 2 - switched from +// bit 3 - archived +// bit 4 - disabled + +db.root.dbDescription version:int first_masterchain_block_id:tonNode.blockIdExt flags:int = db.root.DbDescription; + +db.root.key.cellDb version:int = db.root.Key; +db.root.key.blockDb version:int = db.root.Key; + +db.root.config celldb_version:int blockdb_version:int = db.root.Config; +db.root.key.config = db.root.Key; + +db.celldb.value block_id:tonNode.blockIdExt prev:int256 next:int256 root_hash:int256 = db.celldb.Value; +db.celldb.key.value hash:int256 = db.celldb.key.Value; + +db.block.info#4ac6e727 id:tonNode.blockIdExt flags:# prev_left:flags.1?tonNode.blockIdExt + prev_right:flags.2?tonNode.blockIdExt + next_left:flags.3?tonNode.blockIdExt + next_right:flags.4?tonNode.blockIdExt + lt:flags.13?long + ts:flags.14?int + state:flags.17?int256 + masterchain_ref_seqno:flags.23?int = db.block.Info; +db.block.packedInfo id:tonNode.blockIdExt unixtime:int offset:long = db.block.Info; +db.block.archivedInfo id:tonNode.blockIdExt flags:# next:flags.0?tonNode.blockIdExt = db.block.Info; + +db.blockdb.value next:tonNode.blockIdExt data:bytes = db.blockdb.Value; +db.blockdb.lru id:tonNode.blockIdExt prev:int256 next:int256 = db.blockdb.Lru; +db.blockdb.key.lru id:tonNode.blockIdExt = db.blockdb.Key; +db.blockdb.key.value id:tonNode.blockIdExt = db.blockdb.Key; + +db.candidate source:PublicKey id:tonNode.blockIdExt data:bytes collated_data:bytes = db.Candidate; +db.candidate.id source:PublicKey id:tonNode.blockIdExt collated_data_file_hash:int256 = db.candidate.Id; + +db.filedb.key.empty = db.filedb.Key; +db.filedb.key.blockFile block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.zeroStateFile block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.persistentStateFile block_id:tonNode.blockIdExt masterchain_block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.proof block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.proofLink block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.signatures block_id:tonNode.blockIdExt = db.filedb.Key; +db.filedb.key.candidate id:db.candidate.id = db.filedb.Key; +db.filedb.key.blockInfo block_id:tonNode.blockIdExt = db.filedb.Key; + +db.filedb.value key:db.filedb.Key prev:int256 next:int256 file_hash:int256 = db.filedb.Value; + +db.state.destroyedSessions sessions:(vector int256) = db.state.DestroyedSessions; +db.state.initBlockId block:tonNode.blockIdExt = db.state.InitBlockId; +db.state.gcBlockId block:tonNode.blockIdExt = db.state.GcBlockId; +db.state.shardClient block:tonNode.blockIdExt = db.state.ShardClient; +db.state.asyncSerializer block:tonNode.blockIdExt last:tonNode.blockIdExt last_ts:int = db.state.AsyncSerializer; +db.state.hardforks blocks:(vector tonNode.blockIdExt) = db.state.Hardforks; +db.state.dbVersion version:int = db.state.DbVersion; + +db.state.key.destroyedSessions = db.state.Key; +db.state.key.initBlockId = db.state.Key; +db.state.key.gcBlockId = db.state.Key; +db.state.key.shardClient = db.state.Key; +db.state.key.asyncSerializer = db.state.Key; +db.state.key.hardforks = db.state.Key; +db.state.key.dbVersion = db.state.Key; + +db.lt.el.key workchain:int shard:long idx:int = db.lt.Key; +db.lt.desc.key workchain:int shard:long = db.lt.Key; +db.lt.shard.key idx:int = db.lt.Key; +db.lt.status.key = db.lt.Key; +db.lt.el.value id:tonNode.blockIdExt lt:long ts:int = db.lt.el.Value; +db.lt.desc.value first_idx:int last_idx:int last_seqno:int last_lt:long last_ts:int = db.lt.desc.Value; +db.lt.shard.value workchain:int shard:long = db.lt.shard.Value; +db.lt.status.value total_shards:int = db.lt.status.Value; + +db.files.index.key = db.files.Key; +db.files.package.key package_id:int key:Bool temp:Bool = db.files.Key; + +db.files.index.value packages:(vector int) key_packages:(vector int) temp_packages:(vector int) = db.files.index.Value; +db.files.package.firstBlock workchain:int shard:long seqno:int unixtime:int lt:long = db.files.package.FirstBlock; +db.files.package.value package_id:int key:Bool temp:Bool firstblocks:(vector db.files.package.firstBlock) deleted:Bool + = db.files.package.Value; + +---functions--- + +---types--- + +validator.groupMember public_key_hash:int256 adnl:int256 weight:long = engine.validator.GroupMember; +validator.group workchain:int shard:long catchain_seqno:int config_hash:int256 members:(vector validator.groupMember) = validator.Group; +validator.groupEx workchain:int shard:long vertical_seqno:int catchain_seqno:int config_hash:int256 members:(vector validator.groupMember) = validator.Group; +validator.groupNew workchain:int shard:long vertical_seqno:int last_key_block_seqno:int catchain_seqno:int config_hash:int256 members:(vector validator.groupMember) = validator.Group; + +---functions--- + + +---types--- + + +id.config.local id:PrivateKey = id.config.Local; +dht.config.local id:adnl.id.short = dht.config.Local; +dht.config.random.local cnt:int = dht.config.Local; +liteserver.config.local id:PrivateKey port:int = liteserver.config.Local; +liteserver.config.random.local port:int = liteserver.config.Local; +validator.config.local id:adnl.id.short = validator.config.Local; +validator.config.random.local addr_list:adnl.addressList = validator.config.Local; +control.config.local priv:PrivateKey pub:int256 port:int = control.config.Local; +config.local local_ids:(vector id.config.local) dht:(vector dht.config.Local) validators:(vector validator.config.Local) liteservers:(vector liteserver.config.Local) control:(vector control.config.local) = config.Local; + +dht.config.global static_nodes:dht.nodes k:int a:int = dht.config.Global; +adnl.config.global static_nodes:adnl.nodes = adnl.config.Global; +catchain.config.global tag:int256 nodes:(vector PublicKey) = catchain.config.Global; +dummyworkchain0.config.global zero_state_hash:int256 = dummyworkchain0.config.Global; +validator.config.global zero_state:tonNode.blockIdExt init_block:tonNode.blockIdExt hardforks:(vector tonNode.blockIdExt) = validator.config.Global; +config.global adnl:adnl.config.global dht:dht.config.global validator:validator.config.global = config.Global; + +liteserver.desc id:PublicKey ip:int port:int = liteserver.Desc; +liteclient.config.global liteservers:(vector liteserver.desc) validator:validator.config.global = liteclient.config.Global; + +engine.adnl id:int256 category:int = engine.Adnl; +engine.addr ip:int port:int categories:(vector int) priority_categories:(vector int) = engine.Addr; +engine.addrProxy in_ip:int in_port:int out_ip:int out_port:int + proxy_type:adnl.Proxy categories:(vector int) priority_categories:(vector int) = engine.Addr; +engine.dht id:int256 = engine.Dht; +engine.validatorTempKey key:int256 expire_at:int = engine.ValidatorTempKey; +engine.validatorAdnlAddress id:int256 expire_at:int = engine.ValidatorAdnlAddress; +engine.validator id:int256 temp_keys:(vector engine.validatorTempKey) adnl_addrs:(vector engine.validatorAdnlAddress) election_date:int expire_at:int = engine.Validator; +engine.liteServer id:int256 port:int = engine.LiteServer; +engine.controlProcess id:int256 permissions:int = engine.ControlProcess; +engine.controlInterface id:int256 port:int allowed:(vector engine.controlProcess) = engine.ControlInterface; +engine.gc ids:(vector int256) = engine.Gc; + +engine.dht.config dht:(vector engine.dht) gc:engine.gc = engine.dht.Config; +engine.validator.fullNodeMaster port:int adnl:int256 = engine.validator.FullNodeMaster; +engine.validator.fullNodeSlave ip:int port:int adnl:PublicKey = engine.validator.FullNodeSlave; +engine.validator.config out_port:int addrs:(vector engine.Addr) adnl:(vector engine.adnl) + dht:(vector engine.dht) + validators:(vector engine.validator) fullnode:int256 fullnodeslaves:(vector engine.validator.fullNodeSlave) + fullnodemasters:(vector engine.validator.fullNodeMaster) + liteservers:(vector engine.liteServer) control:(vector engine.controlInterface) + gc:engine.gc = engine.validator.Config; + +---functions--- +---types--- + +engine.adnlProxy.port in_port:int out_port:int dst_ip:int dst_port:int proxy_type:adnl.Proxy = engine.adnlProxy.Port; + +engine.adnlProxy.config ports:(vector engine.adnlProxy.port) = engine.adnlProxy.Config; + +---functions--- + +---types--- + +adnl.pong value:long = adnl.Pong; + +---functions--- + +adnl.ping value:long = adnl.Pong; + +---types--- + +engine.validator.keyHash key_hash:int256 = engine.validator.KeyHash; +engine.validator.signature signature:bytes = engine.validator.Signature; + +engine.validator.oneStat key:string value:string = engine.validator.OneStat; +engine.validator.stats stats:(vector engine.validator.oneStat) = engine.validator.Stats; + +engine.validator.oneSessionStat session_id:string stats:(vector engine.validator.oneStat) = engine.OneSessionStat; +engine.validator.sessionStats stats:(vector engine.validator.oneSessionStat) = engine.validator.SessionStats; + +engine.validator.controlQueryError code:int message:string = engine.validator.ControlQueryError; + +engine.validator.time time:int = engine.validator.Time; +engine.validator.success = engine.validator.Success; + +engine.validator.jsonConfig data:string = engine.validator.JsonConfig; + +engine.validator.electionBid election_date:int perm_key:int256 adnl_addr:int256 to_send_payload:bytes = engine.validator.ElectionBid; +engine.validator.proposalVote perm_key:int256 to_send:bytes = engine.validator.ProposalVote; + +engine.validator.dhtServerStatus id:int256 status:int = engine.validator.DhtServerStatus; +engine.validator.dhtServersStatus servers:(vector engine.validator.dhtServerStatus) = engine.validator.DhtServersStatus; + +---functions--- + +engine.validator.getTime = engine.validator.Time; +engine.validator.importPrivateKey key:PrivateKey = engine.validator.KeyHash; +engine.validator.exportPrivateKey key_hash:int256 = PrivateKey; +engine.validator.exportPublicKey key_hash:int256 = PublicKey; +engine.validator.generateKeyPair = engine.validator.KeyHash; +engine.validator.addAdnlId key_hash:int256 category:int = engine.validator.Success; +engine.validator.addDhtId key_hash:int256 = engine.validator.Success; +engine.validator.addValidatorPermanentKey key_hash:int256 election_date:int ttl:int = engine.validator.Success; +engine.validator.addValidatorTempKey permanent_key_hash:int256 key_hash:int256 ttl:int = engine.validator.Success; +engine.validator.addValidatorAdnlAddress permanent_key_hash:int256 key_hash:int256 ttl:int = engine.validator.Success; +engine.validator.changeFullNodeAdnlAddress adnl_id:int256 = engine.validator.Success; +engine.validator.addLiteserver key_hash:int256 port:int = engine.validator.Success; +engine.validator.addControlInterface key_hash:int256 port:int = engine.validator.Success; +engine.validator.addControlProcess key_hash:int256 port:int peer_key:int256 permissions:int = engine.validator.Success; +engine.validator.setStatesGcInterval interval_ms:int = engine.validator.Success; + +engine.validator.delAdnlId key_hash:int256 = engine.validator.Success; +engine.validator.delDhtId key_hash:int256 = engine.validator.Success; +engine.validator.delValidatorPermanentKey key_hash:int256 = engine.validator.Success; +engine.validator.delValidatorTempKey permanent_key_hash:int256 key_hash:int256 = engine.validator.Success; +engine.validator.delValidatorAdnlAddress permanent_key_hash:int256 key_hash:int256 = engine.validator.Success; + +engine.validator.addListeningPort ip:int port:int categories:(vector int) priority_categories:(vector int) = engine.validator.Success; +engine.validator.addProxy in_ip:int in_port:int out_ip:int out_port:int proxy:adnl.Proxy categories:(vector int) priority_categories:(vector int) = engine.validator.Success; +engine.validator.delListeningPort ip:int port:int categories:(vector int) priority_categories:(vector int) = engine.validator.Success; +engine.validator.delProxy out_ip:int out_port:int categories:(vector int) priority_categories:(vector int) = engine.validator.Success; + +engine.validator.sign key_hash:int256 data:bytes = engine.validator.Signature; + +engine.validator.getStats = engine.validator.Stats; +engine.validator.getSessionStats = engine.validator.SessionStats; +engine.validator.getConfig = engine.validator.JsonConfig; + +engine.validator.setVerbosity verbosity:int = engine.validator.Success; + +engine.validator.createElectionBid election_date:int election_addr:string wallet:string = engine.validator.ElectionBid; +engine.validator.createProposalVote vote:bytes = engine.validator.ProposalVote; +engine.validator.createComplaintVote election_id:int vote:bytes = engine.validator.ProposalVote; + +engine.validator.checkDhtServers id:int256 = engine.validator.DhtServersStatus; + +engine.validator.controlQuery data:bytes = Object; + + +---types--- + +http.header name:string value:string = http.Header; +http.payloadPart data:bytes trailer:(vector http.header) last:Bool = http.PayloadPart; +http.response http_version:string status_code:int reason:string headers:(vector http.header) = http.Response; + +---functions--- + +http.request id:int256 method:string url:string http_version:string headers:(vector http.header) = http.Response; +http.getNextPayloadPart id:int256 seqno:int max_chunk_size:int = http.PayloadPart; + +---types--- + + +http.server.dnsEntry domain:string addr:adnl.id.short = http.server.DnsEntry; +http.server.host domains:(vector string) ip:int port:int adnl_id:adnl.id.short = http.server.Host; + +http.server.config dhs:(vector http.server.dnsEntry) local_hosts:(vector http.server.host) = http.server.Config; + +---functions--- diff --git a/ton_api/tl/ton_net.tl b/ton_api/tl/ton_net.tl new file mode 100644 index 0000000..1ff4927 --- /dev/null +++ b/ton_api/tl/ton_net.tl @@ -0,0 +1,94 @@ + +---types--- + +tonEngine.networkProtocol.error + err_code:int + msg:string += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.validationRequest + id:long + signed_block:bytes += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.emptyStepRequest + id:long + empty_step:bytes += tonEngine.NetworkProtocol; + + +tonEngine.networkProtocol.confirmValidation + id:long + peer:int + result:long + block_seq_no:int + block_start_lt:long + block_end_lt:long + block_gen_utime:int += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.requestBlockByNumber + id:long + seq_no:int + vert_seq_no:int += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.responceBlock + id:long + signed_block:bytes += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.requestLastEqualShard + id:long + shard_hash:int256 += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.responseLastEqualShard + id:long + seq_no:int + vert_seq_no:int += tonEngine.NetworkProtocol; + + +tonEngine.networkProtocol.requestNodeInfo + id:long += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.responseNodeInfo + id:long + validator_no:int + workchain:int + shard_prefix:long + shard_pfx_len:int += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.sendMessageRequest + id:long + message:bytes += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.sendMessageResponse + id:long + result:long += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.reflectToDbRequest + id:long + transaction:bytes + account:bytes += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.reflectToDbResponse + id:long + result:long += tonEngine.NetworkProtocol; + +tonEngine.networkProtocol.rawData id:long data:bytes = tonEngine.NetworkProtocol; +tonEngine.networkProtocol.test1 id:long hash:int256 = tonEngine.NetworkProtocol; +tonEngine.networkProtocol.test2 id:long flag:Bool = tonEngine.NetworkProtocol; +tonEngine.networkProtocol.test3 id:long port:int = tonEngine.NetworkProtocol; + +---functions--- + +engine.validator.getBundle block_id:tonNode.blockIdExt = engine.validator.Success; +engine.validator.getFutureBundle prev_block_ids:(vector tonNode.blockIdExt) = engine.validator.Success; diff --git a/ton_api/tl/tonlib_api.tl b/ton_api/tl/tonlib_api.tl new file mode 100644 index 0000000..c277b09 --- /dev/null +++ b/ton_api/tl/tonlib_api.tl @@ -0,0 +1,323 @@ +double ? = Double; +string ? = String; + +int32 = Int32; +int53 = Int53; +int64 = Int64; +bytes = Bytes; +secureString = SecureString; +secureBytes = SecureBytes; + +object ? = Object; +function ? = Function; + +boolFalse = Bool; +boolTrue = Bool; + +vector {t:Type} # [ t ] = Vector t; + +error code:int32 message:string = Error; +ok = Ok; + +keyStoreTypeDirectory directory:string = KeyStoreType; +keyStoreTypeInMemory = KeyStoreType; + +config config:string blockchain_name:string use_callbacks_for_network:Bool ignore_cache:Bool = Config; + +options config:config keystore_type:KeyStoreType = Options; +options.configInfo default_wallet_id:int64 default_rwallet_init_public_key:string = options.ConfigInfo; +options.info config_info:options.configInfo = options.Info; + +key public_key:string secret:secureBytes = Key; +inputKeyRegular key:key local_password:secureBytes = InputKey; +inputKeyFake = InputKey; +exportedKey word_list:vector = ExportedKey; +exportedPemKey pem:secureString = ExportedPemKey; +exportedEncryptedKey data:secureBytes = ExportedEncryptedKey; +exportedUnencryptedKey data:secureBytes = ExportedUnencryptedKey; + +bip39Hints words:vector = Bip39Hints; + +adnlAddress adnl_address:string = AdnlAddress; + +accountAddress account_address:string = AccountAddress; + +unpackedAccountAddress workchain_id:int32 bounceable:Bool testnet:Bool addr:bytes = UnpackedAccountAddress; + +internal.transactionId lt:int64 hash:bytes = internal.TransactionId; + +ton.blockId workchain:int32 shard:int64 seqno:int32 = internal.BlockId; + +raw.shardAccountState shard_account:bytes = raw.ShardAccountState; +raw.shardAccountNone = raw.ShardAccountState; +raw.fullAccountState balance:int64 code:bytes data:bytes last_transaction_id:internal.transactionId block_id:tonNode.blockIdExt frozen_hash:bytes sync_utime:int53 = raw.FullAccountState; +raw.message source:accountAddress destination:accountAddress value:int64 fwd_fee:int64 ihr_fee:int64 created_lt:int64 body_hash:bytes msg_data:msg.Data = raw.Message; +raw.transaction utime:int53 data:bytes transaction_id:internal.transactionId fee:int64 storage_fee:int64 other_fee:int64 in_msg:raw.message out_msgs:vector = raw.Transaction; +raw.transactions transactions:vector previous_transaction_id:internal.transactionId = raw.Transactions; + +pchan.config alice_public_key:string alice_address:accountAddress bob_public_key:string bob_address:accountAddress init_timeout:int32 close_timeout:int32 channel_id:int64 = pchan.Config; + +raw.initialAccountState code:bytes data:bytes = InitialAccountState; +testGiver.initialAccountState = InitialAccountState; +testWallet.initialAccountState public_key:string = InitialAccountState; +wallet.initialAccountState public_key:string = InitialAccountState; +wallet.v3.initialAccountState public_key:string wallet_id:int64 = InitialAccountState; +wallet.highload.v1.initialAccountState public_key:string wallet_id:int64 = InitialAccountState; +wallet.highload.v2.initialAccountState public_key:string wallet_id:int64 = InitialAccountState; + +rwallet.limit seconds:int32 value:int64 = rwallet.Limit; +rwallet.config start_at:int53 limits:vector = rwallet.Config; +rwallet.initialAccountState init_public_key:string public_key:string wallet_id:int64 = InitialAccountState; + +dns.initialAccountState public_key:string wallet_id:int64 = InitialAccountState; +pchan.initialAccountState config:pchan.config = InitialAccountState; + +raw.accountState code:bytes data:bytes frozen_hash:bytes = AccountState; +testWallet.accountState seqno:int32 = AccountState; +wallet.accountState seqno:int32 = AccountState; +wallet.v3.accountState wallet_id:int64 seqno:int32 = AccountState; +wallet.highload.v1.accountState wallet_id:int64 seqno:int32 = AccountState; +wallet.highload.v2.accountState wallet_id:int64 = AccountState; +testGiver.accountState seqno:int32 = AccountState; +dns.accountState wallet_id:int64 = AccountState; +rwallet.accountState wallet_id:int64 seqno:int32 unlocked_balance:int64 config:rwallet.config = AccountState; + +pchan.stateInit signed_A:Bool signed_B:Bool min_A:int64 min_B:int64 expire_at:int53 A:int64 B:int64 = pchan.State; +pchan.stateClose signed_A:Bool signed_B:Bool min_A:int64 min_B:int64 expire_at:int53 A:int64 B:int64 = pchan.State; +pchan.statePayout A:int64 B:int64 = pchan.State; + +pchan.accountState config:pchan.config state:pchan.State description:string = AccountState; +uninited.accountState frozen_hash:bytes = AccountState; + +fullAccountState address:accountAddress balance:int64 last_transaction_id:internal.transactionId block_id:tonNode.blockIdExt sync_utime:int53 account_state:AccountState revision:int32 = FullAccountState; + +accountRevisionList revisions:vector = AccountRevisionList; +accountList accounts:vector = AccountList; + +syncStateDone = SyncState; +syncStateInProgress from_seqno:int32 to_seqno:int32 current_seqno:int32 = SyncState; + +// +// MSG +// + +msg.dataRaw body:bytes init_state:bytes = msg.Data; +msg.dataText text:bytes = msg.Data; +msg.dataDecryptedText text:bytes = msg.Data; +msg.dataEncryptedText text:bytes = msg.Data; + +msg.dataEncrypted source:accountAddress data:msg.Data = msg.DataEncrypted; +msg.dataDecrypted proof:bytes data:msg.Data = msg.DataDecrypted; + +msg.dataEncryptedArray elements:vector = msg.DataEncryptedArray; +msg.dataDecryptedArray elements:vector = msg.DataDecryptedArray; + +msg.message destination:accountAddress public_key:string amount:int64 data:msg.Data = msg.Message; + +// +// DNS +// + +dns.entryDataUnknown bytes:bytes = dns.EntryData; +dns.entryDataText text:string = dns.EntryData; +dns.entryDataNextResolver resolver:AccountAddress = dns.EntryData; +dns.entryDataSmcAddress smc_address:AccountAddress = dns.EntryData; +dns.entryDataAdnlAddress adnl_address:AdnlAddress = dns.EntryData; + +dns.entry name:string category:int32 entry:dns.EntryData = dns.Entry; + +dns.actionDeleteAll = dns.Action; +// use category = 0 to delete all entries +dns.actionDelete name:string category:int32 = dns.Action; +dns.actionSet entry:dns.entry = dns.Action; + +dns.resolved entries:vector = dns.Resolved; + + +// +// Payment channel +// +pchan.promise signature:bytes promise_A:int64 promise_B:int64 channel_id:int64 = pchan.Promise; + +pchan.actionInit inc_A:int64 inc_B:int64 min_A:int64 min_B:int64 = pchan.Action; +pchan.actionClose extra_A:int64 extra_B:int64 promise:pchan.promise = pchan.Action; +pchan.actionTimeout = pchan.Action; + +// +// Restricted wallet initialization +// +rwallet.actionInit config:rwallet.config = rwallet.Action; + +// +// Actions +// + +actionNoop = Action; +actionMsg messages:vector allow_send_to_uninited:Bool = Action; +actionDns actions:vector = Action; +actionPchan action:pchan.Action = Action; +actionRwallet action:rwallet.actionInit = Action; +//actionMultisig actions:vector = Action; + +fees in_fwd_fee:int53 storage_fee:int53 gas_fee:int53 fwd_fee:int53 = Fees; +query.fees source_fees:fees destination_fees:vector = query.Fees; +// query.emulationResult exit_code:int32 fees:fees = query.EmulationResult; +query.info id:int53 valid_until:int53 body_hash:bytes body:bytes init_state:bytes = query.Info; + +tvm.slice bytes:bytes = tvm.Slice; +tvm.cell bytes:bytes = tvm.Cell; +tvm.numberDecimal number:string = tvm.Number; +tvm.tuple elements:vector = tvm.Tuple; +tvm.list elements:vector = tvm.List; + +tvm.stackEntrySlice slice:tvm.slice = tvm.StackEntry; +tvm.stackEntryCell cell:tvm.cell = tvm.StackEntry; +tvm.stackEntryNumber number:tvm.Number = tvm.StackEntry; +tvm.stackEntryTuple tuple:tvm.Tuple = tvm.StackEntry; +tvm.stackEntryList list:tvm.List = tvm.StackEntry; +tvm.stackEntryUnsupported = tvm.StackEntry; + +smc.info id:int53 = smc.Info; + +smc.methodIdNumber number:int32 = smc.MethodId; +smc.methodIdName name:string = smc.MethodId; + +smc.runResult gas_used:int53 stack:vector exit_code:int32 = smc.RunResult; + +updateSendLiteServerQuery id:int64 data:bytes = Update; +updateSyncState sync_state:SyncState = Update; + +//@class LogStream @description Describes a stream to which tonlib internal log is written + +//@description The log is written to stderr or an OS specific log +logStreamDefault = LogStream; + +//@description The log is written to a file @path Path to the file to where the internal tonlib log will be written @max_file_size Maximum size of the file to where the internal tonlib log is written before the file will be auto-rotated +logStreamFile path:string max_file_size:int53 = LogStream; + +//@description The log is written nowhere +logStreamEmpty = LogStream; + + +//@description Contains a tonlib internal log verbosity level @verbosity_level Log verbosity level +logVerbosityLevel verbosity_level:int32 = LogVerbosityLevel; + +//@description Contains a list of available tonlib internal log tags @tags List of log tags +logTags tags:vector = LogTags; + +data bytes:secureBytes = Data; + +liteServer.info now:int53 version:int32 capabilities:int64 = liteServer.Info; + +---functions--- + +init options:options = options.Info; +close = Ok; + +options.setConfig config:config = options.ConfigInfo; +options.validateConfig config:config = options.ConfigInfo; + +createNewKey local_password:secureBytes mnemonic_password:secureBytes random_extra_seed:secureBytes = Key; +deleteKey key:key = Ok; +deleteAllKeys = Ok; +exportKey input_key:InputKey = ExportedKey; +exportPemKey input_key:InputKey key_password:secureBytes = ExportedPemKey; +exportEncryptedKey input_key:InputKey key_password:secureBytes = ExportedEncryptedKey; +exportUnencryptedKey input_key:InputKey = ExportedUnencryptedKey; +importKey local_password:secureBytes mnemonic_password:secureBytes exported_key:exportedKey = Key; +importPemKey local_password:secureBytes key_password:secureBytes exported_key:exportedPemKey = Key; +importEncryptedKey local_password:secureBytes key_password:secureBytes exported_encrypted_key:exportedEncryptedKey = Key; +importUnencryptedKey local_password:secureBytes exported_unencrypted_key:exportedUnencryptedKey = Key; +changeLocalPassword input_key:InputKey new_local_password:secureBytes = Key; + +encrypt decrypted_data:secureBytes secret:secureBytes = Data; +decrypt encrypted_data:secureBytes secret:secureBytes = Data; +kdf password:secureBytes salt:secureBytes iterations:int32 = Data; + +unpackAccountAddress account_address:string = UnpackedAccountAddress; +packAccountAddress account_address:unpackedAccountAddress = AccountAddress; +getBip39Hints prefix:string = Bip39Hints; + +//raw.init initial_account_state:raw.initialAccountState = Ok; +raw.getShardAccountState account_address:accountAddress = raw.ShardAccountState; +raw.getAccount account_address:accountAddress workchain:int32 = Data; +raw.getTransactions private_key:InputKey account_address:accountAddress from_transaction_id:internal.transactionId = raw.Transactions; +raw.sendMessage body:bytes = Ok; +raw.createAndSendMessage destination:accountAddress initial_account_state:bytes data:bytes = Ok; +raw.createQuery destination:accountAddress init_code:bytes init_data:bytes body:bytes = query.Info; + +sync = tonNode.BlockIdExt; + +// revision = 0 -- use default revision +// revision = x (x > 0) -- use revision x +// revision = -1 -- use experimental (newest) revision. Only for debug purpose +// +// workchain_id = -1 or 0. -1 for masterchain, 0 for basechain +// NB: use wallet_id = default_wallet_id + workchain_id +getAccountAddress initial_account_state:InitialAccountState revision:int32 workchain_id:int32 = AccountAddress; +guessAccountRevision initial_account_state:InitialAccountState workchain_id:int32 = AccountRevisionList; + +guessAccount public_key:string rwallet_init_public_key:string = AccountRevisionList; + +getAccountState account_address:accountAddress = FullAccountState; +createQuery private_key:InputKey address:accountAddress timeout:int32 action:Action initial_account_state:InitialAccountState = query.Info; + +msg.decrypt input_key:InputKey data:msg.dataEncryptedArray = msg.DataDecryptedArray; +msg.decryptWithProof proof:bytes data:msg.dataEncrypted = msg.Data; + +query.send id:int53 = Ok; +query.forget id:int53 = Ok; +query.estimateFees id:int53 ignore_chksig:Bool = query.Fees; +// query.emulate id:int53 ignore_chksig:Bool = query.EmulationResult; +query.getInfo id:int53 = query.Info; + +smc.load account_address:accountAddress = smc.Info; +//smc.forget id:int53 = Ok; +smc.getCode id:int53 = tvm.Cell; +smc.getData id:int53 = tvm.Cell; +smc.getState id:int53 = tvm.Cell; +smc.runGetMethod id:int53 method:smc.MethodId stack:vector = smc.RunResult; + +dns.resolve account_address:accountAddress name:string category:int32 ttl:int32 = dns.Resolved; + +pchan.signPromise input_key:InputKey promise:pchan.promise = pchan.Promise; +pchan.validatePromise public_key:bytes promise:pchan.promise = Ok; + +pchan.packPromise promise:pchan.promise = Data; +pchan.unpackPromise data:secureBytes = pchan.Promise; + +onLiteServerQueryResult id:int64 bytes:bytes = Ok; +onLiteServerQueryError id:int64 error:error = Ok; + +//withBlock id:tonNode.blockIdExt function:Function = Object; + +runTests dir:string = Ok; + +liteServer.getInfo = liteServer.Info; + +//@description Sets new log stream for internal logging of tonlib. This is an offline method. Can be called before authorization. Can be called synchronously @log_stream New log stream +setLogStream log_stream:LogStream = Ok; + +//@description Returns information about currently used log stream for internal logging of tonlib. This is an offline method. Can be called before authorization. Can be called synchronously +getLogStream = LogStream; + +//@description Sets the verbosity level of the internal logging of tonlib. This is an offline method. Can be called before authorization. Can be called synchronously +//@new_verbosity_level New value of the verbosity level for logging. Value 0 corresponds to fatal errors, value 1 corresponds to errors, value 2 corresponds to warnings and debug warnings, value 3 corresponds to informational, value 4 corresponds to debug, value 5 corresponds to verbose debug, value greater than 5 and up to 1023 can be used to enable even more logging +setLogVerbosityLevel new_verbosity_level:int32 = Ok; + +//@description Returns current verbosity level of the internal logging of tonlib. This is an offline method. Can be called before authorization. Can be called synchronously +getLogVerbosityLevel = LogVerbosityLevel; + +//@description Returns list of available tonlib internal log tags, for example, ["actor", "binlog", "connections", "notifications", "proxy"]. This is an offline method. Can be called before authorization. Can be called synchronously +getLogTags = LogTags; + +//@description Sets the verbosity level for a specified tonlib internal log tag. This is an offline method. Can be called before authorization. Can be called synchronously +//@tag Logging tag to change verbosity level @new_verbosity_level New verbosity level; 1-1024 +setLogTagVerbosityLevel tag:string new_verbosity_level:int32 = Ok; + +//@description Returns current verbosity level for a specified tonlib internal log tag. This is an offline method. Can be called before authorization. Can be called synchronously @tag Logging tag to change verbosity level +getLogTagVerbosityLevel tag:string = LogVerbosityLevel; + +//@description Adds a message to tonlib internal log. This is an offline method. Can be called before authorization. Can be called synchronously +//@verbosity_level Minimum verbosity level needed for the message to be logged, 0-1023 @text Text of a message to log +addLogMessage verbosity_level:int32 text:string = Ok; diff --git a/ton_tl_codegen/Cargo.toml b/ton_tl_codegen/Cargo.toml new file mode 100644 index 0000000..21b7ffd --- /dev/null +++ b/ton_tl_codegen/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = 'ton_tl_codegen' +version = '0.0.2' +authors = [ 'Aaron Gallagher <_@habnab.it>', 'Connie Hilarides ', 'Denis K. ', 'Alexey Vavilin >(); + + assert!(files.len() > 0); + files.sort(); + + let mut input = String::new(); + for file in files { + if input.len() > 0 { + input += "---types---\n"; + } + fs::File::open(&file).unwrap() + .read_to_string(&mut input).unwrap(); + } + + ton_tl_codegen::generate_code_for(None, &input, Path::new(OUTPUT_DIR)); +} + diff --git a/ton_tl_codegen/src/lib.rs b/ton_tl_codegen/src/lib.rs new file mode 100644 index 0000000..2b5913c --- /dev/null +++ b/ton_tl_codegen/src/lib.rs @@ -0,0 +1,2115 @@ +/* +* Copyright (C) 2019-2021 TON Labs. All Rights Reserved. +* +* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use +* this file except in compliance with the License. +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific TON DEV software governing permissions and +* limitations under the License. +*/ + +#![deny(private_in_public, unused_extern_crates)] +#![recursion_limit = "128"] + +use std::borrow::Cow; +use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet}; +use std::fs::OpenOptions; +use std::io::Write; +use std::path::{Path, PathBuf}; +use std::process::Command; +use std::sync::atomic::{AtomicBool, Ordering}; + +use proc_macro2::{TokenStream as Tokens, Ident, Span, TokenStream}; + +use parser::{Constructor, Delimiter, Field, Item, Matched, NameChunks, Type}; +use quote::{quote, TokenStreamExt}; +use serde_derive::Deserialize; +use proc_macro2::{Spacing, Punct}; + + +pub mod parser { + use std::cmp::Ordering; + + use pom::{self, Parser}; + use pom::char_class::{alphanum, digit, hex_digit}; + use pom::parser::*; + + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + pub enum Type { + Int, + Flags, + Named(Vec), + TypeParameter(String), + Generic(Vec, Box), + Flagged(String, u32, Box), + Repeated(Option, Vec), + } + + impl Type { + fn get_dotted_name(dotted_ident: &[String]) -> String { + dotted_ident.join(".") + } + + fn get_generic_name(dotted_ident: &[String], ty: &Type) -> String { + Self::get_dotted_name(dotted_ident) + " " + ty.get_name().as_str() + } + + fn get_flagged_name(name: &str, bit: u32, ty: &Type) -> String { + name.to_owned() + "." + bit.to_string().as_str() + "?" + ty.get_name().as_str() + } + + fn get_repeated_name(repeat_count: Option, fields: &[Field]) -> String { + let mut result = String::new(); + if let Some(value) = repeat_count { + result = format!("{} * ", value); + } + result += "[ "; + for field in fields { + result += &(field.get_str() + " "); + } + result + "]" + } + + pub fn get_name(&self) -> String { + use self::Type::*; + match *self { + Int => "#".to_owned(), + Flags => "flags".to_owned(), + Named(ref v) => Self::get_dotted_name(&v), + TypeParameter(ref ty) => ty.clone(), + Generic(ref v, ref ty) => Self::get_generic_name(&v, &ty), + Flagged(ref name, ref bit, ref ty) => Self::get_flagged_name(&name, *bit, &ty), + Repeated(repeat_count, ref fields) => Self::get_repeated_name(repeat_count, &fields), + } + } + + pub fn names_vec(&self) -> Option<&Vec> { + use self::Type::*; + match *self { + Int | + Flags | + TypeParameter(..) | + Flagged(..) | + Repeated(..) => None, + Named(ref v) | + Generic(ref v, ..) => Some(v), + } + } + + pub fn names_vec_mut(&mut self) -> Option<&mut Vec> { + use self::Type::*; + match *self { + Int | + Flags | + TypeParameter(..) | + Flagged(..) | + Repeated(..) => None, + Named(ref mut v) | + Generic(ref mut v, ..) => Some(v), + } + } + + pub fn owned_names_vec(&self) -> Vec { + self.names_vec().cloned().unwrap_or_else(Vec::new) + } + + pub fn namespaces(&self) -> &[String] { + self.names_vec() + .map(|v| &v[..(v.len() - 1).max(0)]) + .unwrap_or(&[]) + } + + pub fn name(&self) -> Option<&str> { + self.names_vec().and_then(|v| v.last().map(String::as_str)) + } + + pub fn flag_field(&self) -> Option<(String, u32)> { + use self::Type::*; + match self { + &Flagged(ref f, b, _) => Some((f.clone(), b)), + _ => None, + } + } + + pub fn is_type_parameter(&self) -> bool { + use self::Type::*; + match self { + &TypeParameter(..) => true, + _ => false, + } + } + } + + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + pub struct Field { + pub name: Option, + pub ty: Type, + } + + impl Field { + pub fn get_str(&self) -> String { + match self.name { + Some(ref name) => name.clone() + ":" + self.ty.get_name().as_str(), + None => self.ty.get_name(), + } + } + } + + #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] + pub struct Constructor { + pub variant: Ty, + pub tl_id: Option, + pub type_parameters: Vec, + pub fields: Vec, + pub output: Ty, + pub original_variant: String, + pub original_output: String, + pub is_function: bool, + } + + #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] + pub enum Delimiter { + Types, + Functions, + } + + #[derive(Debug, Clone)] + pub enum Item { + Delimiter(Delimiter), + Constructor(Constructor), + Layer(u32), + } + + #[derive(Debug, Clone)] + pub struct Matched(pub T, pub String); + + impl Eq for Matched> { + } + + impl PartialEq for Matched> { + fn eq(&self, other: &Self) -> bool { + PartialEq::eq(&self.1, &other.1) + } + } + + impl Ord for Matched> { + fn cmp(&self, other: &Self) -> Ordering { + Ord::cmp(&self.1, &other.1) + } + } + + impl PartialOrd for Matched> { + fn partial_cmp(&self, other: &Self) -> Option { + PartialOrd::partial_cmp(&self.1, &other.1) + } + } + + fn utf8(v: Vec) -> String { + String::from_utf8(v).unwrap() + } + + fn simple_space() -> Parser { + one_of(b" \t\r\n").repeat(0..).discard() + } + + fn ident() -> Parser { + (is_a(alphanum) | sym(b'_')).repeat(1..).map(utf8) + } + + fn dotted_ident() -> Parser> { + ((ident() - sym(b'.')).repeat(0..) + ident()) + .map(|(mut v, i)| { + v.push(i); + v + }) + } + + fn tl_id() -> Parser { + sym(b'#') * is_a(hex_digit).repeat(0..9).convert(|s| u32::from_str_radix(&utf8(s), 16)) + } + + fn decimal() -> Parser { + is_a(digit).repeat(0..).convert(|s| utf8(s).parse()) + } + + fn ty_flag() -> Parser { + (ident() - sym(b'.') + decimal() - sym(b'?') + call(ty)) + .map(|((name, bit), ty)| Type::Flagged(name, bit, Box::new(ty))) + } + + fn ty_generic() -> Parser { + (sym(b'(') * dotted_ident() - simple_space() + call(ty) - sym(b')')).map(|(name, ty)| Type::Generic(name, Box::new(ty))) | + (dotted_ident() - sym(b'<') + call(ty) - sym(b'>')).map(|(name, ty)| Type::Generic(name, Box::new(ty))) + } + + fn ty() -> Parser { + sym(b'#').map(|_| Type::Int) | + sym(b'!') * ident().map(Type::TypeParameter) | + ty_flag() | + ty_generic() | + dotted_ident().map(Type::Named) + } + + fn ty_space_generic() -> Parser { + let space_generic = dotted_ident() - simple_space() + ty(); + space_generic.map(|(name, ty)| Type::Generic(name, Box::new(ty))) | + ty() + } + + fn base_field() -> Parser { + (ident() - sym(b':') + ty()) + .map(|(name, ty)| Field { name: Some(name), ty }) + .name("field") + } + + fn repeated_field() -> Parser { + ((decimal() - simple_space() * sym(b'*') * simple_space()).opt() + + sym(b'[') + * call(base_fields) + - seq(b" ]")) + .map(|(repeat_count, fv)| + Field { name: None, ty: Type::Repeated(repeat_count, fv) } + ) + } + + fn base_field_anonymous_or_repeated() -> Parser { + repeated_field() | + base_field() | + ty().map(|ty| Field { name: None, ty }) + } + + fn base_fields() -> Parser> { + (simple_space() * base_field_anonymous_or_repeated()).repeat(0..) + } + + fn ty_param_field() -> Parser { + sym(b'{') * base_field() - sym(b'}') + } + + fn fields() -> Parser, Vec)> { + (simple_space().opt() * sym(b'?')).map(|_| (vec![], vec![])) | + (simple_space() * ty_param_field()).repeat(0..) + base_fields() + } + + fn output_and_matched(inner: Parser) -> Parser> { + Parser::new(move |input| { + let start = input.position(); + let output = inner.parse(input)?; + let end = input.position(); + Ok(Matched(output, utf8(input.segment(start, end)))) + }) + } + + fn get_tl_id( + parsed_id: Option, + variant: String, + type_parameters: Vec, + fields: Vec, + output: String) -> Option + { + match parsed_id { + Some(id) => Some(id), + None => { + let mut string = variant; + for ty in type_parameters { + string += &(" {".to_owned() + ty.get_str().as_str() + "}"); + } + for field in fields { + string += " "; + string += field.get_str().as_str(); + } + + string += " = "; + string += &output; + + Some(crc::crc32::checksum_ieee(string.as_bytes())) + } + } + } + + fn constructor() -> Parser> { + (output_and_matched(dotted_ident()) + tl_id().opt() + fields() - simple_space() - sym(b'=') - simple_space() + output_and_matched(ty_space_generic()) - sym(b';')) + .map(|(((variant, tl_id), (type_parameters, fields)), output)| Constructor { + tl_id: get_tl_id(tl_id, variant.1.clone(), type_parameters.clone(), fields.clone(), output.1.clone()), + type_parameters, fields, + original_variant: variant.1, + variant: Type::Named(variant.0), + original_output: output.1, + output: output.0, + is_function: false, + }) + .name("constructor") + } + + fn delimiter() -> Parser { + seq(b"---types---").map(|_| Delimiter::Types) | + seq(b"---functions---").map(|_| Delimiter::Functions) + } + + fn layer() -> Parser { + seq(b"// LAYER ") * decimal() + } + + fn space() -> Parser { + let end_comment = || seq(b"*/"); + ( one_of(b" \t\r\n").discard() | + (seq(b"//") - !(seq(b" LAYER ")) - none_of(b"\r\n").repeat(0..)).discard() | + (seq(b"/*") * (!end_comment() * take(1)).repeat(0..) * end_comment()).discard() + ).repeat(0..).discard() + } + + fn item() -> Parser> { + output_and_matched({ + delimiter().map(Item::Delimiter) | + constructor().map(Item::Constructor) | + layer().map(Item::Layer) + }) - space() + } + + fn lines() -> Parser>> { + space() * item().repeat(0..) - end() + } + + pub fn parse_string(input: &str) -> Result>, pom::Error> { + let mut input = pom::DataInput::new(input.as_bytes()); + lines().parse(&mut input) + } + + fn name_ident() -> Parser { + ident().map(|s| NameChunks(vec![s])) + } + + #[derive(Debug, Clone)] + pub struct NameChunks(pub Vec); + + impl NameChunks { + pub fn from_name(name: &str) -> Result { + let mut input = pom::DataInput::new(name.as_bytes()); + (name_ident() - end()).parse(&mut input) + } + + pub fn as_snake_case(&self) -> String { + let names: Vec = self.0.iter() + .map(|s| s.to_ascii_lowercase()) + .collect(); + names.join("_") + } + + pub fn as_upper_camel_case(&self) -> String { + self.0.iter() + .cloned() + .map(|mut s| { + s[..1].make_ascii_uppercase(); + s + }) + .collect() + } + + pub fn common_prefix_of(&self, other: &Self) -> Self { + let index = self.0.iter() + .zip(&other.0) + .enumerate() + .skip_while(|&(_, (a, b))| a == b) + .next() + .map(|(e, _)| e) + .unwrap_or(self.0.len().min(other.0.len())); + NameChunks((&self.0[..index]).to_vec()) + } + + pub fn trim_common_prefix(&mut self, other: &Self) { + assert!(self.0.starts_with(&other.0), "{:?} not a prefix of {:?}", self, other); + self.0.drain(0..(other.0.len())); + } + } +} + +fn fail_hard() -> Tokens { + quote!(FAIL_LOUDLY_AT_COMPILE_TIME!()) +} + +#[derive(Debug)] +struct Constructors(Vec>>); + +impl Default for Constructors { + fn default() -> Self { + Constructors(Vec::new()) + } +} + +type TypeResolutionMap = BTreeMap, TypeIR>; + +#[derive(Debug)] +enum NamespaceItem { + AsEnum(Constructors), + AsVariant(Matched>), + AsFunction(Matched>), + AnotherNamespace(Namespace), +} + +fn write_to_file(contents: impl ToString, filename: &Path, append: bool) { + let mut options = OpenOptions::new(); + options.create(true) + .write(true) + .truncate(!append) + .append(append); + + let mut file = options.open(filename) + .unwrap_or_else(|err| panic!( + "Unable to open file <{filename}> with the given parameters: {options:?}: {err}", + filename = filename.to_string_lossy(), + options = options, + err = err + )); + + file.write_all(contents.to_string().as_bytes()) + .unwrap_or_else(|err| panic!( + "Unable to write contents into the file: {}: {}", filename.to_string_lossy(), err + )); +} + +fn reformat(filename: &Path) { + static WARNING_PRINTED: AtomicBool = AtomicBool::new(false); + if !cfg!(feature = "reformat") { + if !WARNING_PRINTED.swap(true, Ordering::Relaxed) { + println!("use feature \"reformat\" in ton_api cargo.toml for ton_tl_codegen \ + to get formatted rs files, but it slows down generation process. \ + You can reformat manualy desired files (Shift + Alt + F in VSCode)."); + } + return + } + const INSTALL_INSTRUCTIONS: &str = "It's not an issue, the building will proceed. \ + If you wish to develop using ton_api, you can install rustfmt by running command: \ + `rustup component add rustfmt`"; + let status = match Command::new("rustfmt") + .arg("--edition") + .arg("2018") + .arg(filename) + .status() + { + Ok(status) => status, + Err(err) => { + if !WARNING_PRINTED.swap(true, Ordering::Relaxed) { + println!("cargo:warning=rustfmt failed to start: {:?}. {}", err, INSTALL_INSTRUCTIONS); + } + return + } + }; + if !status.success() { + if !WARNING_PRINTED.swap(true, Ordering::Relaxed) { + println!("cargo:warning=rustfmt command returned code: {}. {}", status.code().unwrap(), INSTALL_INSTRUCTIONS); + } + } +} + +#[derive(Debug, Default)] +struct Namespace(BTreeMap); + +impl Namespace { + fn descend_tree(&mut self, names: &[syn::Ident]) -> &mut Self { + use self::NamespaceItem::*; + names.iter() + .fold(self, |ns, name| { + match ns.0.entry(name.clone()).or_insert_with(|| AnotherNamespace(Default::default())) { + &mut AnotherNamespace(ref mut ns) => ns, + other => panic!("descend_tree: duplicate namespace item {} {:?} {:?}", name, other, names), + } + }) + } + + fn insert(&mut self, mut names: Vec, item: NamespaceItem) { + let leaf = names.pop().unwrap(); + let namespace = self.descend_tree(&names); + if namespace.0.contains_key(&leaf) { + println!("cargo:warning=insert: duplicate namespace item {:?}", names); + return; + } + namespace.0.insert(leaf, item); + } + + fn print_rust(&self, config: &Option, prelude: impl ToString, path: &Path, append: bool) -> (PathBuf, PathBuf) { + let mut has_submodules = false; + let items = self.0.iter() + .map(|(name, item)| { + match item { + NamespaceItem::AsEnum(ref cs) => cs.as_enum(config), + NamespaceItem::AsVariant(ref cm) => cm.0.as_variant_type_struct(config, &cm.1), + NamespaceItem::AsFunction(ref cm) => cm.0.as_function_struct(config, &cm.1), + NamespaceItem::AnotherNamespace(ref ns) => { + let prelude = quote! { + use serde_derive::{Serialize, Deserialize}; + }; + let (filename, _dir) = ns.print_rust(config, prelude, path.join(name.to_string()).as_path(), false); + reformat(&filename); + has_submodules = true; + + quote! { pub mod #name; } + } + } + }); + + let contents = quote!(#( #items )*); + + let filename = if has_submodules { + path.join("mod.rs") + } else { + path.with_extension("rs") + }; + + let dir = filename.parent().unwrap_or_else(|| + panic!("Unable to get parent directory for: {}", filename.to_string_lossy()) + ).to_path_buf(); + + std::fs::create_dir_all(&dir).unwrap_or_else(|err| + panic!("Unable to create directory: {} err: {}", filename.to_string_lossy(), err) + ); + + write_to_file(prelude, &filename, append); + write_to_file(contents, &filename, true); + (filename, dir) + } + + fn populate_all_constructors<'this>(&'this self, to_populate: &mut Vec<&'this Constructor>) { + use self::NamespaceItem::*; + for item in self.0.values() { + match *item { + AsEnum(ref cs) => { + to_populate.extend(cs.0.iter().map(|cm| &cm.0)); + } + AsFunction(ref c) => { + to_populate.push(&c.0); + } + AnotherNamespace(ref ns) => { + ns.populate_all_constructors(to_populate); + } + _ => {} + }; + } + } +} + +#[derive(Deserialize)] +pub struct Config { + exclude_types: HashSet, + need_box: HashSet, + need_determiner: HashSet, + replace_with_bytes: HashSet, + additional_derives: HashMap>, +} + +#[derive(Debug)] +struct AllConstructors { + items: Namespace, + layer: u32, +} + +fn filter_items(config: &Option, iv: &mut Vec>) { + let built_in_types: HashSet<&'static str> = [ + "int", "int32", "int53", "int64", "int128", "int256", + "long", "double", "bytes", "vector", + "string", "object", "function", "Object", "Function", + "secureString", "secureBytes", "true", "false", + ].iter().cloned().collect(); + + iv.retain(|&Matched(ref i, _)| { + let c = match i { + &Item::Constructor(ref c) => c, + _ => return true, + }; + // Blacklist some annoying inconsistencies. + if built_in_types.contains(&c.original_variant.as_str()) { + return false; + } + if let Some(config) = config { + return !config.exclude_types.contains(&c.original_variant); + } + true + }); +} + +impl AllConstructors { + fn from_matched_items(config: &Option, iv: Vec>) -> Self { + use self::NamespaceItem::*; + + let mut current = Delimiter::Types; + let mut ret = AllConstructors { + items: Default::default(), + layer: 0, + }; + let mut constructors_tree: BTreeMap, Constructors> = BTreeMap::new(); + let mut functions: Vec>> = Vec::new(); + for Matched(item, text) in iv { + match item { + Item::Delimiter(delimiter) => current = delimiter, + Item::Constructor(mut constructor) => { + match current { + Delimiter::Types => { + let vec = &mut constructors_tree.entry(constructor.output.owned_names_vec()) + .or_insert_with(Default::default) + .0; + let cons = Matched(constructor, text); + if let Err(index) = vec.binary_search(&cons) { + vec.insert(index, cons); + } + }, + Delimiter::Functions => { + constructor.is_function = true; + functions.push(Matched(constructor, text)); + }, + } + }, + Item::Layer(layer_index) => ret.layer = layer_index, + } + } + let mut resolve_map: TypeResolutionMap = Default::default(); + for (_, cs) in &mut constructors_tree { + let base_ns = cs.first_constructor().output.namespaces().to_vec(); + cs.fix_names(config, &base_ns, &mut resolve_map); + } + for &mut Matched(ref mut c, _) in &mut functions { + camelize(config, &mut resolve_map, &mut c.variant, |_, ns| { + ns.insert(0, "rpc".to_string()); + false + }); + } + for (_, cs) in constructors_tree { + let cs = cs.resolve(config, &resolve_map); + for &Matched(ref c, ref m) in &cs.0 { + ret.items.insert( + c.variant.owned_names_vec(), + AsVariant(Matched(c.clone(), m.clone()))); + } + ret.items.insert(cs.first_constructor().output.owned_names_vec(), AsEnum(cs)); + } + for Matched(c, m) in functions { + let c = c.resolve(config, Delimiter::Functions, &resolve_map); + ret.items.insert(c.variant.owned_names_vec(), AsFunction(Matched(c, m))); + } + ret + } + + fn as_lazy_statics(&self) -> Tokens { + let mut all_constructors = Default::default(); + self.items.populate_all_constructors(&mut all_constructors); + let dynamic_deserializers = all_constructors.iter() + .filter_map(|c| c.as_dynamic_deserializer()); + + quote! { + fn make_deserializers() -> ::std::vec::Vec { + vec![ #( #dynamic_deserializers ),* ] + } + + lazy_static::lazy_static! { + static ref ALL_DESERIALIZERS: ::std::vec::Vec = make_deserializers(); + + pub static ref BY_NUMBER: + ::std::collections::BTreeMap = + ALL_DESERIALIZERS.iter() + .map(|d| (d.id, d)) + .collect(); + + pub static ref BY_NAME: + ::std::collections::BTreeMap<&'static str, &'static crate::DynamicDeserializer> = + ALL_DESERIALIZERS.iter() + .map(|d| (d.type_name, d)) + .collect(); + } + } + } + + fn print_tokens(&self, config: &Option, prelude: impl ToString, path: &Path) { + let (filename, dir) = self.items.print_rust(config, prelude, path, false); + let dynamic_deserializers = self.as_lazy_statics(); + write_to_file(dynamic_deserializers, &dir.join("dynamic.rs"), false); + write_to_file(quote! { pub mod dynamic; }, &filename, true); + reformat(&filename) + } +} + +fn no_conflict_ident(s: &str) -> syn::Ident { + let mut candidate: String = s.into(); + loop { + match syn::parse_str(&candidate) { + Ok(i) => return i, + Err(_) => candidate.push('_'), + } + } +} + +fn no_conflict_local_ident(s: &str) -> Option { + match s { + "bytes" => Some(syn::parse_str("bytes_").unwrap()), + _ => None, + } +} + +fn wrap_option_type(wrap: bool, ty: Tokens) -> Tokens { + if wrap { + quote! { Option<#ty> } + } else { + ty + } +} + +fn wrap_option_value(wrap: bool, ty: Tokens) -> Tokens { + if wrap { + quote! { Some(#ty) } + } else { + ty + } +} + +#[derive(Debug, Clone)] +struct TypeName { + tokens: Tokens, + tokens_canon: String, + idents: Option>, +} + +impl PartialEq for TypeName { + fn eq(&self, other: &Self) -> bool { + self.tokens_canon == other.tokens_canon + } +} + +impl Eq for TypeName {} + +impl PartialOrd for TypeName { + fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> { + self.tokens_canon.partial_cmp(&other.tokens_canon) + } +} + +impl Ord for TypeName { + fn cmp(&self, other: &Self) -> ::std::cmp::Ordering { + self.tokens_canon.cmp(&other.tokens_canon) + } +} + +impl TypeName { + fn transformed_tokens(&self, func: F) -> Tokens + where F: FnOnce(&Tokens) -> Tokens, + { + func(&self.tokens) + } + + fn transformed(&self, func: F) -> Self + where F: FnOnce(&Tokens) -> Tokens, + { + let tokens = self.transformed_tokens(func); + let tokens_canon = format!("{}", tokens); + TypeName { tokens, tokens_canon, idents: None } + } +} + +fn to_snake_case(ident: &str) -> String { + let mut result = String::new(); + for c in ident.chars() { + if c.is_ascii_uppercase() && result.len() > 0 { + result.push_str(format!("_{}", c.to_ascii_lowercase()).as_str()); + } else { + result.push(c.to_ascii_lowercase()); + } + } + result +} + +impl ::std::iter::FromIterator for TypeName + where S: AsRef +{ + fn from_iter(iter: T) -> Self + where T: IntoIterator, + { + let mut tokens = quote!(crate::ton); + let mut idents = vec![]; + let mut iter = iter.into_iter(); + if let Some(mut last_segment) = iter.next() { + while let Some(segment) = iter.next() { + let ident = no_conflict_ident(to_snake_case(last_segment.as_ref()).as_str()); + idents.push(ident); + last_segment = segment; + } + idents.push(no_conflict_ident(last_segment.as_ref())); + } + + for ident in &idents { + tokens = quote!(#tokens::#ident); + } + let tokens_canon = format!("{}", tokens); + + TypeName { tokens, tokens_canon, idents: Some(idents) } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +enum WireKind { + Bare(TypeName), + Boxed(TypeName), + TypeParameter(syn::Ident), + FlaggedTrue, + Flags, + ExtraDefault(TypeName), +} + +fn is_first_char_lowercase(s: &str) -> bool { + s.chars().next() + .map(char::is_lowercase) + .unwrap_or(false) +} + +impl WireKind { + fn from_names_and_hint(names: &[String], force_bare: bool) -> Self { + use self::WireKind::*; + match names.last().map(String::as_str) { + Some("true") => FlaggedTrue, + Some(s) if force_bare || is_first_char_lowercase(s) => + Bare(names.iter().map(String::as_str).collect()), + Some(_) => + Boxed(names.iter().map(String::as_str).collect()), + None => unimplemented!(), + } + } + + fn type_parameter(id: syn::Ident) -> Self { + WireKind::TypeParameter(id) + } + + fn become_container_for(&mut self, include_determiner: bool, contained: Self) { + use self::WireKind::*; + let ty_loc = match *self { + Bare(ref mut t) | + Boxed(ref mut t) => t, + _ => unimplemented!(), + }; + let contained = if include_determiner { + match contained { + Bare(ty) => ty.transformed_tokens(|t| quote!(crate::ton::Bare, #t)), + Boxed(ty) => ty.transformed_tokens(|t| quote!(crate::ton::Boxed, #t)), + TypeParameter(t) => quote!(crate::ton::Boxed, #t), + _ => unimplemented!(), + } + } else { + match contained { + Bare(t) | Boxed(t) => t.tokens, + TypeParameter(t) => quote!(#t), + _ => unimplemented!(), + } + }; + *ty_loc = ty_loc.transformed(|ty| quote!(#ty<#contained>)); + } + + fn as_read_method(&self) -> Tokens { + use self::WireKind::*; + match *self { + Bare(..) | Flags => quote!(read_bare), + Boxed(..) | TypeParameter(..) => quote!(read_boxed), + ExtraDefault(..) => quote!(just_default), + FlaggedTrue => fail_hard(), + } + } + + fn as_write_method(&self) -> Option { + use self::WireKind::*; + match *self { + Bare(..) | Flags => Some(quote!(write_bare)), + Boxed(..) | TypeParameter(..) => Some(quote!(write_boxed)), + ExtraDefault(..) => None, + FlaggedTrue => Some(fail_hard()), + } + } + + fn is_unit(&self) -> bool { + use self::WireKind::*; + match *self { + FlaggedTrue => true, + _ => false, + } + } + + fn is_flags(&self) -> bool { + use self::WireKind::*; + match *self { + Flags => true, + _ => false, + } + } + + fn is_type_parameter(&self) -> bool { + use self::WireKind::*; + match *self { + TypeParameter(..) => true, + _ => false, + } + } + + fn is_extra(&self) -> bool { + use self::WireKind::*; + match *self { + ExtraDefault(..) => true, + _ => false, + } + } + + fn opt_names_slice(&self) -> Option<&[syn::Ident]> { + use self::WireKind::*; + match *self { + Bare(ref t) | + Boxed(ref t) | + ExtraDefault(ref t) => t.idents.as_ref().map(|v| v.as_slice()), + _ => None, + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct TypeIR { + wire_kind: WireKind, + needs_box: bool, + needs_determiner: bool, + with_option: bool, +} + +impl TypeIR { + fn from_names(config: &Option, names: &[String]) -> Self { + Self::from_names_and_hint(config, names, false) + } + + fn from_names_and_hint(config: &Option, names: &[String], force_bare: bool) -> Self { + let wire_kind = WireKind::from_names_and_hint(names, force_bare); + let (needs_box, needs_determiner) = if let Some(name) = names.last() { + let needs_determiner = name == "vector" || name == "Vector"; + if let Some(config) = config { + (if config.need_box.contains(name) { + true + } else { + let len = names.len(); + if len >= 2 { + let name = format!("{}.{}", names[len - 2], names[len - 1]); + config.need_box.contains(&name) + } else { + false + } + }, + needs_determiner || config.need_determiner.contains(name) + ) + } else { + (false, needs_determiner) + } + } else { + (false, false) + }; + + TypeIR { + wire_kind, + with_option: false, + needs_box, + needs_determiner, + } + } + + fn bytes() -> Self { + TypeIR { + wire_kind: WireKind::Bare(["bytes"].iter().collect()), + needs_box: false, + needs_determiner: false, + with_option: false, + } + } + + fn int() -> Self { + TypeIR { + wire_kind: WireKind::Bare(["int"].iter().collect()), + needs_box: false, + needs_determiner: false, + with_option: false, + } + } + + fn type_parameter(id: syn::Ident) -> Self { + TypeIR { + wire_kind: WireKind::type_parameter(id), + needs_box: false, + needs_determiner: false, + with_option: false, + } + } + + fn flags() -> Self { + TypeIR { + wire_kind: WireKind::Flags, + needs_box: false, + needs_determiner: false, + with_option: false, + } + } + + fn repeated() -> Self { + unimplemented!() + } + + fn with_container(self, mut container: TypeIR) -> Self { + container.wire_kind.become_container_for(container.needs_determiner, self.wire_kind); + container + } + + fn with_option_wrapper(mut self) -> Self { + if !self.wire_kind.is_unit() { + self.with_option = true; + } + self + } + + fn io_turbofish(&self, with_tlobject: bool) -> Tokens { + use self::WireKind::*; + let ty = match self.wire_kind { + Flags => quote!(crate::ton::Flags), + TypeParameter(_) if with_tlobject => quote!(crate::ton::TLObject), + _ => self.non_field_type(), + }; + quote!(::<#ty>) + } + + fn assemble_method(&self, with_tlobject: bool, method: Tokens) -> Tokens { + let turbofish = self.io_turbofish(with_tlobject); + quote!(#method #turbofish) + } + + fn as_read_method(&self) -> Tokens { + self.assemble_method(true, self.wire_kind.as_read_method()) + } + + fn as_write_method(&self) -> Option { + self.wire_kind.as_write_method().map(|m| self.assemble_method(false, m)) + } + + fn non_field_type(&self) -> Tokens { + use self::WireKind::*; + match self.wire_kind { + Bare(ref t) | + Boxed(ref t) | + ExtraDefault(ref t) => t.tokens.clone(), + TypeParameter(ref t) => quote!(#t), + _ => fail_hard(), + } + } + + fn unboxed(&self) -> Tokens { + wrap_option_type(self.with_option, self.non_field_type()) + } + + fn boxed(&self) -> Tokens { + let mut ty = self.non_field_type(); + if self.needs_box { + ty = quote!(Box<#ty>); + } + wrap_option_type(self.with_option, ty) + } + + fn field_type(&self) -> Tokens { + if self.is_unit() { + quote!(bool) + } else { + self.boxed() + } + } + + fn ref_prefix(&self) -> Tokens { + if self.is_unit() {quote!()} else {quote!(ref)} + } + + fn reference_prefix(&self) -> Tokens { + if self.is_unit() {quote!()} else {quote!(&)} + } + + fn local_reference_prefix(&self) -> Tokens { + if self.is_unit() {quote!(&)} else {quote!()} + } + + fn field_reference_type(&self) -> Tokens { + let ref_ = self.reference_prefix(); + let mut ty = if self.is_unit() { + quote!(bool) + } else { + self.non_field_type() + }; + if self.needs_box { + ty = quote!(Box<#ty>); + } + wrap_option_type(self.with_option, quote!(#ref_ #ty)) + } + + fn as_field_reference(&self, on: Tokens) -> Tokens { + if self.is_unit() { + wrap_option_value(self.with_option, quote!(#on)) + } else if self.with_option { + quote!(#on.as_ref()) + } else { + quote!(&#on) + } + } + + fn is_defined_trailer(&self) -> Tokens { + use self::WireKind::*; + match self.wire_kind { + _ if self.with_option => quote!(.is_some()), + FlaggedTrue => quote!(), + _ => fail_hard(), + } + } + + fn is_unit(&self) -> bool { + self.wire_kind.is_unit() + } + + fn is_flags(&self) -> bool { + self.wire_kind.is_flags() + } + + fn is_type_parameter(&self) -> bool { + self.wire_kind.is_type_parameter() + } + + fn is_extra(&self) -> bool { + self.wire_kind.is_extra() + } + + fn owned_names_vec(&self) -> Vec { + self.wire_kind.opt_names_slice() + .unwrap() + .iter() + .cloned() + .collect() + } + + fn name(&self) -> syn::Ident { + self.wire_kind.opt_names_slice() + .and_then(|s| s.last()) + .unwrap() + .clone() + } +} + +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] +struct FieldIR { + name: String, + ty: TypeIR, + flag_bit: Option<(String, u32)>, +} + +impl Field { + fn resolved(&self, config: &Option, resolve_map: &TypeResolutionMap, replace_string_with_bytes: bool) -> FieldIR { + let ty = if replace_string_with_bytes && self.ty.name() == Some("string") { + TypeIR::bytes() + } else { + let is_flag_field = if let Some(ref value) = self.name { + value == "flags" + } else { + false + }; + self.ty.resolved(config, resolve_map, is_flag_field) + }; + + let name = if let Some(ref value) = self.name { + value.clone() + } else { + String::new() + }; + FieldIR { ty, name, flag_bit: self.ty.flag_field() } + } +} + +impl FieldIR { + fn name(&self) -> syn::Ident { + no_conflict_ident(&self.name) + } + + fn local_name(&self) -> Option { + no_conflict_local_ident(&self.name) + } + + fn as_field(&self) -> Tokens { + let name = self.name(); + let ty = self.ty.field_type(); + + quote! { + #name: #ty + } + } + + fn extra_default(field_name: &str, wire_kind_type: TypeName) -> Self { + FieldIR { + name: field_name.to_string(), + ty: TypeIR { + wire_kind: WireKind::ExtraDefault(wire_kind_type), + needs_box: false, + needs_determiner: false, + with_option: false, + }, + flag_bit: None, + } + } +} + +impl Type { + fn resolved(&self, config: &Option, resolve_map: &TypeResolutionMap, is_flag_field: bool) -> TypeIR { + use Type::*; + match *self { + Named(ref names) => { + match resolve_map.get(names) { + Some(ir) => return ir.clone(), + None => TypeIR::from_names(config, names), + } + }, + TypeParameter(ref name) => TypeIR::type_parameter(no_conflict_ident(name)), + Generic(ref container, ref ty) => { + let ty = ty.resolved(config, resolve_map, false); + let container = match resolve_map.get(container) { + Some(ir) => ir.clone(), + None => TypeIR::from_names(config, container), + }; + ty.with_container(container) + }, + Flagged(_, _, ref ty) => { + ty.resolved(config, resolve_map, false).with_option_wrapper() + }, + Flags => TypeIR::flags(), + Int if is_flag_field => TypeIR::flags(), + Int => TypeIR::int(), + Repeated(..) => TypeIR::repeated(), + } + } +} + +impl Constructor { + fn resolve(self, config: &Option, which: Delimiter, resolve_map: &TypeResolutionMap) -> Constructor { + Constructor { + variant: self.variant.resolved(config, resolve_map, false), + fields: self.resolved_fields(config, resolve_map), + output: self.resolved_output(config, which, resolve_map), + type_parameters: self.type_parameters.iter() + .map(|t| t.resolved(config, resolve_map, false)) + .collect(), + tl_id: self.tl_id, + original_variant: self.original_variant, + original_output: self.original_output, + is_function: self.is_function, + } + } + + fn resolved_output(&self, config: &Option, which: Delimiter, resolve_map: &TypeResolutionMap) -> TypeIR { + if which == Delimiter::Functions && self.is_output_a_type_parameter() { + TypeIR::type_parameter(no_conflict_ident(self.output.name().unwrap())) + } else { + self.output.resolved(config, resolve_map, false) + } + } + + fn is_output_a_type_parameter(&self) -> bool { + let output_name = match &self.output { + &Type::Named(ref v) if v.len() == 1 => v[0].as_str(), + _ => return false, + }; + for p in &self.type_parameters { + if p.name.as_ref().map(String::as_str) == Some(output_name) { + return true; + } + } + false + } + + fn resolved_fields(&self, config: &Option, resolve_map: &TypeResolutionMap) -> Vec { + let replace_string_with_bytes = if let Some(config) = config { + config.replace_with_bytes.contains(&self.original_variant) + } else { + false + }; + + let mut ret: Vec<_> = self.fields.iter() + .map(|f| f.resolved(config, resolve_map, replace_string_with_bytes)) + .collect(); + if &self.original_variant == "manual.gzip_packed" { + ret.push(FieldIR::extra_default( + "unpacked", + ["TLObject"].iter().collect::().transformed(|t| quote!(Option<#t>)))); + } + ret + } +} + +impl Constructor { + fn fields_tokens(&self, pub_: Tokens, trailer: Tokens) -> Tokens { + let pub_ = std::iter::repeat(pub_); + if self.fields.is_empty() { + quote! { #trailer } + } else { + let fields = self.fields.iter() + .filter(|f| !f.ty.is_flags()) + .map(FieldIR::as_field); + quote! { + { #( #pub_ #fields , )* } + } + } + } + + fn generics(&self, mut param_cb: F) -> Tokens + where F: FnMut(syn::Ident) -> Tokens, + { + if self.type_parameters.is_empty() { + return quote!(); + } + let bounds = self.type_parameters.iter().map(move |p| param_cb(p.name())); + quote! { <#(#bounds),*> } + } + + fn impl_generics(&self) -> Tokens { + self.generics(|ty| quote!(#ty)) + } + + fn rpc_generics(&self) -> Tokens { + self.generics(|ty| quote!(#ty: crate::Function)) + } + + fn deserialize_tlobject_generics(&self) -> Tokens { + self.generics(|_| quote!(crate::ton::TLObject)) + } + + fn serialize_generics(&self) -> Tokens { + self.generics(|ty| quote!(#ty: crate::AnyBoxedSerialize)) + } + + fn as_struct_determine_flags(&self, field_prefix: Tokens) -> Option { + match self.fields.iter().filter(|f| f.ty.is_flags()).count() { + 0 => return None, + 1 => (), + n => panic!("{} flags fields found on {:?}", n, self), + } + let determination = { + let fields = self.fields.iter() + .filter_map(|f| { + let name = f.name(); + f.flag_bit.as_ref().map(|(_name, bit)| { + let is_defined = f.ty.is_defined_trailer(); + quote! { + if #field_prefix #name #is_defined { + _flags |= 1 << #bit; + } + } + }) + }); + quote! { + let mut _flags = 0u32; + #( #fields )* + } + }; + Some(determination) + } + + fn as_struct_doc(&self, matched: &str) -> String { + format!("TL-derived from `{}`\n\n```text\n{}\n```\n", self.original_variant, matched) + } + + fn as_struct_base(&self, config: &Option, name: &syn::Ident, matched: &str) -> Tokens { + let doc = self.as_struct_doc(matched); + let derives = gen_derives(config, quote! { Debug, Default, Clone, PartialEq }, name.to_string()); + let impl_generics = self.impl_generics(); + let fields = self.fields_tokens(quote! {pub}, quote! {;}); + + quote! { + #[derive(#derives)] + #[doc = #doc] + pub struct #name #impl_generics #fields + impl Eq for #name {} + } + } + + fn as_struct_deserialize(&self) -> Tokens { + if self.fields.is_empty() { + return quote!(Ok(Self {})); + } + let mut names = Vec::::new(); + let mut reads = Vec::::new(); + for f in &self.fields { + let read_method = f.ty.as_read_method(); + let mut read_op = quote!(_de. #read_method ()?); + if f.ty.needs_box { + read_op = quote!(Box::new(#read_op)); + } + let expr = if let Some((ref flag_name, ref flag_bit)) = f.flag_bit { + let flag_ident = no_conflict_ident(flag_name); + let predicate = quote!(#flag_ident & (1 << #flag_bit) != 0); + if f.ty.is_unit() { + predicate + } else { + quote! { + if #predicate { + Some(#read_op) + } else { + None + } + } + } + } else { + quote!(#read_op) + }; + let name = f.name(); + if !f.ty.is_flags() { + names.push(f.name()); + } + reads.push(quote!(let #name = #expr;)) + } + + quote!({ + #( #reads )* + Ok(Self { #( #names, )* }) + }) + } + + fn as_into_boxed(&self, name: &syn::Ident) -> Option { + if self.tl_id().is_none() || self.is_function { + return None; + } + let constructor = self.output.unboxed(); + let variant_name = self.full_variant_name(); + let operation = if self.variant.needs_box { + quote!(#constructor::#variant_name(Box::new(self))) + } else { + quote!(#constructor::#variant_name(self)) + }; + Some(quote! { + impl crate::IntoBoxed for #name { + type Boxed = #constructor; + fn into_boxed(self) -> #constructor { + #operation + } + } + }) + } + + fn as_type_struct_base(&self, config: &Option, name: syn::Ident, matched: &str) -> Tokens { + let serialize_destructure = self.as_variant_ref_destructure(&name) + .map(|d| quote! { let &#d = self; }) + .unwrap_or_else(|| quote!()); + let serialize_stmts = self.as_variant_serialize(); + let deserialize = self.as_struct_deserialize(); + let type_impl = self.as_type_impl( + &name, + quote!(#serialize_destructure #serialize_stmts Ok(())), + Some(quote!(#deserialize)) + ); + let struct_block = self.as_struct_base(config, &name, matched); + let into_boxed = self.as_into_boxed(&name); + quote! { + #struct_block + #type_impl + #into_boxed + } + } + + fn variant_name(&self) -> syn::Ident { + self.variant.name() + } + + fn full_variant_name(&self) -> syn::Ident { + let str_name = self.original_variant.clone(); + let name: String = str_name + .split('.') + .map(|s| { + let mut s = s.to_string(); + s[..1].make_ascii_uppercase(); + s + }) + .collect::>() + .join("_"); + + no_conflict_ident(&name) + } + + fn as_variant_type_struct(&self, config: &Option, matched: &str) -> Tokens { + if self.variant_name() == "BlockIdExt" { + let tl_id = self.tl_id().unwrap(); + return quote! { + pub(crate) type BlockIdExt = ton_block::BlockIdExt; + pub(crate) const TL_TAG: crate::ConstructorNumber = #tl_id; + } + } + if self.fields.is_empty() { + quote!() + } else { + self.as_type_struct_base(config, self.variant_name(), matched) + } + } + + fn as_variant_ref_destructure(&self, name: &syn::Ident) -> Option { + if self.fields.is_empty() { + return None; + } + let fields = self.fields.iter() + .filter(|f| !f.ty.is_flags()) + .map(|f| { + let field_name = f.name(); + if f.ty.is_extra() { + return quote! { #field_name: _ } + } + let prefix = f.ty.ref_prefix(); + if let Some(local_name) = f.local_name() { + quote! { #field_name: #prefix #local_name } + } else { + quote! { #prefix #field_name } + } + }); + Some(quote! { + #name { #( #fields ),* } + }) + } + + fn as_variant_serialize(&self) -> Tokens { + let determine_flags = self.as_struct_determine_flags(quote!()) + .unwrap_or_else(|| quote!()); + let fields = self.fields.iter() + .map(|f| { + if f.ty.is_unit() { + return quote!(); + } + let write_method = match f.ty.as_write_method() { + Some(m) => m, + None => return quote!(), + }; + let field_name = f.name(); + let local_name = f.local_name().unwrap_or_else(|| field_name.clone()); + if f.ty.is_flags() { + quote! { _ser. #write_method (&_flags)?; } + } else if f.flag_bit.is_some() { + let outer_ref = f.ty.reference_prefix(); + let inner_ref = f.ty.ref_prefix(); + let local_ref = f.ty.local_reference_prefix(); + quote! { + if let #outer_ref Some(#inner_ref inner) = #local_name { + _ser. #write_method (#local_ref inner)?; + } + } + } else { + if f.ty.needs_box { + quote!(_ser.#write_method(#local_name.as_ref())?;) + } else { + let prefix = f.ty.local_reference_prefix(); + quote!(_ser. #write_method(#prefix #local_name)?;) + } + } + }); + quote! { + #determine_flags + #( #fields )* + } + } + + fn as_function_struct(&self, config: &Option, matched: &str) -> Tokens { + let name = self.variant_name(); + let tl_id = self.tl_id().unwrap(); + let rpc_generics = self.rpc_generics(); + let deserialize_generics = self.deserialize_tlobject_generics(); + let serialize_generics = self.serialize_generics(); + let impl_generics = self.impl_generics(); + let mut output_ty = self.output.boxed(); + if self.output.is_type_parameter() { + output_ty = quote! {#output_ty::Reply}; + } + let base = self.as_type_struct_base(config, self.variant_name(), matched); + + quote! { + #base + + impl crate::BoxedDeserialize for #name #deserialize_generics { + fn possible_constructors() -> Vec { vec![#tl_id] } + fn deserialize_boxed(id: crate::ConstructorNumber, de: &mut crate::Deserializer) -> crate::Result { + if id == #tl_id { + de.read_bare() + } else { + _invalid_id!(id) + } + } + } + + impl #serialize_generics crate::BoxedSerialize for #name #impl_generics { + fn serialize_boxed(&self) -> (crate::ConstructorNumber, &dyn crate::BareSerialize) { + (#tl_id, self) + } + } + + impl #rpc_generics crate::Function for #name #impl_generics { + type Reply = #output_ty; + } + } + } + + fn as_variant(&self) -> Tokens { + let name = self.full_variant_name(); + if self.fields.is_empty() { + quote!(#name) + } else { + let type_name = self.variant.unboxed(); + if self.variant.needs_box { + quote!(#name(Box<#type_name>)) + } else { + quote!(#name(#type_name)) + } + } + } + + fn as_variant_serialize_arm(&self) -> Tokens { + let tl_id = self.tl_id().unwrap(); + if self.fields.is_empty() { + quote!(=> (#tl_id, &())) + } else { + if self.variant.needs_box { + quote!((ref x) => (#tl_id, x.as_ref())) + } else { + quote!((ref x) => (#tl_id, x)) + } + } + } + + fn as_variant_deserialize(&self, wrap_to_box: bool) -> Tokens { + if self.fields.is_empty() { + quote!() + } else { + let read_method = self.variant.as_read_method(); + if wrap_to_box { + quote!((Box::new(_de. #read_method ()?))) + } else { + quote!((_de. #read_method ()?)) + } + } + } + + fn tl_id(&self) -> Option { + self.tl_id.as_ref().map(|tl_id| { + let tl_id: syn::LitInt = syn::parse_str(&format!("0x{:08x}", tl_id)).unwrap(); + quote!(crate::ConstructorNumber(#tl_id)) + }) + } + + fn as_type_impl(&self, name: &syn::Ident, serialize: Tokens, deserialize: Option) -> Tokens { + let serialize_generics = self.serialize_generics(); + let impl_generics = self.impl_generics(); + + let deserialize = match deserialize { + Some(body) => { + let deserialize_generics = self.deserialize_tlobject_generics(); + quote! { + impl crate::BareDeserialize for #name #deserialize_generics { + fn deserialize_bare(_de: &mut crate::Deserializer) -> crate::Result { + #body + } + } + } + } + None => quote!() + }; + let constructor = self.tl_id().unwrap_or_else(|| quote!( unreachable!() )); + + quote! { + impl #serialize_generics crate::BareSerialize for #name #impl_generics { + fn constructor(&self) -> crate::ConstructorNumber { + #constructor + } + fn serialize_bare(&self, _ser: &mut crate::Serializer) -> crate::Result<()> { + #serialize + } + } + + #deserialize + } + } + + fn as_dynamic_deserializer(&self) -> Option { + let tl_id = self.tl_id()?; + let type_name = if self.is_function { + Cow::Owned(format!("rpc.{}", self.original_variant)) + } else { + Cow::Borrowed(&self.original_variant) + }; + let mut ty = if &self.original_variant == "manual.gzip_packed" { + quote!(crate::ton::TransparentGunzip) + } else if self.is_function { + self.variant.unboxed() + } else { + self.output.unboxed() + }; + if !self.type_parameters.is_empty() { + let generics = self.type_parameters.iter().map(|_| quote!(crate::ton::TLObject)); + ty = quote!(#ty<#(#generics),*>); + } + Some(quote! { + crate::DynamicDeserializer::from::<#ty>(#tl_id, #type_name) + }) + } +} + +fn camelize(config: &Option, resolve_map: &mut TypeResolutionMap, ty: &mut Type, additional_correction: F) -> NameChunks + where F: FnOnce(&mut NameChunks, &mut Vec) -> bool +{ + let names = ty.names_vec_mut().unwrap(); + let fixup_key = names.clone(); + let name = names.pop().unwrap(); + let mut new_name = NameChunks::from_name(&name).unwrap(); + let force_bare = additional_correction(&mut new_name, names); + names.push(new_name.as_upper_camel_case()); + let type_ir = TypeIR::from_names_and_hint(config, &names, force_bare); + resolve_map.insert(fixup_key, type_ir.clone()); + resolve_map.insert(names.clone(), type_ir); + new_name +} + +fn gen_derives(config: &Option, mut derives: TokenStream, name: String) -> TokenStream { + if let Some(config) = config { + if let Some(add_derives) = config.additional_derives.get(&name) { + for item in add_derives { + derives.append(Punct::new(',', Spacing::Alone)); + derives.append(Ident::new(item.as_str(), Span::call_site())); + } + } + } + + derives +} + +impl Constructors { + fn first_constructor(&self) -> &Constructor { + &self.0[0].0 + } +} + +impl Constructors { + fn resolve(self, config: &Option, resolve_map: &TypeResolutionMap) -> Constructors { + Constructors({ + self.0.into_iter() + .map(|Matched(c, m)| Matched(c.resolve(config, Delimiter::Types, resolve_map), m)) + .collect() + }) + } + + fn fix_names(&mut self, config: &Option, base_ns: &[String], resolve_map: &mut TypeResolutionMap) { + let output_name = camelize(config, resolve_map, &mut self.0[0].0.output, |_, _| false); + + let common_prefix = self.0.iter() + .filter_map(|m| m.0.variant.name()) + .map(|n| NameChunks::from_name(n).unwrap()) + .fold(None, |a_opt: Option, b| { + Some(a_opt.map(|a| a.common_prefix_of(&b)).unwrap_or(b)) + }) + .unwrap_or_else(|| NameChunks(vec![])); + let common_module = if common_prefix.0.is_empty() { + output_name.as_snake_case() + } else { + common_prefix.as_snake_case() + }; + for &mut Matched(ref mut c, _) in &mut self.0 { + camelize(config, resolve_map, &mut c.variant, |name, names| { + if !names.starts_with(base_ns) { + names.splice(..0, base_ns.iter().cloned()); + } + names.push(common_module.clone()); + let was_bare = is_first_char_lowercase(&name.0[0]); + name.trim_common_prefix(&common_prefix); + if name.0.is_empty() { + name.clone_from(&common_prefix); + } + was_bare + }); + } + } + +} + +impl Constructors { + fn coalesce_methods(&self) -> BTreeMap<&str, BTreeMap<&TypeIR, BTreeSet<&Constructor>>> { + let mut map: BTreeMap<&str, BTreeMap<&TypeIR, BTreeSet<&Constructor>>> = BTreeMap::new(); + for &Matched(ref cons, _) in &self.0 { + for field in &cons.fields { + if field.ty.is_flags() { + continue + } + map.entry(&field.name) + .or_insert_with(Default::default) + .entry(&field.ty) + .or_insert_with(Default::default) + .insert(cons); + } + } + map + } + + fn as_only_unwrap(&self, enum_name: &syn::Ident) -> Option { + let &Matched(ref cons, _) = self.0.first()?; + if self.0.len() != 1 || cons.fields.is_empty() { + return None + } + let cons_name = cons.full_variant_name(); + let ty = cons.variant.unboxed(); + let deref = if cons.variant.needs_box { + Some(Punct::new('*', Spacing::Joint)) + } else { + None + }; + Some(quote! { + pub fn only(self) -> #ty { + match self { + #enum_name::#cons_name(x) => #deref x + } + } + }) + } + + fn determine_methods(&self, enum_name: &syn::Ident) -> Tokens { + let all_constructors = self.0.len(); + let mut methods = vec![]; + for (name, typemap) in self.coalesce_methods() { + if typemap.len() != 1 { + continue; + } + let name = no_conflict_ident(name); + let (ty_ir, constructors) = typemap.into_iter().next().unwrap(); + let mut return_ir = ty_ir.clone(); + let exhaustive = constructors.len() == all_constructors; + if !exhaustive { + return_ir.with_option = true; + } + let value = wrap_option_value(!exhaustive && !ty_ir.with_option, ty_ir.as_field_reference(quote!(x.#name))); + let constructors = constructors.into_iter() + .map(|c| { + let cons_name = c.full_variant_name(); + quote!(&#enum_name::#cons_name(ref x) => #value) + }); + let trailer = if exhaustive { + quote!() + } else { + quote!(_ => None) + }; + let ty = return_ir.field_reference_type(); + methods.push(quote! { + pub fn #name(&self) -> #ty { + match self { + #( #constructors, )* + #trailer + } + } + }); + } + + methods.extend(self.as_only_unwrap(enum_name)); + + if methods.is_empty() { + quote!() + } else { + quote! { + impl #enum_name { + #( #methods )* + } + } + } + } + + fn as_type_impl(&self, name: &syn::Ident, serialize: Tokens, deserialize: Tokens) -> Tokens { + let tl_ids = self.as_tl_ids(); + + quote! { + + impl crate::BoxedSerialize for #name { + fn serialize_boxed(&self) -> (crate::ConstructorNumber, &dyn crate::BareSerialize) { + #serialize + } + } + + impl crate::BoxedDeserialize for #name { + fn possible_constructors() -> Vec { vec![#tl_ids] } + fn deserialize_boxed(_id: crate::ConstructorNumber, _de: &mut crate::Deserializer) -> crate::Result { + #deserialize + } + } + + } + } + + fn as_option_type_impl(&self) -> Tokens { + if self.0.len() != 2 { + return quote!(); + } + let tl_ids = self.constructors_and_tl_ids().collect::>(); + let empty = tl_ids.iter().find(|&&(_, c)| c.fields.is_empty()); + let nonempty = tl_ids.iter().find(|&&(_, c)| !c.fields.is_empty()); + let (empty_id, nonempty_id, nonempty_cons) = match (empty, nonempty) { + (Some(&(ref i_e, _)), Some(&(ref i_n, c_n))) => (i_e, i_n, c_n), + _ => return quote!(), + }; + let nonempty_variant = nonempty_cons.variant.unboxed(); + let nonempty_deserialize = nonempty_cons.as_variant_deserialize(false); + + quote! { + + impl crate::BoxedSerialize for Option<#nonempty_variant> { + fn serialize_boxed(&self) -> (crate::ConstructorNumber, &dyn crate::BareSerialize) { + match *self { + None => (#empty_id, &()), + Some(ref x) => (#nonempty_id, x), + } + } + } + + impl crate::BoxedDeserialize for Option<#nonempty_variant> { + fn possible_constructors() -> Vec { vec![#empty_id, #nonempty_id] } + fn deserialize_boxed(_id: crate::ConstructorNumber, _de: &mut crate::Deserializer) -> crate::Result { + match _id { + #empty_id => Ok(None), + #nonempty_id => Ok(Some #nonempty_deserialize), + id => _invalid_id!(id), + } + } + } + + } + } + + fn constructors_and_tl_ids<'this>(&'this self) -> Box)>> { + Box::new(self.0.iter().filter_map(|cm| { + cm.0.tl_id().map(|id| (id, &cm.0)) + })) + } + + fn as_serialize_match(&self, enum_name: &syn::Ident) -> Tokens { + let constructors = self.0.iter() + .map(|&Matched(ref c, _)| { + let variant_name = c.full_variant_name(); + let serialize = c.as_variant_serialize_arm(); + quote!(&#enum_name::#variant_name #serialize) + }); + quote! { + match self { + #( #constructors, )* + } + } + } + + fn as_tl_ids(&self) -> Tokens { + let tl_ids = self.0.iter() + .filter_map(|cm| cm.0.tl_id()); + quote!(#( #tl_ids, )*) + } + + fn as_deserialize_match(&self, enum_name: &syn::Ident) -> Tokens { + let constructors = self.constructors_and_tl_ids() + .map(|(tl_id, c)| { + let variant_name = c.full_variant_name(); + let deserialize = c.as_variant_deserialize(c.variant.needs_box); + quote!(#tl_id => Ok(#enum_name::#variant_name #deserialize)) + }); + quote! { + match _id { + #( #constructors, )* + id => _invalid_id!(id), + } + } + } + + fn as_enum_doc(&self) -> String { + use std::fmt::Write; + let mut ret = format!("TL-derived from `{}`\n\n```text\n", &self.first_constructor().original_output); + for (e, cm) in self.0.iter().enumerate() { + if e != 0 { + ret.write_str("\n\n").unwrap(); + } + ret.write_str(&cm.1).unwrap(); + } + write!(ret, "\n```\n").unwrap(); + ret + } + + fn enum_default_impl(&self) -> Option { + + let &Matched(ref cons, _) = self.0.first()?; + let name = self.first_constructor().output.name(); + let variant = cons.full_variant_name(); + + if cons.fields.is_empty() { + return Some(quote! { + impl Default for #name { + fn default() -> Self { + #name::#variant + } + } + }) + } + let name = self.first_constructor().output.name(); + let variant = cons.full_variant_name(); + + let ty = cons.variant.unboxed(); + + let mut default = quote!(#ty::default()); + if cons.variant.needs_box { + default = quote!(Box::new(#default)); + } + + Some(quote! { + impl Default for #name { + fn default() -> Self { + #name::#variant(#default) + } + } + }) + } + + fn as_enum(&self, config: &Option) -> Tokens { + if self.0.iter().all(|cm| cm.0.tl_id().is_none()) { + return quote!(); + } + let name = self.first_constructor().output.name(); + let doc = self.as_enum_doc(); + if name == "BlockIdExt" { + return quote! { + pub(crate) type BlockIdExt = ton_block::BlockIdExt; + } + } + let derives = gen_derives(config, quote! { Debug, Clone, PartialEq }, name.to_string()); + let variants = self.0.iter() + .map(|cm| cm.0.as_variant()); + let methods = self.determine_methods(&name); + let type_impl = self.as_type_impl( + &name, + self.as_serialize_match(&name), + self.as_deserialize_match(&name)); + let option_type_impl = self.as_option_type_impl(); + let default_impl = self.enum_default_impl(); + + quote! { + #[derive(#derives)] + #[doc = #doc] + pub enum #name { + #( #variants , )* + } + #methods + impl Eq for #name {} + #default_impl + #type_impl + #option_type_impl + } + } +} + +pub fn generate_code_for(config: Option, input: &str, path: &Path) { + let constructors = { + let mut items = parser::parse_string(input).unwrap(); + filter_items(&config, &mut items); + AllConstructors::from_matched_items(&config, items) + }; + + let layer = constructors.layer as i32; + let prelude = quote! { + #![allow(bare_trait_objects, unused_variables, unused_imports, non_snake_case)] + pub use crate::ton_prelude::*; + + pub const LAYER: i32 = #layer; + }; + + constructors.print_tokens(&config, prelude, path); +} diff --git a/ton_tl_codegen/src/tests.rs b/ton_tl_codegen/src/tests.rs new file mode 100644 index 0000000..47b86cd --- /dev/null +++ b/ton_tl_codegen/src/tests.rs @@ -0,0 +1,27 @@ +/* +* Copyright (C) 2019-2021 TON Labs. All Rights Reserved. +* +* Licensed under the SOFTWARE EVALUATION License (the "License"); you may not use +* this file except in compliance with the License. +* +* Unless required by applicable law or agreed to in writing, software +* distributed under the License is distributed on an "AS IS" BASIS, +* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +* See the License for the specific TON DEV software governing permissions and +* limitations under the License. +*/ + +use super::*; + +#[test] +fn test_to_snake_case() { + assert_eq!(to_snake_case("test"), "test"); + assert_eq!(to_snake_case("foo_bar"), "foo_bar"); + assert_eq!(to_snake_case("test_"), "test_"); + assert_eq!(to_snake_case("_test_"), "_test_"); + assert_eq!(to_snake_case("_test"), "_test"); + assert_eq!(to_snake_case("Test"), "test"); + assert_eq!(to_snake_case("TestMsg"), "test_msg"); + assert_eq!(to_snake_case("TestA"), "test_a"); + assert_eq!(to_snake_case("TTest"), "t_test"); +}