diff --git a/Cargo.lock b/Cargo.lock index f58fb50797..f3897bf9de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -152,6 +152,12 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76a2e8124351fda1ef8aaaa3bbd7ebbcb486bbcd4225aca0aa0d84bb2db8fecb" +[[package]] +name = "arrayvec" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50" + [[package]] name = "assert-json-diff" version = "2.0.2" @@ -551,6 +557,21 @@ dependencies = [ "which", ] +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "2.6.0" @@ -825,7 +846,7 @@ checksum = "975982cdb7ad6a142be15bdf84aea7ec6a9e5d4d797c004d43185b24cfe4e684" dependencies = [ "clap", "heck 0.5.0", - "indexmap 2.6.0", + "indexmap 2.11.4", "log", "proc-macro2", "quote", @@ -1543,37 +1564,46 @@ dependencies = [ name = "datadog-profiling" version = "21.0.0" dependencies = [ + "allocator-api2", "anyhow", + "arrayvec", "bitmaps", "bolero", "byteorder", "bytes", "chrono", - "criterion", + "crossbeam-utils", "datadog-alloc", "datadog-profiling-protobuf", "ddcommon", "futures", + "hashbrown 0.15.1", "http", "http-body-util", "hyper", "hyper-multipart-rfc7578", - "indexmap 2.6.0", + "hyper-util", + "indexmap 2.11.4", "lz4_flex", "mime", + "parking_lot", + "proptest", "prost", "rustc-hash", "serde", "serde_json", "target-triple", + "thiserror", "tokio", "tokio-util", + "uuid", ] [[package]] name = "datadog-profiling-ffi" version = "21.0.0" dependencies = [ + "allocator-api2", "anyhow", "build_common", "data-pipeline-ffi", @@ -1581,6 +1611,7 @@ dependencies = [ "datadog-library-config-ffi", "datadog-log-ffi", "datadog-profiling", + "datadog-profiling-protobuf", "ddcommon", "ddcommon-ffi", "ddsketch-ffi", @@ -1590,8 +1621,11 @@ dependencies = [ "http-body-util", "hyper", "libc", + "proptest", + "rustc-hash", "serde_json", "symbolizer-ffi", + "thiserror", "tokio-util", ] @@ -1605,15 +1639,12 @@ dependencies = [ ] [[package]] -name = "datadog-profiling-replayer" +name = "datadog-profiling-validator" version = "21.0.0" dependencies = [ - "anyhow", "clap", - "datadog-profiling", "datadog-profiling-protobuf", - "prost", - "sysinfo", + "thiserror", ] [[package]] @@ -1842,7 +1873,7 @@ dependencies = [ "hyper", "hyper-rustls", "hyper-util", - "indexmap 2.6.0", + "indexmap 2.11.4", "libc", "maplit", "nix", @@ -1864,6 +1895,7 @@ dependencies = [ name = "ddcommon-ffi" version = "21.0.0" dependencies = [ + "allocator-api2", "anyhow", "bolero", "build_common", @@ -2360,7 +2392,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93563d740bc9ef04104f9ed6f86f1e3275c2cdafb95664e26584b9ca807a8ffe" dependencies = [ "fallible-iterator", - "indexmap 2.6.0", + "indexmap 2.11.4", "stable_deref_trait", ] @@ -2414,7 +2446,7 @@ dependencies = [ "futures-core", "futures-sink", "http", - "indexmap 2.6.0", + "indexmap 2.11.4", "slab", "tokio", "tokio-util", @@ -2927,13 +2959,14 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.6.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707907fe3c25f5424cce2cb7e1cbcafee6bdbe735ca90ef77c29e84591e5b9da" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", "hashbrown 0.15.1", "serde", + "serde_core", ] [[package]] @@ -3315,15 +3348,6 @@ dependencies = [ "minimal-lexical", ] -[[package]] -name = "ntapi" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a3895c6391c39d7fe7ebc444a87eb2991b2a0bc718fdabd071eec617fc68e4" -dependencies = [ - "winapi 0.3.9", -] - [[package]] name = "nu-ansi-term" version = "0.46.0" @@ -3575,7 +3599,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.6.0", + "indexmap 2.11.4", ] [[package]] @@ -3750,7 +3774,7 @@ checksum = "714c75db297bc88a63783ffc6ab9f830698a6705aa0201416931759ef4c8183d" dependencies = [ "autocfg", "equivalent", - "indexmap 2.6.0", + "indexmap 2.11.4", ] [[package]] @@ -3800,6 +3824,8 @@ name = "proptest" version = "1.5.0" source = "git+https://github.com/bantonsson/proptest.git?branch=ban/avoid-libm-in-std#9f623fbab7a1a4da487551128c2bffeee2ed6b87" dependencies = [ + "bit-set", + "bit-vec", "bitflags", "lazy_static", "num-traits", @@ -3807,6 +3833,8 @@ dependencies = [ "rand_chacha 0.3.1", "rand_xorshift", "regex-syntax 0.8.5", + "rusty-fork", + "tempfile", "unarray", ] @@ -3919,6 +3947,12 @@ version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7dc55d7dec32ecaf61e0bd90b3d2392d721a28b95cfd23c3e176eccefbeab2f2" +[[package]] +name = "quick-error" +version = "1.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" + [[package]] name = "quote" version = "1.0.37" @@ -4236,6 +4270,18 @@ version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" +[[package]] +name = "rusty-fork" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f" +dependencies = [ + "fnv", + "quick-error", + "tempfile", + "wait-timeout", +] + [[package]] name = "ruzstd" version = "0.3.1" @@ -4365,10 +4411,11 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.219" +version = "1.0.226" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd" dependencies = [ + "serde_core", "serde_derive", ] @@ -4381,11 +4428,20 @@ dependencies = [ "serde", ] +[[package]] +name = "serde_core" +version = "1.0.226" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4" +dependencies = [ + "serde_derive", +] + [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.226" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33" dependencies = [ "proc-macro2", "quote", @@ -4444,7 +4500,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.6.0", + "indexmap 2.11.4", "serde", "serde_derive", "serde_json", @@ -4470,7 +4526,7 @@ version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.11.4", "itoa", "ryu", "serde", @@ -4738,20 +4794,6 @@ dependencies = [ "libc", ] -[[package]] -name = "sysinfo" -version = "0.29.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd727fc423c2060f6c92d9534cef765c65a6ed3f428a03d7def74a8c4348e666" -dependencies = [ - "cfg-if", - "core-foundation-sys", - "libc", - "ntapi", - "once_cell", - "winapi 0.3.9", -] - [[package]] name = "tabwriter" version = "1.4.1" @@ -5106,7 +5148,7 @@ version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.11.4", "toml_datetime", "winnow 0.5.40", ] @@ -5117,7 +5159,7 @@ version = "0.22.26" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "310068873db2c5b3e7659d2cc35d21855dbafa50d1ce336397c666e3cb08137e" dependencies = [ - "indexmap 2.6.0", + "indexmap 2.11.4", "serde", "serde_spanned", "toml_datetime", @@ -5468,6 +5510,15 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "wait-timeout" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09ac3b126d3914f9849036f826e054cbabdc8519970b8998ddaf3b5bd3c65f11" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.5.0" @@ -6205,7 +6256,7 @@ dependencies = [ "flate2", "getrandom 0.3.2", "hmac", - "indexmap 2.6.0", + "indexmap 2.11.4", "liblzma", "memchr", "pbkdf2", diff --git a/Cargo.toml b/Cargo.toml index 68ee6a5711..3c3a75a0e9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,7 +16,8 @@ members = [ "datadog-profiling", "datadog-profiling-ffi", "datadog-profiling-protobuf", - "datadog-profiling-replayer", + #"datadog-profiling-replayer", # todo: add back + "datadog-profiling-validator", "datadog-remote-config", "datadog-sidecar", "datadog-sidecar-ffi", diff --git a/LICENSE-3rdparty.yml b/LICENSE-3rdparty.yml index fbe60d1d51..ecfe41d56c 100644 --- a/LICENSE-3rdparty.yml +++ b/LICENSE-3rdparty.yml @@ -1,4 +1,4 @@ -root_name: builder, build_common, tools, datadog-alloc, datadog-crashtracker, ddcommon, ddtelemetry, datadog-ddsketch, cc_utils, datadog-crashtracker-ffi, ddcommon-ffi, datadog-ipc, datadog-ipc-macros, tarpc, tarpc-plugins, tinybytes, spawn_worker, datadog-library-config, datadog-library-config-ffi, datadog-live-debugger, datadog-live-debugger-ffi, datadog-profiling, datadog-profiling-protobuf, datadog-profiling-ffi, data-pipeline-ffi, data-pipeline, datadog-trace-protobuf, datadog-trace-utils, datadog-trace-normalization, dogstatsd-client, datadog-log-ffi, datadog-log, ddsketch-ffi, ddtelemetry-ffi, symbolizer-ffi, datadog-profiling-replayer, datadog-remote-config, datadog-sidecar, datadog-sidecar-macros, datadog-sidecar-ffi, datadog-trace-obfuscation, datadog-tracer-flare, sidecar_mockgen, test_spawn_from_lib +root_name: builder, build_common, tools, datadog-alloc, datadog-crashtracker, ddcommon, ddtelemetry, datadog-ddsketch, cc_utils, datadog-crashtracker-ffi, ddcommon-ffi, datadog-ipc, datadog-ipc-macros, tarpc, tarpc-plugins, tinybytes, spawn_worker, datadog-library-config, datadog-library-config-ffi, datadog-live-debugger, datadog-live-debugger-ffi, datadog-profiling, datadog-profiling-protobuf, datadog-profiling-ffi, data-pipeline-ffi, data-pipeline, datadog-trace-protobuf, datadog-trace-utils, datadog-trace-normalization, dogstatsd-client, datadog-log-ffi, datadog-log, ddsketch-ffi, ddtelemetry-ffi, symbolizer-ffi, datadog-profiling-validator, datadog-remote-config, datadog-sidecar, datadog-sidecar-macros, datadog-sidecar-ffi, datadog-trace-obfuscation, datadog-tracer-flare, sidecar_mockgen, test_spawn_from_lib third_party_libraries: - package_name: addr2line package_version: 0.24.2 @@ -2151,6 +2151,40 @@ third_party_libraries: THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +- package_name: arrayvec + package_version: 0.7.6 + repository: https://github.com/bluss/arrayvec + license: MIT OR Apache-2.0 + licenses: + - license: MIT + text: | + Copyright (c) Ulrik Sverdrup "bluss" 2015-2023 + + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + - license: Apache-2.0 + text: " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\nTERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\nEND OF TERMS AND CONDITIONS\n\nAPPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\nCopyright [yyyy] [name of copyright owner]\n\nLicensed under the Apache License, Version 2.0 (the \"License\");\nyou may not use this file except in compliance with the License.\nYou may obtain a copy of the License at\n\n\thttp://www.apache.org/licenses/LICENSE-2.0\n\nUnless required by applicable law or agreed to in writing, software\ndistributed under the License is distributed on an \"AS IS\" BASIS,\nWITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\nSee the License for the specific language governing permissions and\nlimitations under the License.\n" - package_name: assert-json-diff package_version: 2.0.2 repository: https://github.com/davidpdrsn/assert-json-diff.git @@ -13606,7 +13640,7 @@ third_party_libraries: IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - package_name: indexmap - package_version: 2.6.0 + package_version: 2.11.4 repository: https://github.com/indexmap-rs/indexmap license: Apache-2.0 OR MIT licenses: @@ -16399,15 +16433,6 @@ third_party_libraries: LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -- package_name: ntapi - package_version: 0.4.1 - repository: https://github.com/MSxDOS/ntapi - license: Apache-2.0 OR MIT - licenses: - - license: Apache-2.0 - text: " Apache License\n Version 2.0, January 2004\n http://www.apache.org/licenses/\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n 1. Definitions.\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n and distribution as defined by Sections 1 through 9 of this document.\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n the copyright owner that is granting the License.\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n other entities that control, are controlled by, or are under common\n control with that entity. For the purposes of this definition,\n \"control\" means (i) the power, direct or indirect, to cause the\n direction or management of such entity, whether by contract or\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n exercising permissions granted by this License.\n\n \"Source\" form shall mean the preferred form for making modifications,\n including but not limited to software source code, documentation\n source, and configuration files.\n\n \"Object\" form shall mean any form resulting from mechanical\n transformation or translation of a Source form, including but\n not limited to compiled object code, generated documentation,\n and conversions to other media types.\n\n \"Work\" shall mean the work of authorship, whether in Source or\n Object form, made available under the License, as indicated by a\n copyright notice that is included in or attached to the work\n (an example is provided in the Appendix below).\n\n \"Derivative Works\" shall mean any work, whether in Source or Object\n form, that is based on (or derived from) the Work and for which the\n editorial revisions, annotations, elaborations, or other modifications\n represent, as a whole, an original work of authorship. For the purposes\n of this License, Derivative Works shall not include works that remain\n separable from, or merely link (or bind by name) to the interfaces of,\n the Work and Derivative Works thereof.\n\n \"Contribution\" shall mean any work of authorship, including\n the original version of the Work and any modifications or additions\n to that Work or Derivative Works thereof, that is intentionally\n submitted to Licensor for inclusion in the Work by the copyright owner\n or by an individual or Legal Entity authorized to submit on behalf of\n the copyright owner. For the purposes of this definition, \"submitted\"\n means any form of electronic, verbal, or written communication sent\n to the Licensor or its representatives, including but not limited to\n communication on electronic mailing lists, source code control systems,\n and issue tracking systems that are managed by, or on behalf of, the\n Licensor for the purpose of discussing and improving the Work, but\n excluding communication that is conspicuously marked or otherwise\n designated in writing by the copyright owner as \"Not a Contribution.\"\n\n \"Contributor\" shall mean Licensor and any individual or Legal Entity\n on behalf of whom a Contribution has been received by Licensor and\n subsequently incorporated within the Work.\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n copyright license to reproduce, prepare Derivative Works of,\n publicly display, publicly perform, sublicense, and distribute the\n Work and such Derivative Works in Source or Object form.\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n this License, each Contributor hereby grants to You a perpetual,\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n (except as stated in this section) patent license to make, have made,\n use, offer to sell, sell, import, and otherwise transfer the Work,\n where such license applies only to those patent claims licensable\n by such Contributor that are necessarily infringed by their\n Contribution(s) alone or by combination of their Contribution(s)\n with the Work to which such Contribution(s) was submitted. If You\n institute patent litigation against any entity (including a\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n or a Contribution incorporated within the Work constitutes direct\n or contributory patent infringement, then any patent licenses\n granted to You under this License for that Work shall terminate\n as of the date such litigation is filed.\n\n 4. Redistribution. You may reproduce and distribute copies of the\n Work or Derivative Works thereof in any medium, with or without\n modifications, and in Source or Object form, provided that You\n meet the following conditions:\n\n (a) You must give any other recipients of the Work or\n Derivative Works a copy of this License; and\n\n (b) You must cause any modified files to carry prominent notices\n stating that You changed the files; and\n\n (c) You must retain, in the Source form of any Derivative Works\n that You distribute, all copyright, patent, trademark, and\n attribution notices from the Source form of the Work,\n excluding those notices that do not pertain to any part of\n the Derivative Works; and\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n distribution, then any Derivative Works that You distribute must\n include a readable copy of the attribution notices contained\n within such NOTICE file, excluding those notices that do not\n pertain to any part of the Derivative Works, in at least one\n of the following places: within a NOTICE text file distributed\n as part of the Derivative Works; within the Source form or\n documentation, if provided along with the Derivative Works; or,\n within a display generated by the Derivative Works, if and\n wherever such third-party notices normally appear. The contents\n of the NOTICE file are for informational purposes only and\n do not modify the License. You may add Your own attribution\n notices within Derivative Works that You distribute, alongside\n or as an addendum to the NOTICE text from the Work, provided\n that such additional attribution notices cannot be construed\n as modifying the License.\n\n You may add Your own copyright statement to Your modifications and\n may provide additional or different license terms and conditions\n for use, reproduction, or distribution of Your modifications, or\n for any such Derivative Works as a whole, provided Your use,\n reproduction, and distribution of the Work otherwise complies with\n the conditions stated in this License.\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n any Contribution intentionally submitted for inclusion in the Work\n by You to the Licensor shall be under the terms and conditions of\n this License, without any additional terms or conditions.\n Notwithstanding the above, nothing herein shall supersede or modify\n the terms of any separate license agreement you may have executed\n with Licensor regarding such Contributions.\n\n 6. Trademarks. This License does not grant permission to use the trade\n names, trademarks, service marks, or product names of the Licensor,\n except as required for reasonable and customary use in describing the\n origin of the Work and reproducing the content of the NOTICE file.\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n agreed to in writing, Licensor provides the Work (and each\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n implied, including, without limitation, any warranties or conditions\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n PARTICULAR PURPOSE. You are solely responsible for determining the\n appropriateness of using or redistributing the Work and assume any\n risks associated with Your exercise of permissions under this License.\n\n 8. Limitation of Liability. In no event and under no legal theory,\n whether in tort (including negligence), contract, or otherwise,\n unless required by applicable law (such as deliberate and grossly\n negligent acts) or agreed to in writing, shall any Contributor be\n liable to You for damages, including any direct, indirect, special,\n incidental, or consequential damages of any character arising as a\n result of this License or out of the use or inability to use the\n Work (including but not limited to damages for loss of goodwill,\n work stoppage, computer failure or malfunction, or any and all\n other commercial damages or losses), even if such Contributor\n has been advised of the possibility of such damages.\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n the Work or Derivative Works thereof, You may choose to offer,\n and charge a fee for, acceptance of support, warranty, indemnity,\n or other liability obligations and/or rights consistent with this\n License. However, in accepting such obligations, You may act only\n on Your own behalf and on Your sole responsibility, not on behalf\n of any other Contributor, and only if You agree to indemnify,\n defend, and hold each Contributor harmless for any liability\n incurred by, or claims asserted against, such Contributor by reason\n of your accepting any such warranty or additional liability.\n\n END OF TERMS AND CONDITIONS\n\n APPENDIX: How to apply the Apache License to your work.\n\n To apply the Apache License to your work, attach the following\n boilerplate notice, with the fields enclosed by brackets \"[]\"\n replaced with your own identifying information. (Don't include\n the brackets!) The text should be enclosed in the appropriate\n comment syntax for the file format. We also recommend that a\n file or class name and description of purpose be included on the\n same \"printed page\" as the copyright notice for easier\n identification within third-party archives.\n\n Copyright [yyyy] [name of copyright owner]\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n " - - license: MIT - text: "Permission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"),to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL\nTHE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE \nSOFTWARE.\n" - package_name: nu-ansi-term package_version: 0.46.0 repository: https://github.com/nushell/nu-ansi-term @@ -23418,7 +23443,7 @@ third_party_libraries: OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - package_name: serde - package_version: 1.0.219 + package_version: 1.0.226 repository: https://github.com/serde-rs/serde license: MIT OR Apache-2.0 licenses: @@ -23832,9 +23857,217 @@ third_party_libraries: incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + END OF TERMS AND CONDITIONS +- package_name: serde_core + package_version: 1.0.226 + repository: https://github.com/serde-rs/serde + license: MIT OR Apache-2.0 + licenses: + - license: MIT + text: | + Permission is hereby granted, free of charge, to any + person obtaining a copy of this software and associated + documentation files (the "Software"), to deal in the + Software without restriction, including without + limitation the rights to use, copy, modify, merge, + publish, distribute, sublicense, and/or sell copies of + the Software, and to permit persons to whom the Software + is furnished to do so, subject to the following + conditions: + + The above copyright notice and this permission notice + shall be included in all copies or substantial portions + of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF + ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED + TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A + PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT + SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY + CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR + IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + DEALINGS IN THE SOFTWARE. + - license: Apache-2.0 + text: |2 + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + END OF TERMS AND CONDITIONS - package_name: serde_derive - package_version: 1.0.219 + package_version: 1.0.226 repository: https://github.com/serde-rs/serde license: MIT OR Apache-2.0 licenses: @@ -26639,9 +26872,9 @@ third_party_libraries: - package_name: stringmetrics package_version: 2.2.2 repository: https://github.com/pluots/stringmetrics - license: License specified in file ($CARGO_HOME/registry/src/github.com-25cdd57fae9f0462/stringmetrics-2.2.2/LICENSE) + license: License specified in file ($CARGO_HOME/registry/src/index.crates.io-1949cf8c6b5b557f/stringmetrics-2.2.2/LICENSE) licenses: - - license: License specified in file ($CARGO_HOME/registry/src/github.com-25cdd57fae9f0462/stringmetrics-2.2.2/LICENSE) + - license: License specified in file ($CARGO_HOME/registry/src/index.crates.io-1949cf8c6b5b557f/stringmetrics-2.2.2/LICENSE) text: | Copyright 2022 Trevor Gross @@ -27046,35 +27279,6 @@ third_party_libraries: OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -- package_name: sysinfo - package_version: 0.29.11 - repository: https://github.com/GuillaumeGomez/sysinfo - license: MIT - licenses: - - license: MIT - text: |+ - The MIT License (MIT) - - Copyright (c) 2015 Guillaume Gomez - - Permission is hereby granted, free of charge, to any person obtaining a copy - of this software and associated documentation files (the "Software"), to deal - in the Software without restriction, including without limitation the rights - to use, copy, modify, merge, publish, distribute, sublicense, and/or sell - copies of the Software, and to permit persons to whom the Software is - furnished to do so, subject to the following conditions: - - The above copyright notice and this permission notice shall be included in all - copies or substantial portions of the Software. - - THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, - OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE - SOFTWARE. - - package_name: tabwriter package_version: 1.4.1 repository: https://github.com/BurntSushi/tabwriter diff --git a/data-pipeline/src/trace_exporter/mod.rs b/data-pipeline/src/trace_exporter/mod.rs index 6162656643..d4883c01e9 100644 --- a/data-pipeline/src/trace_exporter/mod.rs +++ b/data-pipeline/src/trace_exporter/mod.rs @@ -1458,6 +1458,9 @@ mod tests { // This is expected - no metrics should be sent when disabled // WouldBlock on Unix, TimedOut on Windows } + + // EINTR count in CI: 1; aka Interrupted system call (os error 4) + // If this happens again, we should attempt to handle interrupts. Err(e) => panic!("Unexpected error reading from socket: {e}"), } } diff --git a/datadog-alloc/src/virtual_alloc.rs b/datadog-alloc/src/virtual_alloc.rs index 965f4d2d1a..c7461dd7a5 100644 --- a/datadog-alloc/src/virtual_alloc.rs +++ b/datadog-alloc/src/virtual_alloc.rs @@ -8,7 +8,7 @@ use core::alloc::Layout; /// intended for large allocations only, such as working with other allocators /// to provide a large chunk for them. #[derive(Clone, Copy, Debug)] -pub struct VirtualAllocator {} +pub struct VirtualAllocator; #[cfg_attr(debug_assertions, track_caller)] #[inline] @@ -205,7 +205,7 @@ mod tests { bolero::check!() .with_generator(allocs) .for_each(|size_align_vec| { - let allocator = VirtualAllocator {}; + let allocator = VirtualAllocator; for (size, align_bits, idx, val) in size_align_vec { fuzzer_inner_loop(&allocator, *size, *align_bits, *idx, *val, MAX_SIZE) @@ -215,10 +215,9 @@ mod tests { #[test] fn test_zero_sized() { - let alloc = VirtualAllocator {}; assert_eq!(0, core::mem::size_of::()); let zero_sized_layout = Layout::new::(); - _ = alloc.allocate(zero_sized_layout).unwrap_err(); + _ = VirtualAllocator.allocate(zero_sized_layout).unwrap_err(); } #[test] @@ -228,14 +227,13 @@ mod tests { let too_large_layout = Layout::from_size_align(1, too_large) .unwrap() .pad_to_align(); - let alloc = VirtualAllocator {}; - _ = alloc.allocate(too_large_layout).unwrap_err(); + _ = VirtualAllocator.allocate(too_large_layout).unwrap_err(); } #[test] fn test_small_cases() { let page_size = os::page_size().unwrap(); - let alloc = VirtualAllocator {}; + let alloc = VirtualAllocator; // Allocations get rounded up to page size. let small_cases = [1, page_size - 1]; @@ -266,9 +264,8 @@ mod tests { #[track_caller] fn realistic_size(size: usize) { let page_size = os::page_size().unwrap(); - let alloc = VirtualAllocator {}; let layout = Layout::from_size_align(size, page_size).unwrap(); - let wide_ptr = alloc.allocate(layout).unwrap(); + let wide_ptr = VirtualAllocator.allocate(layout).unwrap(); let actual_size = wide_ptr.len(); // Should be a multiple of page size. @@ -277,7 +274,7 @@ mod tests { // Shouldn't ever be smaller than what was asked for. assert!(actual_size >= size); - unsafe { alloc.deallocate(wide_ptr.cast(), layout) }; + unsafe { VirtualAllocator.deallocate(wide_ptr.cast(), layout) }; } #[test] diff --git a/datadog-profiling-ffi/Cargo.toml b/datadog-profiling-ffi/Cargo.toml index 061f110111..b835cdbbae 100644 --- a/datadog-profiling-ffi/Cargo.toml +++ b/datadog-profiling-ffi/Cargo.toml @@ -35,11 +35,13 @@ ddsketch-ffi = ["dep:ddsketch-ffi"] build_common = { path = "../build-common" } [dependencies] +allocator-api2 = { version = "0.2", features = ["alloc"] } anyhow = "1.0" data-pipeline-ffi = { path = "../data-pipeline-ffi", default-features = false, optional = true } datadog-crashtracker-ffi = { path = "../datadog-crashtracker-ffi", default-features = false, optional = true} datadog-library-config-ffi = { path = "../datadog-library-config-ffi", default-features = false, optional = true } datadog-profiling = { path = "../datadog-profiling" } +datadog-profiling-protobuf = { path = "../datadog-profiling-protobuf" } ddcommon = { path = "../ddcommon" } ddcommon-ffi = { path = "../ddcommon-ffi", default-features = false, optional = true } ddtelemetry-ffi = { path = "../ddtelemetry-ffi", default-features = false, optional = true, features = ["expanded_builder_macros"] } @@ -50,6 +52,12 @@ futures = { version = "0.3", default-features = false } http-body-util = "0.1" hyper = { workspace = true} libc = "0.2" +rustc-hash = { version = "1", default-features = false } serde_json = { version = "1.0" } symbolizer-ffi = { path = "../symbolizer-ffi", optional = true, default-features = false } +thiserror = "1" tokio-util = "0.7.1" + +[dev-dependencies] +datadog-profiling-protobuf = { path = "../datadog-profiling-protobuf", features = ["bolero", "prost_impls"] } +proptest = "1" diff --git a/datadog-profiling-ffi/cbindgen.toml b/datadog-profiling-ffi/cbindgen.toml index 8e78939f04..892a53bde0 100644 --- a/datadog-profiling-ffi/cbindgen.toml +++ b/datadog-profiling-ffi/cbindgen.toml @@ -24,72 +24,63 @@ prefix = "ddog_prof_" renaming_overrides_prefixing = true [export.rename] +# Common "ByteSlice" = "ddog_ByteSlice" "CharSlice" = "ddog_CharSlice" "Endpoint" = "ddog_Endpoint" "Error" = "ddog_Error" "HttpStatus" = "ddog_HttpStatus" +"Option_Slice_Label" = "ddog_Option_Slice_Label" "Slice_CChar" = "ddog_Slice_CChar" +"Slice_CharSlice" = "ddog_Slice_CharSlice" "Slice_I64" = "ddog_Slice_I64" +"Slice_U64" = "ddog_Slice_U64" "Slice_U8" = "ddog_Slice_U8" +"StringWrapper" = "ddog_StringWrapper" +"StringWrapperResult" = "ddog_StringWrapperResult" "Tag" = "ddog_Tag" "Timespec" = "ddog_Timespec" "Vec_Tag" = "ddog_Vec_Tag" "Vec_U8" = "ddog_Vec_U8" +"VoidResult" = "ddog_VoidResult" -"ProfilingEndpoint" = "ddog_prof_Endpoint" +# Profiles +"ArcHandle_ProfilesDictionary" = "ddog_prof_ProfilesDictionaryHandle" +"ArcHandle_ScratchPad" = "ddog_prof_ScratchPadHandle" +"ProfileHandle_PprofBuilder" = "ddog_prof_PprofBuilderHandle" +"ProfileHandle_Profile" = "ddog_prof_ProfileHandle" +"ProfileHandle_SampleBuilder" = "ddog_prof_SampleBuilderHandle" +"ProfileStatus" = "ddog_prof_Status" +"Vec_ProfileHandleProfile" = "ddog_prof_Vec_ProfileHandle" +"Vec_Usize" = "ddog_Vec_Usize" + +# Exporter / Legacy +"CompressorFinishResult" = "ddog_prof_Compressor_FinishResult" "ExporterNewResult" = "ddog_prof_Exporter_NewResult" "File" = "ddog_prof_Exporter_File" +"LabelsSetLookupResult" = "ddog_prof_LabelsSet_LookupResult" +"ManagedStringId" = "ddog_prof_ManagedStringId" +"ManagedStringStorage" = "ddog_prof_ManagedStringStorage" +"ManagedStringStorageInternResult" = "ddog_prof_ManagedStringStorage_InternResult" +"ManagedStringStorageNewResult" = "ddog_prof_ManagedStringStorage_NewResult" +"ProfileBuilderBuildResult" = "ddog_prof_ProfileBuilder_BuildResult" +"ProfileBuilderNewResult" = "ddog_prof_ProfileBuilder_NewResult" +"ProfileError" = "ddog_prof_Profile_Error" "ProfileExporter" = "ddog_prof_Exporter" "ProfileNewResult" = "ddog_prof_Profile_NewResult" "ProfileResult" = "ddog_prof_Profile_Result" +"ProfileVoidResult" = "ddog_prof_Profile_VoidResult" +"ProfilingEndpoint" = "ddog_prof_Endpoint" "Request" = "ddog_prof_Exporter_Request" "RequestBuildResult" = "ddog_prof_Exporter_Request_BuildResult" "SendResult" = "ddog_prof_Exporter_SendResult" "SerializeResult" = "ddog_prof_Profile_SerializeResult" -"Slice_File" = "ddog_prof_Exporter_Slice_File" -"ManagedStringStorage" = "ddog_prof_ManagedStringStorage" -"ManagedStringId" = "ddog_prof_ManagedStringId" -"StringWrapper" = "ddog_StringWrapper" -"StringWrapperResult" = "ddog_StringWrapperResult" -"VoidResult" = "ddog_VoidResult" - -"CbindgenIsDumbStringId" = "ddog_prof_StringId" - -"Slice_GenerationalIdLabelId" = "ddog_prof_Slice_LabelId" -"Slice_GenerationalIdLocationId" = "ddog_prof_Slice_LocationId" - -"GenerationalId_FunctionId" = "ddog_prof_FunctionId" -"Result_GenerationalIdFunctionId" = "ddog_prof_FunctionId_Result" -"FunctionId" = "OpaqueFunctionId" - -"GenerationalId_LabelId" = "ddog_prof_LabelId" -"Result_GenerationalIdLabelId" = "ddog_prof_LabelId_Result" -"LabelId" = "OpaqueLabelId" - -"GenerationalId_LabelSetId" = "ddog_prof_LabelSetId" -"Result_GenerationalIdLabelSetId" = "ddog_prof_LabelSetId_Result" -"LabelSetId" = "OpaqueLabelSetId" - -"GenerationalId_LocationId" = "ddog_prof_LocationId" -"Result_GenerationalIdLocationId" = "ddog_prof_LocationId_Result" -"LocationId" = "OpaqueLocationId" - -"GenerationalId_MappingId" = "ddog_prof_MappingId" -"Result_GenerationalIdMappingId" = "ddog_prof_MappingId_Result" -"MappingId" = "OpaqueMappingId" - -"GenerationalId_StackTraceId" = "ddog_prof_StackTraceId" -"Result_GenerationalIdStackTraceId" = "ddog_prof_StackTraceId_Result" -"StackTraceId" = "OpaqueStackTraceId" - -"GenerationalId_StringId" = "ddog_prof_StringId" -"Result_GenerationalIdStringId" = "ddog_prof_StringId_Result" - -# StringId is an alias of StringOffset, we need both to be `OpaqueStringId` -# for the current interning API. -"StringOffset" = "OpaqueStringId" -"StringId" = "OpaqueStringId" +"Slice_File" = "ddog_prof_Slice_Exporter_File" +"SliceSetInsertResult" = "ddog_prof_LabelsSet_InsertResult" +"StoreInsertResult" = "ddog_prof_Store_InsertResult" +"StringTableInternResult" = "ddog_prof_StringTable_InternResult" +"StringTableLookupResult" = "ddog_prof_StringTable_LookupResult" +"StringTableNewResult" = "ddog_prof_StringTable_NewResult" "HandleProfileExporter" = "ddog_prof_ProfileExporter" "Handle_ProfileExporter" = "ddog_prof_ProfileExporter" @@ -106,10 +97,26 @@ renaming_overrides_prefixing = true "CancellationToken" = "struct ddog_OpaqueCancellationToken" "Handle_TokioCancellationToken" = "ddog_CancellationToken" +# Horrible cbindgen output on these >.< +"Record_I64__2__OptZero" = "int64_t" +"Record_I64__3__OptZero" = "int64_t" +"Record_Line__4__OptZero" = "ddog_prof_Line" +"Record_StringOffset__2__OptZero" = "ddog_pprof_StringOffset" +"Record_StringOffset__3__OptZero" = "ddog_pprof_StringOffset" +"Record_StringOffset__4__OptZero" = "ddog_pprof_StringOffset" +"Record_StringOffset__5__OptZero" = "ddog_pprof_StringOffset" +"Record_StringOffset__6__OptZero" = "ddog_pprof_StringOffset" +"Record_U64__1__NoOptZero" = "uint64_t" +"Record_U64__1__OptZero" = "uint64_t" +"Record_U64__2__OptZero" = "uint64_t" +"Record_U64__3__OptZero" = "uint64_t" +"Record_U64__4__OptZero" = "uint64_t" + [export.mangle] rename_types = "PascalCase" [enum] +#merge_generic_tags = true prefix_with_name = true rename_variants = "ScreamingSnakeCase" diff --git a/datadog-profiling-ffi/rustfmt.toml b/datadog-profiling-ffi/rustfmt.toml new file mode 100644 index 0000000000..3504df4d30 --- /dev/null +++ b/datadog-profiling-ffi/rustfmt.toml @@ -0,0 +1,7 @@ +# Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +# SPDX-License-Identifier: Apache-2.0 + +max_width = 80 +doc_comment_code_block_width = 80 +comment_width = 80 +use_small_heuristics = "Max" diff --git a/datadog-profiling-ffi/src/arc_handle.rs b/datadog-profiling-ffi/src/arc_handle.rs new file mode 100644 index 0000000000..bc248587f8 --- /dev/null +++ b/datadog-profiling-ffi/src/arc_handle.rs @@ -0,0 +1,67 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::EmptyHandleError; +use datadog_profiling::profiles::collections::Arc; +use datadog_profiling::profiles::ProfileError; +use std::ptr::{null_mut, NonNull}; + +/// Opaque FFI handle to an `Arc`'s inner `T`. +/// +/// Safety rules for implementors/callers: +/// - Do not create multiple owning `Arc`s from the same raw pointer. +/// - Always restore the original `Arc` with `into_raw` after any `from_raw`. +/// - Use `as_inner()` to validate non-null before performing raw round-trips. +#[repr(transparent)] +#[derive(Debug)] +pub struct ArcHandle(*mut T); + +impl Copy for ArcHandle {} +impl Clone for ArcHandle { + fn clone(&self) -> Self { + *self + } +} +impl Default for ArcHandle { + fn default() -> Self { + Self(null_mut()) + } +} + +impl ArcHandle { + /// Constructs a new handle by allocating an `Arc` and returning its + /// inner pointer as a handle. Returns OutOfMemory on allocation failure. + pub fn new(value: T) -> Result { + let arc = Arc::try_new(value)?; + let ptr = Arc::into_raw(arc).as_ptr(); + Ok(Self(ptr)) + } + + #[inline] + pub fn as_inner(&self) -> Result<&T, EmptyHandleError> { + unsafe { self.0.as_ref() }.ok_or(EmptyHandleError) + } + + /// Tries to clone the resource this handle points to, and returns a new + /// handle to it. + pub fn try_clone(&self) -> Result { + let nn = NonNull::new(self.0).ok_or(EmptyHandleError)?; + // SAFETY: ArcHandle uses a pointer to T as its repr, and as long as + // callers have upheld safety requirements elsewhere, including the + // FFI, then there will be a valid object with refcount > 0. + unsafe { Arc::try_increment_count(nn.as_ptr())? }; + Ok(Self(self.0)) + } + + /// Drops the resource that this handle refers to. It will remain alive if + /// there are other handles to the resource which were created by + /// successful calls to try_clone. This handle will now be empty and + /// operations on it will fail. + pub fn drop_resource(&mut self) { + // pointers aren't default until Rust 1.88. + let ptr = core::mem::replace(&mut self.0, null_mut()); + if let Some(nn) = NonNull::new(ptr) { + drop(unsafe { Arc::from_raw(nn) }); + } + } +} diff --git a/datadog-profiling-ffi/src/exporter.rs b/datadog-profiling-ffi/src/exporter.rs index f1ff7ce464..e29111d427 100644 --- a/datadog-profiling-ffi/src/exporter.rs +++ b/datadog-profiling-ffi/src/exporter.rs @@ -4,13 +4,14 @@ #![allow(renamed_and_removed_lints)] #![allow(clippy::box_vec)] -use datadog_profiling::exporter; -use datadog_profiling::exporter::{ProfileExporter, Request}; -use datadog_profiling::internal::EncodedProfile; +use datadog_profiling::exporter::{ + self, EncodedProfile, ProfileExporter, Request, +}; use ddcommon::tag::Tag; use ddcommon_ffi::slice::{AsBytes, ByteSlice, CharSlice, Slice}; use ddcommon_ffi::{ - wrap_with_ffi_result, wrap_with_void_ffi_result, Handle, Result, ToInner, VoidResult, + wrap_with_ffi_result, wrap_with_void_ffi_result, Handle, Result, ToInner, + VoidResult, }; use function_name::named; use std::borrow::Cow; @@ -35,7 +36,8 @@ pub struct File<'a> { #[must_use] #[no_mangle] -pub extern "C" fn ddog_prof_Exporter_Slice_File_empty() -> Slice<'static, File<'static>> { +pub extern "C" fn ddog_prof_Exporter_Slice_File_empty( +) -> Slice<'static, File<'static>> { Slice::empty() } @@ -48,7 +50,9 @@ pub struct HttpStatus(u16); /// # Arguments /// * `base_url` - Contains a URL with scheme, host, and port e.g. "https://agent:8126/". #[no_mangle] -pub extern "C" fn ddog_prof_Endpoint_agent(base_url: CharSlice) -> ProfilingEndpoint { +pub extern "C" fn ddog_prof_Endpoint_agent( + base_url: CharSlice, +) -> ProfilingEndpoint { ProfilingEndpoint::Agent(base_url) } @@ -86,7 +90,9 @@ unsafe fn try_to_url(slice: CharSlice) -> anyhow::Result { Ok(hyper::Uri::from_str(str)?) } -pub unsafe fn try_to_endpoint(endpoint: ProfilingEndpoint) -> anyhow::Result { +pub unsafe fn try_to_endpoint( + endpoint: ProfilingEndpoint, +) -> anyhow::Result { // convert to utf8 losslessly -- URLs and API keys should all be ASCII, so // a failed result is likely to be an error. match endpoint { @@ -136,7 +142,8 @@ pub unsafe extern "C" fn ddog_prof_Exporter_new( ) -> Result> { wrap_with_ffi_result!({ let library_name = profiling_library_name.to_utf8_lossy().into_owned(); - let library_version = profiling_library_version.to_utf8_lossy().into_owned(); + let library_version = + profiling_library_version.to_utf8_lossy().into_owned(); let family = family.to_utf8_lossy().into_owned(); let converted_endpoint = unsafe { try_to_endpoint(endpoint)? }; let tags = tags.map(|tags| tags.iter().cloned().collect()); @@ -174,13 +181,17 @@ pub unsafe extern "C" fn ddog_prof_Exporter_set_timeout( /// valid `ddog_prof_Exporter_Request` object made by the Rust Global /// allocator that has not already been dropped. #[no_mangle] -pub unsafe extern "C" fn ddog_prof_Exporter_drop(mut exporter: *mut Handle) { +pub unsafe extern "C" fn ddog_prof_Exporter_drop( + mut exporter: *mut Handle, +) { // Technically, this function has been designed so if it's double-dropped // then it's okay, but it's not something that should be relied on. drop(exporter.take()) } -unsafe fn into_vec_files<'a>(slice: Slice<'a, File>) -> Vec> { +unsafe fn into_vec_files<'a>( + slice: Slice<'a, File>, +) -> Vec> { slice .into_slice() .iter() @@ -224,11 +235,15 @@ pub unsafe extern "C" fn ddog_prof_Exporter_Request_build( wrap_with_ffi_result!({ let exporter = exporter.to_inner_mut()?; let profile = *profile.take()?; - let files_to_compress_and_export = into_vec_files(files_to_compress_and_export); - let files_to_export_unmodified = into_vec_files(files_to_export_unmodified); - let tags = optional_additional_tags.map(|tags| tags.iter().cloned().collect()); - - let internal_metadata = parse_json("internal_metadata", optional_internal_metadata_json)?; + let files_to_compress_and_export = + into_vec_files(files_to_compress_and_export); + let files_to_export_unmodified = + into_vec_files(files_to_export_unmodified); + let tags = + optional_additional_tags.map(|tags| tags.iter().cloned().collect()); + + let internal_metadata = + parse_json("internal_metadata", optional_internal_metadata_json)?; let info = parse_json("info", optional_info_json)?; let request = exporter.build( @@ -269,7 +284,9 @@ unsafe fn parse_json( /// pointer must point to a valid `ddog_prof_Exporter_Request` object made by /// the Rust Global allocator. #[no_mangle] -pub unsafe extern "C" fn ddog_prof_Exporter_Request_drop(mut request: *mut Handle) { +pub unsafe extern "C" fn ddog_prof_Exporter_Request_drop( + mut request: *mut Handle, +) { // Technically, this function has been designed so if it's double-dropped // then it's okay, but it's not something that should be relied on. drop(request.take()) @@ -307,7 +324,8 @@ pub unsafe extern "C" fn ddog_prof_Exporter_send( /// different thread. #[no_mangle] #[must_use] -pub extern "C" fn ddog_CancellationToken_new() -> Handle { +pub extern "C" fn ddog_CancellationToken_new() -> Handle +{ TokioCancellationToken::new().into() } @@ -403,7 +421,9 @@ mod tests { CharSlice::from(base_url()) } - fn parsed_event_json(request: ddcommon_ffi::Result>) -> serde_json::Value { + fn parsed_event_json( + request: ddcommon_ffi::Result>, + ) -> serde_json::Value { // Safety: This is a test let request = unsafe { request.unwrap().take().unwrap() }; // Really hacky way of getting the event.json file contents, because I didn't want to @@ -412,9 +432,7 @@ mod tests { // in the profiling tests, please update there too :) let body = request.body(); let body_bytes: String = String::from_utf8_lossy( - &futures::executor::block_on(body.collect()) - .unwrap() - .to_bytes(), + &futures::executor::block_on(body.collect()).unwrap().to_bytes(), ) .to_string(); let event_json = body_bytes @@ -444,7 +462,9 @@ mod tests { }; match result { - Result::Ok(mut exporter) => unsafe { ddog_prof_Exporter_drop(&mut exporter) }, + Result::Ok(mut exporter) => unsafe { + ddog_prof_Exporter_drop(&mut exporter) + }, Result::Err(message) => { drop(message); panic!("Should not occur!") @@ -472,7 +492,8 @@ mod tests { let profile = &mut EncodedProfile::test_instance().unwrap().into(); let timeout_milliseconds = 90; unsafe { - ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds).unwrap(); + ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds) + .unwrap(); } let build_result = unsafe { @@ -546,7 +567,8 @@ mod tests { let profile = &mut EncodedProfile::test_instance().unwrap().into(); let timeout_milliseconds = 90; unsafe { - ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds).unwrap(); + ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds) + .unwrap(); } let raw_internal_metadata = CharSlice::from( @@ -605,10 +627,12 @@ mod tests { let timeout_milliseconds = 90; unsafe { - ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds).unwrap(); + ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds) + .unwrap(); } - let raw_internal_metadata = CharSlice::from("this is not a valid json string"); + let raw_internal_metadata = + CharSlice::from("this is not a valid json string"); let build_result = unsafe { ddog_prof_Exporter_Request_build( @@ -648,7 +672,8 @@ mod tests { let profile = &mut EncodedProfile::test_instance().unwrap().into(); let timeout_milliseconds = 90; unsafe { - ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds).unwrap(); + ddog_prof_Exporter_set_timeout(&mut exporter, timeout_milliseconds) + .unwrap(); } let raw_info = CharSlice::from( @@ -747,7 +772,8 @@ mod tests { let profile = &mut EncodedProfile::test_instance().unwrap().into(); let timeout_milliseconds = 90; unsafe { - ddog_prof_Exporter_set_timeout(exporter, timeout_milliseconds).unwrap(); + ddog_prof_Exporter_set_timeout(exporter, timeout_milliseconds) + .unwrap(); } let raw_info = CharSlice::from("this is not a valid json string"); @@ -799,9 +825,9 @@ mod tests { .unwrap_err() .to_string(); assert_eq!( - "ddog_prof_Exporter_send failed: request: inner pointer was null, indicates use after free", - error - ); + "ddog_prof_Exporter_send failed: request: handle's interior pointer is null, indicates use-after-free", + error + ); } } } diff --git a/datadog-profiling-ffi/src/lib.rs b/datadog-profiling-ffi/src/lib.rs index bd8302c1a6..302fbf15f3 100644 --- a/datadog-profiling-ffi/src/lib.rs +++ b/datadog-profiling-ffi/src/lib.rs @@ -7,13 +7,21 @@ #![cfg_attr(not(test), deny(clippy::todo))] #![cfg_attr(not(test), deny(clippy::unimplemented))] +mod arc_handle; +mod exporter; +mod profile_handle; +mod profile_result; +pub mod profiles; +mod status; + +pub use arc_handle::*; +pub use profile_handle::*; +pub use profile_result::*; +pub use status::*; + #[cfg(all(feature = "symbolizer", not(target_os = "windows")))] pub use symbolizer_ffi::*; -mod exporter; -mod profiles; -mod string_storage; - // re-export crashtracker ffi #[cfg(feature = "crashtracker-ffi")] pub use datadog_crashtracker_ffi::*; diff --git a/datadog-profiling-ffi/src/profile_handle.rs b/datadog-profiling-ffi/src/profile_handle.rs new file mode 100644 index 0000000000..6b3cf1287b --- /dev/null +++ b/datadog-profiling-ffi/src/profile_handle.rs @@ -0,0 +1,173 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +//! A profile handle is similar to the ddcommon_ffi::Handle, but its repr +//! is transparent, and it does not implement Drop. It also does not offer APIs +//! which panic, such as From because it has to Box, which may fail. +//! This is an experiment to see how it works comparatively. +//! +//! To dispose of it, call [`ProfileHandle::take`] and drop the box. + +use allocator_api2::alloc::AllocError; +use allocator_api2::boxed::Box; +use datadog_profiling::profiles::ProfileError; +use ddcommon::error::FfiSafeErrorMessage; +use std::ffi::CStr; +use std::fmt; +use std::ptr::NonNull; + +// Represents an object that should only be referred to by its handle. +#[repr(transparent)] +pub struct ProfileHandle { + /// A null pointer is a valid but almost useless handle as all operations + /// will error or return None. It's still good for initialization and + /// detecting some misuse. The pointer is only valid until it's dropped + /// through any handle to the same resource. If a handle is copied, then + /// it may be invalid even if it's non-null! + ptr: *mut T, +} + +/// Note that this type is Copy because it's an FFI type; we cannot stop C code +/// from copying it, so we are reflecting that fact. It is not recommended to +/// copy a handle. +impl Copy for ProfileHandle {} + +impl Default for ProfileHandle { + fn default() -> Self { + Self { ptr: std::ptr::null_mut() } + } +} + +impl fmt::Debug for ProfileHandle { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + f.debug_struct("ProfileHandle") + .field("ptr", &NonNull::new(self.ptr)) + .finish() + } +} + +impl Clone for ProfileHandle { + fn clone(&self) -> Self { + *self + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct EmptyHandleError; + +/// # Safety +/// +/// Uses c-str literal to ensure valid UTF-8 and null termination. +unsafe impl FfiSafeErrorMessage for EmptyHandleError { + fn as_ffi_str(&self) -> &'static CStr { + c"handle used with an interior null pointer" + } +} + +impl fmt::Display for EmptyHandleError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_rust_str().fmt(f) + } +} + +impl core::error::Error for EmptyHandleError {} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub struct AllocHandleError(AllocError); + +impl From for AllocHandleError { + fn from(e: AllocError) -> Self { + Self(e) + } +} + +impl AllocHandleError { + /// Returns the error message as a static reference to a CStr, which means + /// it is null terminated. + /// This is also guaranteed to valid UTF-8. + pub const fn message() -> &'static CStr { + c"memory allocation failed: profile handle couldn't be made" + } + + pub const fn message_str() -> &'static str { + // str::from_utf8_unchecked isn't stable until 1.87, so duplicate it. + "memory allocation failed: profile handle couldn't be made" + } +} + +impl fmt::Display for AllocHandleError { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + Self::message_str().fmt(f) + } +} + +impl core::error::Error for AllocHandleError {} + +impl From for ProfileError { + fn from(err: EmptyHandleError) -> ProfileError { + ProfileError::other(err.as_rust_str()) + } +} + +impl From for ProfileError { + fn from(_: AllocHandleError) -> ProfileError { + ProfileError::other(AllocHandleError::message_str()) + } +} + +impl ProfileHandle { + /// Tries to heap-allocate the provided value and provide a handle to it. + /// Fails if the allocator fails. + pub fn try_new(t: T) -> Result { + let ptr = Box::into_raw(Box::try_new(t)?).cast(); + Ok(Self { ptr }) + } + + /// Returns the underlying boxed value if the handle is not empty. + /// + /// # Safety + /// + /// This function should not be called from different handles to the same + /// underlying resource. Example of issue: + /// 1. A handle is copied. + /// 2. Take is called on the original handle. + /// 3. Take is called on the copied handle, which isn't aware of the take + /// from step 2, and so you get two Box to the same value. + /// + /// Taking from the same handle multiple times is supported and safe. + pub unsafe fn take(&mut self) -> Option> { + // todo: MSRV 1.88 replace with core::mem::take. + let ptr = core::mem::replace(&mut self.ptr, std::ptr::null_mut()); + (!ptr.is_null()).then(|| unsafe { Box::from_raw(ptr.cast()) }) + } + + /// Tries to return a reference to the underlying value. + /// + /// # Safety + /// + /// 1. The handle's underlying resource must still be alive. + /// 2. No mutable references to the same underlying resource must exist. + /// This includes references from other handles to the same underlying + /// resource. + pub unsafe fn as_inner(&self) -> Result<&T, EmptyHandleError> { + unsafe { self.ptr.cast::().as_ref() }.ok_or(EmptyHandleError) + } + + /// Tries to return a mutable reference to the underlying value. + /// + /// # Safety + /// + /// 1. The handle's underlying resource must still be alive. + /// 2. No references to the same underlying resource must exist, + /// even if it comes from a different handle to the same resource. + pub unsafe fn as_inner_mut(&mut self) -> Result<&mut T, EmptyHandleError> { + unsafe { self.ptr.cast::().as_mut() }.ok_or(EmptyHandleError) + } +} + +impl From> for ProfileHandle { + fn from(ptr: Box) -> Self { + let ptr = Box::into_raw(ptr).cast(); + Self { ptr } + } +} diff --git a/datadog-profiling-ffi/src/profile_result.rs b/datadog-profiling-ffi/src/profile_result.rs new file mode 100644 index 0000000000..d658d051f0 --- /dev/null +++ b/datadog-profiling-ffi/src/profile_result.rs @@ -0,0 +1,24 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::ProfileStatus; + +#[repr(C)] +pub struct ProfileResult { + status: ProfileStatus, + ok: T, +} + +impl From> + for ProfileResult +{ + fn from(result: Result) -> Self { + match result { + Ok(ok) => ProfileResult { status: ProfileStatus::OK, ok }, + Err(err) => ProfileResult { + status: ProfileStatus::from_error(err), + ok: Default::default(), + }, + } + } +} diff --git a/datadog-profiling-ffi/src/profiles/datatypes.rs b/datadog-profiling-ffi/src/profiles/datatypes.rs deleted file mode 100644 index ba3fb9f22b..0000000000 --- a/datadog-profiling-ffi/src/profiles/datatypes.rs +++ /dev/null @@ -1,991 +0,0 @@ -// Copyright 2021-Present Datadog, Inc. https://www.datadoghq.com/ -// SPDX-License-Identifier: Apache-2.0 - -use crate::string_storage::{get_inner_string_storage, ManagedStringStorage}; -use anyhow::Context; -use datadog_profiling::api; -use datadog_profiling::api::ManagedStringId; -use datadog_profiling::internal; -use ddcommon_ffi::slice::{AsBytes, ByteSlice, CharSlice, Slice}; -use ddcommon_ffi::{wrap_with_ffi_result, Error, Handle, Timespec, ToInner}; -use function_name::named; -use std::num::NonZeroI64; -use std::str::Utf8Error; -use std::time::SystemTime; - -/// Represents a profile. Do not access its member for any reason, only use -/// the C API functions on this struct. -#[repr(C)] -pub struct Profile { - // This may be null, but if not it will point to a valid Profile. - inner: *mut internal::Profile, -} - -impl Profile { - fn new(profile: internal::Profile) -> Self { - Profile { - inner: Box::into_raw(Box::new(profile)), - } - } - - fn take(&mut self) -> Option> { - // Leaving a null will help with double-free issues that can - // arise in C. Of course, it's best to never get there in the - // first place! - let raw = std::mem::replace(&mut self.inner, std::ptr::null_mut()); - - if raw.is_null() { - None - } else { - Some(unsafe { Box::from_raw(raw) }) - } - } -} - -impl Drop for Profile { - fn drop(&mut self) { - drop(self.take()) - } -} - -/// A generic result type for when a profiling operation may fail, but there's -/// nothing to return in the case of success. -#[allow(dead_code)] -#[repr(C)] -pub enum ProfileResult { - Ok( - /// Do not use the value of Ok. This value only exists to overcome - /// Rust -> C code generation. - bool, - ), - Err(Error), -} - -impl From> for ProfileResult { - fn from(value: anyhow::Result<()>) -> Self { - match value { - Ok(_) => Self::Ok(true), - Err(err) => Self::Err(err.into()), - } - } -} - -/// Returned by [ddog_prof_Profile_new]. -#[allow(dead_code)] -#[repr(C)] -pub enum ProfileNewResult { - Ok(Profile), - #[allow(dead_code)] - Err(Error), -} - -#[allow(dead_code)] -#[repr(C)] -pub enum SerializeResult { - Ok(Handle), - Err(Error), -} - -impl From> for SerializeResult { - fn from(value: anyhow::Result) -> Self { - match value { - Ok(e) => Self::Ok(e.into()), - Err(err) => Self::Err(err.into()), - } - } -} - -#[repr(C)] -#[derive(Copy, Clone)] -pub struct ValueType<'a> { - pub type_: CharSlice<'a>, - pub unit: CharSlice<'a>, -} - -impl<'a> ValueType<'a> { - pub fn new(type_: &'a str, unit: &'a str) -> Self { - Self { - type_: type_.into(), - unit: unit.into(), - } - } -} - -#[repr(C)] -pub struct Period<'a> { - pub type_: ValueType<'a>, - pub value: i64, -} - -#[repr(C)] -#[derive(Copy, Clone, Default)] -pub struct Label<'a> { - pub key: CharSlice<'a>, - pub key_id: ManagedStringId, - - /// At most one of the following must be present - pub str: CharSlice<'a>, - pub str_id: ManagedStringId, - pub num: i64, - - /// Should only be present when num is present. - /// Specifies the units of num. - /// Use arbitrary string (for example, "requests") as a custom count unit. - /// If no unit is specified, consumer may apply heuristic to deduce the unit. - /// Consumers may also interpret units like "bytes" and "kilobytes" as memory - /// units and units like "seconds" and "nanoseconds" as time units, - /// and apply appropriate unit conversions to these. - pub num_unit: CharSlice<'a>, - pub num_unit_id: ManagedStringId, -} - -#[repr(C)] -#[derive(Copy, Clone, Default)] -pub struct Function<'a> { - /// Name of the function, in human-readable form if available. - pub name: CharSlice<'a>, - pub name_id: ManagedStringId, - - /// Name of the function, as identified by the system. - /// For instance, it can be a C++ mangled name. - pub system_name: CharSlice<'a>, - pub system_name_id: ManagedStringId, - - /// Source file containing the function. - pub filename: CharSlice<'a>, - pub filename_id: ManagedStringId, -} - -#[repr(C)] -#[derive(Copy, Clone, Default)] -pub struct Location<'a> { - /// todo: how to handle unknown mapping? - pub mapping: Mapping<'a>, - pub function: Function<'a>, - - /// The instruction address for this location, if available. It - /// should be within [Mapping.memory_start...Mapping.memory_limit] - /// for the corresponding mapping. A non-leaf address may be in the - /// middle of a call instruction. It is up to display tools to find - /// the beginning of the instruction if necessary. - pub address: u64, - pub line: i64, -} - -#[repr(C)] -#[derive(Copy, Clone, Default)] -pub struct Mapping<'a> { - /// Address at which the binary (or DLL) is loaded into memory. - pub memory_start: u64, - - /// The limit of the address range occupied by this mapping. - pub memory_limit: u64, - - /// Offset in the binary that corresponds to the first mapped address. - pub file_offset: u64, - - /// The object this entry is loaded from. This can be a filename on - /// disk for the main binary and shared libraries, or virtual - /// abstractions like "[vdso]". - pub filename: CharSlice<'a>, - pub filename_id: ManagedStringId, - - /// A string that uniquely identifies a particular program version - /// with high probability. E.g., for binaries generated by GNU tools, - /// it could be the contents of the .note.gnu.build-id field. - pub build_id: CharSlice<'a>, - pub build_id_id: ManagedStringId, -} - -#[repr(C)] -#[derive(Copy, Clone)] -pub struct Sample<'a> { - /// The leaf is at locations[0]. - pub locations: Slice<'a, Location<'a>>, - - /// The type and unit of each value is defined by the corresponding - /// entry in Profile.sample_type. All samples must have the same - /// number of values, the same as the length of Profile.sample_type. - /// When aggregating multiple samples into a single sample, the - /// result has a list of values that is the element-wise sum of the - /// lists of the originals. - pub values: Slice<'a, i64>, - - /// label includes additional context for this sample. It can include - /// things like a thread id, allocation size, etc - pub labels: Slice<'a, Label<'a>>, -} - -impl<'a> TryFrom<&'a Mapping<'a>> for api::Mapping<'a> { - type Error = Utf8Error; - - fn try_from(mapping: &'a Mapping<'a>) -> Result { - let filename = mapping.filename.try_to_utf8()?; - let build_id = mapping.build_id.try_to_utf8()?; - Ok(Self { - memory_start: mapping.memory_start, - memory_limit: mapping.memory_limit, - file_offset: mapping.file_offset, - filename, - build_id, - }) - } -} - -impl<'a> From<&'a Mapping<'a>> for api::StringIdMapping { - fn from(mapping: &'a Mapping<'a>) -> Self { - Self { - memory_start: mapping.memory_start, - memory_limit: mapping.memory_limit, - file_offset: mapping.file_offset, - filename: mapping.filename_id, - build_id: mapping.build_id_id, - } - } -} - -impl<'a> From<&'a ValueType<'a>> for api::ValueType<'a> { - fn from(vt: &'a ValueType<'a>) -> Self { - Self::new( - vt.type_.try_to_utf8().unwrap_or(""), - vt.unit.try_to_utf8().unwrap_or(""), - ) - } -} - -impl<'a> From<&'a Period<'a>> for api::Period<'a> { - fn from(period: &'a Period<'a>) -> Self { - Self { - r#type: api::ValueType::from(&period.type_), - value: period.value, - } - } -} - -impl<'a> TryFrom<&'a Function<'a>> for api::Function<'a> { - type Error = Utf8Error; - - fn try_from(function: &'a Function<'a>) -> Result { - let name = function.name.try_to_utf8()?; - let system_name = function.system_name.try_to_utf8()?; - let filename = function.filename.try_to_utf8()?; - Ok(Self { - name, - system_name, - filename, - }) - } -} - -impl<'a> From<&'a Function<'a>> for api::StringIdFunction { - fn from(function: &'a Function<'a>) -> Self { - Self { - name: function.name_id, - system_name: function.system_name_id, - filename: function.filename_id, - } - } -} - -impl<'a> TryFrom<&'a Location<'a>> for api::Location<'a> { - type Error = Utf8Error; - - fn try_from(location: &'a Location<'a>) -> Result { - let mapping = api::Mapping::try_from(&location.mapping)?; - let function = api::Function::try_from(&location.function)?; - Ok(Self { - mapping, - function, - address: location.address, - line: location.line, - }) - } -} - -impl<'a> From<&'a Location<'a>> for api::StringIdLocation { - fn from(location: &'a Location<'a>) -> Self { - Self { - mapping: api::StringIdMapping::from(&location.mapping), - function: api::StringIdFunction::from(&location.function), - address: location.address, - line: location.line, - } - } -} - -impl<'a> TryFrom<&'a Label<'a>> for api::Label<'a> { - type Error = Utf8Error; - - fn try_from(label: &'a Label<'a>) -> Result { - let key = label.key.try_to_utf8()?; - let str = label.str.try_to_utf8()?; - let num_unit = label.num_unit.try_to_utf8()?; - - Ok(Self { - key, - str, - num: label.num, - num_unit, - }) - } -} - -impl<'a> From<&'a Label<'a>> for api::StringIdLabel { - fn from(label: &'a Label<'a>) -> Self { - let key = label.key_id; - let str = label.str_id; - let num_unit = label.num_unit_id; - - Self { - key, - str, - num: label.num, - num_unit, - } - } -} - -impl<'a> TryFrom> for api::Sample<'a> { - type Error = Utf8Error; - - fn try_from(sample: Sample<'a>) -> Result { - let mut locations: Vec = Vec::with_capacity(sample.locations.len()); - - for location in sample.locations.as_slice().iter() { - locations.push(location.try_into()?) - } - - let values = sample.values.into_slice(); - - let mut labels: Vec = Vec::with_capacity(sample.labels.len()); - for label in sample.labels.as_slice().iter() { - labels.push(label.try_into()?); - } - - Ok(Self { - locations, - values, - labels, - }) - } -} - -impl<'a> From> for api::StringIdSample<'a> { - fn from(sample: Sample<'a>) -> Self { - Self { - locations: sample.locations.as_slice().iter().map(Into::into).collect(), - values: sample.values.into_slice(), - labels: sample.labels.as_slice().iter().map(Into::into).collect(), - } - } -} - -/// Create a new profile with the given sample types. Must call -/// `ddog_prof_Profile_drop` when you are done with the profile. -/// -/// # Arguments -/// * `sample_types` -/// * `period` - Optional period of the profile. Passing None/null translates to zero values. -/// * `start_time` - Optional time the profile started at. Passing None/null will use the current -/// time. -/// -/// # Safety -/// All slices must be have pointers that are suitably aligned for their type -/// and must have the correct number of elements for the slice. -#[no_mangle] -#[must_use] -pub unsafe extern "C" fn ddog_prof_Profile_new( - sample_types: Slice, - period: Option<&Period>, -) -> ProfileNewResult { - profile_new(sample_types, period, None) -} - -/// Same as `ddog_profile_new` but also configures a `string_storage` for the profile. -#[no_mangle] -#[must_use] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_Profile_with_string_storage( - sample_types: Slice, - period: Option<&Period>, - string_storage: ManagedStringStorage, -) -> ProfileNewResult { - profile_new(sample_types, period, Some(string_storage)) -} - -unsafe fn profile_new( - sample_types: Slice, - period: Option<&Period>, - string_storage: Option, -) -> ProfileNewResult { - let types: Vec = sample_types.into_slice().iter().map(Into::into).collect(); - let period = period.map(Into::into); - - let internal_profile = match string_storage { - None => internal::Profile::new(&types, period), - Some(s) => { - let string_storage = match get_inner_string_storage(s, true) { - Ok(string_storage) => string_storage, - Err(err) => return ProfileNewResult::Err(err.into()), - }; - internal::Profile::with_string_storage(&types, period, string_storage) - } - }; - let ffi_profile = Profile::new(internal_profile); - ProfileNewResult::Ok(ffi_profile) -} - -/// # Safety -/// The `profile` can be null, but if non-null it must point to a Profile -/// made by this module, which has not previously been dropped. -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_drop(profile: *mut Profile) { - // Technically, this function has been designed so if it's double-dropped - // then it's okay, but it's not something that should be relied on. - if !profile.is_null() { - drop((*profile).take()) - } -} - -#[cfg(test)] -impl From for Result<(), Error> { - fn from(result: ProfileResult) -> Self { - match result { - ProfileResult::Ok(_) => Ok(()), - ProfileResult::Err(err) => Err(err), - } - } -} - -#[cfg(test)] -impl From for Result { - fn from(result: ProfileNewResult) -> Self { - match result { - ProfileNewResult::Ok(p) => Ok(p), - ProfileNewResult::Err(err) => Err(err), - } - } -} - -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. All pointers inside the `sample` need to be valid for the duration -/// of this call. -/// -/// If successful, it returns the Ok variant. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_add( - profile: *mut Profile, - sample: Sample, - timestamp: Option, -) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - let uses_string_ids = sample - .labels - .first() - .is_some_and(|label| label.key.is_empty() && label.key_id.value > 0); - - if uses_string_ids { - profile.add_string_id_sample(sample.into(), timestamp) - } else { - profile.try_add_sample(sample.try_into()?, timestamp) - } - })() - .context("ddog_prof_Profile_add failed") - .into() -} - -pub(crate) unsafe fn profile_ptr_to_inner<'a>( - profile_ptr: *mut Profile, -) -> anyhow::Result<&'a mut internal::Profile> { - match profile_ptr.as_mut() { - None => anyhow::bail!("profile pointer was null"), - Some(inner_ptr) => match inner_ptr.inner.as_mut() { - Some(profile) => Ok(profile), - None => anyhow::bail!("profile's inner pointer was null (indicates use-after-free)"), - }, - } -} - -/// Associate an endpoint to a given local root span id. -/// During the serialization of the profile, an endpoint label will be added -/// to all samples that contain a matching local root span id label. -/// -/// Note: calling this API causes the "trace endpoint" and "local root span id" strings -/// to be interned, even if no matching sample is found. -/// -/// # Arguments -/// * `profile` - a reference to the profile that will contain the samples. -/// * `local_root_span_id` -/// * `endpoint` - the value of the endpoint label to add for matching samples. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[no_mangle] -#[must_use] -pub unsafe extern "C" fn ddog_prof_Profile_set_endpoint( - profile: *mut Profile, - local_root_span_id: u64, - endpoint: CharSlice, -) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - let endpoint = endpoint.to_utf8_lossy(); - profile.add_endpoint(local_root_span_id, endpoint) - })() - .context("ddog_prof_Profile_set_endpoint failed") - .into() -} - -/// Count the number of times an endpoint has been seen. -/// -/// # Arguments -/// * `profile` - a reference to the profile that will contain the samples. -/// * `endpoint` - the endpoint label for which the count will be incremented -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[no_mangle] -#[must_use] -pub unsafe extern "C" fn ddog_prof_Profile_add_endpoint_count( - profile: *mut Profile, - endpoint: CharSlice, - value: i64, -) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - let endpoint = endpoint.to_utf8_lossy(); - profile.add_endpoint_count(endpoint, value) - })() - .context("ddog_prof_Profile_set_endpoint failed") - .into() -} - -/// Add a poisson-based upscaling rule which will be use to adjust values and make them -/// closer to reality. -/// -/// # Arguments -/// * `profile` - a reference to the profile that will contain the samples. -/// * `offset_values` - offset of the values -/// * `label_name` - name of the label used to identify sample(s) -/// * `label_value` - value of the label used to identify sample(s) -/// * `sum_value_offset` - offset of the value used as a sum (compute the average with -/// `count_value_offset`) -/// * `count_value_offset` - offset of the value used as a count (compute the average with -/// `sum_value_offset`) -/// * `sampling_distance` - this is the threshold for this sampling window. This value must not be -/// equal to 0 -/// -/// # Safety -/// This function must be called before serialize and must not be called after. -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_add_upscaling_rule_poisson( - profile: *mut Profile, - offset_values: Slice, - label_name: CharSlice, - label_value: CharSlice, - sum_value_offset: usize, - count_value_offset: usize, - sampling_distance: u64, -) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - anyhow::ensure!(sampling_distance != 0, "sampling_distance must not be 0"); - let upscaling_info = api::UpscalingInfo::Poisson { - sum_value_offset, - count_value_offset, - sampling_distance, - }; - add_upscaling_rule( - profile, - offset_values, - label_name, - label_value, - upscaling_info, - ) - })() - .context("ddog_prof_Profile_add_upscaling_rule_proportional failed") - .into() -} - -/// Add a proportional-based upscaling rule which will be use to adjust values and make them -/// closer to reality. -/// -/// # Arguments -/// * `profile` - a reference to the profile that will contain the samples. -/// * `offset_values` - offset of the values -/// * `label_name` - name of the label used to identify sample(s) -/// * `label_value` - value of the label used to identify sample(s) -/// * `total_sampled` - number of sampled event (found in the pprof). This value must not be equal -/// to 0 -/// * `total_real` - number of events the profiler actually witnessed. This value must not be equal -/// to 0 -/// -/// # Safety -/// This function must be called before serialize and must not be called after. -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_add_upscaling_rule_proportional( - profile: *mut Profile, - offset_values: Slice, - label_name: CharSlice, - label_value: CharSlice, - total_sampled: u64, - total_real: u64, -) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - anyhow::ensure!(total_sampled != 0, "total_sampled must not be 0"); - anyhow::ensure!(total_real != 0, "total_real must not be 0"); - let upscaling_info = api::UpscalingInfo::Proportional { - scale: total_real as f64 / total_sampled as f64, - }; - add_upscaling_rule( - profile, - offset_values, - label_name, - label_value, - upscaling_info, - ) - })() - .context("ddog_prof_Profile_add_upscaling_rule_proportional failed") - .into() -} - -unsafe fn add_upscaling_rule( - profile: &mut internal::Profile, - offset_values: Slice, - label_name: CharSlice, - label_value: CharSlice, - upscaling_info: api::UpscalingInfo, -) -> anyhow::Result<()> { - let label_name_n = label_name.to_utf8_lossy(); - let label_value_n = label_value.to_utf8_lossy(); - profile.add_upscaling_rule( - offset_values.as_slice(), - label_name_n.as_ref(), - label_value_n.as_ref(), - upscaling_info, - ) -} - -/// # Safety -/// Only pass a reference to a valid `ddog_prof_EncodedProfile`, or null. A -/// valid reference also means that it hasn't already been dropped or exported (do not -/// call this twice on the same object). -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_EncodedProfile_drop( - profile: *mut Handle, -) { - // Technically, this function has been designed so if it's double-dropped - // then it's okay, but it's not something that should be relied on. - if !profile.is_null() { - drop((*profile).take()) - } -} - -/// Given an EncodedProfile, get a slice representing the bytes in the pprof. -/// This slice is valid for use until the encoded_profile is modified in any way (e.g. dropped or -/// consumed). -/// # Safety -/// Only pass a reference to a valid `ddog_prof_EncodedProfile`. -#[no_mangle] -#[must_use] -#[named] -pub unsafe extern "C" fn ddog_prof_EncodedProfile_bytes<'a>( - mut encoded_profile: *mut Handle, -) -> ddcommon_ffi::Result> { - wrap_with_ffi_result!({ - let slice = encoded_profile.to_inner_mut()?.buffer.as_slice(); - // Rountdtrip through raw pointers to avoid Rust complaining about lifetimes. - let byte_slice = ByteSlice::from_raw_parts(slice.as_ptr(), slice.len()); - anyhow::Ok(byte_slice) - }) -} - -/// Serialize the aggregated profile. -/// Drains the data, and then resets the profile for future use. -/// -/// Don't forget to clean up the ok with `ddog_prof_EncodedProfile_drop` or -/// the error variant with `ddog_Error_drop` when you are done with them. -/// -/// # Arguments -/// * `profile` - a reference to the profile being serialized. -/// * `start_time` - optional start time for the serialized profile. If None/null is passed, the -/// time of profile creation will be used. -/// * `end_time` - optional end time of the profile. If None/null is passed, the current time will -/// be used. -/// -/// # Safety -/// The `profile` must point to a valid profile object. -/// The `start_time` and `end_time` must be null or otherwise point to a valid TimeSpec object. -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_serialize( - profile: *mut Profile, - start_time: Option<&Timespec>, - end_time: Option<&Timespec>, -) -> SerializeResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - - let mut old_profile = profile.reset_and_return_previous()?; - if let Some(start_time) = start_time { - old_profile.set_start_time(start_time.into())?; - } - - let end_time = end_time.map(SystemTime::from); - old_profile.serialize_into_compressed_pprof(end_time, None) - })() - .context("ddog_prof_Profile_serialize failed") - .into() -} - -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_Vec_U8_as_slice(vec: &ddcommon_ffi::Vec) -> Slice<'_, u8> { - vec.as_slice() -} - -/// Resets all data in `profile` except the sample types and period. Returns -/// true if it successfully reset the profile and false otherwise. The profile -/// remains valid if false is returned. -/// -/// # Arguments -/// * `profile` - A mutable reference to the profile to be reset. -/// * `start_time` - The time of the profile (after reset). Pass None/null to use the current time. -/// -/// # Safety -/// The `profile` must meet all the requirements of a mutable reference to the profile. Given this -/// can be called across an FFI boundary, the compiler cannot enforce this. -/// If `time` is not null, it must point to a valid Timespec object. -#[no_mangle] -#[must_use] -pub unsafe extern "C" fn ddog_prof_Profile_reset(profile: *mut Profile) -> ProfileResult { - (|| { - let profile = profile_ptr_to_inner(profile)?; - profile.reset_and_return_previous()?; - anyhow::Ok(()) - })() - .context("ddog_prof_Profile_reset failed") - .into() -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn ctor_and_dtor() -> Result<(), Error> { - unsafe { - let sample_type: *const ValueType = &ValueType::new("samples", "count"); - let mut profile = Result::from(ddog_prof_Profile_new( - Slice::from_raw_parts(sample_type, 1), - None, - ))?; - ddog_prof_Profile_drop(&mut profile); - Ok(()) - } - } - - #[test] - fn add_failure() -> Result<(), Error> { - unsafe { - let sample_type: *const ValueType = &ValueType::new("samples", "count"); - let mut profile = Result::from(ddog_prof_Profile_new( - Slice::from_raw_parts(sample_type, 1), - None, - ))?; - - // wrong number of values (doesn't match sample types) - let values: &[i64] = &[]; - - let sample = Sample { - locations: Slice::empty(), - values: Slice::from(values), - labels: Slice::empty(), - }; - - let result = Result::from(ddog_prof_Profile_add(&mut profile, sample, None)); - result.unwrap_err(); - ddog_prof_Profile_drop(&mut profile); - Ok(()) - } - } - - #[test] - // TODO FIX - #[cfg_attr(miri, ignore)] - fn aggregate_samples() -> anyhow::Result<()> { - unsafe { - let sample_type: *const ValueType = &ValueType::new("samples", "count"); - let mut profile = Result::from(ddog_prof_Profile_new( - Slice::from_raw_parts(sample_type, 1), - None, - ))?; - - let mapping = Mapping { - filename: "php".into(), - ..Default::default() - }; - - let locations = vec![Location { - mapping, - function: Function { - name: "{main}".into(), - name_id: ManagedStringId { value: 0 }, - system_name: "{main}".into(), - system_name_id: ManagedStringId { value: 0 }, - filename: "index.php".into(), - filename_id: ManagedStringId { value: 0 }, - }, - ..Default::default() - }]; - let values: Vec = vec![1]; - let labels = vec![Label { - key: Slice::from("pid"), - num: 101, - ..Default::default() - }]; - - let sample = Sample { - locations: Slice::from(&locations), - values: Slice::from(&values), - labels: Slice::from(&labels), - }; - - Result::from(ddog_prof_Profile_add(&mut profile, sample, None))?; - assert_eq!( - profile - .inner - .as_ref() - .unwrap() - .only_for_testing_num_aggregated_samples(), - 1 - ); - - Result::from(ddog_prof_Profile_add(&mut profile, sample, None))?; - assert_eq!( - profile - .inner - .as_ref() - .unwrap() - .only_for_testing_num_aggregated_samples(), - 1 - ); - - ddog_prof_Profile_drop(&mut profile); - Ok(()) - } - } - - unsafe fn provide_distinct_locations_ffi() -> Profile { - let sample_type: *const ValueType = &ValueType::new("samples", "count"); - let mut profile = Result::from(ddog_prof_Profile_new( - Slice::from_raw_parts(sample_type, 1), - None, - )) - .unwrap(); - - let mapping = Mapping { - filename: "php".into(), - ..Default::default() - }; - - let main_locations = vec![Location { - mapping, - function: Function { - name: "{main}".into(), - name_id: ManagedStringId { value: 0 }, - system_name: "{main}".into(), - system_name_id: ManagedStringId { value: 0 }, - filename: "index.php".into(), - filename_id: ManagedStringId { value: 0 }, - }, - ..Default::default() - }]; - let test_locations = vec![Location { - mapping, - function: Function { - name: "test".into(), - name_id: ManagedStringId { value: 0 }, - system_name: "test".into(), - system_name_id: ManagedStringId { value: 0 }, - filename: "index.php".into(), - filename_id: ManagedStringId { value: 0 }, - }, - line: 4, - ..Default::default() - }]; - let values: Vec = vec![1]; - let labels = vec![Label { - key: Slice::from("pid"), - key_id: ManagedStringId { value: 0 }, - str: Slice::from(""), - str_id: ManagedStringId { value: 0 }, - num: 101, - num_unit: Slice::from(""), - num_unit_id: ManagedStringId { value: 0 }, - }]; - - let main_sample = Sample { - locations: Slice::from(main_locations.as_slice()), - values: Slice::from(values.as_slice()), - labels: Slice::from(labels.as_slice()), - }; - - let test_sample = Sample { - locations: Slice::from(test_locations.as_slice()), - values: Slice::from(values.as_slice()), - labels: Slice::from(labels.as_slice()), - }; - - Result::from(ddog_prof_Profile_add(&mut profile, main_sample, None)).unwrap(); - assert_eq!( - profile - .inner - .as_ref() - .unwrap() - .only_for_testing_num_aggregated_samples(), - 1 - ); - - Result::from(ddog_prof_Profile_add(&mut profile, test_sample, None)).unwrap(); - assert_eq!( - profile - .inner - .as_ref() - .unwrap() - .only_for_testing_num_aggregated_samples(), - 2 - ); - - profile - } - - #[test] - fn distinct_locations_ffi() { - unsafe { - ddog_prof_Profile_drop(&mut provide_distinct_locations_ffi()); - } - } -} diff --git a/datadog-profiling-ffi/src/profiles/interning_api.rs b/datadog-profiling-ffi/src/profiles/interning_api.rs deleted file mode 100644 index 88f2a9f69d..0000000000 --- a/datadog-profiling-ffi/src/profiles/interning_api.rs +++ /dev/null @@ -1,452 +0,0 @@ -// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ -// SPDX-License-Identifier: Apache-2.0 - -use std::num::NonZeroI64; - -use super::datatypes::{profile_ptr_to_inner, Profile}; -use datadog_profiling::{ - api::ManagedStringId, - collections::identifiable::StringId, - internal::{ - self, - interning_api::{Generation, GenerationalId}, - FunctionId, LabelId, LabelSetId, LocationId, MappingId, StackTraceId, - }, -}; -use ddcommon_ffi::{ - slice::AsBytes, wrap_with_ffi_result, wrap_with_void_ffi_result, CharSlice, MutSlice, Result, - Slice, VoidResult, -}; -use function_name::named; - -// Cbindgen was putting invalid C types on the static, this workaround seems to fix it. -type CbindgenIsDumbStringId = GenerationalId; - -#[no_mangle] -#[used] -pub static ddog_INTERNED_EMPTY_STRING: CbindgenIsDumbStringId = - internal::Profile::INTERNED_EMPTY_STRING; - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_function( - profile: *mut Profile, - name: GenerationalId, - system_name: GenerationalId, - filename: GenerationalId, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_function(name, system_name, filename) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_label_num( - profile: *mut Profile, - key: GenerationalId, - val: i64, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_label_num( - key, - val, - GenerationalId::new_immortal(StringId::ZERO), - ) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_label_num_with_unit( - profile: *mut Profile, - key: GenerationalId, - val: i64, - unit: GenerationalId, -) -> Result> { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.intern_label_num(key, val, unit) }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_label_str( - profile: *mut Profile, - key: GenerationalId, - val: GenerationalId, -) -> Result> { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.intern_label_str(key, val) }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_labelset( - profile: *mut Profile, - labels: Slice>, -) -> Result> { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.intern_labelset(labels.as_slice()) }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_location( - profile: *mut Profile, - function_id: GenerationalId, - address: u64, - line: i64, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_location(None, function_id, address, line) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_location_with_mapping_id( - profile: *mut Profile, - mapping_id: GenerationalId, - function_id: GenerationalId, - address: u64, - line: i64, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_location(Some(mapping_id), function_id, address, line) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_managed_string( - profile: *mut Profile, - s: ManagedStringId, -) -> Result> { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.intern_managed_string(s) }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_managed_strings( - profile: *mut Profile, - strings: Slice, - mut out: MutSlice>, -) -> VoidResult { - wrap_with_void_ffi_result!({ - anyhow::ensure!(strings.len() == out.len()); - profile_ptr_to_inner(profile)? - .intern_managed_strings(strings.as_slice(), out.as_mut_slice())?; - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_mapping( - profile: *mut Profile, - memory_start: u64, - memory_limit: u64, - file_offset: u64, - filename: GenerationalId, - build_id: GenerationalId, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_mapping( - memory_start, - memory_limit, - file_offset, - filename, - build_id, - ) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns void. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_sample( - profile: *mut Profile, - stacktrace: GenerationalId, - values: Slice, - labels: GenerationalId, - timestamp: Option, -) -> VoidResult { - wrap_with_void_ffi_result!({ - // TODO, this to_vec might not be necessary. - profile_ptr_to_inner(profile)?.intern_sample( - stacktrace, - values.as_slice(), - labels, - timestamp, - )?; - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_stacktrace( - profile: *mut Profile, - locations: Slice>, -) -> Result> { - wrap_with_ffi_result!({ - profile_ptr_to_inner(profile)?.intern_stacktrace(locations.as_slice()) - }) -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_string( - profile: *mut Profile, - s: CharSlice, -) -> Result> { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.intern_string(s.try_to_utf8()?) }) -} - -/// This functions returns an interned id for an empty string -/// -/// # Safety -/// No preconditions -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_interned_empty_string() -> GenerationalId { - internal::Profile::INTERNED_EMPTY_STRING -} - -/// This function interns its argument into the profiler. -/// If successful, it returns an opaque interning ID. -/// This ID is valid for use on this profiler, until the profiler is reset. -/// It is an error to use this id after the profiler has been reset, or on a different profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// All other arguments must remain valid for the length of this call. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_intern_strings( - profile: *mut Profile, - strings: Slice, - mut out: MutSlice>, -) -> VoidResult { - wrap_with_void_ffi_result!({ - anyhow::ensure!(strings.len() == out.len()); - let mut v = Vec::with_capacity(strings.len()); - for s in strings.iter() { - v.push(s.try_to_utf8()?); - } - profile_ptr_to_inner(profile)?.intern_strings(&v, out.as_mut_slice())?; - }) -} - -/// This functions returns the current generation of the profiler. -/// On error, it holds an error message in the error variant. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is _NOT_ thread-safe. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_get_generation( - profile: *mut Profile, -) -> Result { - wrap_with_ffi_result!({ profile_ptr_to_inner(profile)?.get_generation() }) -} - -/// This functions returns whether the given generations are equal. -/// -/// # Safety: No safety requirements -#[must_use] -#[no_mangle] -pub unsafe extern "C" fn ddog_prof_Profile_generations_are_equal( - a: Generation, - b: Generation, -) -> bool { - a == b -} - -/// This functions ends the current sample and allows the profiler exporter to continue, if it was -/// blocked. -/// It must have been paired with exactly one `sample_start`. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is probably thread-safe, but I haven't confirmed this. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_sample_end(profile: *mut Profile) -> VoidResult { - wrap_with_void_ffi_result!({ - profile_ptr_to_inner(profile)?.sample_end()?; - }) -} - -/// This functions starts a sample and blocks the exporter from continuing. -/// -/// # Safety -/// The `profile` ptr must point to a valid Profile object created by this -/// module. -/// This call is probably thread-safe, but I haven't confirmed this. -#[must_use] -#[no_mangle] -#[named] -pub unsafe extern "C" fn ddog_prof_Profile_sample_start(profile: *mut Profile) -> VoidResult { - wrap_with_void_ffi_result!({ - profile_ptr_to_inner(profile)?.sample_start()?; - }) -} diff --git a/datadog-profiling-ffi/src/profiles/mod.rs b/datadog-profiling-ffi/src/profiles/mod.rs index 86136a6fbf..21299c23db 100644 --- a/datadog-profiling-ffi/src/profiles/mod.rs +++ b/datadog-profiling-ffi/src/profiles/mod.rs @@ -1,5 +1,56 @@ // Copyright 2021-Present Datadog, Inc. https://www.datadoghq.com/ // SPDX-License-Identifier: Apache-2.0 -mod datatypes; -mod interning_api; +// todo: do we need to expose compressor? Does FFI have any desire to expose +// a non-compressed serialized profile? +// mod compressor; +// mod parallel_set; + +mod pprof_builder; +mod profile; +mod profiling_dictionary; +mod sample_builder; +mod scratchpad; +mod utf8; + +pub use pprof_builder::*; +pub use profile::*; +pub use profiling_dictionary::*; +pub use sample_builder::*; +pub use scratchpad::*; +pub use utf8::*; + +use std::ffi::CStr; + +// Shared error message helpers and null-check macros reused by FFI modules. +pub const fn null_out_param_err() -> &'static CStr { + c"null pointer used as out parameter" +} + +pub const fn null_insert_err() -> &'static CStr { + c"tried to insert a null pointer" +} + +#[macro_export] +macro_rules! ensure_non_null_out_parameter { + ($expr:expr) => { + if $expr.is_null() { + return $crate::ProfileStatus::from( + $crate::profiles::null_out_param_err(), + ); + } + }; +} + +#[macro_export] +macro_rules! ensure_non_null_insert { + ($expr:expr) => { + if $expr.is_null() { + return $crate::ProfileStatus::from( + $crate::profiles::null_insert_err(), + ); + } + }; +} + +pub(crate) use {ensure_non_null_insert, ensure_non_null_out_parameter}; diff --git a/datadog-profiling-ffi/src/profiles/pprof_builder/adapter.rs b/datadog-profiling-ffi/src/profiles/pprof_builder/adapter.rs new file mode 100644 index 0000000000..5f31bb619e --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/pprof_builder/adapter.rs @@ -0,0 +1,522 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::profiles::{ + ddog_prof_PprofBuilder_add_profile_with_poisson_upscaling, + ddog_prof_PprofBuilder_add_profile_with_proportional_upscaling, + ddog_prof_PprofBuilder_build_compressed, ddog_prof_PprofBuilder_new, + ddog_prof_SampleBuilder_drop, ddog_prof_SampleBuilder_new, + ddog_prof_SampleBuilder_value, PoissonUpscalingRule, + ProportionalUpscalingRule, SampleBuilder, Utf8Option, +}; +use crate::{ + ensure_non_null_out_parameter, profiles, ArcHandle, ProfileHandle, + ProfileStatus, +}; +use datadog_profiling::exporter::EncodedProfile; +use datadog_profiling::profiles::datatypes::{ + Profile, ProfilesDictionary, ScratchPad, ValueType, MAX_SAMPLE_TYPES, +}; +use ddcommon_ffi::{Handle, Slice, Timespec}; +use std::mem; +use std::ops::Range; +use std::time::SystemTime; + +/// An adapter from the offset-based pprof format to the separate profiles +/// format that sort of mirrors the otel format. If you use this type, you are +/// expected to make a new one each profiling interval e.g. 60 seconds. +/// +/// Don't mutate this directly. Its definition is available for FFI layout +/// reasons only. +#[repr(C)] +pub struct ProfileAdapter<'a> { + started_at: Timespec, + dictionary: ArcHandle, + scratchpad: ArcHandle, + mappings: ddcommon_ffi::vec::Vec, + // A vec of slice of proportional rules. Uses an empty slice if the + // profile doesn't have a registered upscaling rule. + proportional_upscaling_rules: + ddcommon_ffi::vec::Vec>>, + // A vec of poisson rules. Exclusive with proportional rules. If the + // profile doesn't have a poisson rule, then it uses a sampling distance + // of 0, which isn't a legal value internally. + poisson_upscaling_rules: ddcommon_ffi::Vec, +} + +impl Default for ProfileAdapter<'_> { + fn default() -> Self { + Self { + started_at: Timespec::from(SystemTime::now()), + dictionary: Default::default(), + scratchpad: Default::default(), + mappings: Default::default(), + proportional_upscaling_rules: Default::default(), + poisson_upscaling_rules: Default::default(), + } + } +} + +#[repr(C)] +pub struct ProfileAdapterMapping { + profile: ProfileHandle, + /// This is the range in the sample types/values array in the legacy API + /// that corresponds to this mapping. + range: Range, +} + +impl Drop for ProfileAdapter<'_> { + fn drop(&mut self) { + let mut mappings = mem::take(&mut self.mappings).into_std(); + for mut mapping in mappings.drain(..) { + drop(unsafe { mapping.profile.take() }) + } + + self.dictionary.drop_resource(); + self.scratchpad.drop_resource(); + } +} + +/// Creates an adapter that maps the legacy offset-based sample model +/// (one flat list of sample types/values) into multiple Profiles, each with +/// 1–2 sample types. +/// +/// Inputs must satisfy: +/// - `value_types.len() == groupings.len()` +/// - `value_types.len() > 0 && groupings.len() > 0` +/// - `groupings` is a sequence of contiguous "runs". Each run defines one +/// Profile and must have length 1 or 2. These groupings all define the same +/// runs: +/// - `[ 0, 0, 1, 0, 0]` +/// - `[ 0, 0, 1, 2, 2]` +/// - `[13, 13, 0, 5, 5]` +/// +/// On success, a handle to the new `ProfileAdapter` is written to `out`. Drop +/// it with `ddog_prof_ProfileAdapter_drop`. +/// +/// Here is a partial C example using some PHP profiles. +/// +/// ```c +/// ddog_prof_ProfilesDictionaryHandle dictionary = // ... ; +/// +/// // Assume these ValueType entries were populated using your string table +/// // (type/unit ids). Order corresponds to the legacy offsets: +/// // [wall-time, wall-samples, cpu-time, alloc-bytes, alloc-count] +/// ddog_prof_ValueType value_types[5] = { +/// wall_time, wall_samples, cpu_time, alloc_bytes, alloc_count +/// }; +/// int64_t groupings[5] = { 0, 0, 1, 2, 2 }; +/// +/// ddog_prof_ScratchPadHandle scratchpad = // ... ; +/// ddog_prof_ProfileAdapter adapter; +/// ddog_prof_ProfileStatus st = ddog_prof_ProfileAdapter_new( +/// &adapter, +/// dictionary, +/// scratchpad, +/// (ddog_Slice_ValueType){ .ptr = value_types, .len = 5 }, +/// (ddog_Slice_I64){ .ptr = groupings, .len = 5 } +/// ); +/// if (st.flags != 0) { +/// // handle error, then: +/// ddog_prof_Status_drop(&st) +/// } +/// +/// // ...later... +/// +/// // Allocation sample was taken. +/// int64_t values[5] = { 0, 0, 0, 128, 1 }; +/// ddog_Slice_I64 ffi_slice = { .ptr = values, len = 5 }; +/// +/// ddog_prof_SampleBuilderHandle sample_builder_handle; +/// +/// st = ddog_prof_ProfileAdapter_add_sample( +/// &sample_builder_handle, +/// adapter, +/// 2, // profile grouping 2 +/// ffi_slice, +/// scratchpad, +/// ); +/// +/// // check st, then you can use SampleBuilder methods +/// // to add timestamps, links, etc. +/// +/// // then add it to the profile: +/// st = ddog_prof_SampleBuilder_finish( +/// &sample_builder_handle, +/// ); +/// +/// // add upscalings per profile grouping with one of: +/// // ddog_prof_ProfileAdapter_add_poisson_upscaling +/// // ddog_prof_ProfileAdapter_add_proportional_upscaling +/// +/// +/// // When the interval is up e.g. 60 seconds, then: +/// ddog_prof_EndcodedProfile encoded_profile; +/// status = ddog_prof_ProfileAdapter_build_compressed( +/// &encoded_profile, +/// &adapter, // this clears the adapter +/// NULL, // start time, if you want to provide one manually +/// NULL, // stop time, if you want to provide one manually +/// ); +/// +/// +/// // order of these doesn't matter, the adapter keeps a refcount +/// // alive on the dictionary and scratchpad. +/// ddog_prof_ProfilesDictionary_drop(&dictionary); +/// ddog_prof_ScratchPad_drop(&scratchpad); +/// +/// ddog_prof_ProfileAdapter_drop(&adapter); +/// ``` +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_new( + out: *mut ProfileAdapter<'_>, + dictionary: ArcHandle, + scratchpad: ArcHandle, + value_types: Slice<'_, ValueType>, + groupings: Slice<'_, i64>, +) -> ProfileStatus { + // Ensure slices and inputs are valid. + if out.is_null() { + return ProfileStatus::from(c"invalid input: argument out to ddog_prof_ProfileAdapter_new was null"); + } + let Ok(value_types) = value_types.try_as_slice() else { + return ProfileStatus::from(c"invalid input: argument value_types to ddog_prof_ProfileAdapter_new failed to convert to a Rust slice"); + }; + let Ok(groupings) = groupings.try_as_slice() else { + return ProfileStatus::from(c"invalid input: argument groupings to ddog_prof_ProfileAdapter_new failed to convert to a Rust slice"); + }; + + // Ensure the value_types and groupings have the same length. + if value_types.len() != groupings.len() { + return ProfileStatus::from(c"invalid input: arguments value_types and groupings to ddog_prof_ProfileAdapter_new had mismatched lengths"); + } + // Ensure the slices have at least 1 element. + if value_types.is_empty() { + return ProfileStatus::from(c"invalid input: arguments value_types and groupings to ddog_prof_ProfileAdapter_new must not be empty"); + } + + // Count runs and validate max run length. + let (n_runs, longest_run) = count_runs_and_longest_run(groupings); + if longest_run > MAX_SAMPLE_TYPES { + return ProfileStatus::from( + c"invalid input: groupings must appear in runs of length at most 2", + ); + } + + // Build mapping of profiles (one per contiguous run). + let mut mappings = ddcommon_ffi::vec::Vec::new(); + let mut proportional_upscaling_rules = ddcommon_ffi::vec::Vec::new(); + let mut poisson_upscaling_rules = ddcommon_ffi::vec::Vec::new(); + mappings.try_reserve_exact(n_runs).unwrap(); + proportional_upscaling_rules.try_reserve_exact(n_runs).unwrap(); + poisson_upscaling_rules.try_reserve_exact(n_runs).unwrap(); + + for run in RunsIter::new(groupings) { + // Create a profile for this run + let mut mapping = ProfileAdapterMapping { + profile: Default::default(), + range: Default::default(), + }; + let result = profiles::ddog_prof_Profile_new(&mut mapping.profile); + if result.flags != 0 { + return result; + } + mapping.range = run.clone(); + let profile = mapping.profile; + mappings.push(mapping); + proportional_upscaling_rules.push(Slice::default()); + poisson_upscaling_rules.push(PoissonUpscalingRule { + sum_offset: 0, + count_offset: 0, + sampling_distance: 0, + }); + // Add sample types for the run. The run length was previously + // validated to be <= MAX_SAMPLE_TYPES. + for value_idx in run { + let status = profiles::ddog_prof_Profile_add_sample_type( + profile, + value_types[value_idx], + ); + if status.flags != 0 { + return status; + } + } + } + + let Ok(mut dictionary) = dictionary.try_clone() else { + return ProfileStatus::from(c"reference count overflow: profile adapter could not clone the profiles dictionary"); + }; + + let Ok(scratchpad) = scratchpad.try_clone() else { + dictionary.drop_resource(); + return ProfileStatus::from(c"reference count overflow: profile adapter could not clone the scratchpad"); + }; + + unsafe { + out.write(ProfileAdapter { + started_at: Timespec::from(SystemTime::now()), + dictionary, + scratchpad, + mappings, + proportional_upscaling_rules, + poisson_upscaling_rules, + }) + }; + ProfileStatus::OK +} + +fn count_runs_and_longest_run(groupings: &[i64]) -> (usize, usize) { + // Do it all in one pass. + RunsIter::new(groupings).fold((0, 0), |(n_runs, longest), run| { + (n_runs + 1, longest.max(run.len())) + }) +} + +/// Maps the non-zero values to a profile, and returns using out parameters +/// the profile handle it matches, and a sample builder handle. The values +/// have already been added to the sample builder; the caller still needs to +/// add stack, timestamp, link, etc to the sample builder and then build it +/// into the profile. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_add_sample( + sample_builder: *mut ProfileHandle, + adapter: &ProfileAdapter<'_>, + profile_grouping: usize, + values: Slice<'_, i64>, +) -> ProfileStatus { + assert!(!sample_builder.is_null()); + assert!(profile_grouping < adapter.mappings.len()); + if adapter.mappings.is_empty() { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_sample was called on an empty adapter"); + } + let values = values.try_as_slice().unwrap(); + + let Some(mapping) = adapter.mappings.get(profile_grouping) else { + return ProfileStatus::from(c"invalid input: grouping passed to ddog_prof_ProfileAdapter_add_sample was out of range"); + }; + + let mut builder = ProfileHandle::default(); + let status = ddog_prof_SampleBuilder_new( + &mut builder, + mapping.profile, + adapter.scratchpad, + ); + if status.flags != 0 { + return status; + } + for val in values[mapping.range.clone()].iter().copied() { + let status = ddog_prof_SampleBuilder_value(builder, val); + if status.flags != 0 { + ddog_prof_SampleBuilder_drop(&mut builder); + return status; + } + } + + sample_builder.write(builder); + + ProfileStatus::OK +} + +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_add_proportional_upscaling< + 'a, +>( + adapter: Option<&mut ProfileAdapter<'a>>, + grouping_index: usize, + upscaling_rules: Slice<'a, ProportionalUpscalingRule<'a>>, + // utf8_option: Utf8Option, // todo: store this too +) -> ProfileStatus { + let Some(adapter) = adapter else { + return ProfileStatus::from(c"invalid input: null adapter passed to ddog_prof_ProfileAdapter_add_proportional_upscaling"); + }; + let Some(rules) = + adapter.proportional_upscaling_rules.get_mut(grouping_index) + else { + return ProfileStatus::from(c"invalid input: grouping index passed to ddog_prof_ProfileAdapter_add_proportional_upscaling was out of range"); + }; + if !rules.is_empty() { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_proportional_upscaling was called for the same grouping more than once"); + } + if let Some(rule) = adapter.poisson_upscaling_rules.get(grouping_index) { + if rule.sampling_distance != 0 { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_proportional_upscaling was called on a grouping that already had a poisson rule"); + } + } + *rules = upscaling_rules; + + ProfileStatus::OK +} + +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_add_poisson_upscaling( + adapter: Option<&mut ProfileAdapter<'_>>, + grouping_index: usize, + upscaling_rule: PoissonUpscalingRule, +) -> ProfileStatus { + let Some(adapter) = adapter else { + return ProfileStatus::from(c"invalid input: null adapter passed to ddog_prof_ProfileAdapter_add_poisson_upscaling"); + }; + + if upscaling_rule.sampling_distance == 0 { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_poisson_upscaling cannot have a sampling distance of zero"); + } + + let Some(rule) = adapter.poisson_upscaling_rules.get_mut(grouping_index) + else { + return ProfileStatus::from(c"invalid input: grouping index passed to ddog_prof_ProfileAdapter_add_poisson_upscaling was out of range"); + }; + + if rule.sampling_distance != 0 { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_poisson_upscaling was called for the same grouping more than once"); + } + if let Some(rules) = + adapter.proportional_upscaling_rules.get(grouping_index) + { + if !rules.is_empty() { + return ProfileStatus::from(c"invalid input: ddog_prof_ProfileAdapter_add_poisson_upscaling was called on a grouping that already had proportional rules"); + } + } + + *rule = upscaling_rule; + + ProfileStatus::OK +} + +/// Builds and compresses a pprof using the data in the profile adapter. +/// +/// Afterward, you probably want to drop the adapter and make a new one. +/// +/// # Parameters +/// * `out_profile`: a pointer safe for `core::ptr::write`ing the handle for +/// the encoded profile. +/// * `adapter`: a mutable reference to the profile adapter. +/// * `start`: an optional reference to the start time of the Pprof profile. +/// Defaults to the time the adapter was made. +/// * `end`: an optional reference to the stop time of the Pprof profile. +/// Defaults to the time this call was made. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_build_compressed( + out_profile: *mut Handle, + adapter: Option<&mut ProfileAdapter<'_>>, + start: Option<&Timespec>, + end: Option<&Timespec>, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_profile); + let Some(adapter) = adapter else { + return ProfileStatus::from(c"invalid input: null adapter passed to ddog_prof_ProfileAdapter_build_compressed"); + }; + let start = start.unwrap_or(&adapter.started_at).clone(); + let end = end.cloned().unwrap_or_else(|| Timespec::from(SystemTime::now())); + + let mut pprof_builder = ProfileHandle::default(); + let Ok(dictionary) = adapter.dictionary.try_clone() else { + return ProfileStatus::from(c"reference count overflow: failed to increase refcount of profiles dictionary for ddog_prof_ProfileAdapter_build_compressed"); + }; + let Ok(scratchpad) = adapter.scratchpad.try_clone() else { + return ProfileStatus::from(c"reference count overflow: failed to increase refcount of scratchpad for ddog_prof_ProfileAdapter_build_compressed"); + }; + let status = + ddog_prof_PprofBuilder_new(&mut pprof_builder, dictionary, scratchpad); + if status.flags != 0 { + return status; + } + + for grouping_index in 0..adapter.mappings.len() { + let mapping = &adapter.mappings[grouping_index]; + let proportional = adapter.proportional_upscaling_rules[grouping_index]; + if !proportional.is_empty() { + let status = + ddog_prof_PprofBuilder_add_profile_with_proportional_upscaling( + pprof_builder, + mapping.profile, + proportional, + Utf8Option::Assume, + ); + if status.flags != 0 { + return status; + } + } else { + let poisson = adapter.poisson_upscaling_rules[grouping_index]; + let status = + ddog_prof_PprofBuilder_add_profile_with_poisson_upscaling( + pprof_builder, + mapping.profile, + poisson, + ); + if status.flags != 0 { + return status; + } + } + } + + ddog_prof_PprofBuilder_build_compressed( + out_profile, + pprof_builder, + 4096, + start, + end, + ) +} + +/// Frees the resources associated to the profile adapter handle, leaving an +/// empty adapter in its place. This is safe to call with null, and it's also +/// safe to call with an empty adapter. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfileAdapter_drop( + adapter: *mut ProfileAdapter, +) { + if adapter.is_null() { + return; + } + drop(mem::take(&mut *adapter)); +} + +/// Iterator over contiguous runs, returning the range for the run rather than +/// a slice of the data. This allows it to be used for element-wise arrays +/// like groupings and values. +/// +/// # Examples +/// +/// ``` +/// let groupings = &[0, 0, 1, 2, 2, 3, 4]; +/// let iter = datadog_profiling_ffi::profiles::RunsIter::new(groupings); +/// let runs = iter.collect::>(); +/// assert_eq!(runs.as_slice(), &[0..2, 2..3, 3..5, 5..6, 6..7]); +/// ``` +pub struct RunsIter<'a> { + slice: &'a [i64], + start: usize, +} + +impl<'a> RunsIter<'a> { + #[inline] + pub fn new(slice: &'a [i64]) -> Self { + Self { slice, start: 0 } + } + + #[inline] + fn run_len(&self, start: usize) -> usize { + let id = self.slice[start]; + self.slice[start..].iter().copied().take_while(|&i| i == id).count() + } +} + +impl<'a> Iterator for RunsIter<'a> { + type Item = Range; + + fn next(&mut self) -> Option { + if self.start < self.slice.len() { + let start = self.start; + let end = start + self.run_len(start); + self.start = end; // The new run starts at the end of the previous. + Some(start..end) + } else { + None + } + } +} diff --git a/datadog-profiling-ffi/src/profiles/pprof_builder/mod.rs b/datadog-profiling-ffi/src/profiles/pprof_builder/mod.rs new file mode 100644 index 0000000000..dffbd395c3 --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/pprof_builder/mod.rs @@ -0,0 +1,306 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +mod adapter; +mod upscaling; + +pub use adapter::*; +pub use upscaling::*; + +use crate::profile_handle::ProfileHandle; +use crate::profiles::{ + ensure_non_null_out_parameter, Utf8ConversionError, Utf8Option, +}; +use crate::{ArcHandle, ProfileStatus}; +use datadog_profiling::exporter::EncodedProfile; +use datadog_profiling::profiles::datatypes::{ + Profile, ProfilesDictionary, ScratchPad, +}; +use datadog_profiling::profiles::{ + Compressor, PprofBuilder, ProfileError, SizeRestrictedBuffer, +}; +use ddcommon_ffi::slice::Slice; +use ddcommon_ffi::{Handle, Timespec}; + +/// Creates a `PprofBuilder` handle. +/// +/// # Safety +/// +/// - `out` must be non-null and valid for writes of `ProfileHandle<_>`. +/// - `dictionary` and `scratchpad` must be live handles whose resources +/// outlive all uses of the returned builder handle. +/// - Callers must uphold aliasing rules across FFI: while the builder is +/// mutated through this handle, no other references to the same builder +/// may be used. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_new<'a>( + out: *mut ProfileHandle>, + dictionary: ArcHandle, + scratchpad: ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let dict = dictionary.as_inner()? as *const ProfilesDictionary; + let pad = scratchpad.as_inner()? as *const ScratchPad; + // SAFETY: Tie lifetime to 'static for FFI; caller must ensure handles outlive builder usage. + let builder = PprofBuilder::new(unsafe { &*dict }, unsafe { &*pad }); + let h = ProfileHandle::try_new(builder)?; + unsafe { out.write(h) }; + Ok(()) + }()) +} + +/// Adds a profile to the builder without upscaling rules. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder, and no other mutable +/// references to that builder may be active for the duration of the call. +/// - `profile` must be non-null and point to a valid `Profile` that +/// remains alive until the pprof builder is done. +/// +/// TODO: finish safety +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_add_profile( + mut handle: ProfileHandle, + profile: *const Profile, +) -> ProfileStatus { + crate::profiles::ensure_non_null_insert!(profile); + let result = || -> Result<(), ProfileStatus> { + let builder = unsafe { + handle + .as_inner_mut() + .map_err(ProfileStatus::from_ffi_safe_error_message)? + }; + let prof_ref = unsafe { &*profile }; + builder + .try_add_profile(prof_ref) + .map_err(ProfileStatus::from_ffi_safe_error_message) + }(); + match result { + Ok(_) => ProfileStatus::OK, + Err(err) => err, + } +} + +/// Adds a profile to the builder with the attached poisson upscaling rule. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder, and no other mutable +/// references to that builder may be active for the duration of the call. +/// - `profile` must be non-null and point to a valid `Profile` that +/// remains alive until the pprof builder is done. +/// +/// TODO: finish safety +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_add_profile_with_poisson_upscaling( + mut handle: ProfileHandle, + profile: ProfileHandle, + upscaling_rule: PoissonUpscalingRule, +) -> ProfileStatus { + let profile = match profile.as_inner() { + Ok(profile) => profile, + Err(err) => return ProfileStatus::from_ffi_safe_error_message(err), + }; + let result = || -> Result<(), ProfileStatus> { + let builder = unsafe { + handle + .as_inner_mut() + .map_err(ProfileStatus::from_ffi_safe_error_message)? + }; + + let upscaling_rule = upscaling_rule + .try_into() + .map_err(ProfileStatus::from_ffi_safe_error_message)?; + builder + .try_add_profile_with_poisson_upscaling( + // SAFETY: todo lifetime extension + unsafe { core::mem::transmute(profile) }, + upscaling_rule, + ) + .map_err(ProfileStatus::from_ffi_safe_error_message) + }(); + match result { + Ok(_) => ProfileStatus::OK, + Err(status) => status, + } +} + +/// Adds a profile to the builder with the attached proportional rule. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder, and no other mutable +/// references to that builder may be active for the duration of the call. +/// - `profile` must be non-null and point to a valid `Profile` that +/// remains alive until the pprof builder is done. +/// +/// TODO: finish safety +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_add_profile_with_proportional_upscaling< + 'a, +>( + mut handle: ProfileHandle>, + profile: ProfileHandle, + upscaling_rules: Slice>, + utf8_option: Utf8Option, +) -> ProfileStatus { + let profile = match profile.as_inner() { + Ok(profile) => profile, + Err(err) => return ProfileStatus::from_error(err), + }; + let result = || -> Result<(), ProfileStatus> { + let builder = unsafe { handle.as_inner_mut() } + .map_err(ProfileStatus::from_ffi_safe_error_message)?; + + let upscaling_rules = upscaling_rules + .try_as_slice() + .map_err(ProfileStatus::from_ffi_safe_error_message)?; + + builder + .try_add_profile_with_proportional_upscaling( + // SAFETY: todo lifetime extension + unsafe { core::mem::transmute(profile) }, + upscaling_rules.iter().map( + |rule| -> Result<_, Utf8ConversionError> { + let key = rule.group_by_label.key; + let value = utf8_option + .try_as_bytes_convert(rule.group_by_label.value)?; + Ok(( + (key, value), + rule.sampled as f64 / rule.real as f64, + )) + }, + ), + ) + .map_err(ProfileStatus::from_ffi_safe_error_message) + }(); + match result { + Ok(_) => ProfileStatus::OK, + Err(status) => status, + } +} + +/// Builds and returns a compressed `EncodedProfile` via `out_profile`. +/// +/// # Safety +/// +/// - `out_profile` must be non-null and valid for writes of `Handle<_>`. +/// - `handle` must refer to a live builder whose dependencies (dictionary, +/// scratchpad) are still alive. +/// - No other references may concurrently mutate the same builder. +/// - `start` and `end` must denote a non-decreasing time range. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_build_compressed( + out_profile: *mut Handle, + handle: ProfileHandle>, + size_hint: u32, + start: Timespec, + end: Timespec, +) -> ProfileStatus { + build_with_sink::( + out_profile, + handle, + size_hint, + start, + end, + |cap| Ok(Compressor::with_max_capacity(cap)), + |mut c| c.finish(), + ) +} + +/// Builds and returns an uncompressed `EncodedProfile` via `out_profile`. +/// +/// # Safety +/// +/// Same requirements as [`ddog_prof_PprofBuilder_build_compressed`]. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_build_uncompressed( + out_profile: *mut Handle, + handle: ProfileHandle>, + size_hint: u32, + start: Timespec, + end: Timespec, +) -> ProfileStatus { + build_with_sink::( + out_profile, + handle, + size_hint, + start, + end, + |cap| Ok(SizeRestrictedBuffer::new(cap)), + |b| Ok(b.into()), + ) +} + +fn build_with_sink( + out_profile: *mut Handle, + mut handle: ProfileHandle>, + size_hint: u32, + start: Timespec, + end: Timespec, + make_sink: Make, + finalize: Finalize, +) -> ProfileStatus +where + Sink: std::io::Write, + Make: FnOnce(usize) -> Result, + Finalize: FnOnce(Sink) -> Result, ProfileError>, +{ + ensure_non_null_out_parameter!(out_profile); + ProfileStatus::from(|| -> Result<(), ProfileError> { + if start.seconds > end.seconds + || (start.seconds == end.seconds + && start.nanoseconds > end.nanoseconds) + { + return Err(ProfileError::other( + "end time cannot be before start time", + )); + } + let builder = unsafe { handle.as_inner_mut()? }; + const MIB: usize = 1024 * 1024; + // This is decoupled from the intake limit somewhat so that if the + // limit is raised a little, clients don't need to be rebuilt. Of + // course, if the limit is raised a lot then we'll need to rebuild + // with a new max. + let max_cap = (size_hint as usize).min(64 * MIB); + let mut sink = make_sink(max_cap)?; + builder.build(&mut sink)?; + let buffer = finalize(sink)?; + let start: std::time::SystemTime = start.into(); + let end: std::time::SystemTime = end.into(); + let encoded = EncodedProfile { + start, + end, + buffer, + endpoints_stats: Default::default(), + }; + let h = Handle::try_new(encoded).ok_or(ProfileError::other( + "out of memory: failed to allocate handle for the EncodedProfile", + ))?; + unsafe { out_profile.write(h) }; + Ok(()) + }()) +} + +/// Drops the builder resource held by `handle` and leaves an empty handle. +/// +/// # Safety +/// +/// - If non-null, `handle` must point to a valid +/// `ProfileHandle>`. +/// - The underlying resource must be dropped at most once across all copies +/// of the handle. Calling this on the same handle multiple times is ok. +/// - Do not use other copies of the handle after the resource is dropped. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_PprofBuilder_drop( + handle: *mut ProfileHandle>, +) { + if let Some(h) = handle.as_mut() { + drop(h.take()); + } +} diff --git a/datadog-profiling-ffi/src/profiles/pprof_builder/upscaling.rs b/datadog-profiling-ffi/src/profiles/pprof_builder/upscaling.rs new file mode 100644 index 0000000000..be934e6fd4 --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/pprof_builder/upscaling.rs @@ -0,0 +1,87 @@ +// Copyright 2023-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use datadog_profiling::profiles::collections::StringId; +use datadog_profiling::profiles::datatypes::MAX_SAMPLE_TYPES; +use ddcommon::error::FfiSafeErrorMessage; +use ddcommon_ffi::slice::CharSlice; +use std::ffi::CStr; +use std::num::NonZeroU64; + +#[repr(C)] +#[derive(Clone, Copy, Debug)] +pub struct GroupByLabel<'a> { + pub key: StringId, + pub value: CharSlice<'a>, +} + +#[repr(C)] +#[derive(Clone, Copy, Debug)] +pub struct ProportionalUpscalingRule<'a> { + /// The labels to group the sample values by. If it should apply to all + /// samples and not group by label, then use the empty StringId and empty + /// CharSlice. + pub group_by_label: GroupByLabel<'a>, + pub sampled: u64, + pub real: u64, +} + +#[repr(C)] +#[derive(Clone, Copy, Debug)] +pub struct PoissonUpscalingRule { + /// Which offset in the profile's sample is the sum. Must be disjoint from + /// `count_offset`. + pub sum_offset: u32, + /// Which offset in the profile's sample is the count. Must be disjoint + /// from `sum_offset`. + pub count_offset: u32, + pub sampling_distance: u64, +} + +#[derive(Debug)] +pub enum PoissonUpscalingConversionError { + SamplingDistance, + SumOffset, + CountOffset, +} + +// SAFETY: all cases use Rust c-str literals. +unsafe impl FfiSafeErrorMessage for PoissonUpscalingConversionError { + fn as_ffi_str(&self) -> &'static CStr { + match self { + PoissonUpscalingConversionError::SamplingDistance => c"PoissonUpscalingRule.sampling_distance cannot be zero", + PoissonUpscalingConversionError::SumOffset => c"PoissonUpscalingRule.sum_offset must be less than MAX_SAMPLE_TYPES", + PoissonUpscalingConversionError::CountOffset => c"PoissonUpscalingRule.count_offset must be less than MAX_SAMPLE_TYPES", + } + } +} + +impl core::fmt::Display for PoissonUpscalingConversionError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + self.as_rust_str().fmt(f) + } +} + +impl core::error::Error for PoissonUpscalingConversionError {} + +impl TryFrom + for datadog_profiling::profiles::PoissonUpscalingRule +{ + type Error = PoissonUpscalingConversionError; + + fn try_from(value: PoissonUpscalingRule) -> Result { + let Some(sampling_distance) = NonZeroU64::new(value.sampling_distance) + else { + return Err(PoissonUpscalingConversionError::SamplingDistance); + }; + let sum_offset = value.count_offset as usize; + let count_offset = value.count_offset as usize; + if sum_offset >= MAX_SAMPLE_TYPES { + return Err(PoissonUpscalingConversionError::SumOffset); + } + if count_offset >= MAX_SAMPLE_TYPES { + return Err(PoissonUpscalingConversionError::CountOffset); + } + Ok(Self { sum_offset, count_offset, sampling_distance }) + } +} diff --git a/datadog-profiling-ffi/src/profiles/profile.rs b/datadog-profiling-ffi/src/profiles/profile.rs new file mode 100644 index 0000000000..529b80408d --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/profile.rs @@ -0,0 +1,236 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::profile_handle::ProfileHandle; +use crate::{ensure_non_null_out_parameter, ProfileStatus}; +use datadog_profiling::profiles::datatypes::{Profile, ValueType}; +use datadog_profiling::profiles::ProfileError; + +/// Allocates a new `Profile` and writes a handle to `handle`. +/// +/// # Safety +/// +/// - `handle` must be non-null and valid for writes of `ProfileHandle<_>`. +/// - The written handle must be dropped via the matching drop function; +/// see [`ddog_prof_Profile_drop`] for more details. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_Profile_new( + handle: *mut ProfileHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(handle); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let h = ProfileHandle::try_new(Profile::default())?; + unsafe { handle.write(h) }; + Ok(()) + }()) +} + +/// Adds a sample type to a profile. +/// +/// # Safety +/// +/// - `handle` must refer to a live `Profile` and is treated as a unique +/// mutable reference for the duration of the call (no aliasing mutations). +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_Profile_add_sample_type( + mut handle: ProfileHandle, + vt: ValueType, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let prof = unsafe { handle.as_inner_mut()? }; + prof.try_add_sample_type(vt) + }()) +} + +/// Sets the period and adds its `ValueType` to the profile. +/// +/// # Safety +/// +/// - `handle` must refer to a live `Profile` and is treated as a unique +/// mutable reference for the duration of the call (no aliasing mutations). +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_Profile_add_period( + mut handle: ProfileHandle, + period: i64, + vt: ValueType, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let prof = unsafe { handle.as_inner_mut()? }; + prof.add_period(period, vt); + Ok(()) + }()) +} + +/// Drops the contents of the profile handle, leaving an empty handle behind. +/// +/// # Safety +/// +/// Pointer must point to a valid profile handle if not null. +/// +/// The underlying resource must only be dropped through a single handle, and +/// once the underlying profile has been dropped, all other handles are invalid +/// and should be discarded without dropping them. +/// +/// However, this function is safe to call multiple times on the _same handle_. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_Profile_drop( + handle: *mut ProfileHandle, +) { + if let Some(h) = handle.as_mut() { + drop(h.take()); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::profiles::{ + ddog_prof_ProfileAdapter_drop, ddog_prof_ProfileAdapter_new, + ddog_prof_ProfilesDictionary_drop, ddog_prof_ScratchPad_drop, + ProfileAdapter, + }; + use crate::{ddog_prof_Status_drop, ArcHandle}; + use datadog_profiling::profiles::collections::StringId; + use datadog_profiling::profiles::datatypes::{ + ProfilesDictionary, ScratchPad, + }; + use proptest::prelude::*; + use proptest::test_runner::Config as ProptestConfig; + use std::ffi::CStr; + + // Tighter limits under Miri + #[cfg(miri)] + const PROPTEST_CASES: u32 = 32; + #[cfg(not(miri))] + const PROPTEST_CASES: u32 = 64; + + const MAX_SHRINK_ITERS: u32 = 100; + + // Bound the number of runs to keep input small under Miri + #[cfg(miri)] + const MAX_RUNS: usize = 4; + #[cfg(not(miri))] + const MAX_RUNS: usize = 8; + + // Strategy: build groupings as runs of length 1-2. + // Only adjacent runs must differ (run N's id != run N-1's id); non-contiguous reuse is allowed. + fn groupings_strategy() -> impl Strategy> { + // Generate 1..=8 runs, each run length in {1,2}, ensuring only adjacent runs differ. + (1usize..=MAX_RUNS) + .prop_flat_map(|num_runs| { + let run_lens = prop::collection::vec( + prop_oneof![Just(1usize), Just(2usize)], + num_runs, + ); + let ids = prop::collection::vec(any::(), num_runs) + .prop_map(|mut v| { + // Ensure adjacent different by tweaking duplicates + for i in 1..v.len() { + if v[i] == v[i - 1] { + v[i] = v[i].wrapping_add(1); + } + } + v.into_iter().map(|x| x as i64).collect::>() + }); + (run_lens, ids) + }) + .prop_map(|(run_lens, ids)| { + let mut g = Vec::new(); + for (len, id) in run_lens.into_iter().zip(ids.into_iter()) { + g.extend(std::iter::repeat(id).take(len)); + } + g + }) + .prop_filter("non-empty", |g| !g.is_empty()) + } + + // Strategy: (groupings, value_types) with aligned lengths + fn groupings_and_value_types( + ) -> impl Strategy, Vec)> { + groupings_strategy().prop_flat_map(|groupings| { + let len = groupings.len(); + let vt = ValueType::new(StringId::EMPTY, StringId::EMPTY); + prop::collection::vec(proptest::strategy::Just(vt), len) + .prop_map(move |vts| (groupings.clone(), vts)) + }) + } + + proptest! { + #![proptest_config(ProptestConfig { cases: PROPTEST_CASES, max_shrink_iters: MAX_SHRINK_ITERS, .. ProptestConfig::default() })] + #[test] + fn adapter_new_ok_on_valid_inputs((groupings, value_types) in groupings_and_value_types()) { + let mut dict = ArcHandle::new(ProfilesDictionary::try_new().unwrap()).unwrap(); + let mut scratchpad = ArcHandle::new(ScratchPad::try_new().unwrap()).unwrap(); + // Construct adapter + let mut adapter = ProfileAdapter::default(); + let mut status = unsafe { + ddog_prof_ProfileAdapter_new( + &mut adapter, + dict, + scratchpad, + ddcommon_ffi::Slice::from(value_types.as_slice()), + ddcommon_ffi::Slice::from(groupings.as_slice()), + ) + }; + + if status.flags != 0 { + let cstr = unsafe { CStr::from_ptr(status.err) }; + let str = cstr.to_str().unwrap(); + eprintln!("profile adapter failed: {str}"); + } + + // Safe to call on OK too. + unsafe { ddog_prof_Status_drop(&mut status)}; + + // Drop is safe + unsafe { ddog_prof_ProfileAdapter_drop(&mut adapter) }; + // Double-drop is a no-op + unsafe { ddog_prof_ProfileAdapter_drop(&mut adapter) }; + + unsafe { ddog_prof_ScratchPad_drop(&mut scratchpad) }; + unsafe { ddog_prof_ProfilesDictionary_drop(&mut dict) }; + } + + #[test] + fn adapter_new_rejects_runs_gt_two(mut groupings in groupings_strategy()) { + // Force an invalid run of length 3 by inserting an extra element equal to its neighbor + if groupings.len() >= 2 { + let idx = 0usize; + groupings.insert(idx, groupings[idx]); + // Now first run is length >= 2; insert again to make it 3 + groupings.insert(idx, groupings[idx]); + let len = groupings.len(); + let vt = ValueType::new(StringId::EMPTY, StringId::EMPTY); + let value_types = vec![vt; len]; + + let mut adapter = ProfileAdapter::default(); + let mut dict = ArcHandle::new(ProfilesDictionary::try_new().unwrap()).unwrap(); + let mut scratchpad = ArcHandle::new(ScratchPad::try_new().unwrap()).unwrap(); + let mut status = unsafe { + ddog_prof_ProfileAdapter_new( + &mut adapter, + dict, + scratchpad, + ddcommon_ffi::Slice::from(value_types.as_slice()), + ddcommon_ffi::Slice::from(groupings.as_slice()), + ) + }; + + if status.flags != 0 { + let cstr = unsafe { CStr::from_ptr(status.err) }; + let str = cstr.to_str().unwrap(); + eprintln!("profile adapter failed: {str}"); + } + // Safe to call on OK too. + unsafe { ddog_prof_Status_drop(&mut status)}; + + unsafe { ddog_prof_ProfileAdapter_drop(&mut adapter) }; + unsafe { ddog_prof_ScratchPad_drop(&mut scratchpad) }; + unsafe { ddog_prof_ProfilesDictionary_drop(&mut dict) }; + } + } + } +} diff --git a/datadog-profiling-ffi/src/profiles/profiling_dictionary.rs b/datadog-profiling-ffi/src/profiles/profiling_dictionary.rs new file mode 100644 index 0000000000..34937ee602 --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/profiling_dictionary.rs @@ -0,0 +1,265 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::arc_handle::ArcHandle; +use crate::profiles::utf8::Utf8Option; +use crate::profiles::{ensure_non_null_insert, ensure_non_null_out_parameter}; +use crate::ProfileStatus; +use datadog_profiling::profiles::collections::StringId; +use datadog_profiling::profiles::datatypes::{ + Function, FunctionId, Mapping, MappingId, ProfilesDictionary, +}; +use datadog_profiling::profiles::ProfileError; +use ddcommon_ffi::CharSlice; + +/// A StringId that represents the empty string. +/// This is always available in every string set and can be used without +/// needing to insert it into a string set. +#[no_mangle] +pub static DDOG_PROF_STRINGID_EMPTY: StringId = StringId::EMPTY; + +/// A StringId that represents the string "end_timestamp_ns". +/// This is always available in every string set and can be used without +/// needing to insert it into a string set. +#[no_mangle] +pub static DDOG_PROF_STRINGID_END_TIMESTAMP_NS: StringId = + StringId::END_TIMESTAMP_NS; + +/// A StringId that represents the string "local root span id". +/// This is always available in every string set and can be used without +/// needing to insert it into a string set. +#[no_mangle] +pub static DDOG_PROF_STRINGID_LOCAL_ROOT_SPAN_ID: StringId = + StringId::LOCAL_ROOT_SPAN_ID; + +/// A StringId that represents the string "trace endpoint". +/// This is always available in every string set and can be used without +/// needing to insert it into a string set. +#[no_mangle] +pub static DDOG_PROF_STRINGID_TRACE_ENDPOINT: StringId = + StringId::TRACE_ENDPOINT; + +/// A StringId that represents the string "span id". +/// This is always available in every string set and can be used without +/// needing to insert it into a string set. +#[no_mangle] +pub static DDOG_PROF_STRINGID_SPAN_ID: StringId = StringId::SPAN_ID; + +/// Allocates a new `ProfilesDictionary` and writes a handle to it in `handle`. +/// +/// # Safety +/// +/// - `handle` must be non-null and valid for writes of +/// `ProfilesDictionaryHandle`. +/// - The returned handle must eventually drop the resource; see +/// [`ddog_prof_ProfilesDictionary_drop`] for more details. +/// - If you need a copy, use [`ddog_prof_ProfilesDictionary_try_clone`]; +/// don't just memcpy a new handle. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_new( + handle: *mut ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(handle); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let dict = ProfilesDictionary::try_new()?; + let h = ArcHandle::new(dict)?; + unsafe { handle.write(h) }; + Ok(()) + }()) +} + +/// Creates a new handle to the same `ProfilesDictionary` by incrementing the +/// internal reference count. +/// +/// # Safety +/// +/// - `out` must be non-null and valid for writes of `ProfilesDictionaryHandle`. +/// - `handle` must point to a live dictionary resource. +/// - Do not duplicate handles via memcpy; always use this API to create new +/// handles so the reference count is maintained correctly. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_try_clone( + out: *mut ArcHandle, + handle: ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let cloned = handle.try_clone()?; + unsafe { out.write(cloned) }; + Ok(()) + }()) +} + +/// Inserts a `Function` into the dictionary and returns its id. +/// +/// # Safety +/// +/// - `function_id` must be non-null and valid for writes of `FunctionId`. +/// - `handle` must refer to a live dictionary. +/// - `function` must be non-null and point to a valid `Function` for the +/// duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_insert_function( + function_id: *mut FunctionId, + handle: ArcHandle, + function: *const Function, +) -> ProfileStatus { + ensure_non_null_out_parameter!(function_id); + ensure_non_null_insert!(function); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let dict = handle.as_inner()?; + let id = dict.functions().try_insert(*function)?; + unsafe { function_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Inserts a `Mapping` into the dictionary and returns its id. +/// +/// # Safety +/// +/// - `mapping_id` must be non-null and valid for writes of `MappingId`. +/// - `handle` must refer to a live dictionary. +/// - `mapping` must be non-null and point to a valid `Mapping` for the +/// duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_insert_mapping( + mapping_id: *mut MappingId, + handle: ArcHandle, + mapping: *const Mapping, +) -> ProfileStatus { + ensure_non_null_out_parameter!(mapping_id); + ensure_non_null_insert!(mapping); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let dict = handle.as_inner()?; + let id = dict.mappings().try_insert(*mapping)?; + unsafe { mapping_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Inserts a UTF-8 string into the dictionary string table. +/// +/// # Safety +/// +/// - `string_id` must be non-null and valid for writes of `StringId`. +/// - `handle` must refer to a live dictionary. +/// - The UTF-8 policy indicated by `utf8_option` must be respected by caller +/// for the provided `byte_slice`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_insert_str( + string_id: *mut StringId, + handle: ArcHandle, + byte_slice: CharSlice, + utf8_option: Utf8Option, +) -> ProfileStatus { + ensure_non_null_out_parameter!(string_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let dict = handle.as_inner()?; + crate::profiles::utf8::insert_str( + dict.strings(), + byte_slice, + utf8_option, + ) + .map(|id| unsafe { string_id.write(id) }) + }()) +} + +/// Tries to get the string value associated with the string id. Fails if the +/// handle has been taken from, or the result param is null. +/// +/// # Safety +/// +/// 1. The lifetime of the return slice is tied to the underlying storage of +/// the string set, make sure the string set is still alive when using the +/// returned slice. +/// 2. The string id should belong to the string set in this dictionary. +/// Well-known strings are an exception, as they exist in every set. +/// 3. The handle must represent a live profiles dictionary. Remember handles +/// can be copied, and if _any_ handle drops the resource, then all handles +/// pointing the resource are now invalid, even if though they are unaware +/// of it. +/// 4. The result pointer must valid for [`core::ptr::write`]. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_get_str( + result: *mut CharSlice<'static>, + handle: ArcHandle, + string_id: StringId, +) -> ProfileStatus { + ensure_non_null_out_parameter!(result); + ProfileStatus::from(handle.as_inner().map(|dict| { + // SAFETY: It's not actually safe--as indicated in the docs + // for this function, the caller needs to be sure the string + // set in the dictionary outlives the slice. + result.write(unsafe { + std::mem::transmute::, CharSlice<'static>>( + CharSlice::from(dict.strings().get(string_id)), + ) + }) + })) +} + +/// Drops the `ProfilesDictionary` that the handle owns, leaving a valid but +/// useless handle (all operations on it will error). This takes a pointer to +/// the handle to be able to modify it to leave behind an empty handle. +/// +/// # Safety +/// +/// - If non-null, `handle` must point to a valid `ProfilesDictionaryHandle`. +/// - The underlying resource must be dropped exactly once across all copies of +/// the handle. After dropping, all other copies become invalid and must not +/// be used; they should be discarded without dropping. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ProfilesDictionary_drop( + handle: *mut ArcHandle, +) { + if let Some(h) = handle.as_mut() { + h.drop_resource(); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::profiles::utf8::Utf8Option; + use std::ptr::NonNull; + + #[test] + fn test_basics_including_drop() { + let mut handle = ArcHandle::default(); + unsafe { + Result::from(ddog_prof_ProfilesDictionary_new(&mut handle)) + .unwrap(); + + let mut string_id = StringId::default(); + Result::from(ddog_prof_ProfilesDictionary_insert_str( + &mut string_id, + handle, + CharSlice::from("void main(int, char *[])"), + Utf8Option::Assume, + )) + .unwrap(); + + let mut function_id = NonNull::dangling(); + let function = Function { + name: string_id, + system_name: Default::default(), + file_name: Default::default(), + }; + Result::from(ddog_prof_ProfilesDictionary_insert_function( + &mut function_id, + handle, + &function, + )) + .unwrap(); + + let mut found = CharSlice::empty(); + let status = ddog_prof_ProfilesDictionary_get_str( + &mut found, handle, string_id, + ); + Result::from(status).unwrap(); + + ddog_prof_ProfilesDictionary_drop(&mut handle); + } + } +} diff --git a/datadog-profiling-ffi/src/profiles/sample_builder.rs b/datadog-profiling-ffi/src/profiles/sample_builder.rs new file mode 100644 index 0000000000..b3f64ca67f --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/sample_builder.rs @@ -0,0 +1,216 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::arc_handle::ArcHandle; +use crate::profile_handle::ProfileHandle; +use crate::profiles::{ + ensure_non_null_insert, ensure_non_null_out_parameter, Utf8Option, +}; +use crate::ProfileStatus; +use datadog_profiling::profiles::collections::StringId; +use datadog_profiling::profiles::datatypes::{ + self, Link, Profile, ScratchPad, StackId, +}; +use datadog_profiling::profiles::ProfileError; +use ddcommon_ffi::{CharSlice, Timespec}; +use std::time::SystemTime; + +pub struct SampleBuilder { + builder: datatypes::SampleBuilder, + profile: ProfileHandle, // borrowed +} + +/// Creates a `SampleBuilder` backed by the provided `ScratchPad`. +/// +/// Use [`ddog_prof_SampleBuilder_drop`] to free it, see it for more details. +/// +/// # Safety +/// +/// - `out` must be non-null and valid for writes of `SampleBuilderHandle`. +/// - `profile` handle must outlive the sample value, as it borrows it. +/// - `scratchpad` must be a live handle; its resource must outlive all uses of +/// the returned builder handle. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_new( + out: *mut ProfileHandle, + profile: ProfileHandle, + scratchpad: ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let sp = scratchpad.as_inner()?; + let attributes = sp.attributes().try_clone()?; + let links = sp.links().try_clone()?; + let builder = datatypes::SampleBuilder::new(attributes, links); + let ffi_builder = SampleBuilder { builder, profile }; + let handle = ProfileHandle::try_new(ffi_builder)?; + unsafe { out.write(handle) }; + Ok(()) + }()) +} + +/// Sets the stack id of the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_stack_id( + mut handle: ProfileHandle, + stack_id: StackId, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let ffi_builder = unsafe { handle.as_inner_mut()? }; + ffi_builder.builder.set_stack_id(stack_id); + Ok(()) + }()) +} + +/// Appends a value to the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_value( + mut handle: ProfileHandle, + value: i64, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let ffi_builder = unsafe { handle.as_inner_mut()? }; + ffi_builder.builder.push_value(value)?; + Ok(()) + }()) +} + +/// Adds a string attribute to the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +/// - `key`/`val` must follow the UTF-8 policy indicated by `utf8`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_attribute_str( + mut handle: ProfileHandle, + key_id: StringId, + val: CharSlice<'_>, + utf8: Utf8Option, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let val = unsafe { utf8.try_as_bytes_convert(val)? }; + let ffi_builder = unsafe { handle.as_inner_mut()? }; + ffi_builder.builder.push_attribute_str(key_id, val.as_ref())?; + Ok(()) + }()) +} + +/// Adds an integer attribute to the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +/// - `key` must follow the UTF-8 policy indicated by `utf8`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_attribute_int( + mut handle: ProfileHandle, + key_id: StringId, + val: i64, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let ffi_builder = unsafe { handle.as_inner_mut()? }; + ffi_builder.builder.push_attribute_int(key_id, val)?; + Ok(()) + }()) +} + +/// Sets the link on the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +/// - `link` must be non-null and point to a valid `Link` for the duration of +/// the call. +#[must_use] +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_link( + mut handle: ProfileHandle, + link: *const Link, +) -> ProfileStatus { + ensure_non_null_insert!(link); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let ffi_builder = unsafe { handle.as_inner_mut()? }; + let link = unsafe { *link }; + ffi_builder.builder.set_link(link)?; + Ok(()) + }()) +} + +/// Sets a timestamp (in nanoseconds) on the builder. +/// +/// # Safety +/// +/// - `handle` must refer to a live builder and is treated as a unique mutable +/// reference for the duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_timestamp( + mut handle: ProfileHandle, + timestamp: Timespec, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let timestamp = SystemTime::from(timestamp); + let ffi_builder = unsafe { handle.as_inner_mut()? }; + ffi_builder.builder.set_timestamp(timestamp); + Ok(()) + }()) +} + +/// Build the sample, and insert it into the profile. Done as one operation to +/// avoid boxing and exposing the Sample to FFI, since it isn't FFI-safe. +/// +/// This will steal the contents of the sample builder. It is safe to drop the +/// sample builder afterward, but it isn't necessary if it succeeds. +/// Builds a sample from the builder and inserts it into `profile`. +/// +/// # Safety +/// +/// - `builder` must point to a valid `ProfileHandle`. +/// - After a successful build, the builder’s internal state is consumed and +/// must not be used unless rebuilt. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_finish( + builder: *mut ProfileHandle, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let builder_handle = + builder.as_mut().ok_or(ProfileError::InvalidInput)?; + // todo: safety + let ffi_builder = unsafe { builder_handle.as_inner_mut()? }; + let sample = ffi_builder.builder.build()?; + // todo: safety + let prof = unsafe { ffi_builder.profile.as_inner_mut()? }; + prof.add_sample(sample) + }()) +} + +/// Free the resource associated with the sample builder handle. +/// +/// # Safety +/// +/// - If non-null, `builder` must point to a valid `ProfileHandle`. +/// - The underlying resource must be dropped at most once across all copies of +/// the handle. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_SampleBuilder_drop( + builder: *mut ProfileHandle, +) { + if let Some(h) = builder.as_mut() { + drop(h.take()); + } +} diff --git a/datadog-profiling-ffi/src/profiles/scratchpad.rs b/datadog-profiling-ffi/src/profiles/scratchpad.rs new file mode 100644 index 0000000000..e66f9e0b43 --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/scratchpad.rs @@ -0,0 +1,419 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use crate::arc_handle::ArcHandle; +use crate::profiles::utf8::{insert_str, Utf8Option}; +use crate::profiles::{ensure_non_null_insert, ensure_non_null_out_parameter}; +use crate::{EmptyHandleError, ProfileStatus}; +use datadog_profiling::profiles::collections::StringId; +use datadog_profiling::profiles::datatypes::ProfilesDictionary; +use datadog_profiling::profiles::datatypes::{ + AttributeId, KeyValue, Link, LinkId, Location, LocationId, ScratchPad, + StackId, +}; +use datadog_profiling::profiles::string_writer::FallibleStringWriter; +use datadog_profiling::profiles::ProfileError; +use ddcommon_ffi::CharSlice; + +/// Allocates a new `ScratchPad` and returns a handle to it via the out +/// parameter `handle`. +/// +/// Use [`ddog_prof_ScratchPad_drop`] to free; see its docs for more details. +/// +/// # Safety +/// +/// - `handle` must be non-null and valid for writes of `ScratchPadHandle`. +/// - Don't make C copies to handles, use [`ddog_prof_ScratchPad_try_clone`] +/// to get another refcounted copy (e.g. for another thread). +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_new( + handle: *mut ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(handle); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = ScratchPad::try_new()?; + let h = ArcHandle::new(pad)?; + unsafe { handle.write(h) }; + Ok(()) + }()) +} + +/// Creates a new handle to the same `ScratchPad` by incrementing the internal +/// reference count. +/// +/// # Safety +/// +/// - `out` must be non-null and valid for writes of `ScratchPadHandle`. +/// - `handle` must refer to a live `ScratchPad`. +/// - Do not duplicate handles via memcpy; always use this API to create new +/// handles so the reference count is maintained correctly. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_try_clone( + out: *mut ArcHandle, + handle: ArcHandle, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let cloned = handle.try_clone()?; + unsafe { out.write(cloned) }; + Ok(()) + }()) +} + +/// Decrements the refcount on the underlying `ScratchPad` resource held by +/// `handle` and leaves an empty handle. If the refcount hits zero, it will +/// be destroyed. +/// +/// # Safety +/// +/// - If non-null, `handle` must point to a valid `ScratchPadHandle`. +/// - Only drop properly created/cloned handles. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_drop( + handle: *mut ArcHandle, +) { + if let Some(h) = handle.as_mut() { + h.drop_resource(); + } +} + +/// Inserts a `Location` and returns its id. +/// +/// # Safety +/// +/// - `out_location_id` must be non-null and valid for writes of `LocationId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `location` must be non-null and valid for the duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_insert_location( + out_location_id: *mut LocationId, + handle: ArcHandle, + location: *const Location, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_location_id); + ensure_non_null_insert!(location); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let id = pad.locations().try_insert(unsafe { *location })?; + unsafe { out_location_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Interns a stack of `LocationId` and returns its `StackId`. +/// +/// # Safety +/// +/// - `out_stack_id` must be non-null and valid for writes of `StackId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `locations` must point to valid `LocationId`s obtained from the same +/// `ScratchPad` and be valid for the duration of the call. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_insert_stack( + out_stack_id: *mut StackId, + handle: ArcHandle, + locations: ddcommon_ffi::Slice<'_, LocationId>, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_stack_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let slice = + locations.try_as_slice().map_err(ProfileError::from_thin_error)?; + // SAFETY: re-interpreting LocationId as SetId is safe as + // long as they were made from SetId::into_raw. + let ids = unsafe { + core::slice::from_raw_parts(slice.as_ptr().cast(), slice.len()) + }; + let stack_id = pad.stacks().try_insert(ids)?; + unsafe { out_stack_id.write(stack_id) }; + Ok(()) + }()) +} + +/// Inserts a `Link` and returns its id. +/// +/// # Safety +/// +/// - `out_link_id` must be non-null and valid for writes of `LinkId`. +/// - `handle` must refer to a live `ScratchPad`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_insert_link( + out_link_id: *mut LinkId, + handle: ArcHandle, + link: Link, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_link_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let id = pad.links().try_insert(link)?; + unsafe { out_link_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Inserts a string attribute key/value pair and returns its id. +/// +/// # Safety +/// +/// - `out_attr_id` must be non-null and valid for writes of `AttributeId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `key`/`value` must adhere to the UTF-8 policy expressed by `utf8_option`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_insert_attribute_str( + out_attr_id: *mut AttributeId, + handle: ArcHandle, + dictionary: ArcHandle, + key: CharSlice<'_>, + value: CharSlice<'_>, + utf8_option: Utf8Option, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_attr_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let key_str = utf8_option.try_as_bytes_convert(key)?; + if key_str.is_empty() { + return Err(ProfileError::InvalidInput); + } + + let value_str = utf8_option.try_as_bytes_convert(value)?; + + // Intern key string into the dictionary string table + let dict = dictionary.as_inner()?; + let key_id = dict.strings().try_insert(key_str.as_ref())?; + + let mut val_writer = FallibleStringWriter::new(); + val_writer.try_push_str(value_str.as_ref())?; + let val_owned = String::from(val_writer); + + let kv = KeyValue { + key: key_id, + value: datadog_profiling::profiles::datatypes::AnyValue::String( + val_owned, + ), + }; + let id = pad.attributes().try_insert(kv)?; + unsafe { out_attr_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Inserts an integer attribute and returns its id. +/// +/// # Safety +/// +/// - `out_attr_id` must be non-null and valid for writes of `AttributeId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `key` must adhere to the UTF-8 policy expressed by `utf8_option`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_insert_attribute_int( + out_attr_id: *mut AttributeId, + handle: ArcHandle, + dictionary: ArcHandle, + key: CharSlice<'_>, + value: i64, + utf8_option: Utf8Option, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_attr_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let key_str = utf8_option.try_as_bytes_convert(key)?; + if key_str.is_empty() { + return Err(ProfileError::InvalidInput); + } + // Intern key string into the dictionary string table + let dict = dictionary.as_inner()?; + let key_id = dict.strings().try_insert(key_str.as_ref())?; + let kv = KeyValue { + key: key_id, + value: datadog_profiling::profiles::datatypes::AnyValue::Integer( + value, + ), + }; + let id = pad.attributes().try_insert(kv)?; + unsafe { out_attr_id.write(id.into_raw()) }; + Ok(()) + }()) +} + +/// Registers a trace endpoint for a local root span id. Returns its `StringId`. +/// +/// # Safety +/// +/// - `out_string_id` must be non-null and valid for writes of `StringId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `endpoint` must adhere to the UTF-8 policy expressed by `utf8_option`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_add_trace_endpoint( + out_string_id: *mut StringId, + handle: ArcHandle, + local_root_span_id: i64, + endpoint: CharSlice<'_>, + utf8_option: Utf8Option, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_string_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + // Use the same UTF-8 handling helpers as string insertion + let id = insert_str( + pad.endpoint_tracker().strings(), + endpoint, + utf8_option, + )?; + // Now register the mapping and counts + let _ = pad + .endpoint_tracker() + .add_trace_endpoint(local_root_span_id, unsafe { + pad.endpoint_tracker().strings().get(id) + })?; + unsafe { out_string_id.write(id) }; + Ok(()) + }()) +} + +/// Adds a count to an existing endpoint id. +/// +/// # Safety +/// +/// - `handle` must refer to a live `ScratchPad`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_add_endpoint_count( + handle: ArcHandle, + endpoint_id: StringId, + count: usize, +) -> ProfileStatus { + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + pad.endpoint_tracker().add_endpoint_count(endpoint_id, count) + }()) +} + +/// Registers a trace endpoint and adds an initial count; returns its id. +/// +/// # Safety +/// +/// - `out_string_id` must be non-null and valid for writes of `StringId`. +/// - `handle` must refer to a live `ScratchPad`. +/// - `endpoint` must adhere to the UTF-8 policy expressed by `utf8_option`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_add_trace_endpoint_with_count( + out_string_id: *mut StringId, + handle: ArcHandle, + local_root_span_id: i64, + endpoint: CharSlice<'_>, + utf8_option: Utf8Option, + count: usize, +) -> ProfileStatus { + ensure_non_null_out_parameter!(out_string_id); + ProfileStatus::from(|| -> Result<(), ProfileError> { + let pad = handle.as_inner()?; + let endpoint_str = utf8_option.try_as_bytes_convert(endpoint)?; + let id = pad.endpoint_tracker().add_trace_endpoint_with_count( + local_root_span_id, + endpoint_str.as_ref(), + count, + )?; + unsafe { out_string_id.write(id) }; + Ok(()) + }()) +} + +#[derive(thiserror::Error, Debug)] +#[error("trace endpoint not found for local root span id 0x{0:X}")] +struct EndpointNotFound(u64); + +/// Returns the endpoint string for `local_root_span_id` if present. +/// +/// # Safety +/// +/// - `result` must be non-null and valid for writes of `CharSlice<'static>`. +/// - The returned slice borrows from the scratchpad’s internal string table; +/// the caller must ensure the scratchpad outlives any use of `*result`. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_ScratchPad_get_trace_endpoint_str( + result: *mut CharSlice<'static>, + handle: ArcHandle, + local_root_span_id: i64, +) -> ProfileStatus { + ensure_non_null_out_parameter!(result); + let Ok(pad) = handle.as_inner() else { + return ProfileStatus::from_ffi_safe_error_message(EmptyHandleError); + }; + if let Some(s) = + pad.endpoint_tracker().get_trace_endpoint_str(local_root_span_id) + { + // SAFETY: the lifetime is _not_ safe, it's not static! It's tied to + // the underlying string set (owned by the ScratchPad). It's up to the + // FFI to use responsibly. + let slice = unsafe { + std::mem::transmute::, CharSlice<'static>>( + CharSlice::from(s), + ) + }; + unsafe { result.write(slice) }; + ProfileStatus::OK + } else { + ProfileStatus::from_error(EndpointNotFound(local_root_span_id as u64)) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::ddog_prof_Status_drop; + use ddcommon_ffi::slice::AsBytes; + use std::ffi::CStr; + + #[test] + fn get_endpoint_str_not_found_has_message() { + unsafe { + let mut handle = ArcHandle::::default(); + Result::from(ddog_prof_ScratchPad_new(&mut handle)).unwrap(); + + let mut out = CharSlice::empty(); + let mut status = ddog_prof_ScratchPad_get_trace_endpoint_str( + &mut out, + handle, + u64::MAX as i64, + ); + + let cstr: &CStr = + (&status).try_into().expect("expected error status"); + let msg = cstr.to_string_lossy(); + assert_eq!( + msg.as_ref(), + "trace endpoint not found for local root span id 0xFFFFFFFFFFFFFFFF" + ); + + ddog_prof_ScratchPad_drop(&mut handle); + ddog_prof_Status_drop(&mut status); + } + } + + #[test] + fn add_and_get_endpoint_str_ok() { + unsafe { + let mut handle = ArcHandle::::default(); + Result::from(ddog_prof_ScratchPad_new(&mut handle)).unwrap(); + + let mut str_id = StringId::default(); + let ep = CharSlice::from("/users/{id}"); + let status = ddog_prof_ScratchPad_add_trace_endpoint( + &mut str_id, + handle, + 0x1234, + ep, + Utf8Option::Validate, + ); + Result::from(status).unwrap(); + + let mut out = CharSlice::empty(); + let status = ddog_prof_ScratchPad_get_trace_endpoint_str( + &mut out, handle, 0x1234, + ); + Result::from(status).unwrap(); + assert_eq!(out.try_to_utf8().unwrap(), "/users/{id}"); + + ddog_prof_ScratchPad_drop(&mut handle); + } + } +} diff --git a/datadog-profiling-ffi/src/profiles/utf8.rs b/datadog-profiling-ffi/src/profiles/utf8.rs new file mode 100644 index 0000000000..d506b56e61 --- /dev/null +++ b/datadog-profiling-ffi/src/profiles/utf8.rs @@ -0,0 +1,171 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use datadog_profiling::profiles::collections::{ParallelStringSet, StringId}; +use datadog_profiling::profiles::ProfileError; +use ddcommon::error::FfiSafeErrorMessage; +use ddcommon_ffi::slice::{AsBytes, CharSlice, SliceConversionError}; +use std::borrow::Cow; +use std::collections::TryReserveError; +use std::ffi::CStr; +use std::str::Utf8Error; + +#[repr(C)] +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub enum Utf8Option { + /// The string is assumed to be valid UTF-8. If it's not, the behavior + /// is undefined. + Assume, + /// The string is converted to UTF-8 using lossy conversion. + ConvertLossy, + /// The string is validated to be UTF-8. If it's not, an error is + /// returned. + Validate, +} + +pub enum Utf8ConversionError { + OutOfMemory(TryReserveError), + SliceConversionError(SliceConversionError), + Utf8Error(Utf8Error), +} + +impl From for Utf8ConversionError { + fn from(e: TryReserveError) -> Self { + Self::OutOfMemory(e) + } +} + +impl From for Utf8ConversionError { + fn from(e: SliceConversionError) -> Self { + Self::SliceConversionError(e) + } +} + +impl From for Utf8ConversionError { + fn from(e: Utf8Error) -> Self { + Self::Utf8Error(e) + } +} + +impl From for ProfileError { + fn from(err: Utf8ConversionError) -> ProfileError { + match err { + Utf8ConversionError::OutOfMemory(_) => ProfileError::OutOfMemory, + Utf8ConversionError::SliceConversionError(_) => { + ProfileError::InvalidInput + } + Utf8ConversionError::Utf8Error(_) => ProfileError::InvalidInput, + } + } +} + +// SAFETY: all cases are c-str literals, or delegate to the same trait. +unsafe impl FfiSafeErrorMessage for Utf8ConversionError { + fn as_ffi_str(&self) -> &'static CStr { + match self { + Utf8ConversionError::OutOfMemory(_) => { + c"out of memory: utf8 conversion failed" + } + Utf8ConversionError::SliceConversionError(err) => err.as_ffi_str(), + Utf8ConversionError::Utf8Error(_) => { + c"invalid input: string was not utf-8" + } + } + } +} + +impl Utf8Option { + /// Converts a byte slice to a UTF-8 string according to the option. + /// - Assume: Borrow without validation (caller guarantees UTF-8) + /// - ConvertLossy: Lossy conversion with fallible allocation + /// - Validate: Validate and borrow on success + /// + /// # Safety + /// + /// When [`Utf8Option::Assume`] is passed, it must be valid UTF-8. + pub unsafe fn convert( + self, + bytes: &[u8], + ) -> Result, Utf8ConversionError> { + // SAFETY: caller asserts validity under Assume + Ok(match self { + Utf8Option::Assume => { + Cow::Borrowed(unsafe { std::str::from_utf8_unchecked(bytes) }) + } + Utf8Option::ConvertLossy => try_from_utf8_lossy(bytes)?, + Utf8Option::Validate => Cow::Borrowed(std::str::from_utf8(bytes)?), + }) + } + + /// # Safety + /// See the safety conditions on [`AsBytes::try_as_bytes`] and also + /// [`Utf8Option::convert`]; both must be upheld. + pub unsafe fn try_as_bytes_convert<'a, T: AsBytes<'a>>( + self, + t: T, + ) -> Result, Utf8ConversionError> { + let bytes = t.try_as_bytes()?; + self.convert(bytes) + } +} + +/// Tries to convert a slice of bytes to a string. The input may have invalid +/// characters. +/// +/// This is the same implementation as [`String::from_utf8_lossy`] except that +/// this uses fallible allocations. +pub fn try_from_utf8_lossy(v: &[u8]) -> Result, TryReserveError> { + let mut iter = v.utf8_chunks(); + + let first_valid = if let Some(chunk) = iter.next() { + let valid = chunk.valid(); + if chunk.invalid().is_empty() { + debug_assert_eq!(valid.len(), v.len()); + return Ok(Cow::Borrowed(valid)); + } + valid + } else { + return Ok(Cow::Borrowed("")); + }; + + const REPLACEMENT: &str = "\u{FFFD}"; + const REPLACEMENT_LEN: usize = REPLACEMENT.len(); + + let mut res = String::new(); + res.try_reserve(v.len())?; + res.push_str(first_valid); + res.try_reserve(REPLACEMENT_LEN)?; + res.push_str(REPLACEMENT); + + for chunk in iter { + let valid = chunk.valid(); + res.try_reserve(valid.len())?; + res.push_str(valid); + if !chunk.invalid().is_empty() { + res.try_reserve(REPLACEMENT_LEN)?; + res.push_str(REPLACEMENT); + } + } + + Ok(Cow::Owned(res)) +} + +pub fn insert_str( + set: &ParallelStringSet, + str: CharSlice<'_>, + utf8_options: Utf8Option, +) -> Result { + let bytes = str.try_as_bytes().map_err(ProfileError::from_thin_error)?; + let string = match utf8_options { + Utf8Option::Assume => { + // SAFETY: the caller is asserting the data is valid UTF-8. + Cow::Borrowed(unsafe { std::str::from_utf8_unchecked(bytes) }) + } + Utf8Option::ConvertLossy => try_from_utf8_lossy(bytes)?, + Utf8Option::Validate => Cow::Borrowed( + std::str::from_utf8(bytes) + .map_err(|_| ProfileError::InvalidInput)?, + ), + }; + Ok(set.try_insert(string.as_ref())?) +} diff --git a/datadog-profiling-ffi/src/status.rs b/datadog-profiling-ffi/src/status.rs new file mode 100644 index 0000000000..c7629d98bf --- /dev/null +++ b/datadog-profiling-ffi/src/status.rs @@ -0,0 +1,256 @@ +// Copyright 2025-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 + +use allocator_api2::alloc::{AllocError, Allocator, Global, Layout}; +use datadog_profiling::profiles::FallibleStringWriter; +use std::borrow::Cow; +use std::ffi::{c_char, CStr, CString}; +use std::hint::unreachable_unchecked; +use std::mem::ManuallyDrop; +use std::ptr::{null, NonNull}; + +const FLAG_OK: usize = 0b00; +const FLAG_STATIC: usize = 0b01; +const FLAG_ALLOCATED: usize = 0b11; + +const MASK_IS_ERROR: usize = 0b01; +const MASK_IS_ALLOCATED: usize = 0b10; +const MASK_UNUSED: usize = !(MASK_IS_ERROR | MASK_IS_ALLOCATED); + +/// Represents the result of an operation that either succeeds with no value, +/// or fails with an error message. This is like `Result<(), Cow` except +/// its representation is smaller, and is FFI-stable. +/// +/// The OK status is guaranteed to have a representation of `{ 0, null }`. +#[repr(C)] +#[derive(Debug)] +pub struct ProfileStatus { + /// 0 means okay, everything else is opaque in C. + /// In Rust, the bits help us know whether it is heap allocated or not. + pub flags: libc::size_t, + /// If not null, this is a pointer to a valid null-terminated string in + /// UTF-8 encoding. + /// This is null if `flags` == 0. + pub err: *const c_char, +} + +impl Default for ProfileStatus { + fn default() -> Self { + Self { flags: 0, err: null() } + } +} + +unsafe impl Send for ProfileStatus {} +unsafe impl Sync for ProfileStatus {} + +impl From> for ProfileStatus { + fn from(result: Result<(), E>) -> Self { + match result { + Ok(_) => ProfileStatus::OK, + Err(err) => ProfileStatus::from_error(err), + } + } +} + +impl From<&'static CStr> for ProfileStatus { + fn from(value: &'static CStr) -> Self { + Self { flags: FLAG_STATIC, err: value.as_ptr() } + } +} + +impl From for ProfileStatus { + fn from(cstring: CString) -> Self { + Self { flags: FLAG_ALLOCATED, err: cstring.into_raw() } + } +} + +impl TryFrom for CString { + type Error = usize; + + fn try_from(status: ProfileStatus) -> Result { + if status.flags == FLAG_ALLOCATED { + Ok(unsafe { CString::from_raw(status.err.cast_mut()) }) + } else { + Err(status.flags) + } + } +} + +impl TryFrom<&ProfileStatus> for &CStr { + type Error = usize; + + fn try_from(status: &ProfileStatus) -> Result { + if status.flags != FLAG_OK { + Ok(unsafe { CStr::from_ptr(status.err.cast_mut()) }) + } else { + Err(status.flags) + } + } +} + +impl From for Result<(), Cow<'static, CStr>> { + fn from(status: ProfileStatus) -> Self { + let flags = status.flags; + let is_error = (flags & MASK_IS_ERROR) != 0; + let is_allocated = (flags & MASK_IS_ALLOCATED) != 0; + #[allow(clippy::panic)] + if cfg!(debug_assertions) && (status.flags & MASK_UNUSED) != 0 { + panic!("invalid bit pattern: {flags:b}"); + } + match (is_allocated, is_error) { + (false, false) => Ok(()), + (false, true) => { + Err(Cow::Borrowed(unsafe { CStr::from_ptr(status.err) })) + } + (true, true) => Err(Cow::Owned(unsafe { + CString::from_raw(status.err.cast_mut()) + })), + (true, false) => { + #[allow(clippy::panic)] + if cfg!(debug_assertions) { + panic!("invalid bit pattern: {flags:b}"); + } + unsafe { unreachable_unchecked() } + } + } + } +} + +impl From<()> for ProfileStatus { + fn from(_: ()) -> Self { + Self::OK + } +} + +/// Tries to shrink a vec to exactly fit its length. +/// On success, the vector's capacity equals its length. +/// Returns an allocation error if the allocator cannot shrink. +fn vec_try_shrink_to_fit(vec: &mut Vec) -> Result<(), AllocError> { + let len = vec.len(); + if vec.capacity() == len || core::mem::size_of::() == 0 { + return Ok(()); + } + + // Take ownership temporarily to manipulate raw parts; put an empty vec + // in its place. + let mut md = ManuallyDrop::new(core::mem::take(vec)); + + // Avoid len=0 case for allocators by dropping the allocation and replacing + // it with a new empty vec. + if len == 0 { + // SAFETY: we have exclusive access, and we're not exposing the zombie + // bits to safe code since we're just returning (original vec was + // replaced by an empty vec). + unsafe { ManuallyDrop::drop(&mut md) }; + return Ok(()); + } + + let ptr = md.as_mut_ptr(); + let cap = md.capacity(); + + // SAFETY: Vec invariants ensure `cap >= len`, and capacity/len fit isize. + let old_layout = unsafe { Layout::array::(cap).unwrap_unchecked() }; + let new_layout = unsafe { Layout::array::(len).unwrap_unchecked() }; + + // SAFETY: `ptr` is non-null and properly aligned for T (Vec invariant). + let old_ptr_u8 = unsafe { NonNull::new_unchecked(ptr.cast::()) }; + + match unsafe { Global.shrink(old_ptr_u8, old_layout, new_layout) } { + Ok(new_ptr_u8) => { + let new_ptr = new_ptr_u8.as_ptr().cast::(); + // SAFETY: new allocation valid for len Ts; capacity == len. + let new_vec = unsafe { Vec::from_raw_parts(new_ptr, len, len) }; + *vec = new_vec; + Ok(()) + } + Err(_) => { + // Reconstruct original and put it back; report OOM. + let orig = unsafe { Vec::from_raw_parts(ptr, len, cap) }; + *vec = orig; + Err(AllocError) + } + } +} + +fn string_try_shrink_to_fit(string: &mut String) -> Result<(), AllocError> { + // Take ownership to get access to the backing Vec. + let mut bytes = core::mem::take(string).into_bytes(); + let res = vec_try_shrink_to_fit(&mut bytes); + // SAFETY: bytes came from a valid UTF-8 String and were not mutated. + *string = unsafe { String::from_utf8_unchecked(bytes) }; + res +} + +impl ProfileStatus { + pub const OK: ProfileStatus = ProfileStatus { flags: FLAG_OK, err: null() }; + + const OUT_OF_MEMORY: ProfileStatus = ProfileStatus { + flags: FLAG_STATIC, + err: c"out of memory while trying to display error".as_ptr(), + }; + const NULL_BYTE_IN_ERROR_MESSAGE: ProfileStatus = ProfileStatus { + flags: FLAG_STATIC, + err: c"another error occured, but cannot be displayed because it has interior null bytes".as_ptr(), + }; + + pub fn from_ffi_safe_error_message< + E: ddcommon::error::FfiSafeErrorMessage, + >( + err: E, + ) -> Self { + ProfileStatus::from(err.as_ffi_str()) + } + + pub fn from_error(err: E) -> Self { + use core::fmt::Write; + let mut writer = FallibleStringWriter::new(); + if write!(writer, "{}", err).is_err() { + return ProfileStatus::OUT_OF_MEMORY; + } + + let mut str = String::from(writer); + + // std doesn't expose memchr even though it has it, but fortunately + // libc has it, and we use the libc crate already in FFI. + let pos = unsafe { libc::memchr(str.as_ptr().cast(), 0, str.len()) }; + if !pos.is_null() { + return ProfileStatus::NULL_BYTE_IN_ERROR_MESSAGE; + } + + // Reserve memory exactly. We have to shrink later in order to turn + // it into a box, so we don't want any excess capacity. + if str.try_reserve_exact(1).is_err() { + return ProfileStatus::OUT_OF_MEMORY; + } + str.push('\0'); + + if string_try_shrink_to_fit(&mut str).is_err() { + return ProfileStatus::OUT_OF_MEMORY; + } + + // Pop the null off because CString::from_vec_unchecked adds one. + _ = str.pop(); + + // And finally, this is why we went through the pain of + // string_try_shrink_to_fit: this method will call shrink_to_fit, so + // to avoid an allocation failure here, we had to make a String with + // no excess capacity. + let cstring = unsafe { CString::from_vec_unchecked(str.into_bytes()) }; + ProfileStatus::from(cstring) + } +} + +/// Frees any error associated with the status, and replaces it with an OK. +/// +/// # Safety +/// +/// The pointer should point at a valid Status object, if it's not null. +#[no_mangle] +pub unsafe extern "C" fn ddog_prof_Status_drop(status: *mut ProfileStatus) { + if status.is_null() { + return; + } + // SAFETY: safe when the user respects ddog_prof_Status_drop's conditions. + let status = unsafe { core::ptr::replace(status, ProfileStatus::OK) }; + drop(Result::from(status)); +} diff --git a/datadog-profiling-ffi/src/string_storage.rs b/datadog-profiling-ffi/src/string_storage.rs deleted file mode 100644 index b57ea4ef25..0000000000 --- a/datadog-profiling-ffi/src/string_storage.rs +++ /dev/null @@ -1,326 +0,0 @@ -// Copyright 2023-Present Datadog, Inc. https://www.datadoghq.com/ -// SPDX-License-Identifier: Apache-2.0 - -use anyhow::Context; -use datadog_profiling::api::ManagedStringId; -use datadog_profiling::collections::string_storage::ManagedStringStorage as InternalManagedStringStorage; -use ddcommon_ffi::slice::AsBytes; -use ddcommon_ffi::{CharSlice, Error, MaybeError, Slice, StringWrapperResult}; -use libc::c_void; -use std::mem::MaybeUninit; -use std::num::NonZeroU32; -use std::sync::Arc; -use std::sync::Mutex; - -// A note about this being Copy: -// We're writing code for C with C semantics but with Rust restrictions still -// around. In terms of C, this is just a pointer with some unknown lifetime -// that is the programmer's job to handle. -// Normally, Rust is taking care of that lifetime for us. Because we need to -// uncouple this so that lifetimes can bridge C and Rust, the lifetime of the -// object isn't managed in Rust, but in the sequence of API calls. -#[derive(Copy, Clone)] -#[repr(C)] -pub struct ManagedStringStorage { - // This may be null, but if not it will point to a valid InternalManagedStringStorage, - // wrapped as needed for correct concurrency. This type is made opaque for cbindgen. - inner: *const c_void, -} - -#[allow(dead_code)] -#[repr(C)] -pub enum ManagedStringStorageNewResult { - Ok(ManagedStringStorage), - #[allow(dead_code)] - Err(Error), -} - -#[no_mangle] -#[must_use] -pub extern "C" fn ddog_prof_ManagedStringStorage_new() -> ManagedStringStorageNewResult { - let storage = InternalManagedStringStorage::new(); - - ManagedStringStorageNewResult::Ok(ManagedStringStorage { - inner: Arc::into_raw(Arc::new(Mutex::new(storage))) as *const c_void, - }) -} - -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_drop(storage: ManagedStringStorage) { - if let Ok(storage) = get_inner_string_storage(storage, false) { - drop(storage); - } -} - -#[repr(C)] -#[allow(dead_code)] -pub enum ManagedStringStorageInternResult { - Ok(ManagedStringId), - Err(Error), -} - -#[must_use] -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_intern( - storage: ManagedStringStorage, - string: CharSlice, -) -> ManagedStringStorageInternResult { - // Empty strings always get assigned id 0, no need to check. - if string.is_empty() { - return anyhow::Ok(ManagedStringId::empty()).into(); - } - - (|| { - let storage = get_inner_string_storage(storage, true)?; - - let string_id = storage - .lock() - .map_err(|_| anyhow::anyhow!("string storage lock was poisoned"))? - .intern(string.try_to_utf8()?)?; - - anyhow::Ok(ManagedStringId::new(string_id)) - })() - .context("ddog_prof_ManagedStringStorage_intern failed") - .into() -} - -/// Interns all the strings in `strings`, writing the resulting id to the same -/// offset in `output_ids`. -/// -/// This can fail if: -/// 1. The given `output_ids_size` doesn't match the size of the input slice. -/// 2. The internal storage pointer is null. -/// 3. It fails to acquire a lock (e.g. it was poisoned). -/// 4. Defensive checks against bugs fail. -/// -/// If a failure occurs, do not use any of the ids in the output array. After -/// this point, you should only use read-only routines (except for drop) on -/// the managed string storage. -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_intern_all( - storage: ManagedStringStorage, - strings: Slice, - output_ids: *mut MaybeUninit, - output_ids_size: usize, -) -> MaybeError { - let result = (|| { - if strings.len() != output_ids_size { - anyhow::bail!("input and output arrays have different sizes") - } - - let storage = get_inner_string_storage(storage, true)?; - - let mut write_locked_storage = storage - .lock() - .map_err(|_| anyhow::anyhow!("string storage lock was poisoned"))?; - - let output_slice = core::slice::from_raw_parts_mut(output_ids, output_ids_size); - - for (output_id, input_str) in output_slice.iter_mut().zip(strings.iter()) { - let string_id = if input_str.is_empty() { - ManagedStringId::empty() - } else { - ManagedStringId::new(write_locked_storage.intern(input_str.try_to_utf8()?)?) - }; - output_id.write(string_id); - } - - anyhow::Ok(()) - })() - .context("ddog_prof_ManagedStringStorage_intern failed"); - - match result { - Ok(_) => MaybeError::None, - Err(e) => MaybeError::Some(e.into()), - } -} - -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_unintern( - storage: ManagedStringStorage, - id: ManagedStringId, -) -> MaybeError { - let Some(non_empty_string_id) = NonZeroU32::new(id.value) else { - return MaybeError::None; // Empty string, nothing to do - }; - - let result = (|| { - let storage = get_inner_string_storage(storage, true)?; - - let mut write_locked_storage = storage - .lock() - .map_err(|_| anyhow::anyhow!("string storage lock was poisoned"))?; - - write_locked_storage.unintern(non_empty_string_id) - })() - .context("ddog_prof_ManagedStringStorage_unintern failed"); - - match result { - Ok(_) => MaybeError::None, - Err(e) => MaybeError::Some(e.into()), - } -} - -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_unintern_all( - storage: ManagedStringStorage, - ids: Slice, -) -> MaybeError { - let result = (|| { - let storage = get_inner_string_storage(storage, true)?; - - let mut write_locked_storage = storage - .lock() - .map_err(|_| anyhow::anyhow!("string storage lock was poisoned"))?; - - for non_empty_string_id in ids.iter().filter_map(|id| NonZeroU32::new(id.value)) { - write_locked_storage.unintern(non_empty_string_id)?; - } - - anyhow::Ok(()) - })() - .context("ddog_prof_ManagedStringStorage_unintern failed"); - - match result { - Ok(_) => MaybeError::None, - Err(e) => MaybeError::Some(e.into()), - } -} - -#[must_use] -#[no_mangle] -/// Returns a string given its id. -/// This API is mostly for testing, overall you should avoid reading back strings from libdatadog -/// once they've been interned and should instead always operate on the id. -/// Remember to `ddog_StringWrapper_drop` the string once you're done with it. -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_get_string( - storage: ManagedStringStorage, - id: ManagedStringId, -) -> StringWrapperResult { - (|| { - let storage = get_inner_string_storage(storage, true)?; - let string: String = (*storage - .lock() - .map_err(|_| { - anyhow::anyhow!("acquisition of read lock on string storage should succeed") - })? - .get_string(id.value)?) - .to_owned(); - - anyhow::Ok(string) - })() - .context("ddog_prof_ManagedStringStorage_get_string failed") - .into() -} - -#[no_mangle] -/// TODO: @ivoanjo Should this take a `*mut ManagedStringStorage` like Profile APIs do? -pub unsafe extern "C" fn ddog_prof_ManagedStringStorage_advance_gen( - storage: ManagedStringStorage, -) -> MaybeError { - let result = (|| { - let storage = get_inner_string_storage(storage, true)?; - - storage - .lock() - .map_err(|_| anyhow::anyhow!("string storage lock was poisoned"))? - .advance_gen(); - - anyhow::Ok(()) - })() - .context("ddog_prof_ManagedStringStorage_advance_gen failed"); - - match result { - Ok(_) => MaybeError::None, - Err(e) => MaybeError::Some(e.into()), - } -} - -pub unsafe fn get_inner_string_storage( - storage: ManagedStringStorage, - // This should be `true` in every case EXCEPT when implementing `drop`, which uses `false`. - // (E.g. we use this flag to know if we need to increment the refcount for the copy we create - // or not). - for_use: bool, -) -> anyhow::Result>> { - if storage.inner.is_null() { - anyhow::bail!("storage inner pointer is null"); - } - - let storage_ptr = storage.inner; - - if for_use { - // By incrementing strong count here we ensure that the returned Arc represents a "clone" of - // the original and will thus not trigger a drop of the underlying data when out of - // scope. NOTE: We can't simply do Arc::from_raw(storage_ptr).clone() because when we - // return, the Arc created through `Arc::from_raw` would go out of scope and decrement - // strong count. - Arc::increment_strong_count(storage_ptr); - } - Ok(Arc::from_raw( - storage_ptr as *const Mutex, - )) -} - -impl From> for ManagedStringStorageInternResult { - fn from(value: anyhow::Result) -> Self { - match value { - Ok(v) => Self::Ok(v), - Err(err) => Self::Err(err.into()), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_string_storage() { - let storage = match ddog_prof_ManagedStringStorage_new() { - ManagedStringStorageNewResult::Ok(ok) => ok, - ManagedStringStorageNewResult::Err(err) => panic!("{err}"), - }; - let string_rs = [ - CharSlice::from("I'm running out of time."), - CharSlice::from("My zoom meeting ends in 2 minutes."), - ]; - let strings = Slice::new(&string_rs); - - // We're going to intern the same group of strings twice to make sure - // that we get the same ids. - let mut ids_rs1 = [ManagedStringId::empty(); 2]; - let ids1 = ids_rs1.as_mut_ptr(); - let result = unsafe { - ddog_prof_ManagedStringStorage_intern_all(storage, strings, ids1.cast(), strings.len()) - }; - if let MaybeError::Some(err) = result { - panic!("{err}"); - } - - let mut ids_rs2 = [ManagedStringId::empty(); 2]; - let ids2 = ids_rs2.as_mut_ptr(); - let result = unsafe { - ddog_prof_ManagedStringStorage_intern_all(storage, strings, ids2.cast(), strings.len()) - }; - if let MaybeError::Some(err) = result { - panic!("{err}"); - } - - // Check the ids match and aren't zero. - { - assert_eq!(ids_rs1, ids_rs2); - for id in ids_rs1 { - assert_ne!(id.value, 0); - } - } - - unsafe { ddog_prof_ManagedStringStorage_drop(storage) } - } -} diff --git a/datadog-profiling-protobuf/src/label.rs b/datadog-profiling-protobuf/src/label.rs index 04b1ad7f67..4f42f501cf 100644 --- a/datadog-profiling-protobuf/src/label.rs +++ b/datadog-profiling-protobuf/src/label.rs @@ -2,10 +2,12 @@ // SPDX-License-Identifier: Apache-2.0 use super::{Record, StringOffset, Value, WireType, OPT_ZERO}; +use std::hash::Hash; use std::io::{self, Write}; /// A label includes additional context for this sample. It can include things /// like a thread id, allocation size, etc. +/// This repr omits `num_unit` to save 8 bytes (4 from padding). #[repr(C)] #[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Hash)] #[cfg_attr(feature = "bolero", derive(bolero::generator::TypeGenerator))] @@ -16,14 +18,6 @@ pub struct Label { pub str: Record, /// At most, one of the str and num should be used. pub num: Record, - - // todo: if we don't use num_unit, then we can save 8 bytes--4 from - // num_unit plus 4 from padding. - /// Should only be present when num is present. - /// Specifies the units of num. - /// Use arbitrary string (for example, "requests") as a custom count unit. - /// If no unit is specified, consumer may apply heuristic to deduce it. - pub num_unit: Record, } /// # Safety @@ -32,17 +26,13 @@ unsafe impl Value for Label { const WIRE_TYPE: WireType = WireType::LengthDelimited; fn proto_len(&self) -> u64 { - self.key.proto_len() - + self.str.proto_len() - + self.num.proto_len() - + self.num_unit.proto_len() + self.key.proto_len() + self.str.proto_len() + self.num.proto_len() } fn encode(&self, writer: &mut W) -> io::Result<()> { self.key.encode(writer)?; self.str.encode(writer)?; - self.num.encode(writer)?; - self.num_unit.encode(writer) + self.num.encode(writer) } } @@ -56,13 +46,10 @@ impl From