diff --git a/.gitignore b/.gitignore index 7d268e51..9ed5b1a7 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,7 @@ __pycache__/ build/ bld + +# LLM Implementation +*_api_key.txt +*_chat_history.txt \ No newline at end of file diff --git a/CMakeLists.txt b/CMakeLists.txt index d75628ff..ef82619d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -205,6 +205,13 @@ set(XEUS_CPP_SRC src/xutils.cpp ) +if(NOT EMSCRIPTEN) + list(APPEND XEUS_CPP_SRC + src/xmagics/xassist.hpp + src/xmagics/xassist.cpp + ) +endif() + if(EMSCRIPTEN) list(APPEND XEUS_CPP_SRC src/xinterpreter_wasm.cpp) endif() @@ -309,9 +316,41 @@ macro(xeus_cpp_create_target target_name linkage output_name) else () set(XEUS_CPP_XEUS_TARGET xeus-static) endif () + + #This is a workaround for the issue with the libcurl target on Windows specifically for xassist + if (WIN32) + # Set the MSVC runtime library + set(CMAKE_MSVC_RUNTIME_LIBRARY "MultiThreaded$<$:Debug>DLL") + + # Find libcurl + find_package(CURL REQUIRED) + + # Add CURL_STATICLIB definition if linking statically + if (CURL_STATICLIB) + target_compile_definitions(${target_name} PUBLIC CURL_STATICLIB) + endif() - target_link_libraries(${target_name} PUBLIC ${XEUS_CPP_XEUS_TARGET} clangCppInterOp pugixml argparse::argparse) + # Link against the correct libcurl target + if (CURL_FOUND) + target_include_directories(${target_name} PRIVATE ${CURL_INCLUDE_DIRS}) + target_link_libraries(${target_name} PRIVATE ${CURL_LIBRARIES}) + endif() + # Existing target_link_libraries call, adjusted for clarity + target_link_libraries(${target_name} PUBLIC ${XEUS_CPP_XEUS_TARGET} clangCppInterOp pugixml argparse::argparse) + + # Ensure all linked libraries use the same runtime library + if (MSVC) + target_compile_options(${target_name} PRIVATE "/MD$<$:d>") + endif() + elseif (NOT EMSCRIPTEN) + # Curl initialised specifically for xassist + target_link_libraries(${target_name} PUBLIC ${XEUS_CPP_XEUS_TARGET} clangCppInterOp pugixml argparse::argparse curl) + else () + # TODO : Add curl support for emscripten + target_link_libraries(${target_name} PUBLIC ${XEUS_CPP_XEUS_TARGET} clangCppInterOp pugixml argparse::argparse) + endif() + if (WIN32 OR CYGWIN) # elseif (APPLE) diff --git a/docs/source/gemini.png b/docs/source/gemini.png new file mode 100644 index 00000000..f99f86d9 Binary files /dev/null and b/docs/source/gemini.png differ diff --git a/docs/source/index.rst b/docs/source/index.rst index 3a7ee4b1..6d59becc 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -34,6 +34,7 @@ The Xeus-Cpp is a Jupyter kernel for the C++ programming language InstallationAndUsage UsingXeus-Cpp tutorials + magics dev-build-options debug FAQ diff --git a/docs/source/magics.rst b/docs/source/magics.rst new file mode 100644 index 00000000..d8e64274 --- /dev/null +++ b/docs/source/magics.rst @@ -0,0 +1,41 @@ +Magics commands +-------------------- + +Magics are special commands for the kernel that are not part of the C++ +programming language. + +There are defined with the symbol ``%`` for a line magic and ``%%`` for a cell +magic. + +Here are the magics available in xeus-cpp. + +%%xassist +======================== + +Leverage the large language models to assist in your development process. Currently supported models are Gemini - gemini-1.5-flash, OpenAI - gpt-3.5-turbo-16k. + +- Save the api key + +.. code:: + + %%xassist model --save-key + key + +- Use the model + +.. code:: + + %%xassist model + prompt + +- Reset model and clear chat history + +.. code:: + + %%xassist model --refresh + +- Example + +.. image:: gemini.png + +A new prompt is sent to the model everytime and the functionality to use previous context will be added soon. \ No newline at end of file diff --git a/environment-wasm-build.yml b/environment-wasm-build.yml index 27d414af..130ba900 100644 --- a/environment-wasm-build.yml +++ b/environment-wasm-build.yml @@ -4,4 +4,4 @@ channels: dependencies: - cmake - emsdk >=3.1.11 - - empack >=2.0.1 + - empack >=2.0.1 \ No newline at end of file diff --git a/environment-wasm-host.yml b/environment-wasm-host.yml index 99db9689..7d310366 100644 --- a/environment-wasm-host.yml +++ b/environment-wasm-host.yml @@ -8,4 +8,4 @@ dependencies: - xeus - CppInterOp>=1.3.0 - cpp-argparse - - pugixml + - pugixml \ No newline at end of file diff --git a/src/xinterpreter.cpp b/src/xinterpreter.cpp index e715d457..0bbc7329 100644 --- a/src/xinterpreter.cpp +++ b/src/xinterpreter.cpp @@ -28,6 +28,9 @@ #include "xinput.hpp" #include "xinspect.hpp" #include "xmagics/os.hpp" +#ifndef EMSCRIPTEN +#include "xmagics/xassist.hpp" +#endif #include "xparser.hpp" #include "xsystem.hpp" @@ -404,5 +407,8 @@ __get_cxx_version () // preamble_manager["magics"].get_cast().register_magic("file", writefile()); // preamble_manager["magics"].get_cast().register_magic("timeit", timeit(&m_interpreter)); // preamble_manager["magics"].get_cast().register_magic("python", pythonexec()); +#ifndef EMSCRIPTEN + preamble_manager["magics"].get_cast().register_magic("xassist", xassist()); +#endif } } diff --git a/src/xmagics/xassist.cpp b/src/xmagics/xassist.cpp new file mode 100644 index 00000000..a2985e7d --- /dev/null +++ b/src/xmagics/xassist.cpp @@ -0,0 +1,328 @@ +/************************************************************************************ + * Copyright (c) 2023, xeus-cpp contributors * + * * + * Distributed under the terms of the BSD 3-Clause License. * + * * + * The full license is in the file LICENSE, distributed with this software. * + ************************************************************************************/ +#include "xassist.hpp" + +#define CURL_STATICLIB +#include +#include +#include +#include +#include +#include +#include + +using json = nlohmann::json; + +// TODO: Implement xplugin to separate the magics from the main code. +// TODO: Add support for open-source models. +namespace xcpp +{ + class api_key_manager + { + public: + + static void save_api_key(const std::string& model, const std::string& api_key) + { + std::string api_key_file_path = model + "_api_key.txt"; + std::ofstream out(api_key_file_path); + if (out) + { + out << api_key; + out.close(); + std::cout << "API key saved for model " << model << std::endl; + } + else + { + std::cerr << "Failed to open file for writing API key for model " << model << std::endl; + } + } + + // Method to load the API key for a specific model + static std::string load_api_key(const std::string& model) + { + std::string api_key_file_path = model + "_api_key.txt"; + std::ifstream in(api_key_file_path); + std::string api_key; + if (in) + { + std::getline(in, api_key); + in.close(); + return api_key; + } + + std::cerr << "Failed to open file for reading API key for model " << model << std::endl; + return ""; + } + }; + + class chat_history + { + public: + + static std::string chat(const std::string& model, const std::string& user, const std::string& cell) + { + return append_and_read_back(model, user, "\"" + cell + "\""); + } + + static std::string chat(const std::string& model, const std::string& user, const nlohmann::json& cell) + { + return append_and_read_back(model, user, cell.dump()); + } + + static void refresh(const std::string& model) + { + std::string chat_history_file_path = model + "_chat_history.txt"; + std::ofstream out(chat_history_file_path, std::ios::out); + } + + private: + + static std::string + append_and_read_back(const std::string& model, const std::string& user, const std::string& serialized_cell) + { + std::string chat_history_file_path = model + "_chat_history.txt"; + std::ofstream out; + bool is_empty = is_file_empty(chat_history_file_path); + + out.open(chat_history_file_path, std::ios::app); + if (!out) + { + std::cerr << "Failed to open file for writing chat history for model " << model << std::endl; + return ""; + } + + if (!is_empty) + { + out << ", "; + } + + if (model == "gemini") + { + out << R"({ "role": ")" << user << R"(", "parts": [ { "text": )" << serialized_cell << "}]}\n"; + } + else + { + out << R"({ "role": ")" << user << R"(", "content": )" << serialized_cell << "}\n"; + } + + out.close(); + + return read_file_content(chat_history_file_path); + } + + static bool is_file_empty(const std::string& file_path) + { + std::ifstream file(file_path, std::ios::ate); // Open the file at the end + if (!file) // If the file cannot be opened, it might not exist + { + return true; // Consider non-existent files as empty + } + return file.tellg() == 0; + } + + static std::string read_file_content(const std::string& file_path) + { + std::ifstream in(file_path); + std::stringstream buffer_stream; + buffer_stream << in.rdbuf(); + return buffer_stream.str(); + } + }; + + class curl_helper + { + private: + + CURL* m_curl; + curl_slist* m_headers; + + public: + + curl_helper() + : m_curl(curl_easy_init()) + , m_headers(curl_slist_append(nullptr, "Content-Type: application/json")) + { + } + + ~curl_helper() + { + if (m_curl) + { + curl_easy_cleanup(m_curl); + } + if (m_headers) + { + curl_slist_free_all(m_headers); + } + } + + // Delete copy constructor and copy assignment operator + curl_helper(const curl_helper&) = delete; + curl_helper& operator=(const curl_helper&) = delete; + + // Delete move constructor and move assignment operator + curl_helper(curl_helper&&) = delete; + curl_helper& operator=(curl_helper&&) = delete; + + std::string + perform_request(const std::string& url, const std::string& post_data, const std::string& auth_header = "") + { + if (!auth_header.empty()) + { + m_headers = curl_slist_append(m_headers, auth_header.c_str()); + } + + curl_easy_setopt(m_curl, CURLOPT_URL, url.c_str()); + curl_easy_setopt(m_curl, CURLOPT_HTTPHEADER, m_headers); + curl_easy_setopt(m_curl, CURLOPT_POSTFIELDS, post_data.c_str()); + + std::string response; + curl_easy_setopt( + m_curl, + CURLOPT_WRITEFUNCTION, + +[](const char* in, size_t size, size_t num, std::string* out) + { + const size_t total_bytes(size * num); + out->append(in, total_bytes); + return total_bytes; + } + ); + curl_easy_setopt(m_curl, CURLOPT_WRITEDATA, &response); + + CURLcode res = curl_easy_perform(m_curl); + if (res != CURLE_OK) + { + std::cerr << "CURL request failed: " << curl_easy_strerror(res) << std::endl; + return ""; + } + + return response; + } + }; + + std::string gemini(const std::string& cell, const std::string& key) + { + curl_helper curl_helper; + const std::string chat_message = xcpp::chat_history::chat("gemini", "user", cell); + const std::string url = "https://generativelanguage.googleapis.com/v1beta/models/gemini-1.5-flash:generateContent?key=" + + key; + const std::string post_data = R"({"contents": [ )" + chat_message + R"(]})"; + + std::string response = curl_helper.perform_request(url, post_data); + + json j = json::parse(response); + if (j.find("error") != j.end()) + { + std::cerr << "Error: " << j["error"]["message"] << std::endl; + return ""; + } + + const std::string chat = xcpp::chat_history::chat( + "gemini", + "model", + j["candidates"][0]["content"]["parts"][0]["text"] + ); + + return j["candidates"][0]["content"]["parts"][0]["text"]; + } + + std::string openai(const std::string& cell, const std::string& key) + { + curl_helper curl_helper; + const std::string url = "https://api.openai.com/v1/chat/completions"; + const std::string chat_message = xcpp::chat_history::chat("openai", "user", cell); + const std::string post_data = R"({ + "model": "gpt-3.5-turbo-16k", + "messages": [)" + chat_message + + R"(], + "temperature": 0.7 + })"; + std::string auth_header = "Authorization: Bearer " + key; + + std::string response = curl_helper.perform_request(url, post_data, auth_header); + + json j = json::parse(response); + + if (j.find("error") != j.end()) + { + std::cerr << "Error: " << j["error"]["message"] << std::endl; + return ""; + } + + const std::string chat = xcpp::chat_history::chat( + "openai", + "assistant", + j["choices"][0]["message"]["content"] + ); + + return j["choices"][0]["message"]["content"]; + } + + void xassist::operator()(const std::string& line, const std::string& cell) + { + try + { + std::istringstream iss(line); + std::vector tokens( + std::istream_iterator{iss}, + std::istream_iterator() + ); + + std::vector models = {"gemini", "openai"}; + std::string model = tokens[1]; + + if (std::find(models.begin(), models.end(), model) == models.end()) + { + std::cerr << "Model not found." << std::endl; + return; + } + + if (tokens.size() > 2) + { + if (tokens[2] == "--save-key") + { + xcpp::api_key_manager::save_api_key(model, cell); + return; + } + + if (tokens[2] == "--refresh") + { + xcpp::chat_history::refresh(model); + return; + } + } + + std::string key = xcpp::api_key_manager::load_api_key(model); + if (key.empty()) + { + std::cerr << "API key for model " << model << " is not available." << std::endl; + return; + } + + std::string response; + if (model == "gemini") + { + response = gemini(cell, key); + } + else if (model == "openai") + { + response = openai(cell, key); + } + + std::cout << response; + } + catch (const std::runtime_error& e) + { + std::cerr << "Caught an exception: " << e.what() << std::endl; + } + catch (...) + { + std::cerr << "Caught an unknown exception" << std::endl; + } + } +} // namespace xcpp \ No newline at end of file diff --git a/src/xmagics/xassist.hpp b/src/xmagics/xassist.hpp new file mode 100644 index 00000000..363dcbd0 --- /dev/null +++ b/src/xmagics/xassist.hpp @@ -0,0 +1,26 @@ +/************************************************************************************ + * Copyright (c) 2023, xeus-cpp contributors * + * * + * Distributed under the terms of the BSD 3-Clause License. * + * * + * The full license is in the file LICENSE, distributed with this software. * + ************************************************************************************/ + +#ifndef XEUS_CPP_XASSIST_MAGIC_HPP +#define XEUS_CPP_XASSIST_MAGIC_HPP + +#include + +#include "xeus-cpp/xmagics.hpp" + +namespace xcpp +{ + class xassist : public xmagic_cell + { + public: + + XEUS_CPP_API + void operator()(const std::string& line, const std::string& cell) override; + }; +} // namespace xcpp +#endif \ No newline at end of file diff --git a/test/test_interpreter.cpp b/test/test_interpreter.cpp index 364e1fd7..eea3c28a 100644 --- a/test/test_interpreter.cpp +++ b/test/test_interpreter.cpp @@ -19,6 +19,7 @@ #include "../src/xparser.hpp" #include "../src/xsystem.hpp" #include "../src/xmagics/os.hpp" +#include "../src/xmagics/xassist.hpp" #include "../src/xinspect.hpp" @@ -886,4 +887,79 @@ TEST_SUITE("xinspect"){ cmp.child_value = "nonexistentMethod"; REQUIRE(cmp(node) == false); } +} + +TEST_SUITE("xassist"){ + + TEST_CASE("model_not_found"){ + xcpp::xassist assist; + std::string line = "%%xassist testModel"; + std::string cell = "test input"; + + StreamRedirectRAII redirect(std::cerr); + + assist(line, cell); + + REQUIRE(redirect.getCaptured() == "Model not found.\n"); + + } + + TEST_CASE("gemini_save"){ + xcpp::xassist assist; + std::string line = "%%xassist gemini --save-key"; + std::string cell = "1234"; + + assist(line, cell); + + std::ifstream infile("gemini_api_key.txt"); + std::string content; + std::getline(infile, content); + + REQUIRE(content == "1234"); + infile.close(); + + StreamRedirectRAII redirect(std::cerr); + + assist("%%xassist gemini", "hello"); + + REQUIRE(!redirect.getCaptured().empty()); + + std::remove("gemini_api_key.txt"); + } + + TEST_CASE("gemini"){ + xcpp::xassist assist; + std::string line = "%%xassist gemini"; + std::string cell = "hello"; + + StreamRedirectRAII redirect(std::cerr); + + assist(line, cell); + + REQUIRE(!redirect.getCaptured().empty()); + } + + TEST_CASE("openai"){ + xcpp::xassist assist; + std::string line = "%%xassist openai --save-key"; + std::string cell = "1234"; + + assist(line, cell); + + std::ifstream infile("openai_api_key.txt"); + std::string content; + std::getline(infile, content); + + REQUIRE(content == "1234"); + infile.close(); + + StreamRedirectRAII redirect(std::cerr); + + assist("%%xassist openai", "hello"); + + REQUIRE(!redirect.getCaptured().empty()); + + std::remove("openai_api_key.txt"); + } + } \ No newline at end of file