Skip to content
This repository was archived by the owner on Jul 4, 2025. It is now read-only.

Commit 9912012

Browse files
authored
Merge branch 'dev' into big-refactor
2 parents c99f538 + 81211a4 commit 9912012

15 files changed

+32
-14
lines changed

docs/docusaurus.config.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -476,7 +476,7 @@ const config: Config = {
476476
},
477477
{
478478
label: "Careers",
479-
href: "https://homebrew.bamboohr.com/careers",
479+
href: "https://menlo.bamboohr.com/careers",
480480
},
481481
],
482482
},

engine/common/assistant_code_interpreter_tool.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ struct AssistantCodeInterpreterTool : public AssistantTool {
2020

2121
static cpp::result<AssistantCodeInterpreterTool, std::string> FromJson() {
2222
AssistantCodeInterpreterTool tool;
23-
return std::move(tool);
23+
return tool;
2424
}
2525

2626
cpp::result<Json::Value, std::string> ToJson() override {

engine/common/thread.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -150,9 +150,11 @@ struct Thread : JsonSerializable {
150150
if (auto code_interpreter =
151151
dynamic_cast<CodeInterpreter*>(tool_resources.get())) {
152152
tool_json["code_interpreter"] = tool_result.value();
153+
(void) code_interpreter;
153154
} else if (auto file_search =
154155
dynamic_cast<FileSearch*>(tool_resources.get())) {
155156
tool_json["file_search"] = tool_result.value();
157+
(void) file_search;
156158
}
157159
json["tool_resources"] = tool_json;
158160
}

engine/e2e-test/api/engines/test_api_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ def test_engines_get_llamacpp_should_be_successful(self):
2828

2929
# engines install
3030
def test_engines_install_llamacpp_specific_version_and_variant(self):
31-
data = {"version": "v0.1.40-b4354", "variant": "linux-amd64-avx-cuda-11-7"}
31+
data = {"version": "v0.1.40-b4354", "variant": "linux-amd64-avx"}
3232
response = requests.post(
3333
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
3434
)

engine/e2e-test/api/engines/test_api_engine_install_nightly.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ def test_engines_install_llamacpp_should_be_successful(self):
2323
assert response.status_code == 200
2424

2525
def test_engines_install_llamacpp_specific_version_and_variant(self):
26-
data = {"version": latest_pre_release_tag, "variant": "linux-amd64-avx-cuda-11-7"}
26+
data = {"version": latest_pre_release_tag, "variant": "linux-amd64-avx"}
2727
response = requests.post(
2828
"http://localhost:3928/v1/engines/llama-cpp/install", json=data
2929
)

engine/e2e-test/api/engines/test_api_get_default_engine.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def setup_and_teardown(self):
2424
def test_api_get_default_engine_successfully(self):
2525
# Data test
2626
engine= "llama-cpp"
27-
name= "linux-amd64-avx-cuda-11-7"
27+
name= "linux-amd64-avx"
2828
version= "v0.1.35-27.10.24"
2929

3030
data = {"version": version, "variant": name}

engine/e2e-test/api/engines/test_api_get_list_engine.py

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,11 @@ def test_api_get_list_engines_successfully(self):
2626
engine= "llama-cpp"
2727
name= "linux-amd64-avx"
2828
version= "v0.1.35-27.10.24"
29+
30+
post_install_url = f"http://localhost:3928/v1/engines/{engine}/install"
31+
response = requests.delete(
32+
post_install_url
33+
)
2934

3035
data = {"version": version, "variant": name}
3136
post_install_url = f"http://localhost:3928/v1/engines/{engine}/install"

engine/extensions/python-engine/python_engine.cc

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ size_t StreamWriteCallback(char* ptr, size_t size, size_t nmemb,
5656
return size * nmemb;
5757
}
5858

59-
static size_t WriteCallback(char* ptr, size_t size, size_t nmemb,
59+
[[maybe_unused]] static size_t WriteCallback(char* ptr, size_t size, size_t nmemb,
6060
std::string* data) {
6161
data->append(ptr, size * nmemb);
6262
return size * nmemb;
@@ -185,6 +185,7 @@ void PythonEngine::GetModels(
185185
status["status_code"] = k200OK;
186186

187187
callback(std::move(status), std::move(response_json));
188+
(void) json_body;
188189
}
189190

190191
void PythonEngine::LoadModel(
@@ -386,6 +387,8 @@ void PythonEngine::HandleChatCompletion(
386387
std::shared_ptr<Json::Value> json_body,
387388
std::function<void(Json::Value&&, Json::Value&&)>&& callback) {
388389
LOG_WARN << "Does not support yet!";
390+
(void) json_body;
391+
(void) callback;
389392
}
390393

391394
CurlResponse PythonEngine::MakeStreamPostRequest(
@@ -623,7 +626,9 @@ Json::Value PythonEngine::GetRemoteModels() {
623626
return Json::Value();
624627
}
625628

626-
void PythonEngine::StopInferencing(const std::string& model_id) {}
629+
void PythonEngine::StopInferencing(const std::string& model_id) {
630+
(void)model_id;
631+
}
627632

628633
void PythonEngine::HandleRouteRequest(
629634
std::shared_ptr<Json::Value> json_body,
@@ -893,12 +898,14 @@ void PythonEngine::SetLogLevel(trantor::Logger::LogLevel log_level) {
893898

894899
void PythonEngine::Load(EngineLoadOption opts) {
895900
// Develop register model here on loading engine
901+
(void) opts;
896902
};
897903

898904
void PythonEngine::Unload(EngineUnloadOption opts) {
899905
for (const auto& pair : models_) {
900906
TerminateModelProcess(pair.first);
901907
}
908+
(void) opts;
902909
};
903910

904-
} // namespace python_engine
911+
} // namespace python_engine

engine/main.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,7 @@ void RunServer(std::optional<std::string> host, std::optional<int> port,
6363
bool ignore_cout) {
6464
#if defined(__unix__) || (defined(__APPLE__) && defined(__MACH__))
6565
auto signal_handler = +[](int sig) -> void {
66-
std::cout << "\rCaught interrupt signal, shutting down\n";
66+
std::cout << "\rCaught interrupt signal:" << sig << ", shutting down\n";;
6767
shutdown_signal = true;
6868
};
6969
signal(SIGINT, signal_handler);
@@ -145,7 +145,7 @@ void RunServer(std::optional<std::string> host, std::optional<int> port,
145145
return;
146146
}
147147

148-
using Event = cortex::event::Event;
148+
// using Event = cortex::event::Event; //unused
149149
using EventQueue =
150150
eventpp::EventQueue<EventType,
151151
void(const eventpp::AnyData<eventMaxSize>&)>;

engine/services/download_service.h

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -234,6 +234,8 @@ class DownloadService {
234234
break;
235235
}
236236
}
237+
(void) ultotal;
238+
(void) ulnow;
237239

238240
return 0;
239241
}

0 commit comments

Comments
 (0)