From 78ff49893e60263cc2b9eaedfe0270e48b672d9c Mon Sep 17 00:00:00 2001 From: Wovchena Date: Sun, 8 Oct 2023 03:06:04 +0400 Subject: [PATCH] onnx --- .github/workflows/llm_demo.yml | 2 +- demos/llm_demo/cpp/CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/llm_demo.yml b/.github/workflows/llm_demo.yml index 2066261d284..176f1ce3e23 100644 --- a/.github/workflows/llm_demo.yml +++ b/.github/workflows/llm_demo.yml @@ -34,7 +34,7 @@ jobs: working-directory: open_llama_3b_v2 run: | git lfs checkout - python -m pip install --extra-index-url https://download.pytorch.org/whl/cpu git+https://github.com/huggingface/optimum-intel.git + python -m pip install --extra-index-url https://download.pytorch.org/whl/cpu onnx git+https://github.com/huggingface/optimum-intel.git source ../ov/setupvars.sh python -c "from optimum.intel.openvino import OVModelForCausalLM; model = OVModelForCausalLM.from_pretrained('.', export=True); model.save_pretrained('.')" python ../demos/thirdparty/llama.cpp/convert.py . --vocab-only --outfile vocab.gguf diff --git a/demos/llm_demo/cpp/CMakeLists.txt b/demos/llm_demo/cpp/CMakeLists.txt index 665efd97205..1d47ee3dc36 100644 --- a/demos/llm_demo/cpp/CMakeLists.txt +++ b/demos/llm_demo/cpp/CMakeLists.txt @@ -4,7 +4,7 @@ add_demo(NAME llm_demo SOURCES main.cpp DEPENDENCIES llama common) if(COMPILER_IS_GCC_LIKE) # Disable the warnings from llama.cpp - target_compile_options(llm_demo PRIVATE -Wimplicit-fallthrough=0 -Wno-unused-function) + add_compile_options(llm_demo PUBLIC -Wno-implicit-fallthrough -Wno-unused-function) elseif(MSVC) target_compile_options(llm_demo PRIVATE /wd4146 /wd4305 /wd4715 /wd4996) endif()