Skip to content

Commit

Permalink
onnx
Browse files Browse the repository at this point in the history
  • Loading branch information
Wovchena committed Oct 7, 2023
1 parent ed255b9 commit 78ff498
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/llm_demo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
working-directory: open_llama_3b_v2
run: |
git lfs checkout
python -m pip install --extra-index-url https://download.pytorch.org/whl/cpu git+https://github.com/huggingface/optimum-intel.git
python -m pip install --extra-index-url https://download.pytorch.org/whl/cpu onnx git+https://github.com/huggingface/optimum-intel.git
source ../ov/setupvars.sh
python -c "from optimum.intel.openvino import OVModelForCausalLM; model = OVModelForCausalLM.from_pretrained('.', export=True); model.save_pretrained('.')"
python ../demos/thirdparty/llama.cpp/convert.py . --vocab-only --outfile vocab.gguf
Expand Down
2 changes: 1 addition & 1 deletion demos/llm_demo/cpp/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

add_demo(NAME llm_demo SOURCES main.cpp DEPENDENCIES llama common)
if(COMPILER_IS_GCC_LIKE) # Disable the warnings from llama.cpp
target_compile_options(llm_demo PRIVATE -Wimplicit-fallthrough=0 -Wno-unused-function)
add_compile_options(llm_demo PUBLIC -Wno-implicit-fallthrough -Wno-unused-function)
elseif(MSVC)
target_compile_options(llm_demo PRIVATE /wd4146 /wd4305 /wd4715 /wd4996)
endif()

0 comments on commit 78ff498

Please sign in to comment.