forked from mudler/LocalAI
-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.go
123 lines (104 loc) · 3.42 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
package main
import (
"os"
"os/signal"
"path/filepath"
"syscall"
"github.com/alecthomas/kong"
"github.com/joho/godotenv"
"github.com/mudler/LocalAI/core/cli"
"github.com/mudler/LocalAI/internal"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
_ "github.com/mudler/LocalAI/swagger"
)
func main() {
var err error
// Initialize zerolog at a level of INFO, we will set the desired level after we parse the CLI options
log.Logger = log.Output(zerolog.ConsoleWriter{Out: os.Stderr})
zerolog.SetGlobalLevel(zerolog.InfoLevel)
// Catch signals from the OS requesting us to exit
go func() {
c := make(chan os.Signal, 1) // we need to reserve to buffer size 1, so the notifier are not blocked
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
<-c
os.Exit(1)
}()
// handle loading environment variabled from .env files
envFiles := []string{".env", "localai.env"}
homeDir, err := os.UserHomeDir()
if err == nil {
envFiles = append(envFiles, filepath.Join(homeDir, "localai.env"), filepath.Join(homeDir, ".config/localai.env"))
}
envFiles = append(envFiles, "/etc/localai.env")
for _, envFile := range envFiles {
if _, err := os.Stat(envFile); err == nil {
log.Info().Str("envFile", envFile).Msg("env file found, loading environment variables from file")
err = godotenv.Load(envFile)
if err != nil {
log.Error().Err(err).Str("envFile", envFile).Msg("failed to load environment variables from file")
continue
}
}
}
// Actually parse the CLI options
ctx := kong.Parse(&cli.CLI,
kong.Description(
` LocalAI is a drop-in replacement OpenAI API for running LLM, GPT and genAI models locally on CPU, GPUs with consumer grade hardware.
Some of the models compatible are:
- Vicuna
- Koala
- GPT4ALL
- GPT4ALL-J
- Cerebras
- Alpaca
- StableLM (ggml quantized)
For a list of compatible models, check out: https://localai.io/model-compatibility/index.html
Copyright: Ettore Di Giacinto
Version: ${version}
`,
),
kong.UsageOnError(),
kong.Vars{
"basepath": kong.ExpandPath("."),
"remoteLibraryURL": "https://raw.githubusercontent.com/mudler/LocalAI/master/embedded/model_library.yaml",
"galleries": `[{"name":"localai", "url":"github:mudler/LocalAI/gallery/index.yaml@master"}]`,
"version": internal.PrintableVersion(),
},
)
// Configure the logging level before we run the application
// This is here to preserve the existing --debug flag functionality
logLevel := "info"
if cli.CLI.Debug && cli.CLI.LogLevel == nil {
logLevel = "debug"
zerolog.SetGlobalLevel(zerolog.DebugLevel)
cli.CLI.LogLevel = &logLevel
}
if cli.CLI.LogLevel == nil {
cli.CLI.LogLevel = &logLevel
}
switch *cli.CLI.LogLevel {
case "error":
zerolog.SetGlobalLevel(zerolog.ErrorLevel)
log.Info().Msg("Setting logging to error")
case "warn":
zerolog.SetGlobalLevel(zerolog.WarnLevel)
log.Info().Msg("Setting logging to warn")
case "info":
zerolog.SetGlobalLevel(zerolog.InfoLevel)
log.Info().Msg("Setting logging to info")
case "debug":
zerolog.SetGlobalLevel(zerolog.DebugLevel)
log.Debug().Msg("Setting logging to debug")
case "trace":
zerolog.SetGlobalLevel(zerolog.TraceLevel)
log.Trace().Msg("Setting logging to trace")
}
// Populate the application with the embedded backend assets
cli.CLI.Context.BackendAssets = backendAssets
// Run the thing!
err = ctx.Run(&cli.CLI.Context)
if err != nil {
log.Fatal().Err(err).Msg("Error running the application")
}
}