Skip to content

Commit

Permalink
feat: cleanups, small enhancements
Browse files Browse the repository at this point in the history
Signed-off-by: mudler <mudler@localai.io>
  • Loading branch information
mudler committed Jul 4, 2023
1 parent 6d19a8b commit b722e7e
Show file tree
Hide file tree
Showing 6 changed files with 35 additions and 15 deletions.
3 changes: 3 additions & 0 deletions api/api.go
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,9 @@ func App(opts ...AppOption) (*fiber.App, error) {
}))
}

log.Info().Msgf("Starting LocalAI using %d threads, with models path: %s", options.threads, options.loader.ModelPath)
log.Info().Msgf("LocalAI version: %s", internal.PrintableVersion())

cm := NewConfigMerger()
if err := cm.LoadConfigs(options.loader.ModelPath); err != nil {
log.Error().Msgf("error loading config files: %s", err.Error())
Expand Down
10 changes: 7 additions & 3 deletions api/openai.go
Original file line number Diff line number Diff line change
Expand Up @@ -214,15 +214,17 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {

if input.Stream {
if len(config.PromptStrings) > 1 {
return errors.New("cannot handle more than 1 `PromptStrings` when `Stream`ing")
return errors.New("cannot handle more than 1 `PromptStrings` when Streaming")
}

predInput := config.PromptStrings[0]

// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
Input string
}{Input: predInput})
}{
Input: predInput,
})
if err == nil {
predInput = templatedInput
log.Debug().Msgf("Template found, input modified to: %s", predInput)
Expand Down Expand Up @@ -268,7 +270,9 @@ func completionEndpoint(cm *ConfigMerger, o *Option) func(c *fiber.Ctx) error {
// A model can have a "file.bin.tmpl" file associated with a prompt template prefix
templatedInput, err := o.loader.TemplatePrefix(templateFile, struct {
Input string
}{Input: i})
}{
Input: i,
})
if err == nil {
i = templatedInput
log.Debug().Msgf("Template found, input modified to: %s", i)
Expand Down
16 changes: 16 additions & 0 deletions api/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@ package api
import (
"context"
"embed"
"encoding/json"

"github.com/go-skynet/LocalAI/pkg/gallery"
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/rs/zerolog/log"
)

type Option struct {
Expand Down Expand Up @@ -69,6 +71,20 @@ func WithBackendAssets(f embed.FS) AppOption {
}
}

func WithStringGalleries(galls string) AppOption {
return func(o *Option) {
if galls == "" {
log.Debug().Msgf("no galleries to load")
return
}
var galleries []gallery.Gallery
if err := json.Unmarshal([]byte(galls), &galleries); err != nil {
log.Error().Msgf("failed loading galleries: %s", err.Error())
}
o.galleries = append(o.galleries, galleries...)
}
}

func WithGalleries(galleries []gallery.Gallery) AppOption {
return func(o *Option) {
o.galleries = append(o.galleries, galleries...)
Expand Down
2 changes: 1 addition & 1 deletion internal/version.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,5 +6,5 @@ var Version = ""
var Commit = ""

func PrintableVersion() string {
return fmt.Sprintf("LocalAI %s (%s)", Version, Commit)
return fmt.Sprintf("%s (%s)", Version, Commit)
}
14 changes: 3 additions & 11 deletions main.go
Original file line number Diff line number Diff line change
@@ -1,14 +1,11 @@
package main

import (
"encoding/json"
"fmt"
"os"
"path/filepath"

api "github.com/go-skynet/LocalAI/api"
"github.com/go-skynet/LocalAI/internal"
"github.com/go-skynet/LocalAI/pkg/gallery"
model "github.com/go-skynet/LocalAI/pkg/model"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
Expand Down Expand Up @@ -126,19 +123,13 @@ Some of the models compatible are:
- Alpaca
- StableLM (ggml quantized)
It uses llama.cpp, ggml and gpt4all as backend with golang c bindings.
For a list of compatible model, check out: https://localai.io/model-compatibility/index.html
`,
UsageText: `local-ai [options]`,
Copyright: "go-skynet authors",
Copyright: "Ettore Di Giacinto",
Action: func(ctx *cli.Context) error {
fmt.Printf("Starting LocalAI using %d threads, with models path: %s\n", ctx.Int("threads"), ctx.String("models-path"))
galls := ctx.String("galleries")
var galleries []gallery.Gallery
err := json.Unmarshal([]byte(galls), &galleries)
fmt.Println(err)
app, err := api.App(
api.WithConfigFile(ctx.String("config-file")),
api.WithGalleries(galleries),
api.WithJSONStringPreload(ctx.String("preload-models")),
api.WithYAMLConfigPreload(ctx.String("preload-models-config")),
api.WithModelLoader(model.NewModelLoader(ctx.String("models-path"))),
Expand All @@ -147,6 +138,7 @@ It uses llama.cpp, ggml and gpt4all as backend with golang c bindings.
api.WithImageDir(ctx.String("image-path")),
api.WithAudioDir(ctx.String("audio-path")),
api.WithF16(ctx.Bool("f16")),
api.WithStringGalleries(ctx.String("galleries")),
api.WithDisableMessage(false),
api.WithCors(ctx.Bool("cors")),
api.WithCorsAllowOrigins(ctx.String("cors-allow-origins")),
Expand Down
5 changes: 5 additions & 0 deletions pkg/gallery/gallery.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
"strings"

"github.com/go-skynet/LocalAI/pkg/utils"
"github.com/imdario/mergo"
Expand All @@ -17,6 +18,10 @@ type Gallery struct {

// Installs a model from the gallery (galleryname@modelname)
func InstallModelFromGallery(galleries []Gallery, name string, basePath string, req GalleryModel, downloadStatus func(string, string, string, float64)) error {

// os.PathSeparator is not allowed in model names. Replace them with "__" to avoid conflicts with file paths.
name = strings.ReplaceAll(name, string(os.PathSeparator), "__")

models, err := AvailableGalleryModels(galleries, basePath)
if err != nil {
return err
Expand Down

0 comments on commit b722e7e

Please sign in to comment.