Unverified Commit 77ccbf04 authored by Michael Yang's avatar Michael Yang Committed by GitHub
Browse files

Merge pull request #6128 from ollama/mxyng/lint

enable gofmt/gofumpt/goimports/tenv
parents 4addf6b5 b732beba
...@@ -2,7 +2,7 @@ package cmd ...@@ -2,7 +2,7 @@ package cmd
import ( import (
"context" "context"
"fmt" "errors"
"os" "os"
"os/exec" "os/exec"
"strings" "strings"
...@@ -20,7 +20,7 @@ func startApp(ctx context.Context, client *api.Client) error { ...@@ -20,7 +20,7 @@ func startApp(ctx context.Context, client *api.Client) error {
return err return err
} }
if !strings.Contains(link, "Ollama.app") { if !strings.Contains(link, "Ollama.app") {
return fmt.Errorf("could not find ollama app") return errors.New("could not find ollama app")
} }
path := strings.Split(link, "Ollama.app") path := strings.Split(link, "Ollama.app")
if err := exec.Command("/usr/bin/open", "-a", path[0]+"Ollama.app").Run(); err != nil { if err := exec.Command("/usr/bin/open", "-a", path[0]+"Ollama.app").Run(); err != nil {
......
...@@ -4,11 +4,11 @@ package cmd ...@@ -4,11 +4,11 @@ package cmd
import ( import (
"context" "context"
"fmt" "errors"
"github.com/ollama/ollama/api" "github.com/ollama/ollama/api"
) )
func startApp(ctx context.Context, client *api.Client) error { func startApp(ctx context.Context, client *api.Client) error {
return fmt.Errorf("could not connect to ollama server, run 'ollama serve' to start it") return errors.New("could not connect to ollama server, run 'ollama serve' to start it")
} }
...@@ -31,7 +31,7 @@ func startApp(ctx context.Context, client *api.Client) error { ...@@ -31,7 +31,7 @@ func startApp(ctx context.Context, client *api.Client) error {
// Finally look in the path // Finally look in the path
appExe, err = exec.LookPath(AppName) appExe, err = exec.LookPath(AppName)
if err != nil { if err != nil {
return fmt.Errorf("could not locate ollama app") return errors.New("could not locate ollama app")
} }
} }
} }
......
...@@ -5,9 +5,10 @@ import ( ...@@ -5,9 +5,10 @@ import (
"fmt" "fmt"
"strings" "strings"
"github.com/ollama/ollama/llm"
"github.com/pdevine/tensor" "github.com/pdevine/tensor"
"github.com/pdevine/tensor/native" "github.com/pdevine/tensor/native"
"github.com/ollama/ollama/llm"
) )
type llama struct { type llama struct {
......
...@@ -2,6 +2,7 @@ package convert ...@@ -2,6 +2,7 @@ package convert
import ( import (
"crypto/sha256" "crypto/sha256"
"encoding/hex"
"encoding/json" "encoding/json"
"flag" "flag"
"fmt" "fmt"
...@@ -14,8 +15,9 @@ import ( ...@@ -14,8 +15,9 @@ import (
"slices" "slices"
"testing" "testing"
"github.com/ollama/ollama/llm"
"golang.org/x/exp/maps" "golang.org/x/exp/maps"
"github.com/ollama/ollama/llm"
) )
func convertFull(t *testing.T, fsys fs.FS) (*os.File, llm.KV, llm.Tensors) { func convertFull(t *testing.T, fsys fs.FS) (*os.File, llm.KV, llm.Tensors) {
...@@ -99,7 +101,7 @@ func TestConvertFull(t *testing.T) { ...@@ -99,7 +101,7 @@ func TestConvertFull(t *testing.T) {
t.Fatal(err) t.Fatal(err)
} }
actual[tensor.Name] = fmt.Sprintf("%x", sha256sum.Sum(nil)) actual[tensor.Name] = hex.EncodeToString(sha256sum.Sum(nil))
} }
expectFile, err := os.Open(filepath.Join("testdata", fmt.Sprintf("%s.json", tt))) expectFile, err := os.Open(filepath.Join("testdata", fmt.Sprintf("%s.json", tt)))
......
...@@ -111,8 +111,9 @@ func (st safetensor) WriteTo(w io.Writer) (int64, error) { ...@@ -111,8 +111,9 @@ func (st safetensor) WriteTo(w io.Writer) (int64, error) {
return 0, err return 0, err
} }
for _, b := range u16s { f32s = make([]float32, len(u16s))
f32s = append(f32s, float16.Frombits(b).Float32()) for i := range u16s {
f32s[i] = float16.Frombits(u16s[i]).Float32()
} }
case "BF16": case "BF16":
......
...@@ -3,6 +3,7 @@ package format ...@@ -3,6 +3,7 @@ package format
import ( import (
"fmt" "fmt"
"math" "math"
"strconv"
) )
const ( const (
...@@ -28,6 +29,6 @@ func HumanNumber(b uint64) string { ...@@ -28,6 +29,6 @@ func HumanNumber(b uint64) string {
case b >= Thousand: case b >= Thousand:
return fmt.Sprintf("%.0fK", float64(b)/Thousand) return fmt.Sprintf("%.0fK", float64(b)/Thousand)
default: default:
return fmt.Sprintf("%d", b) return strconv.FormatUint(b, 10)
} }
} }
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
package gpu package gpu
import ( import (
"fmt" "errors"
"log/slog" "log/slog"
"os" "os"
"path/filepath" "path/filepath"
...@@ -95,5 +95,5 @@ func commonAMDValidateLibDir() (string, error) { ...@@ -95,5 +95,5 @@ func commonAMDValidateLibDir() (string, error) {
} }
} }
return "", fmt.Errorf("no suitable rocm found, falling back to CPU") return "", errors.New("no suitable rocm found, falling back to CPU")
} }
package gpu package gpu
import ( import (
"errors"
"fmt" "fmt"
"log/slog" "log/slog"
"syscall" "syscall"
...@@ -76,7 +77,7 @@ func (hl *HipLib) Release() { ...@@ -76,7 +77,7 @@ func (hl *HipLib) Release() {
func (hl *HipLib) AMDDriverVersion() (driverMajor, driverMinor int, err error) { func (hl *HipLib) AMDDriverVersion() (driverMajor, driverMinor int, err error) {
if hl.dll == 0 { if hl.dll == 0 {
return 0, 0, fmt.Errorf("dll has been unloaded") return 0, 0, errors.New("dll has been unloaded")
} }
var version int var version int
status, _, err := syscall.SyscallN(hl.hipDriverGetVersion, uintptr(unsafe.Pointer(&version))) status, _, err := syscall.SyscallN(hl.hipDriverGetVersion, uintptr(unsafe.Pointer(&version)))
...@@ -110,7 +111,7 @@ func (hl *HipLib) HipGetDeviceCount() int { ...@@ -110,7 +111,7 @@ func (hl *HipLib) HipGetDeviceCount() int {
func (hl *HipLib) HipSetDevice(device int) error { func (hl *HipLib) HipSetDevice(device int) error {
if hl.dll == 0 { if hl.dll == 0 {
return fmt.Errorf("dll has been unloaded") return errors.New("dll has been unloaded")
} }
status, _, err := syscall.SyscallN(hl.hipSetDevice, uintptr(device)) status, _, err := syscall.SyscallN(hl.hipSetDevice, uintptr(device))
if status != hipSuccess { if status != hipSuccess {
...@@ -121,7 +122,7 @@ func (hl *HipLib) HipSetDevice(device int) error { ...@@ -121,7 +122,7 @@ func (hl *HipLib) HipSetDevice(device int) error {
func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, error) { func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, error) {
if hl.dll == 0 { if hl.dll == 0 {
return nil, fmt.Errorf("dll has been unloaded") return nil, errors.New("dll has been unloaded")
} }
var props hipDevicePropMinimal var props hipDevicePropMinimal
status, _, err := syscall.SyscallN(hl.hipGetDeviceProperties, uintptr(unsafe.Pointer(&props)), uintptr(device)) status, _, err := syscall.SyscallN(hl.hipGetDeviceProperties, uintptr(unsafe.Pointer(&props)), uintptr(device))
...@@ -134,7 +135,7 @@ func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, err ...@@ -134,7 +135,7 @@ func (hl *HipLib) HipGetDeviceProperties(device int) (*hipDevicePropMinimal, err
// free, total, err // free, total, err
func (hl *HipLib) HipMemGetInfo() (uint64, uint64, error) { func (hl *HipLib) HipMemGetInfo() (uint64, uint64, error) {
if hl.dll == 0 { if hl.dll == 0 {
return 0, 0, fmt.Errorf("dll has been unloaded") return 0, 0, errors.New("dll has been unloaded")
} }
var totalMemory uint64 var totalMemory uint64
var freeMemory uint64 var freeMemory uint64
......
...@@ -393,7 +393,7 @@ func AMDValidateLibDir() (string, error) { ...@@ -393,7 +393,7 @@ func AMDValidateLibDir() (string, error) {
// If we still haven't found a usable rocm, the user will have to install it on their own // If we still haven't found a usable rocm, the user will have to install it on their own
slog.Warn("amdgpu detected, but no compatible rocm library found. Either install rocm v6, or follow manual install instructions at https://github.com/ollama/ollama/blob/main/docs/linux.md#manual-install") slog.Warn("amdgpu detected, but no compatible rocm library found. Either install rocm v6, or follow manual install instructions at https://github.com/ollama/ollama/blob/main/docs/linux.md#manual-install")
return "", fmt.Errorf("no suitable rocm found, falling back to CPU") return "", errors.New("no suitable rocm found, falling back to CPU")
} }
func AMDDriverVersion() (driverMajor, driverMinor int, err error) { func AMDDriverVersion() (driverMajor, driverMinor int, err error) {
......
...@@ -2,7 +2,7 @@ package gpu ...@@ -2,7 +2,7 @@ package gpu
import ( import (
"bytes" "bytes"
"fmt" "errors"
"log/slog" "log/slog"
"os" "os"
"path/filepath" "path/filepath"
...@@ -85,7 +85,7 @@ func AMDGetGPUInfo() []RocmGPUInfo { ...@@ -85,7 +85,7 @@ func AMDGetGPUInfo() []RocmGPUInfo {
n = bytes.IndexByte(props.GcnArchName[:], 0) n = bytes.IndexByte(props.GcnArchName[:], 0)
gfx := string(props.GcnArchName[:n]) gfx := string(props.GcnArchName[:n])
slog.Debug("hip device", "id", i, "name", name, "gfx", gfx) slog.Debug("hip device", "id", i, "name", name, "gfx", gfx)
//slog.Info(fmt.Sprintf("[%d] Integrated: %d", i, props.iGPU)) // DOESN'T REPORT CORRECTLY! Always 0 // slog.Info(fmt.Sprintf("[%d] Integrated: %d", i, props.iGPU)) // DOESN'T REPORT CORRECTLY! Always 0
// TODO Why isn't props.iGPU accurate!? // TODO Why isn't props.iGPU accurate!?
if strings.EqualFold(name, iGPUName) { if strings.EqualFold(name, iGPUName) {
slog.Info("unsupported Radeon iGPU detected skipping", "id", i, "name", name, "gfx", gfx) slog.Info("unsupported Radeon iGPU detected skipping", "id", i, "name", name, "gfx", gfx)
...@@ -161,7 +161,7 @@ func AMDValidateLibDir() (string, error) { ...@@ -161,7 +161,7 @@ func AMDValidateLibDir() (string, error) {
// Should not happen on windows since we include it in the installer, but stand-alone binary might hit this // Should not happen on windows since we include it in the installer, but stand-alone binary might hit this
slog.Warn("amdgpu detected, but no compatible rocm library found. Please install ROCm") slog.Warn("amdgpu detected, but no compatible rocm library found. Please install ROCm")
return "", fmt.Errorf("no suitable rocm found, falling back to CPU") return "", errors.New("no suitable rocm found, falling back to CPU")
} }
func (gpus RocmGPUInfoList) RefreshFreeMemory() error { func (gpus RocmGPUInfoList) RefreshFreeMemory() error {
......
...@@ -42,7 +42,7 @@ func PayloadsDir() (string, error) { ...@@ -42,7 +42,7 @@ func PayloadsDir() (string, error) {
return "", fmt.Errorf("failed to generate tmp dir: %w", err) return "", fmt.Errorf("failed to generate tmp dir: %w", err)
} }
} else { } else {
err = os.MkdirAll(tmpDir, 0755) err = os.MkdirAll(tmpDir, 0o755)
if err != nil { if err != nil {
return "", fmt.Errorf("failed to generate tmp dir %s: %w", tmpDir, err) return "", fmt.Errorf("failed to generate tmp dir %s: %w", tmpDir, err)
} }
...@@ -54,7 +54,7 @@ func PayloadsDir() (string, error) { ...@@ -54,7 +54,7 @@ func PayloadsDir() (string, error) {
if err != nil { if err != nil {
return "", err return "", err
} }
if _, err := pidFile.Write([]byte(fmt.Sprint(os.Getpid()))); err != nil { if _, err := pidFile.Write([]byte(strconv.Itoa(os.Getpid()))); err != nil {
return "", err return "", err
} }
......
...@@ -7,9 +7,9 @@ package gpu ...@@ -7,9 +7,9 @@ package gpu
#cgo windows LDFLAGS: -lpthread #cgo windows LDFLAGS: -lpthread
#include "gpu_info.h" #include "gpu_info.h"
*/ */
import "C" import "C"
import ( import (
"fmt" "fmt"
"log/slog" "log/slog"
...@@ -70,7 +70,6 @@ var CudaTegra string = os.Getenv("JETSON_JETPACK") ...@@ -70,7 +70,6 @@ var CudaTegra string = os.Getenv("JETSON_JETPACK")
// Note: gpuMutex must already be held // Note: gpuMutex must already be held
func initCudaHandles() *cudaHandles { func initCudaHandles() *cudaHandles {
// TODO - if the ollama build is CPU only, don't do these checks as they're irrelevant and confusing // TODO - if the ollama build is CPU only, don't do these checks as they're irrelevant and confusing
cHandles := &cudaHandles{} cHandles := &cudaHandles{}
...@@ -211,14 +210,16 @@ func GetGPUInfo() GpuInfoList { ...@@ -211,14 +210,16 @@ func GetGPUInfo() GpuInfoList {
if err != nil { if err != nil {
slog.Warn("error looking up system memory", "error", err) slog.Warn("error looking up system memory", "error", err)
} }
cpus = []CPUInfo{CPUInfo{ cpus = []CPUInfo{
{
GpuInfo: GpuInfo{ GpuInfo: GpuInfo{
memInfo: mem, memInfo: mem,
Library: "cpu", Library: "cpu",
Variant: cpuCapability, Variant: cpuCapability,
ID: "0", ID: "0",
}, },
}} },
}
// Fallback to CPU mode if we're lacking required vector extensions on x86 // Fallback to CPU mode if we're lacking required vector extensions on x86
if cpuCapability < GPURunnerCPUCapability && runtime.GOARCH == "amd64" { if cpuCapability < GPURunnerCPUCapability && runtime.GOARCH == "amd64" {
......
...@@ -8,6 +8,7 @@ package gpu ...@@ -8,6 +8,7 @@ package gpu
#include "gpu_info_darwin.h" #include "gpu_info_darwin.h"
*/ */
import "C" import "C"
import ( import (
"runtime" "runtime"
......
...@@ -43,10 +43,12 @@ var OneapiGlobs = []string{ ...@@ -43,10 +43,12 @@ var OneapiGlobs = []string{
"/usr/lib*/libze_intel_gpu.so*", "/usr/lib*/libze_intel_gpu.so*",
} }
var CudartMgmtName = "libcudart.so*" var (
var NvcudaMgmtName = "libcuda.so*" CudartMgmtName = "libcudart.so*"
var NvmlMgmtName = "" // not currently wired on linux NvcudaMgmtName = "libcuda.so*"
var OneapiMgmtName = "libze_intel_gpu.so" NvmlMgmtName = "" // not currently wired on linux
OneapiMgmtName = "libze_intel_gpu.so"
)
func GetCPUMem() (memInfo, error) { func GetCPUMem() (memInfo, error) {
var mem memInfo var mem memInfo
......
...@@ -40,10 +40,12 @@ var OneapiGlobs = []string{ ...@@ -40,10 +40,12 @@ var OneapiGlobs = []string{
"c:\\Windows\\System32\\DriverStore\\FileRepository\\*\\ze_intel_gpu64.dll", "c:\\Windows\\System32\\DriverStore\\FileRepository\\*\\ze_intel_gpu64.dll",
} }
var CudartMgmtName = "cudart64_*.dll" var (
var NvcudaMgmtName = "nvcuda.dll" CudartMgmtName = "cudart64_*.dll"
var NvmlMgmtName = "nvml.dll" NvcudaMgmtName = "nvcuda.dll"
var OneapiMgmtName = "ze_intel_gpu64.dll" NvmlMgmtName = "nvml.dll"
OneapiMgmtName = "ze_intel_gpu64.dll"
)
func GetCPUMem() (memInfo, error) { func GetCPUMem() (memInfo, error) {
memStatus := MEMORYSTATUSEX{length: sizeofMemoryStatusEx} memStatus := MEMORYSTATUSEX{length: sizeofMemoryStatusEx}
......
...@@ -162,7 +162,7 @@ func PullIfMissing(ctx context.Context, client *api.Client, modelName string) er ...@@ -162,7 +162,7 @@ func PullIfMissing(ctx context.Context, client *api.Client, modelName string) er
fn := func(resp api.ProgressResponse) error { fn := func(resp api.ProgressResponse) error {
// fmt.Print(".") // fmt.Print(".")
if !stallTimer.Reset(stallDuration) { if !stallTimer.Reset(stallDuration) {
return fmt.Errorf("stall was detected, aborting status reporting") return errors.New("stall was detected, aborting status reporting")
} }
return nil return nil
} }
...@@ -180,7 +180,7 @@ func PullIfMissing(ctx context.Context, client *api.Client, modelName string) er ...@@ -180,7 +180,7 @@ func PullIfMissing(ctx context.Context, client *api.Client, modelName string) er
select { select {
case <-stallTimer.C: case <-stallTimer.C:
return fmt.Errorf("download stalled") return errors.New("download stalled")
case <-done: case <-done:
return pullError return pullError
} }
...@@ -243,7 +243,7 @@ func DoGenerate(ctx context.Context, t *testing.T, client *api.Client, genReq ap ...@@ -243,7 +243,7 @@ func DoGenerate(ctx context.Context, t *testing.T, client *api.Client, genReq ap
// fmt.Print(".") // fmt.Print(".")
buf.Write([]byte(response.Response)) buf.Write([]byte(response.Response))
if !stallTimer.Reset(streamTimeout) { if !stallTimer.Reset(streamTimeout) {
return fmt.Errorf("stall was detected while streaming response, aborting") return errors.New("stall was detected while streaming response, aborting")
} }
return nil return nil
} }
......
...@@ -11,8 +11,9 @@ package llm ...@@ -11,8 +11,9 @@ package llm
// #include <stdlib.h> // #include <stdlib.h>
// #include "llama.h" // #include "llama.h"
import "C" import "C"
import ( import (
"fmt" "errors"
"unsafe" "unsafe"
) )
...@@ -33,7 +34,7 @@ func Quantize(infile, outfile string, ftype fileType) error { ...@@ -33,7 +34,7 @@ func Quantize(infile, outfile string, ftype fileType) error {
params.ftype = ftype.Value() params.ftype = ftype.Value()
if rc := C.llama_model_quantize(cinfile, coutfile, &params); rc != 0 { if rc := C.llama_model_quantize(cinfile, coutfile, &params); rc != 0 {
return fmt.Errorf("failed to quantize model. This model architecture may not be supported, or you may need to upgrade Ollama to the latest version") return errors.New("failed to quantize model. This model architecture may not be supported, or you may need to upgrade Ollama to the latest version")
} }
return nil return nil
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment