@@ -8,27 +8,21 @@ export const recommendedModels: ModelRecommendation[] = [{
8
8
9
9
fileOptions : [ {
10
10
huggingFace : {
11
- model : "bartowski /Meta-Llama-3.1-8B-Instruct-GGUF" ,
11
+ model : "mradermacher /Meta-Llama-3.1-8B-Instruct-GGUF" ,
12
12
branch : "main" ,
13
- file : "Meta-Llama-3.1-8B-Instruct- Q8_0.gguf"
13
+ file : "Meta-Llama-3.1-8B-Instruct. Q8_0.gguf"
14
14
}
15
15
} , {
16
16
huggingFace : {
17
- model : "bartowski /Meta-Llama-3.1-8B-Instruct-GGUF" ,
17
+ model : "mradermacher /Meta-Llama-3.1-8B-Instruct-GGUF" ,
18
18
branch : "main" ,
19
- file : "Meta-Llama-3.1-8B-Instruct-Q6_K_L .gguf"
19
+ file : "Meta-Llama-3.1-8B-Instruct.Q6_K .gguf"
20
20
}
21
21
} , {
22
22
huggingFace : {
23
- model : "bartowski /Meta-Llama-3.1-8B-Instruct-GGUF" ,
23
+ model : "mradermacher /Meta-Llama-3.1-8B-Instruct-GGUF" ,
24
24
branch : "main" ,
25
- file : "Meta-Llama-3.1-8B-Instruct-Q5_K_L.gguf"
26
- }
27
- } , {
28
- huggingFace : {
29
- model : "bartowski/Meta-Llama-3.1-8B-Instruct-GGUF" ,
30
- branch : "main" ,
31
- file : "Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf"
25
+ file : "Meta-Llama-3.1-8B-Instruct.Q4_K_M.gguf"
32
26
}
33
27
} ]
34
28
} , {
@@ -40,33 +34,27 @@ export const recommendedModels: ModelRecommendation[] = [{
40
34
41
35
fileOptions : [ {
42
36
huggingFace : {
43
- model : "bartowski /Meta-Llama-3.1-70B-Instruct-GGUF" ,
37
+ model : "mradermacher /Meta-Llama-3.1-70B-Instruct-GGUF" ,
44
38
branch : "main" ,
45
- file : "Meta-Llama-3.1-70B-Instruct-Q8_0/Meta-Llama-3.1-70B-Instruct- Q8_0-00001-of-00002 .gguf"
39
+ file : "Meta-Llama-3.1-70B-Instruct. Q8_0.gguf.part1of2 "
46
40
}
47
41
} , {
48
42
huggingFace : {
49
- model : "bartowski /Meta-Llama-3.1-70B-Instruct-GGUF" ,
43
+ model : "mradermacher /Meta-Llama-3.1-70B-Instruct-GGUF" ,
50
44
branch : "main" ,
51
- file : "Meta-Llama-3.1-70B-Instruct-Q6_K_L/Meta-Llama-3.1-70B-Instruct-Q6_K_L-00001-of-00002 .gguf"
45
+ file : "Meta-Llama-3.1-70B-Instruct.Q6_K .gguf.part1of2 "
52
46
}
53
47
} , {
54
48
huggingFace : {
55
- model : "bartowski /Meta-Llama-3.1-70B-Instruct-GGUF" ,
49
+ model : "mradermacher /Meta-Llama-3.1-70B-Instruct-GGUF" ,
56
50
branch : "main" ,
57
- file : "Meta-Llama-3.1-70B-Instruct-Q5_K_L/Meta-Llama-3.1-70B-Instruct-Q5_K_L-00001-of-00002 .gguf"
51
+ file : "Meta-Llama-3.1-70B-Instruct.Q4_K_M .gguf"
58
52
}
59
53
} , {
60
54
huggingFace : {
61
- model : "bartowski /Meta-Llama-3.1-70B-Instruct-GGUF" ,
55
+ model : "mradermacher /Meta-Llama-3.1-70B-Instruct-GGUF" ,
62
56
branch : "main" ,
63
- file : "Meta-Llama-3.1-70B-Instruct-Q4_K_M.gguf"
64
- }
65
- } , {
66
- huggingFace : {
67
- model : "bartowski/Meta-Llama-3.1-70B-Instruct-GGUF" ,
68
- branch : "main" ,
69
- file : "Meta-Llama-3.1-70B-Instruct-IQ4_XS.gguf"
57
+ file : "Meta-Llama-3.1-70B-Instruct.Q4_K_S.gguf"
70
58
}
71
59
} ]
72
60
} , {
@@ -82,6 +70,12 @@ export const recommendedModels: ModelRecommendation[] = [{
82
70
branch : "main" ,
83
71
file : "Meta-Llama-3.1-405B-Instruct.Q3_K_L.gguf.part1of5"
84
72
}
73
+ } , {
74
+ huggingFace : {
75
+ model : "mradermacher/Meta-Llama-3.1-405B-Instruct-GGUF" ,
76
+ branch : "main" ,
77
+ file : "Meta-Llama-3.1-405B-Instruct.Q3_K_M.gguf.part1of4"
78
+ }
85
79
} ]
86
80
} , {
87
81
name : "Phi 3 3.8B" ,
0 commit comments