forked from oppiliappan/llm
-
Notifications
You must be signed in to change notification settings - Fork 3
/
launch.json
124 lines (124 loc) · 3.31 KB
/
launch.json
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug BLOOM Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["bloom", "${env:HOME}/.ggml-models/bloom-7b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug GPT-2 Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["gpt2", "${env:HOME}/.ggml-models/cerebras-gpt-13b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug GPT-J Inference",
"cargo": {
"args": [
"build",
"--example=inference",
"--package=llm"
],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["gptj", "${env:HOME}/.ggml-models/gpt-j-6b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug LLaMA Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["llama", "${env:HOME}/.ggml-models/gpt4all-7b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug MPT Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["mpt", "${env:HOME}/.ggml-models/mpt-7b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug GPT-NeoX Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["gptneox", "${env:HOME}/.ggml-models/stablelm-base-alpha-3b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug RedPajama Inference",
"cargo": {
"args": ["build", "--example=inference", "--package=llm"],
"filter": {
"name": "inference",
"kind": "example"
}
},
"args": ["redpajama", "${env:HOME}/.ggml-models/redpajama-incite-7b.bin"],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug Vicuna Chat",
"cargo": {
"args": ["build", "--example=vicuna-chat", "--package=llm"],
"filter": {
"name": "vicuna-chat",
"kind": "example"
}
},
"args": ["llama", "${env:HOME}/.ggml-models/wizardlm-7b.bin"],
"cwd": "${workspaceFolder}"
}
]
}