forked from caching-tools/next-shared-cache
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdiffscribe.ts
202 lines (166 loc) · 6.5 KB
/
diffscribe.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
#!/usr/bin/env node
import { execFileSync } from 'node:child_process';
import { createInterface } from 'node:readline/promises';
import { OpenAI } from 'openai';
import 'dotenv/config';
// OpenAI client initialization
const openai = new OpenAI({
apiKey: process.env.OPENAI_API_KEY,
timeout: 60 * 1000,
});
// Readline interface initialization
const rl = createInterface({
input: process.stdin,
output: process.stdout,
});
// Constants for configurations
const DEFAULT_MODEL = 'gpt-4-1106-preview';
const SUPPORTED_MODELS = ['gpt-4', 'gpt-3.5-turbo', 'gpt-4-1106-preview'];
const DEFAULT_TEMPERATURE = 0.05;
const COMMIT_LENGTH_MIN = 50;
const COMMIT_LENGTH_MAX = 500;
// Argument parsing function
function parseArguments(args: string[]): Record<string, string> {
return args.reduce<Record<string, string>>((acc, arg) => {
const [keyWithDash, value] = arg.split('=');
if (keyWithDash && keyWithDash.startsWith('--') && value) {
const key = keyWithDash.slice(2);
acc[key] = value;
}
return acc;
}, {});
}
// Extract and set defaults
const args = parseArguments(process.argv.slice(2));
const {
model = DEFAULT_MODEL,
modelTemperature = `${DEFAULT_TEMPERATURE}`,
commitLength = `${COMMIT_LENGTH_MIN}`,
} = args;
// Validations
const commitLengthNumber = parseInt(commitLength, 10);
if (isNaN(commitLengthNumber) || commitLengthNumber < COMMIT_LENGTH_MIN || commitLengthNumber > COMMIT_LENGTH_MAX) {
throw new Error(`Invalid commit length ${commitLength}`);
}
const temperature = parseFloat(modelTemperature);
if (isNaN(temperature)) {
throw new Error(`Invalid temperature ${modelTemperature}`);
}
if (!SUPPORTED_MODELS.includes(model)) {
// typecast required because of TS's strict type checking with array includes
throw new Error(`Model ${model} is not supported`);
}
// Utility functions
async function getUserPrompt(question: string): Promise<string> {
const answer = await rl.question(`\n${question}\n\nAnswer: `);
return answer;
}
async function main(): Promise<void> {
const diffBuffer = execFileSync('git', ['diff', '--cached', '--stat']);
const diff = diffBuffer.toString('utf-8').trim();
const statusBuffer = execFileSync('git', ['status', '--renames']);
const status = statusBuffer.toString('utf-8').trim();
const messages: OpenAI.ChatCompletionMessageParam[] = [
{
role: 'system',
content: `
Context:
You are an assistant tasked with creating commit messages for the user working on JavaScript or TypeScript projects.
Inputs:
You will receive the Git status and Git diffstat.
Guidelines:
Using the Git status, identify which files have been affected and the branch name.
Using the Git diffstat, identify files and directories with significant changes.
If the most significant change is a package-lock.json file, ignore it.
Try to understand then tell what the user tries to achieve with this commit. Be explicit.
Ask the user to correct you. Keep asking until you have a good understanding.
Use all your information and context to determine what the user is trying to achieve.
Based on this information, create a short commit message in an imperative style. E.g., "Update...", "Refactor...", or "Fix..." etc.
Try to keep it under ${commitLengthNumber} characters.
If the message length exceeds ${commitLengthNumber} characters, use markdown syntax to break it into multiple paragraphs.
`.trim(),
},
{
role: 'system',
content: `Git status:
${status}`.trim(),
},
{
role: 'system',
content: `Git diffstat:
${diff}`.trim(),
},
];
const functions: OpenAI.ChatCompletionCreateParams.Function[] = [
{
name: 'getUserPrompt',
parameters: {
type: 'object',
properties: {
question: {
type: 'string',
description: 'Question to prompt the user with.',
},
},
required: ['question'],
},
description: 'Ask the user to correct',
},
];
const response = await openai.chat.completions.create({
messages,
model,
temperature,
functions,
});
let conversationChoice: OpenAI.Chat.Completions.ChatCompletion.Choice;
const initialChoice = response.choices.at(0);
if (!initialChoice) {
throw new Error('No conversation choice');
}
conversationChoice = initialChoice;
while (conversationChoice.finish_reason === 'function_call') {
const conversationMessage = conversationChoice.message;
if (conversationMessage.function_call) {
const availableFunctions: Record<string, (question: string) => Promise<string>> = {
getUserPrompt,
};
const functionName = conversationMessage.function_call.name;
const functionToCall = availableFunctions[functionName];
if (!functionToCall) {
throw new Error(`Function ${functionName} is not available`);
}
const functionArgs = JSON.parse(conversationMessage.function_call.arguments) as unknown as {
question: string;
};
// eslint-disable-next-line no-await-in-loop -- We need to wait for the response
const functionResponse = await functionToCall(functionArgs.question);
messages.push(conversationMessage);
messages.push({
role: 'function',
name: functionName,
content: functionResponse,
});
// eslint-disable-next-line no-await-in-loop -- We need to wait for the response
const nextResponse = await openai.chat.completions.create({
messages,
model,
temperature,
functions,
});
const nextChoice = nextResponse.choices.at(0);
if (!nextChoice) {
throw new Error('No conversation choice');
}
conversationChoice = nextChoice;
}
}
process.stdout.write(`\n${conversationChoice.message.content ?? ''}`);
}
main()
.then(() => {
process.exit(0);
})
.catch(() => {
process.exit(1);
});