diff --git a/package.json b/package.json index 0a812cb..375526a 100644 --- a/package.json +++ b/package.json @@ -24,8 +24,8 @@ "build": "babel --out-dir=dist source", "dev": "babel --out-dir=dist --watch source", "clean": "npm run clean:linux", - "clean:linux": "rm -rf dist", - "clean:windows": "rmdir /s /q dist", + "clean:linux": "rm -rf dist || echo Directory not found, skipping removal", + "clean:windows": "rmdir /s /q dist || echo Directory not found, skipping removal", "setup": "npm run setup:linux", "setup:linux": "npm run clean:linux && npm run build && npm i -g .", "setup:windows": "npm run clean:windows && npm run build", diff --git a/source/app.js b/source/app.js index c084ca7..0c10106 100644 --- a/source/app.js +++ b/source/app.js @@ -1,48 +1,7 @@ import React from 'react'; -import {Text, Newline} from 'ink'; -import BigText from 'ink-big-text'; -import Gradient from 'ink-gradient'; -import isGit from 'is-git-repository'; -import isCommitterSet from './utils/errors.js'; -import info from './utils/info.js'; -import askForCommitMessage from './utils/commit.js'; -import { getOpenAIKey, setOpenAIKey, deleteOPenAIKey } from './utils/api.js'; +import Logo from './utils/logo.js'; export default function App({flags}) { - if(flags.setopenai) { - setOpenAIKey(flags.setopenai); - } - if(flags.delopenai) { - deleteOPenAIKey(); - } - if (!getOpenAIKey()) { - console.log('Please provide an OpenAI API key.'); - console.log('You can get one from https://platform.openai.com/account/api-keys') - console.log('Run `magicc --setopenai=` to save your API key and try again.'); - } else { - console.log('You have an OpenAI API key, you can now generate a commit message.'); - const gitCheck = isGit(); - const committerCheck = isCommitterSet(); - if (gitCheck && committerCheck) { - askForCommitMessage(); - } else { - console.log('This is not a git repository.'); - } - } - return ( - <> - - - You can do `magicc`, you can build anything that you desire. 🪄 - - - Version: {info('version')} | - Author: {info('author')} - - Need Help? magicc --help - - ================================================== - - - ); + + return ; } diff --git a/source/cli.js b/source/cli.js index 4300b4e..cd03af5 100755 --- a/source/cli.js +++ b/source/cli.js @@ -1,12 +1,14 @@ #!/usr/bin/env node import React from 'react'; -import {render} from 'ink'; import meow from 'meow'; import App from './app.js'; +import Logo from './utils/logo.js'; +import { render } from 'ink'; + +render(, {patchConsole: false}); const cli = meow( - ` - Usage + `Usage $ magicc Options @@ -33,4 +35,4 @@ const cli = meow( }, ); -render(); +render(, {patchConsole: false}); diff --git a/source/models/ollama.js b/source/models/ollama.js new file mode 100644 index 0000000..14e9b02 --- /dev/null +++ b/source/models/ollama.js @@ -0,0 +1,36 @@ +import Ollama from 'ollama'; // Import the Ollama model +import config from '../utils/config.json'; + +async function ollamaModel(model, flags, diffContent) { + try { + // Use the prompt from the config file emoji and send to Ollama + const categoryResponse = await Ollama.chat({ + messages: [ + {role: 'system', content: config.commitConfig.emoji}, + {role: 'user', content: diffContent}, + ], + model, + }); + // Use the prompt from the config file message and send to Ollama + + const messageResponse = await Ollama.chat({ + messages: [ + {role: 'system', content: config.commitConfig.message}, + {role: 'user', content: diffContent}, + ], + model, + }); + console.log('categoryResponse', categoryResponse); + console.log('messageResponse', messageResponse); + return { + category: categoryResponse?.message?.content, + message: messageResponse?.message?.content, + }; + } catch (error) { + throw new Error( + 'Failed to connect to local Ollama instance. To start Ollama, first download it at https://ollama.ai.', + ); + } +} + +export default ollamaModel; diff --git a/source/models/openai.js b/source/models/openai.js new file mode 100644 index 0000000..c70ee9c --- /dev/null +++ b/source/models/openai.js @@ -0,0 +1,54 @@ +import OpenAI from 'openai'; +import config from '../utils/config.json'; +import dotenv from 'dotenv'; + +import {getOpenAIKey, setOpenAIKey, deleteOPenAIKey} from '../utils/api.js'; + +dotenv.config(); + +async function openAiModel(model, flags, diffContent) { + if (flags.setopenai) { + setOpenAIKey(flags.setopenai); + } + if (flags.delopenai) { + deleteOPenAIKey(); + } + if (!getOpenAIKey()) { + return { + message: + 'Please provide an OpenAI API key.\n' + + 'You can get one from https://platform.openai.com/account/api-keys\n' + + 'Run `magicc --setopenai=` to save your API key and try again.', + }; + } else { + console.log( + 'You have an OpenAI API key, you can now generate a commit message.', + ); + + const apiKey = await getOpenAIKey(); + const openai = new OpenAI({apiKey: apiKey}); + + const category = await openai.chat.completions.create({ + messages: [ + {role: 'system', content: config.commitConfig.emoji}, + {role: 'user', content: diffContent}, + ], + model, + }); + // use the prmopt from the config file message and send to openai + const message = await openai.chat.completions.create({ + messages: [ + {role: 'system', content: config.commitConfig.message}, + {role: 'user', content: diffContent}, + ], + model, + }); + + return { + category: category.choices[0].message.content, + message: message.choices[0].message.content, + }; + } +} + +export default openAiModel; diff --git a/source/utils/commit.js b/source/utils/commit.js index ac92f81..432c109 100644 --- a/source/utils/commit.js +++ b/source/utils/commit.js @@ -1,12 +1,13 @@ -import generatePrompt from './openai.js'; +import generateCommitMessage from './generateCommitMessage.js'; import {execa} from 'execa'; import readline from 'readline'; import React from 'react'; import {Box, render, Text, useApp} from 'ink'; import SelectInput from 'ink-select-input'; +import Logo from './logo.js'; -async function askForCommitMessage() { - const prompt = await generatePrompt(); +async function askForCommitMessage(flags, model) { + const prompt = await generateCommitMessage(flags, model); const rl = readline.createInterface({ input: process.stdin, @@ -24,10 +25,10 @@ async function askForCommitMessage() { .catch(error => { console.error('Failed to commit changes:', error); }); - } - else { + } else { console.log('Changes not committed.'); } + rl.close(); exit(); }; @@ -43,13 +44,14 @@ async function askForCommitMessage() { ]; return ( - - {`Suggested commit message: ${prompt}\nDo you want to proceed?`} - - + + + {`Suggested commit message: ${prompt}\nDo you want to proceed?`} + + + ); }; - if (prompt) { render(); } else { @@ -58,4 +60,54 @@ async function askForCommitMessage() { } } +export async function initGit() { + try { + await execa('git', ['restore', '--staged', '.']); + } catch (error) { + console.error(error); + } +} + +// git status to see if there are any changes +// if there's any changes add the first file in the list of changes +let firstFilePath = ''; + +export async function gitStatus() { + try { + const {stdout: status} = await execa('git', ['status', '--porcelain']); + if (status) { + // get the first file path in the list of changes + const lines = status.split('\n'); + const filePaths = lines + .map(line => line.split(' ').slice(2).join(' ').trim()) + .filter(filePath => filePath !== '') + .concat( + lines + .filter(line => line.startsWith('??')) + .map(line => line.split(' ').slice(1).join(' ').trim()), + ); + // git add the first file in the list of changes + firstFilePath = filePaths[0]; + await execa('git', ['add', firstFilePath]); + console.log(`${firstFilePath} has been added to the staging area.`); + } else { + console.log('No changes to commit.'); + return false; + } + } catch (error) { + console.error(error); + } +} + +// get the diff of the staged changes +export async function gitDiff() { + try { + const {stdout: gitDiff} = await execa('git', ['diff', '--staged']); + return gitDiff; + } catch (error) { + console.error(error); + } +} + + export default askForCommitMessage; diff --git a/source/utils/config.json b/source/utils/config.json index 95a1471..bbab717 100644 --- a/source/utils/config.json +++ b/source/utils/config.json @@ -1,6 +1,22 @@ { - "emoji": "YYou are the author of the commit message. Your task is to select the appropriate category for the git diff based on the changes. Use the following categories (emoji category name => usage): 📦 new => for new files or new features; ✨ tweak => for enhancements or updates to the codebase; ☕ chore => for updates or changes outside the project codebase, including README.md; 🐞 fix => for fixing code bugs and errors. Please reply with the category name only.", - "message": "You are the author of the changes, you are going to provide a professional git commit message that is no longer than 25 characters in imperative present tense. Stricly no emojis are allowed and no conventional commit message as prefix is already provided. For example, instead of 'fix: fix a bug' make it 'fix a bug'. The message should be in lower case and no period at the end.", - "default_model": "gpt-4o-mini", - "maxDiffSize": 4000 + "commitConfig": { + "emoji": "You are the author of the commit message. Your task is to select the appropriate category for the git diff based on the changes. Use the following categories (emoji category name => usage): 📦 new => for new files or new features; ✨ tweak => for enhancements or updates to the codebase; ☕ chore => for updates or changes outside the project codebase, including README.md; 🐞 fix => for fixing code bugs and errors. Please reply with the category name only.", + "message": "You are the author of the changes, and you will provide a professional git commit message that is no longer than 25 characters in imperative present tense. Strictly no emojis are allowed, and no conventional commit message prefix is provided. For example, instead of 'fix: fix a bug,' make it 'fix a bug.' The message should be in lowercase and should not have a period at the end." + }, + + "models": [ + { + "title": "GPT-4o Mini", + "provider": "openai", + "model": "gpt-4o-mini", + "maxDiffSize": 4000, + "apiKey": "" + }, + { + "title": "Llama 3.1 8B", + "provider": "ollama", + "model": "llama3.1:8b", + "maxDiffSize": 4000 + } + ] } diff --git a/source/utils/generateCommitMessage.js b/source/utils/generateCommitMessage.js new file mode 100644 index 0000000..0745930 --- /dev/null +++ b/source/utils/generateCommitMessage.js @@ -0,0 +1,61 @@ +import openAiModel from '../models/openai.js'; +import ollamaModel from '../models/ollama.js'; +import config from './config.json'; +import {gitDiff, gitStatus, initGit} from './commit.js'; + +// remove any staged changes in git + +async function generateCommitMessage(flags, model) { + const maxDiffSize = config.maxDiffSize; + + await initGit(); + const status = await gitStatus(); + + const gitDiffContent = await gitDiff(); + const {category, message} = await getModelResponse( + model, + flags, + gitDiffContent, + ); + + if (gitDiffContent.length > maxDiffSize) { + console.log('Diff content is too large. Skipping OpenAI request.'); + return `✨ tweak (${firstFilePath}): update ${firstFilePath}`; + } + + if (status !== false) { + return `${category} (${firstFilePath}): ${message}`; + } else { + return false; + } +} + +async function getModelResponse(model, flags, gitDiffContent) { + let response; + + try { + switch (model) { + case 'gpt-4o-mini': + response = await openAiModel(model, flags, gitDiffContent); + break; + case 'llama3.1:8b': + response = await ollamaModel(model, flags, gitDiffContent); + break; + default: + throw new Error('Unsupported model selected'); + } + console.log('response', response); + + if (response && response.category && response.message) { + // Destructure and return the required fields + const {category, message} = response; + return {category, message}; + } else { + throw new Error(response.message); + } + } catch (error) { + console.log(error.message); + } +} + +export default generateCommitMessage; diff --git a/source/utils/logo.js b/source/utils/logo.js new file mode 100644 index 0000000..3be7548 --- /dev/null +++ b/source/utils/logo.js @@ -0,0 +1,31 @@ +import React from 'react'; +import info from './info.js'; +import BigText from 'ink-big-text'; +import Gradient from 'ink-gradient'; +import {Text, Newline} from 'ink'; +import ModelSelection from './modelSelection.js'; + +export default function Logo(children) { + return ( + <> + + + + You can do `magicc`, you can build anything that you desire. 🪄 + + + + Version: {info('version')} | Author:{' '} + {info('author')} + + + Need Help? magicc --help + + + ================================================== + + + + + ); +} diff --git a/source/utils/modelSelection.js b/source/utils/modelSelection.js new file mode 100644 index 0000000..a794f09 --- /dev/null +++ b/source/utils/modelSelection.js @@ -0,0 +1,60 @@ +import React, {useState} from 'react'; +import readline from 'readline'; + +import isGit from 'is-git-repository'; +import SelectInput from 'ink-select-input'; +import askForCommitMessage from './commit.js'; +import isCommitterSet from './errors.js'; +import {Text, Box, useApp} from 'ink'; +import config from './config.json'; + +const ModelSelection = ({flags}) => { + const {exit} = useApp(); + const [isDisabled, setIsDisabled] = useState(false); // State to disable the options after selection + + const rl = readline.createInterface({ + input: process.stdin, + output: process.stdout, + }); + const models = config.models; + + const handleSelect = async item => { + if (item.value) { + const model = item.value; + const gitCheck = isGit(); + const committerCheck = await isCommitterSet(); + if (gitCheck && committerCheck) { + try { + await askForCommitMessage(flags, model); // Await the function that handles commit message + } catch (error) { + rl.close(); + + // console.error('Error generating commit message:', error); + exit(); + } + } else { + console.log('This is not a git repository.'); + } + } else { + exit(); + rl.close(); + } + }; + + const modelItems = models.map(model => ({ + label: model.title, + value: model.model, // Model name to pass to generatePrompt + })); + + return ( + + Select a model: + {!isDisabled && ( // Conditionally render the SelectInput based on whether a selection has been made + + )} + {isDisabled && Model selection in progress...} + + ); +}; + +export default ModelSelection; diff --git a/source/utils/openai.js b/source/utils/openai.js deleted file mode 100644 index 979d1fa..0000000 --- a/source/utils/openai.js +++ /dev/null @@ -1,97 +0,0 @@ -import {execa} from 'execa'; -import OpenAI from 'openai'; -import config from './config.json'; -import { getOpenAIKey } from './api.js'; -import dotenv from 'dotenv'; - -dotenv.config(); - -// remove any staged changes in git -async function initGit() { - try { - await execa('git', ['restore', '--staged', '.']); - } catch (error) { - console.error(error); - } -} - -// git status to see if there are any changes -// if there's any changes add the first file in the list of changes -let firstFilePath = ''; - -async function gitStatus() { - try { - const { stdout: status } = await execa('git', ['status', '--porcelain']); - if (status) { - // get the first file path in the list of changes - const lines = status.split('\n'); - const filePaths = lines - .map(line => line.split(' ').slice(2).join(' ').trim()) - .filter(filePath => filePath !== '') - .concat(lines - .filter(line => line.startsWith('??')) - .map(line => line.split(' ').slice(1).join(' ').trim()) - ); - // git add the first file in the list of changes - firstFilePath = filePaths[0]; - await execa('git', ['add', firstFilePath]); - console.log(`${firstFilePath} has been added to the staging area.`); - } else { - console.log('No changes to commit.'); - return false; - } - } catch (error) { - console.error(error); - } -} - -// get the diff of the staged changes -async function gitDiff() { - try { - const { stdout: gitDiff } = await execa('git', ['diff', '--staged']); - return gitDiff; - } catch (error) { - console.error(error); - } -} - -async function generatePrompt() { - const apiKey = await getOpenAIKey(); - const openai = new OpenAI({apiKey: apiKey}); - const maxDiffSize = config.maxDiffSize; - - // get the staged changes - await initGit(); - await gitStatus(); - const gitDiffContent = await gitDiff(); - - if (gitDiffContent.length > maxDiffSize) { - console.log('Diff content is too large. Skipping OpenAI request.'); - return `✨ tweak (${firstFilePath}): update ${firstFilePath}`; - } - - // use the prompt from the config file emoji and send to openai - const category = await openai.chat.completions.create({ - messages: [ - { role: "system", content: config.emoji }, - { role: "user", content: gitDiffContent }, - ], - model: config.default_model, - }); - // use the prmopt from the config file message and send to openai - const message = await openai.chat.completions.create({ - messages: [ - { role: "system", content: config.message }, - { role: "user", content: gitDiffContent }, - ], - model: config.default_model, - }); - - if (await gitStatus() !== false) { - return `${category.choices[0].message.content} (${firstFilePath}): ${message.choices[0].message.content}`; - } else { - return false; - } -} - -export default generatePrompt; \ No newline at end of file