Skip to content

Commit

Permalink
feat(prompting): prompt without any input files and specify prompts w…
Browse files Browse the repository at this point in the history
…ithin a file
  • Loading branch information
peterdanwan committed Oct 6, 2024
1 parent 845eb10 commit 69e0a61
Show file tree
Hide file tree
Showing 6 changed files with 127 additions and 6 deletions.
48 changes: 48 additions & 0 deletions _examples/prompt-files/prompt-1.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
Take the code from each file sent and produce a comprehensive response in markdown format explaining the code.

## Structure Requirements:
- Start with a level 1 heading (#) that serves as an appropriate title for the document. This title should not be "README.md".
- Create a level 2 heading (##) titled "Project Structure" to outline the directory structure of the project in a tree format.
- For each file sent, provide a separate level 2 heading (##) with the file name (do not include the relative path).
- Under each file heading, give a detailed explanation of the file's purpose and functionality, including code snippets where relevant.

## Explanation Requirements:
- Refer to code snippets from the provided files to illustrate your explanations.
- Ensure the explanation is clear, concise, and uses semantic markdown headings to separate different sections and concepts.
- If the file is a script or module, explain the key functions, classes, or exports it contains.
- If there are any dependencies or connections between the files, explain them under the appropriate headings.

## Example Output:
\`\`\`markdown
# Appropriate Title for the Document

## Project Structure
\`
├── src
│ ├── ai_models
│ │ └── geminiModels.js
│ ├── commanderProgram.js
│ ├── getFileContent.js
│ ├── ai.js
│ ├── defaultPrompt.js
│ └── _gr.js
\`

## ai_models/geminiModels.js
Explanation of the contents of geminiModels.js, including key functions, classes, and usage examples.

## commanderProgram.js
Explanation of commanderProgram.js, highlighting its role in the project, key functionality, and any important code snippets.

... (continue for each file)
\`\`\`

Ensure that the markdown uses the appropriate level of headings and references relevant code snippets for clarity.

---

In addition to following the rules above, please look out for the following with regards to the files I'm about to send you.

I'd like you to help me create the appropriate associations for the models within my `models` folder.

I also believe that the names of each table's columns in my `_db-scripts/init.sql` need to match what I have listed in my models.
6 changes: 2 additions & 4 deletions src/_gr.js
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import program from './commanderProgram.js';
import handleConfigOption from './option_handlers/handleConfigOption.js';
import handleHelpOption from './option_handlers/handleHelpOption.js';
import handleFilesOption from './option_handlers/handleFilesOption.js';
import handleNoFilesOption from './option_handlers/handleNoFilesOption.js';

async function main() {
const args = process.argv;
Expand All @@ -26,10 +27,7 @@ async function main() {
const files = options['files'];
await handleFilesOption(files, options);
} else {
console.error(
"No files specified to process. Use '--files' or '-f option or configure files='..' in .toml file."
);
process.exit(0);
await handleNoFilesOption(options);
}
}

Expand Down
1 change: 1 addition & 0 deletions src/commanderProgram.js
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ program.option(
`specify which free-tier model you'd want to use \ne.g., ${modelsString}`
);
program.option('-p, --prompt <string>', 'specify a custom prompt');
program.option('-pf, --promptFile <string>', 'specify a prompt file');
program.option(
'-c, --config',
"makes a .gimme_readme_config file if it doesn't exist and/or shows the location of this config file"
Expand Down
22 changes: 20 additions & 2 deletions src/option_handlers/handleFilesOption.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,28 @@ import defaultPrompt from '../defaultPrompt.js';
import { glob } from 'glob'; // Add glob for pattern matching

export default async function handleFilesOption(files, options) {
// cli || .gimme_readme_config || process.env.VAL || hardcoded default;
const toml = getTOMLFileValues();

let prompt = options.prompt || toml?.CUSTOM_PROMPT || process.env.CUSTOM_PROMPT || defaultPrompt;
let prompt;

// Check if both -p and -pf are used
if (options.prompt && options.promptFile) {
console.error(chalk.red("Error: Cannot use both '-p' and '-pf' simultaneously"));
process.exit(1);
}

if (options.promptFile) {
const promptFilePath = path.resolve(process.cwd(), options.promptFile);
try {
prompt = getFileContent(promptFilePath);
} catch (error) {
console.error(chalk.red(`Error reading prompt file: ${error.message}`));
process.exit(1);
}
} else if (options.prompt) {
prompt = options.prompt || defaultPrompt;
}

const model = options.model || toml?.preferences.MODEL || process.env.MODEL || 'gemini-1.5-flash';
const outputFile = options.outputFile || toml?.OUTPUT_FILE || process.env.OUTPUT_FILE || null; // if no options are being specified, the option would be null, which means it will go out to terminal in the console
const temperature =
Expand Down
55 changes: 55 additions & 0 deletions src/option_handlers/handleNoFilesOption.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
// src/option_handlers/handleNoFilesOption.js

import chalk from 'chalk';
import ora from 'ora';
import path from 'path';

import getFileContent from '../file_functions/getFileContent.js';
import getTOMLFileValues from '../file_functions/getTOMLFileValues.js';
import promptAI from '../ai.js';
import defaultPrompt from '../defaultPrompt.js';

export default async function handleNoFilesOption(options) {
const toml = getTOMLFileValues();

let prompt;

// Check if both -p and -pf are used
if (options.prompt && options.promptFile) {
console.error(chalk.red("Error: Cannot use both '-p' and '-pf' options simultaneously."));
process.exit(1);
}

if (options.promptFile) {
const promptFilePath = path.resolve(process.cwd(), options.promptFile);
try {
prompt = getFileContent(promptFilePath);
} catch (error) {
console.error(chalk.red(`Error reading prompt file: ${error.message}`));
process.exit(1);
}
} else if (options.prompt) {
prompt = options.prompt || defaultPrompt;
}

const model = options.model || toml?.preferences.MODEL || process.env.MODEL || 'gemini-1.5-flash';
const outputFile = options.outputFile || toml?.OUTPUT_FILE || process.env.OUTPUT_FILE || null;
const temperature =
options.temperature || toml?.preferences.TEMPERATURE || process.env.TEMPERATURE || 0.5;
const needToken = options.token || toml?.TOKEN || false;

console.log(chalk.blue('Sending prompt to the model...'));

const spinner = ora(` Waiting for a response from the ${chalk.blue(model)} model...\n`).start();

try {
await promptAI(prompt, model, temperature, outputFile, needToken);
spinner.succeed(` Response received from ${chalk.blue(model)} model`);
} catch (error) {
spinner.fail(` Failed to receive response from ${chalk.red(model)} model`);
console.error(error);
process.exit(1);
}

process.exit(0);
}
1 change: 1 addition & 0 deletions tests/unit/commanderProgram.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ describe('src/commanderProgram.js tests', () => {
description: `specify which free-tier model you'd want to use \ne.g., ${modelsString}`,
},
{ flags: '-p, --prompt <string>', description: 'specify a custom prompt' },
{ flags: '-pf, --promptFile <string>', description: 'specify a prompt file'},
{
flags: '-c, --config',
description:
Expand Down

0 comments on commit 69e0a61

Please sign in to comment.