Skip to content

Commit

Permalink
fix: knip, logger and formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
sshivaditya committed Jan 13, 2025
1 parent 06bcb54 commit a687136
Show file tree
Hide file tree
Showing 11 changed files with 71 additions and 494 deletions.
3 changes: 1 addition & 2 deletions src/adapters/openai/helpers/completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ export class Completions extends SuperOpenAi {
maxTokens: number
): Promise<CompletionsType> {
const numTokens = await this.findTokenLength(query, additionalContext, localContext, groundTruths);
logger.info(`Number of tokens: ${numTokens}`);
logger.debug(`Number of tokens: ${numTokens}`);

const sysMsg = [
"You Must obey the following ground truths: ",
Expand All @@ -84,7 +84,6 @@ export class Completions extends SuperOpenAi {
].join("\n");

logger.info(`System message: ${sysMsg}`);
logger.info(`Query: ${query}`);

const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({
model: model,
Expand Down
6 changes: 3 additions & 3 deletions src/handlers/ask-llm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ export async function askQuestion(context: Context, question: string) {
// build a nicely structure system message containing a streamlined chat history
// includes the current issue, any linked issues, and any linked PRs
const formattedChat = await formatChatHistory(context, maxDepth);
logger.info("Formatted chat history " + formattedChat.join("\n"));
logger.debug("Formatted chat history " + formattedChat.join("\n"));
return await askLlm(context, question, []);
}

Expand Down Expand Up @@ -46,7 +46,7 @@ export async function askLlm(context: Context, question: string, formattedChat:
...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []),
];

context.logger.info("Similar text: " + similarText.join("\n"));
context.logger.debug("Similar text: " + similarText.join("\n"));

// filter out any empty strings
formattedChat = formattedChat.filter((text) => text);
Expand All @@ -56,7 +56,7 @@ export async function askLlm(context: Context, question: string, formattedChat:
// gather structural data about the payload repository
const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]);

context.logger.info("Languages: " + languages.join(", "));
context.logger.debug("Languages: " + languages.join(", "));
let groundTruths: string[] = [];

if (!languages.length) {
Expand Down
104 changes: 0 additions & 104 deletions src/handlers/comments.ts

This file was deleted.

16 changes: 8 additions & 8 deletions src/helpers/format-chat-history.ts
Original file line number Diff line number Diff line change
Expand Up @@ -177,22 +177,22 @@ async function buildTree(
// Early return checks to prevent unnecessary processing
if (depth > maxDepth || processingStack.has(key)) {
// Processing stack is used to prevent infinite loops
logger.info(`Skip ${key} - max depth/already processing`);
logger.debug(`Skip ${key} - max depth/already processing`);
return processedNodes.get(key) || null;
}

if (processedNodes.has(key)) {
logger.info(`Return cached node: ${key}`);
logger.debug(`Return cached node: ${key}`);
return processedNodes.get(key) || null;
}

if (linkedIssueKeys.has(key)) {
logger.info(`Skip ${key} - already linked`);
logger.debug(`Skip ${key} - already linked`);
return null;
}

if (failedFetches.has(key)) {
logger.info(`Skip ${key} - previous fetch failed`);
logger.debug(`Skip ${key} - previous fetch failed`);
return null;
}

Expand All @@ -201,7 +201,7 @@ async function buildTree(
try {
const [owner, repo, issueNum] = splitKey(key);
const response = await fetchIssueComments({ context, owner, repo, issueNum: parseInt(issueNum) }, tokenLimit);
logger.info(`Tokens: ${tokenLimit.runningTokenCount}/${tokenLimit.tokensRemaining}`);
logger.debug(`Tokens: ${tokenLimit.runningTokenCount}/${tokenLimit.tokensRemaining}`);
const issue = response.issue;

if (!issue) {
Expand Down Expand Up @@ -254,8 +254,8 @@ async function buildTree(
// Process valid references
for (const ref of references) {
//Uses references found so far to create child nodes
const childNode = await createNode(ref, depth + 1); // Recursively create child nodes untill max depth is reached
logger.info(`Created child node for ${ref}`);
const childNode = await createNode(ref, depth + 1); // Recursively create child nodes until max depth is reached
logger.debug(`Created child node for ${ref}`);
if (childNode) {
childNode.parent = node;
node.children.push(childNode);
Expand Down Expand Up @@ -384,7 +384,7 @@ export async function formatChatHistory(context: Context, maxDepth: number = 2):
treeOutput.push(headerLine, "");

await processTreeNode(tree, "", treeOutput, tokenLimits);
logger.info(`Final tokens: ${tokenLimits.runningTokenCount}/${tokenLimits.tokensRemaining}`);
logger.debug(`Final tokens: ${tokenLimits.runningTokenCount}/${tokenLimits.tokensRemaining}`);

return treeOutput;
}
55 changes: 52 additions & 3 deletions src/helpers/issue-fetching.ts
Original file line number Diff line number Diff line change
@@ -1,9 +1,58 @@
import { FetchParams, Issue, LinkedIssues, SimplifiedComment } from "../types/github-types";
import { TokenLimits } from "../types/llm";
import { logger } from "./errors";
import { idIssueFromComment } from "./issue";
import { fetchPullRequestComments, fetchPullRequestDetails } from "./pull-request-fetching";
import { createDefaultTokenLimits } from "./token-utils";

/**
* Create a unique key for an issue based on its URL and optional issue number
* @param issueUrl - The URL of the issue
* @param issue - The optional issue number
* @returns The unique key for the issue
*/
export function createKey(issueUrl: string, issue?: number) {
const urlParts = issueUrl.split("/");

let key;

// Handle PR review comment URLs which have 'pull' and 'comments' in the path
if (urlParts.includes("pull") && urlParts.includes("comments")) {
// Extract the PR number from the URL
const prIndex = urlParts.indexOf("pull");
if (prIndex >= 0 && prIndex + 1 < urlParts.length) {
const prNumber = urlParts[prIndex + 1];
const [, , , issueOrg, issueRepo] = urlParts;
key = `${issueOrg}/${issueRepo}/${prNumber}`;
}
} else if (urlParts.length === 7) {
const [, , , issueOrg, issueRepo, , issueNumber] = urlParts;
key = `${issueOrg}/${issueRepo}/${issueNumber}`;
} else if (urlParts.length === 5) {
const [, , issueOrg, issueRepo] = urlParts;
key = `${issueOrg}/${issueRepo}/${issue}`;
} else if (urlParts.length === 8) {
const [, , , issueOrg, issueRepo, , , issueNumber] = urlParts;
key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`;
} else if (urlParts.length === 3) {
const [issueOrg, issueRepo, issueNumber] = urlParts;
key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`;
}

if (!key) {
throw logger.error("Invalid issue URL", {
issueUrl,
issueNumber: issue,
});
}

if (key.includes("#")) {
key = key.split("#")[0];
}

return key;
}

export async function fetchIssue(params: FetchParams, tokenLimits?: TokenLimits): Promise<Issue | null> {
const { octokit, payload, logger } = params.context;
const { issueNum, owner, repo } = params;
Expand Down Expand Up @@ -58,7 +107,7 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke
},
currentTokenLimits
);
logger.info(`Fetched issue #${targetIssueNum}`);
logger.debug(`Fetched issue #${targetIssueNum}`);

if (!issue) {
return { issue: null, comments: null, linkedIssues: null };
Expand Down Expand Up @@ -122,7 +171,7 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke
issue_number: targetIssueNum,
});

logger.info(`Fetched comments for issue #${targetIssueNum}`);
logger.debug(`Fetched comments for issue #${targetIssueNum}`);

comments = response.data
.filter((comment): comment is typeof comment & { body: string } => comment.user?.type !== "Bot" && typeof comment.body === "string")
Expand Down Expand Up @@ -169,6 +218,6 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke
});
}
}
logger.info(`Processed ${comments.length} comments and ${linkedIssues.length} linked issues`);
logger.debug(`Processed ${comments.length} comments and ${linkedIssues.length} linked issues`);
return { issue, comments, linkedIssues };
}
Loading

0 comments on commit a687136

Please sign in to comment.