From a6871364dab80e2f199d93b95f32df1903ce10ba Mon Sep 17 00:00:00 2001 From: Shivaditya Shivganesh Date: Mon, 13 Jan 2025 02:05:18 -0500 Subject: [PATCH] fix: knip, logger and formatting --- src/adapters/openai/helpers/completions.ts | 3 +- src/handlers/ask-llm.ts | 6 +- src/handlers/comments.ts | 104 ------- src/helpers/format-chat-history.ts | 16 +- src/helpers/issue-fetching.ts | 55 +++- src/helpers/issue.ts | 343 +-------------------- src/helpers/pull-request-fetching.ts | 2 +- src/helpers/pull-request-parsing.ts | 6 +- src/types/github-types.ts | 26 -- src/types/plugin-input.ts | 2 +- tests/main.test.ts | 2 +- 11 files changed, 71 insertions(+), 494 deletions(-) delete mode 100644 src/handlers/comments.ts diff --git a/src/adapters/openai/helpers/completions.ts b/src/adapters/openai/helpers/completions.ts index e8e64c7..480cdba 100644 --- a/src/adapters/openai/helpers/completions.ts +++ b/src/adapters/openai/helpers/completions.ts @@ -69,7 +69,7 @@ export class Completions extends SuperOpenAi { maxTokens: number ): Promise { const numTokens = await this.findTokenLength(query, additionalContext, localContext, groundTruths); - logger.info(`Number of tokens: ${numTokens}`); + logger.debug(`Number of tokens: ${numTokens}`); const sysMsg = [ "You Must obey the following ground truths: ", @@ -84,7 +84,6 @@ export class Completions extends SuperOpenAi { ].join("\n"); logger.info(`System message: ${sysMsg}`); - logger.info(`Query: ${query}`); const res: OpenAI.Chat.Completions.ChatCompletion = await this.client.chat.completions.create({ model: model, diff --git a/src/handlers/ask-llm.ts b/src/handlers/ask-llm.ts index c3d84a3..59e06d1 100644 --- a/src/handlers/ask-llm.ts +++ b/src/handlers/ask-llm.ts @@ -17,7 +17,7 @@ export async function askQuestion(context: Context, question: string) { // build a nicely structure system message containing a streamlined chat history // includes the current issue, any linked issues, and any linked PRs const formattedChat = await formatChatHistory(context, maxDepth); - logger.info("Formatted chat history " + formattedChat.join("\n")); + logger.debug("Formatted chat history " + formattedChat.join("\n")); return await askLlm(context, question, []); } @@ -46,7 +46,7 @@ export async function askLlm(context: Context, question: string, formattedChat: ...(similarIssues?.map((issue: IssueSimilaritySearchResult) => issue.issue_plaintext) || []), ]; - context.logger.info("Similar text: " + similarText.join("\n")); + context.logger.debug("Similar text: " + similarText.join("\n")); // filter out any empty strings formattedChat = formattedChat.filter((text) => text); @@ -56,7 +56,7 @@ export async function askLlm(context: Context, question: string, formattedChat: // gather structural data about the payload repository const [languages, { dependencies, devDependencies }] = await Promise.all([fetchRepoLanguageStats(context), fetchRepoDependencies(context)]); - context.logger.info("Languages: " + languages.join(", ")); + context.logger.debug("Languages: " + languages.join(", ")); let groundTruths: string[] = []; if (!languages.length) { diff --git a/src/handlers/comments.ts b/src/handlers/comments.ts deleted file mode 100644 index 10d5ee1..0000000 --- a/src/handlers/comments.ts +++ /dev/null @@ -1,104 +0,0 @@ -import { logger } from "../helpers/errors"; -import { splitKey } from "../helpers/issue"; -import { LinkedIssues, SimplifiedComment } from "../types/github-types"; -import { StreamlinedComment } from "../types/llm"; - -/** - * Get all streamlined comments from linked issues - * @param linkedIssues - The linked issues to get comments from - * @returns The streamlined comments which are grouped by issue key - */ -export async function getAllStreamlinedComments(linkedIssues: LinkedIssues[]) { - const streamlinedComments: Record = {}; - - for (const issue of linkedIssues) { - const linkedIssueComments = issue.comments || []; - if (linkedIssueComments.length === 0) continue; - - const linkedStreamlinedComments = streamlineComments(linkedIssueComments); - if (!linkedStreamlinedComments) continue; - - for (const [key, value] of Object.entries(linkedStreamlinedComments)) { - streamlinedComments[key] = [...(streamlinedComments[key] || []), ...value]; - } - } - return streamlinedComments; -} - -/** - * Create a unique key for an issue based on its URL and optional issue number - * @param issueUrl - The URL of the issue - * @param issue - The optional issue number - * @returns The unique key for the issue - */ -export function createKey(issueUrl: string, issue?: number) { - const urlParts = issueUrl.split("/"); - - let key; - - // Handle PR review comment URLs which have 'pull' and 'comments' in the path - if (urlParts.includes("pull") && urlParts.includes("comments")) { - // Extract the PR number from the URL - const prIndex = urlParts.indexOf("pull"); - if (prIndex >= 0 && prIndex + 1 < urlParts.length) { - const prNumber = urlParts[prIndex + 1]; - const [, , , issueOrg, issueRepo] = urlParts; - key = `${issueOrg}/${issueRepo}/${prNumber}`; - } - } else if (urlParts.length === 7) { - const [, , , issueOrg, issueRepo, , issueNumber] = urlParts; - key = `${issueOrg}/${issueRepo}/${issueNumber}`; - } else if (urlParts.length === 5) { - const [, , issueOrg, issueRepo] = urlParts; - key = `${issueOrg}/${issueRepo}/${issue}`; - } else if (urlParts.length === 8) { - const [, , , issueOrg, issueRepo, , , issueNumber] = urlParts; - key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; - } else if (urlParts.length === 3) { - const [issueOrg, issueRepo, issueNumber] = urlParts; - key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; - } - - if (!key) { - throw logger.error("Invalid issue URL", { - issueUrl, - issueNumber: issue, - }); - } - - if (key.includes("#")) { - key = key.split("#")[0]; - } - - return key; -} - -/** - * Streamline comments by filtering out bot comments and organizing them by issue key - * @param comments - The comments to streamline - * @returns The streamlined comments grouped by issue key - */ -export function streamlineComments(comments: SimplifiedComment[]) { - const streamlined: Record = {}; - - for (const comment of comments) { - const { user, issueUrl: url, body } = comment; - if (user?.type === "Bot") continue; - - const key = createKey(url); - const [owner, repo] = splitKey(key); - streamlined[key] ??= []; - - if (user && body) { - streamlined[key].push({ - user: user.login, - body, - id: parseInt(comment.id, 10), - org: owner, - repo, - issueUrl: url, - }); - } - } - return streamlined; -} diff --git a/src/helpers/format-chat-history.ts b/src/helpers/format-chat-history.ts index 3b04a3d..f8bcb2d 100644 --- a/src/helpers/format-chat-history.ts +++ b/src/helpers/format-chat-history.ts @@ -177,22 +177,22 @@ async function buildTree( // Early return checks to prevent unnecessary processing if (depth > maxDepth || processingStack.has(key)) { // Processing stack is used to prevent infinite loops - logger.info(`Skip ${key} - max depth/already processing`); + logger.debug(`Skip ${key} - max depth/already processing`); return processedNodes.get(key) || null; } if (processedNodes.has(key)) { - logger.info(`Return cached node: ${key}`); + logger.debug(`Return cached node: ${key}`); return processedNodes.get(key) || null; } if (linkedIssueKeys.has(key)) { - logger.info(`Skip ${key} - already linked`); + logger.debug(`Skip ${key} - already linked`); return null; } if (failedFetches.has(key)) { - logger.info(`Skip ${key} - previous fetch failed`); + logger.debug(`Skip ${key} - previous fetch failed`); return null; } @@ -201,7 +201,7 @@ async function buildTree( try { const [owner, repo, issueNum] = splitKey(key); const response = await fetchIssueComments({ context, owner, repo, issueNum: parseInt(issueNum) }, tokenLimit); - logger.info(`Tokens: ${tokenLimit.runningTokenCount}/${tokenLimit.tokensRemaining}`); + logger.debug(`Tokens: ${tokenLimit.runningTokenCount}/${tokenLimit.tokensRemaining}`); const issue = response.issue; if (!issue) { @@ -254,8 +254,8 @@ async function buildTree( // Process valid references for (const ref of references) { //Uses references found so far to create child nodes - const childNode = await createNode(ref, depth + 1); // Recursively create child nodes untill max depth is reached - logger.info(`Created child node for ${ref}`); + const childNode = await createNode(ref, depth + 1); // Recursively create child nodes until max depth is reached + logger.debug(`Created child node for ${ref}`); if (childNode) { childNode.parent = node; node.children.push(childNode); @@ -384,7 +384,7 @@ export async function formatChatHistory(context: Context, maxDepth: number = 2): treeOutput.push(headerLine, ""); await processTreeNode(tree, "", treeOutput, tokenLimits); - logger.info(`Final tokens: ${tokenLimits.runningTokenCount}/${tokenLimits.tokensRemaining}`); + logger.debug(`Final tokens: ${tokenLimits.runningTokenCount}/${tokenLimits.tokensRemaining}`); return treeOutput; } diff --git a/src/helpers/issue-fetching.ts b/src/helpers/issue-fetching.ts index 660ebb7..7c49442 100644 --- a/src/helpers/issue-fetching.ts +++ b/src/helpers/issue-fetching.ts @@ -1,9 +1,58 @@ import { FetchParams, Issue, LinkedIssues, SimplifiedComment } from "../types/github-types"; import { TokenLimits } from "../types/llm"; +import { logger } from "./errors"; import { idIssueFromComment } from "./issue"; import { fetchPullRequestComments, fetchPullRequestDetails } from "./pull-request-fetching"; import { createDefaultTokenLimits } from "./token-utils"; +/** + * Create a unique key for an issue based on its URL and optional issue number + * @param issueUrl - The URL of the issue + * @param issue - The optional issue number + * @returns The unique key for the issue + */ +export function createKey(issueUrl: string, issue?: number) { + const urlParts = issueUrl.split("/"); + + let key; + + // Handle PR review comment URLs which have 'pull' and 'comments' in the path + if (urlParts.includes("pull") && urlParts.includes("comments")) { + // Extract the PR number from the URL + const prIndex = urlParts.indexOf("pull"); + if (prIndex >= 0 && prIndex + 1 < urlParts.length) { + const prNumber = urlParts[prIndex + 1]; + const [, , , issueOrg, issueRepo] = urlParts; + key = `${issueOrg}/${issueRepo}/${prNumber}`; + } + } else if (urlParts.length === 7) { + const [, , , issueOrg, issueRepo, , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber}`; + } else if (urlParts.length === 5) { + const [, , issueOrg, issueRepo] = urlParts; + key = `${issueOrg}/${issueRepo}/${issue}`; + } else if (urlParts.length === 8) { + const [, , , issueOrg, issueRepo, , , issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } else if (urlParts.length === 3) { + const [issueOrg, issueRepo, issueNumber] = urlParts; + key = `${issueOrg}/${issueRepo}/${issueNumber || issue}`; + } + + if (!key) { + throw logger.error("Invalid issue URL", { + issueUrl, + issueNumber: issue, + }); + } + + if (key.includes("#")) { + key = key.split("#")[0]; + } + + return key; +} + export async function fetchIssue(params: FetchParams, tokenLimits?: TokenLimits): Promise { const { octokit, payload, logger } = params.context; const { issueNum, owner, repo } = params; @@ -58,7 +107,7 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke }, currentTokenLimits ); - logger.info(`Fetched issue #${targetIssueNum}`); + logger.debug(`Fetched issue #${targetIssueNum}`); if (!issue) { return { issue: null, comments: null, linkedIssues: null }; @@ -122,7 +171,7 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke issue_number: targetIssueNum, }); - logger.info(`Fetched comments for issue #${targetIssueNum}`); + logger.debug(`Fetched comments for issue #${targetIssueNum}`); comments = response.data .filter((comment): comment is typeof comment & { body: string } => comment.user?.type !== "Bot" && typeof comment.body === "string") @@ -169,6 +218,6 @@ export async function fetchIssueComments(params: FetchParams, tokenLimits?: Toke }); } } - logger.info(`Processed ${comments.length} comments and ${linkedIssues.length} linked issues`); + logger.debug(`Processed ${comments.length} comments and ${linkedIssues.length} linked issues`); return { issue, comments, linkedIssues }; } diff --git a/src/helpers/issue.ts b/src/helpers/issue.ts index 15366c7..17af77a 100644 --- a/src/helpers/issue.ts +++ b/src/helpers/issue.ts @@ -1,45 +1,5 @@ -import { - FetchedCodes, - FetchParams, - GqlIssueCommentSearchResult, - GqlIssueSearchResult, - GqlPullRequestReviewCommentSearchResult, - GqlPullRequestSearchResult, - LinkedIssues, -} from "../types/github-types"; -import { StreamlinedComment, TokenLimits } from "../types/llm"; -import { Context } from "../types/context"; +import { FetchParams, LinkedIssues } from "../types/github-types"; import { logger } from "./errors"; -import { encode } from "gpt-tokenizer"; - -function updateTokenCount(text: string, tokenLimits: TokenLimits): void { - const tokenCount = encode(text, { disallowedSpecial: new Set() }).length; - tokenLimits.runningTokenCount += tokenCount; - tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - tokenLimits.runningTokenCount; -} - -export function dedupeStreamlinedComments(streamlinedComments: Record) { - for (const key of Object.keys(streamlinedComments)) { - streamlinedComments[key] = streamlinedComments[key].filter( - (comment: StreamlinedComment, index: number, self: StreamlinedComment[]) => index === self.findIndex((t: StreamlinedComment) => t.body === comment.body) - ); - } - return streamlinedComments; -} - -export function mergeStreamlinedComments(existingComments: Record, newComments: Record) { - if (!existingComments) { - existingComments = {}; - } - for (const [key, value] of Object.entries(newComments)) { - if (!existingComments[key]) { - existingComments[key] = []; - } - const previous = existingComments[key] || []; - existingComments[key] = [...previous, ...value]; - } - return existingComments; -} export function splitKey(key: string): [string, string, string] { try { @@ -194,304 +154,3 @@ export function idIssueFromComment(comment?: string | null, params?: FetchParams } return response.length > 0 ? response : null; } - -export async function fetchCodeLinkedFromIssue( - issue: string, - context: Context, - url: string, - extensions: string[] = [".ts", ".json", ".sol"], - tokenLimits?: TokenLimits -): Promise { - const { octokit } = context; - - function parseGitHubUrl(url: string): { owner: string; repo: string; path: string } | null { - const cleanUrl = cleanGitHubUrl(url); - const match = cleanUrl.match(/https?:\/\/(?:www\.)?github\.com\/([^/]+)\/([^/]+?)\/blob\/[^/]+\/(.+)/); - return match ? { owner: match[1], repo: match[2], path: match[3] } : null; - } - - function hasValidExtension(path: string) { - const cleanPath = path.split("#")[0]; - return extensions.some((ext) => cleanPath.toLowerCase().endsWith(ext.toLowerCase())); - } - - function removeLineNumbers(url: string) { - const match = url.match(/(.*?)(#L\d+(-L\d+)?)/); - return match ? match[1] : url; - } - - const urls = issue.match(/https?:\/\/(?:www\.)?github\.com\/[^\s]+/g) || []; - const results = await Promise.all( - urls.map(async (url) => { - let parsedUrl = parseGitHubUrl(url); - parsedUrl = parsedUrl ? { ...parsedUrl, path: removeLineNumbers(parsedUrl.path) } : null; - if (!parsedUrl || !hasValidExtension(parsedUrl.path)) return null; - - try { - const commitSha = url.match(/https?:\/\/github\.com\/[^/]+\/[^/]+?\/blob\/([^/]+)\/.+/); - let response; - if (commitSha) { - response = await octokit.rest.repos.getContent({ - owner: parsedUrl.owner, - repo: parsedUrl.repo, - ref: commitSha[1], - path: parsedUrl.path, - }); - } else { - response = await octokit.rest.repos.getContent({ - owner: parsedUrl.owner, - repo: parsedUrl.repo, - path: parsedUrl.path, - }); - } - - if ("content" in response.data) { - const content = Buffer.from(response.data.content, "base64").toString(); - if (tokenLimits) { - updateTokenCount(content, tokenLimits); - } - return { body: content, id: parsedUrl.path }; - } - } catch (error) { - logger.error(`Error fetching content from ${url}:`, { stack: error instanceof Error ? error.stack : String(error) }); - } - return null; - }) - ); - - return results - .filter((result): result is { body: string; id: string } => result !== null) - .map((result) => ({ - ...result, - org: context.payload.repository.owner.login, - repo: context.payload.repository.name, - issueNumber: parseInt(issue.match(/\/issues\/(\d+)/)?.[1] || "0", 10), - issueUrl: url, - user: context.payload.sender, - })); -} - -export async function pullReadmeFromRepoForIssue(params: FetchParams, tokenLimits?: TokenLimits): Promise { - let readme; - try { - const response = await params.context.octokit.rest.repos.getContent({ - owner: params.context.payload.repository.owner?.login || params.context.payload.organization?.login || "", - repo: params.context.payload.repository.name, - path: "README.md", - }); - if ("content" in response.data) { - readme = Buffer.from(response.data.content, "base64").toString(); - if (tokenLimits) { - updateTokenCount(readme, tokenLimits); - } - } - } catch (error) { - throw logger.error(`Error fetching README from repository:`, { stack: error instanceof Error ? error.stack : String(error) }); - } - logger.info("Readme fetched from repository"); - return readme; -} - -export async function fetchSimilarIssues(context: Context, question: string, tokenLimits?: TokenLimits): Promise { - const { - adapters: { - supabase: { issue }, - }, - octokit, - config: { similarityThreshold }, - } = context; - - try { - const similarIssues = await issue.findSimilarIssues(question, 1 - similarityThreshold, ""); - const linkedIssues: LinkedIssues[] = []; - - if (similarIssues) { - for (const similarIssue of similarIssues) { - try { - const issueId = similarIssue.issue_id; - const issueFetched: GqlIssueSearchResult = await octokit.graphql( - ` - query ($nodeId: ID!) { - node(id: $nodeId) { - ... on Issue { - number - url - body - repository { - owner { - login - } - name - } - } - } - } - `, - { - nodeId: issueId, - } - ); - - if (issueFetched?.node) { - if (tokenLimits && issueFetched.node.body) { - updateTokenCount(issueFetched.node.body, tokenLimits); - } - linkedIssues.push({ - issueNumber: issueFetched.node.number, - owner: issueFetched.node.repository.owner.login, - repo: issueFetched.node.repository.name, - url: issueFetched.node.url, - body: issueFetched.node.body, - }); - } - } catch (error) { - logger.error(`Error fetching similar issue ${similarIssue.issue_id}:`, { stack: error instanceof Error ? error.stack : String(error) }); - continue; - } - } - } - return linkedIssues; - } catch (error) { - logger.error("Error in fetchSimilarIssues:", { stack: error instanceof Error ? error.stack : String(error) }); - return []; - } -} - -export async function fetchLinkedIssuesFromComment( - context: Context, - commentBody: string, - params: FetchParams, - tokenLimits?: TokenLimits -): Promise { - const { - adapters: { - supabase: { comment }, - }, - octokit, - config: { similarityThreshold }, - } = context; - - try { - const similarComments = await comment.findSimilarComments(commentBody, 1 - similarityThreshold, ""); - const linkedIssues: LinkedIssues[] = []; - - if (similarComments) { - for (const similarComment of similarComments) { - try { - const commentId = similarComment.comment_id; - const commentFetched: { node: { __typename: string } } = await octokit.graphql( - ` - query ($nodeId: ID!) { - node(id: $nodeId) { - __typename - ... on PullRequest { - id - body - closingIssuesReferences(first: 1) { - nodes { - number - url - body - repository { - owner { - login - } - name - } - } - } - } - ... on PullRequestReviewComment { - id - body - pullRequest { - id - title - url - body - repository { - owner { - login - } - name - } - } - } - ... on IssueComment { - id - body - issue { - number - url - body - repository { - owner { - login - } - name - } - } - } - } - } - `, - { - nodeId: commentId, - } - ); - - if (commentFetched?.node?.__typename) { - if (commentFetched.node.__typename === "IssueComment") { - const issueCommentNode = commentFetched as unknown as GqlIssueCommentSearchResult; - if (tokenLimits && issueCommentNode.node.issue.body) { - updateTokenCount(issueCommentNode.node.issue.body, tokenLimits); - } - linkedIssues.push({ - issueNumber: issueCommentNode.node.issue.number, - owner: issueCommentNode.node.issue.repository.owner.login, - repo: issueCommentNode.node.issue.repository.name, - url: issueCommentNode.node.issue.url, - body: issueCommentNode.node.issue.body, - }); - } else if (commentFetched.node.__typename === "PullRequest") { - const pullRequestNode = commentFetched as unknown as GqlPullRequestSearchResult; - const issueData = pullRequestNode.node.closingIssuesReferences.nodes[0]; - if (tokenLimits && issueData.body) { - updateTokenCount(issueData.body, tokenLimits); - } - linkedIssues.push({ - issueNumber: issueData.number, - owner: issueData.repository.owner.login, - repo: issueData.repository.name, - url: issueData.url, - body: issueData.body, - }); - } else if (commentFetched.node.__typename === "PullRequestReviewComment") { - const pullRequestReviewCommentNode = commentFetched as unknown as GqlPullRequestReviewCommentSearchResult; - const issueData = pullRequestReviewCommentNode.node.pullRequest.closingIssuesReferences.nodes[0]; - if (tokenLimits && issueData.body) { - updateTokenCount(issueData.body, tokenLimits); - } - linkedIssues.push({ - issueNumber: issueData.number, - owner: issueData.repository.owner.login, - repo: issueData.repository.name, - url: issueData.url, - body: issueData.body, - }); - } - } - } catch (error) { - const err = error instanceof Error ? error : new Error(String(error)); - logger.error(`Error processing comment ${similarComment.comment_id}:`, { stack: err.stack }); - continue; - } - } - } - return linkedIssues; - } catch (error) { - const err = error instanceof Error ? error : new Error(String(error)); - logger.error("Error in fetchLinkedIssuesFromComment:", { stack: err.stack }); - return []; - } -} diff --git a/src/helpers/pull-request-fetching.ts b/src/helpers/pull-request-fetching.ts index 6df2efc..f6f713d 100644 --- a/src/helpers/pull-request-fetching.ts +++ b/src/helpers/pull-request-fetching.ts @@ -91,7 +91,7 @@ export async function fetchPullRequestComments(params: FetchParams) { } pageCount++; - logger.info(`Fetching PR comments page ${pageCount}`, { owner, repo, issueNum }); + logger.debug(`Fetching PR comments page ${pageCount}`, { owner, repo, issueNum }); const prData: PullRequestGraphQlResponse = await octokit.graphql( ` query($owner: String!, $repo: String!, $number: Int!, $commentsAfter: String, $reviewsAfter: String) { diff --git a/src/helpers/pull-request-parsing.ts b/src/helpers/pull-request-parsing.ts index 5a356e1..9703311 100644 --- a/src/helpers/pull-request-parsing.ts +++ b/src/helpers/pull-request-parsing.ts @@ -20,7 +20,7 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim // Using the quick estimate, include as many files as possible without exceeding token limits for (const file of estimatedFileDiffStats) { if (tokenLimits.runningTokenCount + file.estimatedTokenCount > tokenLimits.tokensRemaining) { - logger.info(`Skipping ${file.filename} content to stay within token limits.`); + logger.debug(`Skipping ${file.filename} content to stay within token limits.`); continue; } includedFileDiffs.push(file); @@ -51,7 +51,7 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim for (const file of accurateFileDiffStats) { tokenLimits.runningTokenCount += file.tokenCount; tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - tokenLimits.runningTokenCount; - logger.info( + logger.debug( `Added ${file.tokenCount} tokens for ${file.filename}. Running total: ${tokenLimits.runningTokenCount}. Remaining: ${tokenLimits.tokensRemaining}` ); } @@ -62,7 +62,7 @@ export async function processPullRequestDiff(diff: string, tokenLimits: TokenLim if (removedFile) { tokenLimits.runningTokenCount -= removedFile.tokenCount; tokenLimits.tokensRemaining = tokenLimits.modelMaxTokenLimit - tokenLimits.maxCompletionTokens - tokenLimits.runningTokenCount; - logger.info( + logger.debug( `Excluded ${removedFile.filename} content after accurate token count exceeded limits. Removed ${removedFile.tokenCount} tokens. New total: ${tokenLimits.runningTokenCount}` ); } diff --git a/src/types/github-types.ts b/src/types/github-types.ts index c32813a..d862f47 100644 --- a/src/types/github-types.ts +++ b/src/types/github-types.ts @@ -1,14 +1,11 @@ import { RestEndpointMethodTypes } from "@octokit/rest"; import { Context } from "./context"; -import { StreamlinedComment } from "./llm"; type BaseIssue = RestEndpointMethodTypes["issues"]["get"]["response"]["data"]; export interface Issue extends BaseIssue { prDetails?: PullRequestDetails; } -export type IssueComments = RestEndpointMethodTypes["issues"]["listComments"]["response"]["data"][0]; -export type ReviewComments = RestEndpointMethodTypes["pulls"]["listReviewComments"]["response"]["data"][0]; export type User = RestEndpointMethodTypes["users"]["getByUsername"]["response"]["data"]; export type FetchParams = { @@ -71,12 +68,6 @@ export type GqlIssueCommentSearchResult = { node: IssueCommentNode; }; -export interface PullRequestFile { - filename: string; - diffContent: string; - status: "added" | "modified" | "deleted"; -} - export interface PullRequestDetails { diff: string | null; } @@ -134,20 +125,3 @@ export interface TreeProcessingQueue { parent?: string; priority: number; } - -export interface ProcessingContext { - params: FetchParams; - issueTree: Record; - seen: Set; - processingQueue: TreeProcessingQueue[]; - linkedIssues: LinkedIssues[]; - specAndBodies: Record; - streamlinedComments: Record; - maxDepth: number; -} - -export interface CommentProcessingResult { - processedComments: number; - linkedIssues: LinkedIssues[]; - hasCodeReferences: boolean; -} diff --git a/src/types/plugin-input.ts b/src/types/plugin-input.ts index 31a9ff3..37f0060 100644 --- a/src/types/plugin-input.ts +++ b/src/types/plugin-input.ts @@ -12,7 +12,7 @@ export const pluginSettingsSchema = T.Object({ model: T.String({ default: "o1-mini" }), openAiBaseUrl: T.Optional(T.String()), similarityThreshold: T.Number({ default: 0.9 }), - maxDepth: T.Optional(T.Number({ default: 3 })), + maxDepth: T.Optional(T.Number({ default: 3 })), // max depth of the chat history to be fetched maxTokens: T.Number({ default: 10000 }), }); diff --git a/tests/main.test.ts b/tests/main.test.ts index 5195095..88a4f80 100644 --- a/tests/main.test.ts +++ b/tests/main.test.ts @@ -11,7 +11,7 @@ import { envSchema } from "../src/types/env"; import { CompletionsType } from "../src/adapters/openai/helpers/completions"; import { logger } from "../src/helpers/errors"; import { Octokit } from "@octokit/rest"; -import { createKey } from "../src/handlers/comments"; +import { createKey } from "../src/helpers/issue-fetching"; const TEST_QUESTION = "what is pi?"; const LOG_CALLER = "_Logs.";