diff --git a/.gitignore b/.gitignore index 771961ced618..3384fc1ae5f8 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,7 @@ out/ .env .idea/ .run/ +.test/ **/*.iml **/*.vsix index.scip diff --git a/.vscode/settings.json b/.vscode/settings.json index cf59f1700dcb..fb52789c99c2 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -26,6 +26,9 @@ }, "editor.insertSpaces": true, "cSpell.words": ["Supercompletion", "Supercompletions"], + "[json]": { + "editor.defaultFormatter": "biomejs.biome", + }, "[typescript]": { "editor.defaultFormatter": "biomejs.biome" }, diff --git a/agent/src/cli/cody-bench/strategy-fix.ts b/agent/src/cli/cody-bench/strategy-fix.ts index c6bf18c84b8f..84bd42272615 100644 --- a/agent/src/cli/cody-bench/strategy-fix.ts +++ b/agent/src/cli/cody-bench/strategy-fix.ts @@ -3,7 +3,7 @@ import { PromptString, ps } from '@sourcegraph/cody-shared' import { glob } from 'glob' import * as vscode from 'vscode' import { ProtocolTextDocumentWithUri } from '../../../../vscode/src/jsonrpc/TextDocumentWithUri' -import { fileExists } from '../../../../vscode/src/local-context/download-symf' +import { pathExists } from '../../../../vscode/src/local-context/utils' import { redactAuthorizationHeader } from '../../../../vscode/src/testutils/CodyPersister' import { AgentTextDocument } from '../../AgentTextDocument' import { TestClient } from '../../TestClient' @@ -31,7 +31,7 @@ export async function evaluateFixStrategy( token: options.srcAccessToken, }, }) - if (!(await fileExists(path.join(options.workspace, 'node_modules')))) { + if (!(await pathExists(path.join(options.workspace, 'node_modules')))) { // Run pnpm install only when `node_modules` doesn't exist. await runVoidCommand(options.installCommand, options.workspace) } diff --git a/agent/src/cli/cody-bench/strategy-unit-test.ts b/agent/src/cli/cody-bench/strategy-unit-test.ts index dbd5f80b4743..e2b0fa129e05 100644 --- a/agent/src/cli/cody-bench/strategy-unit-test.ts +++ b/agent/src/cli/cody-bench/strategy-unit-test.ts @@ -3,7 +3,7 @@ import _ from 'lodash' import * as vscode from 'vscode' import yaml from 'yaml' import type { RpcMessageHandler } from '../../../../vscode/src/jsonrpc/jsonrpc' -import { fileExists } from '../../../../vscode/src/local-context/download-symf' +import { fileExists } from '../../../../vscode/src/local-context/utils' import { redactAuthorizationHeader } from '../../../../vscode/src/testutils/CodyPersister' import { TestClient } from '../../TestClient' import { getLanguageForFileName } from '../../language' diff --git a/lib/shared/src/sourcegraph-api/graphql/url.ts b/lib/shared/src/sourcegraph-api/graphql/url.ts index 36a7ff7f5e18..60a4f802e9ba 100644 --- a/lib/shared/src/sourcegraph-api/graphql/url.ts +++ b/lib/shared/src/sourcegraph-api/graphql/url.ts @@ -1,5 +1,3 @@ -import { trimEnd } from 'lodash' - const GRAPHQL_URI = '/.api/graphql' interface BuildGraphQLUrlOptions { @@ -12,5 +10,5 @@ interface BuildGraphQLUrlOptions { export const buildGraphQLUrl = ({ request, baseUrl }: BuildGraphQLUrlOptions): string => { const nameMatch = request ? request.match(/^\s*(?:query|mutation)\s+(\w+)/) : '' const apiURL = `${GRAPHQL_URI}${nameMatch ? `?${nameMatch[1]}` : ''}` - return baseUrl ? new URL(trimEnd(baseUrl, '/') + apiURL).href : apiURL + return baseUrl ? new URL(apiURL, baseUrl).href : apiURL } diff --git a/lib/shared/src/utils.ts b/lib/shared/src/utils.ts index a51ebba9e02a..b462d7913f53 100644 --- a/lib/shared/src/utils.ts +++ b/lib/shared/src/utils.ts @@ -99,3 +99,25 @@ export function createSubscriber(): Subscriber { export function nextTick() { return new Promise(resolve => process.nextTick(resolve)) } + +export type SemverString = `${Prefix}${number}.${number}.${number}` + +export namespace SemverString { + const splitPrefixRegex = /^(?.*)(?\d+\.\d+\.\d+)$/ + export function forcePrefix

(prefix: P, value: string): SemverString

{ + const match = splitPrefixRegex.exec(value) + if (!match || !match.groups?.version) { + throw new Error(`Invalid semver string: ${value}`) + } + return `${prefix}${match.groups?.version}` as SemverString

+ } +} + +type TupleFromUnion = [T] extends [never] + ? [] + : T extends any + ? [T, ...TupleFromUnion>] + : [] + +// Helper type to ensure an array contains all members of T +export type ArrayContainsAll = TupleFromUnion diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index e0c5d2d7d8bb..b962fb292e5e 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -56,7 +56,7 @@ importers: version: 8.0.5(react-dom@18.2.0)(react@18.2.0)(typescript@5.4.2)(vite@5.2.9) '@testing-library/jest-dom': specifier: ^6.4.2 - version: 6.4.2(vitest@1.5.0) + version: 6.4.2(vitest@1.6.0) '@testing-library/react': specifier: ^14.2.2 version: 14.2.2(react-dom@18.2.0)(react@18.2.0) @@ -104,7 +104,7 @@ importers: version: 5.2.9(@types/node@20.12.7) vitest: specifier: ^1.5.0 - version: 1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) + version: 1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) agent: dependencies: @@ -493,6 +493,9 @@ importers: glob: specifier: ^7.2.3 version: 7.2.3 + graceful-fs: + specifier: ^4.2.11 + version: 4.2.11 he: specifier: ^1.2.0 version: 1.2.0 @@ -562,6 +565,9 @@ importers: semver: specifier: ^7.5.4 version: 7.5.4 + signal-exit: + specifier: ^4.1.0 + version: 4.1.0 socks-proxy-agent: specifier: ^8.0.1 version: 8.0.1 @@ -593,9 +599,15 @@ importers: '@google-cloud/pubsub': specifier: ^3.7.3 version: 3.7.3 + '@npmcli/promise-spawn': + specifier: ^7.0.2 + version: 7.0.2 '@playwright/test': specifier: 1.44.1 version: 1.44.1 + '@pollyjs/adapter': + specifier: ^6.0.6 + version: 6.0.6 '@pollyjs/adapter-node-http': specifier: ^6.0.6 version: 6.0.6 @@ -629,6 +641,9 @@ importers: '@types/glob': specifier: ^8.0.0 version: 8.0.0 + '@types/graceful-fs': + specifier: ^4.1.9 + version: 4.1.9 '@types/ini': specifier: ^4.1.0 version: 4.1.0 @@ -647,6 +662,9 @@ importers: '@types/mocha': specifier: ^10.0.6 version: 10.0.6 + '@types/npmcli__promise-spawn': + specifier: ^6.0.3 + version: 6.0.3 '@types/pako': specifier: ^2.0.3 version: 2.0.3 @@ -656,6 +674,9 @@ importers: '@types/semver': specifier: ^7.5.0 version: 7.5.0 + '@types/signal-exit': + specifier: ^3.0.4 + version: 3.0.4 '@types/unzipper': specifier: ^0.10.7 version: 0.10.7 @@ -683,6 +704,9 @@ importers: ajv-formats: specifier: ^3.0.1 version: 3.0.1(ajv@8.14.0) + chokidar: + specifier: ^3.6.0 + version: 3.6.0 concurrently: specifier: ^8.2.0 version: 8.2.0 @@ -704,6 +728,9 @@ importers: fuzzysort: specifier: ^2.0.4 version: 2.0.4 + http-proxy-middleware: + specifier: ^3.0.0 + version: 3.0.0 mocha: specifier: ^10.2.0 version: 10.2.0 @@ -731,6 +758,9 @@ importers: typescript-language-server: specifier: ^4.3.3 version: 4.3.3 + ulidx: + specifier: ^2.3.0 + version: 2.3.0 vite-plugin-svgr: specifier: ^4.2.0 version: 4.2.0(typescript@5.4.2)(vite@5.2.11) @@ -743,6 +773,9 @@ importers: yaml: specifier: ^2.3.4 version: 2.3.4 + zod: + specifier: ^3.23.8 + version: 3.23.8 web: dependencies: @@ -876,7 +909,7 @@ packages: '@babel/traverse': 7.24.5 '@babel/types': 7.24.5 convert-source-map: 2.0.0 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 gensync: 1.0.0-beta.2 json5: 2.2.3 semver: 6.3.1 @@ -957,7 +990,7 @@ packages: '@babel/core': 7.24.5 '@babel/helper-compilation-targets': 7.23.6 '@babel/helper-plugin-utils': 7.24.5 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 lodash.debounce: 4.0.8 resolve: 1.22.8 transitivePeerDependencies: @@ -2123,7 +2156,7 @@ packages: '@babel/helper-split-export-declaration': 7.24.5 '@babel/parser': 7.24.5 '@babel/types': 7.24.5 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 globals: 11.12.0 transitivePeerDependencies: - supports-color @@ -3002,7 +3035,7 @@ packages: resolution: {integrity: sha512-ribfPYfHb+Uw3b27Eiw6NPqjhIhTpVFzEWLwyc/1Xp+DCdwRRyIlAUODX+9bPARF6aQtUu1+/PHzdNvRzcs/+Q==} engines: {node: '>= 12'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 http-errors: 2.0.0 koa-compose: 4.1.0 methods: 1.1.2 @@ -3324,6 +3357,13 @@ packages: rimraf: 3.0.2 dev: false + /@npmcli/promise-spawn@7.0.2: + resolution: {integrity: sha512-xhfYPXoV5Dy4UkY0D+v2KkwvnDfiA/8Mt3sWCGI/hM03NsYIH8ZaG6QzS9x7pje5vHZBZJ2v6VRFVTWACnqcmQ==} + engines: {node: ^16.14.0 || >=18.0.0} + dependencies: + which: 4.0.0 + dev: true + /@openctx/client@0.0.19: resolution: {integrity: sha512-zyfCojoQlkqsNwEJERdCpAs1ytJnAQ/bFEfPM0wv6npkGDpjG0pagQonx7wY7gR2g1+gRDonS29hYeRU/i98lQ==} dependencies: @@ -5785,7 +5825,7 @@ packages: pretty-format: 27.5.1 dev: true - /@testing-library/jest-dom@6.4.2(vitest@1.5.0): + /@testing-library/jest-dom@6.4.2(vitest@1.6.0): resolution: {integrity: sha512-CzqH0AFymEMG48CpzXFriYYkOjk6ZGPCLMhW9e9jg3KMCn5OfJecF8GtGW7yGfR/IgCe3SX8BSwjdzI6BBbZLw==} engines: {node: '>=14', npm: '>=6', yarn: '>=1'} peerDependencies: @@ -5814,7 +5854,7 @@ packages: dom-accessibility-api: 0.6.3 lodash: 4.17.21 redent: 3.0.0 - vitest: 1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) + vitest: 1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0) dev: true /@testing-library/react@14.2.2(react-dom@18.2.0)(react@18.2.0): @@ -6029,6 +6069,12 @@ packages: resolution: {integrity: sha512-40um9QqwHjRS92qnOaDpL7RmDK15NuZYo9HihiJRbYkMQZlWnuH8AdvbMy8/o6lgLmKbDUKa+OALCltHdbOTpQ==} dev: false + /@types/graceful-fs@4.1.9: + resolution: {integrity: sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/hast@2.3.10: resolution: {integrity: sha512-McWspRw8xx8J9HurkVBfYj0xKoE25tOFlHGdx4MJ5xORQrMGZNqJhVQWaIbm6Oyla5kYOXtDiopzKRJzEOkwJw==} dependencies: @@ -6048,6 +6094,12 @@ packages: resolution: {integrity: sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==} dev: true + /@types/http-proxy@1.17.14: + resolution: {integrity: sha512-SSrD0c1OQzlFX7pGu1eXxSEjemej64aaNPRhhVYUGqXh0BtldAAx37MG8btcumvpgKyZp1F5Gn3JkktdxiFv6w==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/ini@4.1.0: resolution: {integrity: sha512-mTehMtc+xtnWBBvqizcqYCktKDBH2WChvx1GU3Sfe4PysFDXiNe+1YwtpVX1MDtCa4NQrSPw2+3HmvXHY3gt1w==} dev: true @@ -6074,6 +6126,7 @@ packages: /@types/lodash@4.17.0: resolution: {integrity: sha512-t7dhREVv6dbNj0q17X12j7yDG4bD/DHYX7o5/DbDxobP0HnGPgpRz2Ej77aL7TZT3DSw13fqUTj8J4mMnqa7WA==} + dev: false /@types/long@4.0.2: resolution: {integrity: sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==} @@ -6165,6 +6218,12 @@ packages: resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} dev: true + /@types/npmcli__promise-spawn@6.0.3: + resolution: {integrity: sha512-4EQlesp5HtYHPXMXd4uuI+Q9hELEU0eVg/HBRLkqGC5U2ohwXZduCottmzPpb4tWCB+w4kQ3XNPlHIdXvCTyFw==} + dependencies: + '@types/node': 20.12.7 + dev: true + /@types/open@6.2.1: resolution: {integrity: sha512-CzV16LToFaKwm1FfplVTF08E3pznw4fQNCQ87N+A1RU00zu/se7npvb6IC9db3/emnSThQ6R8qFKgrei2M4EYQ==} deprecated: This is a stub types definition. open provides its own type definitions, so you do not need this installed. @@ -6251,6 +6310,10 @@ packages: resolution: {integrity: sha512-9Hp0ObzwwO57DpLFF0InUjUm/II8GmKAvzbefxQTihCb7KI6yc9yzf0nLc4mVdby5N4DRCgQM2wCup9KTieeww==} dev: false + /@types/signal-exit@3.0.4: + resolution: {integrity: sha512-e7EUPfU9afHyWc5CXtlqbvVHEshrb05uPlDCenWIbMgtWoFrTuTDVYNLKk6o4X2/4oHTfNqrJX/vaJ3uBhtXTg==} + dev: true + /@types/svg2ttf@5.0.3: resolution: {integrity: sha512-hL+/A4qMISvDbDTtdY73R0zuvsdc7YRYnV5FyAfKVGk8OsluXu/tCFxop7IB5Sgr+ZCS0hHtFxylD0REmm+abA==} dev: false @@ -6349,29 +6412,12 @@ packages: - supports-color dev: true - /@vitest/expect@1.5.0: - resolution: {integrity: sha512-0pzuCI6KYi2SIC3LQezmxujU9RK/vwC1U9R0rLuGlNGcOuDWxqWKu6nUdFsX9tH1WU0SXtAxToOsEjeUn1s3hA==} - dependencies: - '@vitest/spy': 1.5.0 - '@vitest/utils': 1.5.0 - chai: 4.4.1 - dev: true - /@vitest/expect@1.6.0: resolution: {integrity: sha512-ixEvFVQjycy/oNgHjqsL6AZCDduC+tflRluaHIzKIsdbzkLn2U/iBnVeJwB6HsIjQBdfMR8Z0tRxKUsvFJEeWQ==} dependencies: '@vitest/spy': 1.6.0 '@vitest/utils': 1.6.0 chai: 4.4.1 - dev: false - - /@vitest/runner@1.5.0: - resolution: {integrity: sha512-7HWwdxXP5yDoe7DTpbif9l6ZmDwCzcSIK38kTSIt6CFEpMjX4EpCgT6wUmS0xTXqMI6E/ONmfgRKmaujpabjZQ==} - dependencies: - '@vitest/utils': 1.5.0 - p-limit: 5.0.0 - pathe: 1.1.2 - dev: true /@vitest/runner@1.6.0: resolution: {integrity: sha512-P4xgwPjwesuBiHisAVz/LSSZtDjOTPYZVmNAnpHHSR6ONrf8eCJOFRvUwdHn30F5M1fxhqtl7QZQUk2dprIXAg==} @@ -6379,15 +6425,6 @@ packages: '@vitest/utils': 1.6.0 p-limit: 5.0.0 pathe: 1.1.2 - dev: false - - /@vitest/snapshot@1.5.0: - resolution: {integrity: sha512-qpv3fSEuNrhAO3FpH6YYRdaECnnRjg9VxbhdtPwPRnzSfHVXnNzzrpX4cJxqiwgRMo7uRMWDFBlsBq4Cr+rO3A==} - dependencies: - magic-string: 0.30.10 - pathe: 1.1.2 - pretty-format: 29.7.0 - dev: true /@vitest/snapshot@1.6.0: resolution: {integrity: sha512-+Hx43f8Chus+DCmygqqfetcAZrDJwvTj0ymqjQq4CvmpKFSTVteEOBzCusu1x2tt4OJcvBflyHUE0DZSLgEMtQ==} @@ -6395,28 +6432,11 @@ packages: magic-string: 0.30.10 pathe: 1.1.2 pretty-format: 29.7.0 - dev: false - - /@vitest/spy@1.5.0: - resolution: {integrity: sha512-vu6vi6ew5N5MMHJjD5PoakMRKYdmIrNJmyfkhRpQt5d9Ewhw9nZ5Aqynbi3N61bvk9UvZ5UysMT6ayIrZ8GA9w==} - dependencies: - tinyspy: 2.2.1 - dev: true /@vitest/spy@1.6.0: resolution: {integrity: sha512-leUTap6B/cqi/bQkXUu6bQV5TZPx7pmMBKBQiI0rJA8c3pB56ZsaTbREnF7CJfmvAS4V2cXIBAh/3rVwrrCYgw==} dependencies: tinyspy: 2.2.1 - dev: false - - /@vitest/utils@1.5.0: - resolution: {integrity: sha512-BDU0GNL8MWkRkSRdNFvCUCAVOeHaUlVJ9Tx0TYBZyXaaOTmGtUFObzchCivIBrIwKzvZA7A9sCejVhXM2aY98A==} - dependencies: - diff-sequences: 29.6.3 - estree-walker: 3.0.3 - loupe: 2.3.7 - pretty-format: 29.7.0 - dev: true /@vitest/utils@1.6.0: resolution: {integrity: sha512-21cPiuGMoMZwiOHa2i4LXkMkMkCGzA+MVFV70jRwHo95dL4x/ts5GZhML1QWuy7yfp3WzK3lRvZi3JnXTYqrBw==} @@ -6425,7 +6445,6 @@ packages: estree-walker: 3.0.3 loupe: 2.3.7 pretty-format: 29.7.0 - dev: false /@vitest/web-worker@1.4.0(vitest@1.6.0): resolution: {integrity: sha512-JgLAVtPpF2/AJTI3y79eq8RrKEdK4lFS7gxT9O2gAbke1rbhRqpPoM/acQHWA6RrrX9Jci+Yk+ZQuOGON4D4ZA==} @@ -6587,12 +6606,12 @@ packages: acorn: 7.4.1 dev: true - /acorn-jsx@5.3.2(acorn@8.11.3): + /acorn-jsx@5.3.2(acorn@8.12.0): resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: acorn: ^6.0.0 || ^7.0.0 || ^8.0.0 dependencies: - acorn: 8.11.3 + acorn: 8.12.0 dev: true /acorn-walk@7.2.0: @@ -6604,6 +6623,12 @@ packages: resolution: {integrity: sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==} engines: {node: '>=0.4.0'} + /acorn-walk@8.3.3: + resolution: {integrity: sha512-MxXdReSRhGO7VlFe1bRG/oI7/mdLV9B9JJT0N8vZOhF7gFRR5l3M8W9G8JxmKV+JC5mGqJ0QvqfSOLsCPa4nUw==} + engines: {node: '>=0.4.0'} + dependencies: + acorn: 8.12.0 + /acorn@7.4.1: resolution: {integrity: sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A==} engines: {node: '>=0.4.0'} @@ -6615,6 +6640,11 @@ packages: engines: {node: '>=0.4.0'} hasBin: true + /acorn@8.12.0: + resolution: {integrity: sha512-RTvkC4w+KNXrM39/lWCUaG0IbRkWdCv7W/IOW9oU6SawyxulvkQy5HQPVTKxEjczcUvapcrw3cFx/60VN/NRNw==} + engines: {node: '>=0.4.0'} + hasBin: true + /address@1.2.2: resolution: {integrity: sha512-4B/qKCfeE/ODUaAUpSwfzazo5x29WD4r3vXiWsB7I2mSDAihwEqKO+g8GELZUQSSAo5e1XTYh3ZVfLyxBc12nA==} engines: {node: '>= 10.0.0'} @@ -6624,7 +6654,7 @@ packages: resolution: {integrity: sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==} engines: {node: '>= 6.0.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -6632,7 +6662,7 @@ packages: resolution: {integrity: sha512-H0TSyFNDMomMNJQBn8wFV5YC/2eJ+VXECwOadZJT554xP6cODZHPX3H9QMQECxvrgiSOP1pHjy1sMWQVYJOUOA==} engines: {node: '>= 14'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -6902,7 +6932,7 @@ packages: /axios@1.3.6: resolution: {integrity: sha512-PEcdkk7JcdPiMDkvM4K6ZBRYq9keuVJsToxm2zQIM70Qqo2WHTdJZMXcG9X+RmRp2VPNUQC8W1RAGbgt6b1yMg==} dependencies: - follow-redirects: 1.15.6 + follow-redirects: 1.15.6(debug@4.3.5) form-data: 4.0.0 proxy-from-env: 1.1.0 transitivePeerDependencies: @@ -7366,7 +7396,7 @@ packages: dependencies: assertion-error: 1.1.0 check-error: 1.0.3 - deep-eql: 4.1.3 + deep-eql: 4.1.4 get-func-name: 2.0.2 loupe: 2.3.7 pathval: 1.1.1 @@ -7995,6 +8025,17 @@ packages: ms: 2.1.2 supports-color: 8.1.1 + /debug@4.3.5: + resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.2 + /decamelize-keys@1.1.1: resolution: {integrity: sha512-WiPxgEirIV0/eIOMcnFBA3/IJZAZqKnwAwWyvvdi4lsr1WCN22nhdf/3db3DoZcUjTV2SqfzIwNyp6y2xs3nmg==} engines: {node: '>=0.10.0'} @@ -8051,8 +8092,8 @@ packages: optional: true dev: false - /deep-eql@4.1.3: - resolution: {integrity: sha512-WaEtAOpRA1MQ0eohqZjpGD8zdI0Ovsm8mmFhaDN8dvDZzyoUMcYDnf5Y6iu7HTXxf8JDS23qWa4a+hKCDyOPzw==} + /deep-eql@4.1.4: + resolution: {integrity: sha512-SUwdGfqdKOwxCPeVYjwSyRpJ7Z+fhpwIAtmCUdZIWZ/YP5R9WAsyuSgpLVDi9bjWoN2LXHNss/dk3urXtdQxGg==} engines: {node: '>=6'} dependencies: type-detect: 4.0.8 @@ -8241,7 +8282,7 @@ packages: hasBin: true dependencies: address: 1.2.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color dev: true @@ -8496,7 +8537,7 @@ packages: peerDependencies: esbuild: '>=0.12 <1' dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 esbuild: 0.18.20 transitivePeerDependencies: - supports-color @@ -8620,8 +8661,8 @@ packages: resolution: {integrity: sha512-oruZaFkjorTpF32kDSI5/75ViwGeZginGGy2NoOSg3Q9bnwlnmDm4HLnkl0RE3n+njDXR037aY1+x58Z/zFdwQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} dependencies: - acorn: 8.11.3 - acorn-jsx: 5.3.2(acorn@8.11.3) + acorn: 8.12.0 + acorn-jsx: 5.3.2(acorn@8.12.0) eslint-visitor-keys: 3.4.3 dev: true @@ -8664,6 +8705,10 @@ packages: resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} engines: {node: '>=6'} + /eventemitter3@4.0.7: + resolution: {integrity: sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==} + dev: true + /events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} @@ -9060,7 +9105,7 @@ packages: engines: {node: '>=0.4.0'} dev: true - /follow-redirects@1.15.6: + /follow-redirects@1.15.6(debug@4.3.5): resolution: {integrity: sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==} engines: {node: '>=4.0'} peerDependencies: @@ -9068,6 +9113,8 @@ packages: peerDependenciesMeta: debug: optional: true + dependencies: + debug: 4.3.5 /for-each@0.3.3: resolution: {integrity: sha512-jqYfLp7mo9vIyQf8ykW2v7A+2N4QjeCeI5+Dz9XraiO1ign81wjiH7Fb9vSOWvQfNtmSa4H2RoQTrrXivdUZmw==} @@ -9302,7 +9349,7 @@ packages: dependencies: basic-ftp: 5.0.5 data-uri-to-buffer: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 fs-extra: 11.2.0 transitivePeerDependencies: - supports-color @@ -9539,7 +9586,7 @@ packages: source-map: 0.6.1 wordwrap: 1.0.0 optionalDependencies: - uglify-js: 3.17.4 + uglify-js: 3.18.0 dev: true /happy-dom@14.3.10: @@ -9802,7 +9849,7 @@ packages: resolution: {integrity: sha512-Ci5LRufQ8AtrQ1U26AevS8QoMXDOhnAHCJI3eZu1com7mZGHxREmw3dNj85ftpQokQCvak8nI2pnFS8zyM1M+Q==} engines: {node: '>=4.0.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -9812,7 +9859,7 @@ packages: dependencies: '@tootallnate/once': 1.1.2 agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color dev: true @@ -9823,7 +9870,7 @@ packages: dependencies: '@tootallnate/once': 2.0.0 agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -9836,12 +9883,37 @@ packages: transitivePeerDependencies: - supports-color + /http-proxy-middleware@3.0.0: + resolution: {integrity: sha512-36AV1fIaI2cWRzHo+rbcxhe3M3jUDCNzc4D5zRl57sEWRAxdXYtw7FSQKYY6PDKssiAKjLYypbssHk+xs/kMXw==} + engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0} + dependencies: + '@types/http-proxy': 1.17.14 + debug: 4.3.5 + http-proxy: 1.18.1(debug@4.3.5) + is-glob: 4.0.3 + is-plain-obj: 3.0.0 + micromatch: 4.0.5 + transitivePeerDependencies: + - supports-color + dev: true + + /http-proxy@1.18.1(debug@4.3.5): + resolution: {integrity: sha512-7mz/721AbnJwIVbnaSv1Cz3Am0ZLT/UBwkC92VlxhXv/k/BBQfM2fXElQNC27BVGr0uwUpplYPQM9LnaBMR5NQ==} + engines: {node: '>=8.0.0'} + dependencies: + eventemitter3: 4.0.7 + follow-redirects: 1.15.6(debug@4.3.5) + requires-port: 1.0.0 + transitivePeerDependencies: + - debug + dev: true + /https-proxy-agent@5.0.1: resolution: {integrity: sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==} engines: {node: '>= 6'} dependencies: agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 transitivePeerDependencies: - supports-color @@ -10253,6 +10325,11 @@ packages: engines: {node: '>=8'} dev: true + /is-plain-obj@3.0.0: + resolution: {integrity: sha512-gwsOE28k+23GP1B6vFl1oVh/WOzmawBrKwo5Ev6wMKzPkaXaCDIQKzLnvsA42DRlbVTWorkgTKIviAKCWkfUwA==} + engines: {node: '>=10'} + dev: true + /is-plain-obj@4.1.0: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} @@ -10373,6 +10450,11 @@ packages: /isexe@2.0.0: resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + /isexe@3.1.1: + resolution: {integrity: sha512-LpB/54B+/2J5hqQ7imZHfdU31OlgQqx7ZicVlkm9kzg9/w8GKLEcFfJl/t7DCEDueOyBAD6zCCwTO6Fzs0NoEQ==} + engines: {node: '>=16'} + dev: true + /isobject@2.1.0: resolution: {integrity: sha512-+OUdGJlgjOBZDfxnDjYYG6zp487z0JGNQq3cYQYg5f5hKR+syHMsaztzGeml/4kGG55CSpKSpWTY+jYGgsHLgA==} engines: {node: '>=0.10.0'} @@ -10732,7 +10814,7 @@ packages: resolution: {integrity: sha512-rm71jaA/P+6HeCpoRhmCv8KVBIi0tfGuO/dMKicbQnQW/YJntJ6MnnspkodoA4QstMVEZArsCphmd0bJEtoMjQ==} engines: {node: '>= 7.6.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 koa-compose: 4.1.0 transitivePeerDependencies: - supports-color @@ -10742,7 +10824,7 @@ packages: resolution: {integrity: sha512-tmcyQ/wXXuxpDxyNXv5yNNkdAMdFRqwtegBXUaowiQzUKqJehttS0x2j0eOZDQAyloAth5w6wwBImnFzkUz3pQ==} engines: {node: '>= 8'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 http-errors: 1.8.1 resolve-path: 1.4.0 transitivePeerDependencies: @@ -10768,7 +10850,7 @@ packages: content-disposition: 0.5.4 content-type: 1.0.5 cookies: 0.9.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 delegates: 1.0.0 depd: 2.0.0 destroy: 1.2.0 @@ -10790,6 +10872,10 @@ packages: - supports-color dev: true + /layerr@2.1.0: + resolution: {integrity: sha512-xDD9suWxfBYeXgqffRVH/Wqh+mqZrQcqPRn0I0ijl7iJQ7vu8gMGPt1Qop59pEW/jaIDNUN7+PX1Qk40+vuflg==} + dev: true + /lazy-universal-dotenv@4.0.0: resolution: {integrity: sha512-aXpZJRnTkpK6gQ/z4nk+ZBLd/Qdp118cvPruLSIQzQNRhKwEcdXCOzXuF55VDqIiuAaY3UGZ10DJtvZzDcvsxg==} engines: {node: '>=14.0.0'} @@ -10855,8 +10941,8 @@ packages: resolution: {integrity: sha512-ok6z3qlYyCDS4ZEU27HaU6x/xZa9Whf8jD4ptH5UZTQYZVYeb9bnZ3ojVhiJNLiXK1Hfc0GNbLXcmZ5plLDDBg==} engines: {node: '>=14'} dependencies: - mlly: 1.6.1 - pkg-types: 1.1.0 + mlly: 1.7.1 + pkg-types: 1.1.1 /locate-path@3.0.0: resolution: {integrity: sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==} @@ -11646,7 +11732,7 @@ packages: resolution: {integrity: sha512-o/sd0nMof8kYff+TqcDx3VSrgBTcZpSvYcAHIfHhv5VAuNmisCxjhx6YmxS8PFEpb9z5WKWKPdzf0jM23ro3RQ==} dependencies: '@types/debug': 4.1.12 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 decode-named-character-reference: 1.0.2 devlop: 1.1.0 micromark-core-commonmark: 2.0.1 @@ -11859,12 +11945,12 @@ packages: hasBin: true dev: false - /mlly@1.6.1: - resolution: {integrity: sha512-vLgaHvaeunuOXHSmEbZ9izxPx3USsk8KCQ8iC+aTlp5sKRSoZvwhHh5L9VbKSaVC6sJDqbyohIS76E2VmHIPAA==} + /mlly@1.7.1: + resolution: {integrity: sha512-rrVRZRELyQzrIUAVMHxP97kv+G786pHmOKzuFII8zDYahFBS7qnHh2AlYSl1GAHhaMPCz6/oHjVMcfFYgFYHgA==} dependencies: - acorn: 8.11.3 + acorn: 8.12.0 pathe: 1.1.2 - pkg-types: 1.1.0 + pkg-types: 1.1.1 ufo: 1.5.3 /mocha@10.2.0: @@ -12013,7 +12099,7 @@ packages: resolution: {integrity: sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==} engines: {node: '>= 10.13'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 json-stringify-safe: 5.0.1 propagate: 2.0.1 transitivePeerDependencies: @@ -12344,7 +12430,7 @@ packages: dependencies: '@vscode/vsce': 2.22.0 commander: 6.2.1 - follow-redirects: 1.15.6 + follow-redirects: 1.15.6(debug@4.3.5) is-ci: 2.0.0 leven: 3.1.0 semver: 7.6.0 @@ -12416,7 +12502,7 @@ packages: dependencies: '@tootallnate/quickjs-emscripten': 0.23.0 agent-base: 7.1.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 get-uri: 6.0.3 http-proxy-agent: 7.0.2 https-proxy-agent: 7.0.4 @@ -12641,11 +12727,11 @@ packages: find-up: 5.0.0 dev: true - /pkg-types@1.1.0: - resolution: {integrity: sha512-/RpmvKdxKf8uILTtoOhAgf30wYbP2Qw+L9p3Rvshx1JZVX+XQNZQFjlbmGHEGIm4CkVPlSn+NXmIM8+9oWQaSA==} + /pkg-types@1.1.1: + resolution: {integrity: sha512-ko14TjmDuQJ14zsotODv7dBlwxKhUKQEhuhmbqo1uCi9BB0Z2alo/wAXg6q1dTR5TyuqYyWhjtfe/Tsh+X28jQ==} dependencies: confbox: 0.1.7 - mlly: 1.6.1 + mlly: 1.7.1 pathe: 1.1.2 /playwright-core@1.43.1: @@ -13563,7 +13649,7 @@ packages: resolution: {integrity: sha512-nQFEv9gRw6SJAwWD2LrL0NmQvAcO7FBwJbwmr2ttPAacfy0xuiOjE5zt+zM4xDyuyvUaxBi/9gb2SoCyNEVJcw==} engines: {node: '>=8.6.0'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 module-details-from-path: 1.0.3 resolve: 1.22.8 transitivePeerDependencies: @@ -13635,7 +13721,7 @@ packages: resolution: {integrity: sha512-wfI3pk7EE80lCIXprqh7ym48IHYdwmAAzESdbU8Q9l7pnRCk9LEhpbOTNKjz6FARLm/Bl5m+4F0ABxOkYUujSQ==} engines: {node: '>=12'} dependencies: - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 extend: 3.0.2 transitivePeerDependencies: - supports-color @@ -14034,7 +14120,7 @@ packages: engines: {node: '>= 10'} dependencies: agent-base: 6.0.2 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 socks: 2.8.3 transitivePeerDependencies: - supports-color @@ -14056,7 +14142,7 @@ packages: engines: {node: '>= 14'} dependencies: agent-base: 7.1.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 socks: 2.8.3 transitivePeerDependencies: - supports-color @@ -15046,6 +15132,21 @@ packages: requiresBuild: true dev: true + /uglify-js@3.18.0: + resolution: {integrity: sha512-SyVVbcNBCk0dzr9XL/R/ySrmYf0s372K6/hFklzgcp2lBFyXtw4I7BOdDjlLhE1aVqaI/SHWXWmYdlZxuyF38A==} + engines: {node: '>=0.8.0'} + hasBin: true + requiresBuild: true + dev: true + optional: true + + /ulidx@2.3.0: + resolution: {integrity: sha512-36piWNqcdp9hKlQewyeehCaALy4lyx3FodsCxHuV6i0YdexSkjDOubwxEVr2yi4kh62L/0MgyrxqG4K+qtovnw==} + engines: {node: '>=16'} + dependencies: + layerr: 2.1.0 + dev: true + /underscore@1.13.6: resolution: {integrity: sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A==} dev: true @@ -15224,7 +15325,7 @@ packages: resolution: {integrity: sha512-d6Mhq8RJeGA8UfKCu54Um4lFA0eSaRa3XxdAJg8tIdxbu1ubW0hBCZUL7yI2uGyYCRndvbK8FLHzqy2XKfeMsg==} engines: {node: '>=14.0.0'} dependencies: - acorn: 8.11.3 + acorn: 8.12.0 chokidar: 3.6.0 webpack-sources: 3.2.3 webpack-virtual-modules: 0.6.1 @@ -15400,34 +15501,13 @@ packages: vfile-message: 4.0.2 dev: false - /vite-node@1.5.0(@types/node@20.12.7): - resolution: {integrity: sha512-tV8h6gMj6vPzVCa7l+VGq9lwoJjW8Y79vst8QZZGiuRAfijU+EEWuc0kFpmndQrWhMMhet1jdSF+40KSZUqIIw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - dependencies: - cac: 6.7.14 - debug: 4.3.4(supports-color@8.1.1) - pathe: 1.1.2 - picocolors: 1.0.1 - vite: 5.2.9(@types/node@20.12.7) - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - dev: true - /vite-node@1.6.0(@types/node@20.12.7): resolution: {integrity: sha512-de6HJgzC+TFzOu0NTC4RAIsyf/DY/ibWDYQUcuEA84EMHhcefTUGkjFHKKEJhQN4A+6I0u++kr3l36ZF2d7XRw==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true dependencies: cac: 6.7.14 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 pathe: 1.1.2 picocolors: 1.0.1 vite: 5.2.11(@types/node@20.12.7) @@ -15440,7 +15520,6 @@ packages: - sugarss - supports-color - terser - dev: false /vite-plugin-svgr@4.2.0(typescript@5.4.2)(vite@5.2.11): resolution: {integrity: sha512-SC7+FfVtNQk7So0XMjrrtLAbEC8qjFPifyD7+fs/E6aaNdVde6umlVVh0QuwDLdOMu7vp5RiGFsB70nj5yo0XA==} @@ -15528,64 +15607,6 @@ packages: fsevents: 2.3.3 dev: true - /vitest@1.5.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0): - resolution: {integrity: sha512-d8UKgR0m2kjdxDWX6911uwxout6GHS0XaGH1cksSIVVG8kRlE7G7aBw7myKQCvDI5dT4j7ZMa+l706BIORMDLw==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 1.5.0 - '@vitest/ui': 1.5.0 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - dependencies: - '@types/node': 20.12.7 - '@vitest/expect': 1.5.0 - '@vitest/runner': 1.5.0 - '@vitest/snapshot': 1.5.0 - '@vitest/spy': 1.5.0 - '@vitest/utils': 1.5.0 - acorn-walk: 8.3.2 - chai: 4.4.1 - debug: 4.3.4(supports-color@8.1.1) - execa: 8.0.1 - happy-dom: 14.3.10 - jsdom: 22.1.0 - local-pkg: 0.5.0 - magic-string: 0.30.10 - pathe: 1.1.2 - picocolors: 1.0.0 - std-env: 3.7.0 - strip-literal: 2.1.0 - tinybench: 2.8.0 - tinypool: 0.8.4 - vite: 5.2.9(@types/node@20.12.7) - vite-node: 1.5.0(@types/node@20.12.7) - why-is-node-running: 2.2.2 - transitivePeerDependencies: - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - dev: true - /vitest@1.6.0(@types/node@20.12.7)(happy-dom@14.3.10)(jsdom@22.1.0): resolution: {integrity: sha512-H5r/dN06swuFnzNFhq/dnz37bPXnq8xB2xB5JOVk8K09rUtoeNN+LHWkoQ0A/i3hvbUKKcCei9KpbxqHMLhLLA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -15617,9 +15638,9 @@ packages: '@vitest/snapshot': 1.6.0 '@vitest/spy': 1.6.0 '@vitest/utils': 1.6.0 - acorn-walk: 8.3.2 + acorn-walk: 8.3.3 chai: 4.4.1 - debug: 4.3.4(supports-color@8.1.1) + debug: 4.3.5 execa: 8.0.1 happy-dom: 14.3.10 jsdom: 22.1.0 @@ -15642,7 +15663,6 @@ packages: - sugarss - supports-color - terser - dev: false /vlq@0.2.3: resolution: {integrity: sha512-DRibZL6DsNhIgYQ+wNdWDL2SL3bKPlVrRiBqV5yuMm++op8W4kGFtaQfCs4KEJn0wBZcHVHJ3eoywX8983k1ow==} @@ -15802,6 +15822,14 @@ packages: dependencies: isexe: 2.0.0 + /which@4.0.0: + resolution: {integrity: sha512-GlaYyEb07DPxYCKhKzplCWBJtvxZcZMrL+4UkrTSJHHPyZU4mYYTv3qaOe77H7EODLSSopAUFAc6W8U4yqvscg==} + engines: {node: ^16.13.0 || >=18.0.0} + hasBin: true + dependencies: + isexe: 3.1.1 + dev: true + /why-is-node-running@2.2.2: resolution: {integrity: sha512-6tSwToZxTOcotxHeA+qGCq1mVzKR3CwcJGmVcY+QE8SHy6TnpFnh8PAvPNHYr7EcuVeG0QSMxtYCuO1ta/G/oA==} engines: {node: '>=8'} @@ -16114,6 +16142,10 @@ packages: resolution: {integrity: sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==} dev: true + /zod@3.23.8: + resolution: {integrity: sha512-XBx9AXhXktjUqnepgTiE5flcKIYWi/rme0Eaj+5Y0lftuGBq+jyRu/md4WnuxqgP1ubdpNCsYEYPxrzVHD8d6g==} + dev: true + /zwitch@2.0.4: resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==} dev: false diff --git a/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml b/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml new file mode 100644 index 000000000000..9e5a07e5b58b --- /dev/null +++ b/recordings/vscode/e2e_2083974793/example-test-ts_2069432700/Show-off-v2-features_1731159314/recording.har.yaml @@ -0,0 +1,929 @@ +log: + _recordingName: e2e/example.test.ts/Show off v2 features + creator: + comment: persister:fs + name: Polly.JS + version: 6.0.6 + entries: + - _id: fd79a8c38d53d2648b1b68d15d1f77c6 + _order: 0 + cache: {} + request: + bodySize: 217 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "217" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 303 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query CodyConfigFeaturesResponse { + site { + codyConfigFeatures { + chat + autoComplete + commands + attribution + } + } + } + variables: {} + queryString: + - name: CodyConfigFeaturesResponse + value: null + url: https://sourcegraph.com/.api/graphql?CodyConfigFeaturesResponse + response: + bodySize: 152 + content: + encoding: base64 + mimeType: application/json + size: 152 + text: "[\"H4sIAAAAAAAAAzyLwQqAIBAF/2XPfYFXof/YdC0h3dDnIcR/Dws6DQwznTyDyXSqETLp1\ + N9Wc4j7KoxWpL72YJBBabIQN6jVdJ0yj885TYmzr38DlLg1RM1kAp9VxhjjAQAA//8D\ + AIfOLkJuAAAA\"]" + textDecoded: + data: + site: + codyConfigFeatures: + attribution: false + autoComplete: true + chat: true + commands: true + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:04 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:04.040Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 09b59ac55ce3c40d6f9ab8c79846a2c6 + _order: 0 + cache: {} + request: + bodySize: 144 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "144" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 291 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query ContextFilters { + site { + codyContextFilters(version: V1) { + raw + } + } + } + variables: {} + queryString: + - name: ContextFilters + value: null + url: https://sourcegraph.com/.api/graphql?ContextFilters + response: + bodySize: 111 + content: + encoding: base64 + mimeType: application/json + size: 111 + text: "[\"H4sIAAAAAAAAA6pWSkksSVSyqlY=\",\"Ks4sSQXRyfkplc75eSWpFSVumTklqUXFINGi\ + xHIlq7zSnJza2tpaAAAAAP//AwA2LshlNQAAAA==\"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.358Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 0b63afd2aa308ea31206484848de021f + _order: 0 + cache: {} + request: + bodySize: 183 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "183" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-use-sourcegraph-embeddings + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.463Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 97bed95811980e1a8d1c114065f65cdf + _order: 0 + cache: {} + request: + bodySize: 177 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "177" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-autocomplete-tracing + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.464Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: a3d3d0f7feb229a85bd641be136a8edd + _order: 0 + cache: {} + request: + bodySize: 181 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "181" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 296 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-embeddings-auto-indexing + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.749Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 0b8e788d38a93fd0c53c921e768bff46 + _order: 0 + cache: {} + request: + bodySize: 177 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "177" + - name: accept + value: "*/*" + - name: traceparent + value: 00-f0577df6943a2bfaaffc899734ad881e-924cb5ce9934c0cc-01 + - name: content-type + value: application/json; charset=utf-8 + headersSize: 366 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-interactive-tutorial + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.713Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 4ec84765b3d45b8a8f6392da0304fa77 + _order: 0 + cache: {} + request: + bodySize: 178 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "178" + - name: accept + value: "*/*" + - name: traceparent + value: 00-f3df72a135c611acb86086fc824e6e1f-b32eb6211074902f-01 + - name: content-type + value: application/json; charset=utf-8 + headersSize: 366 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query EvaluateFeatureFlag($flagName: String!) { + evaluateFeatureFlag(flagName: $flagName) + } + variables: + flagName: cody-chat-context-preamble + queryString: + - name: EvaluateFeatureFlag + value: null + url: https://sourcegraph.com/.api/graphql?EvaluateFeatureFlag + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluateFeatureFlag\":null}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:03 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:03.727Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 62498f2d11167bd2d5d002a799a49338 + _order: 0 + cache: {} + request: + bodySize: 147 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "147" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 289 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |2 + + query FeatureFlags { + evaluatedFeatureFlags() { + name + value + } + } + variables: {} + queryString: + - name: FeatureFlags + value: null + url: https://sourcegraph.com/.api/graphql?FeatureFlags + response: + bodySize: 37 + content: + mimeType: application/json + size: 37 + text: "{\"data\":{\"evaluatedFeatureFlags\":[]}}" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: content-length + value: "37" + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + headersSize: 1296 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.205Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 8d297306aeea324b87ef494954016fba + _order: 0 + cache: {} + request: + bodySize: 164 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "164" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 295 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query SiteIdentification { + site { + siteID + productSubscription { + license { + hashedKey + } + } + } + } + variables: {} + queryString: + - name: SiteIdentification + value: null + url: https://sourcegraph.com/.api/graphql?SiteIdentification + response: + bodySize: 219 + content: + encoding: base64 + mimeType: application/json + size: 219 + text: "[\"H4sIAAAAAAAAAzTLsQ6CMBCA4Xc=\",\"udmF9q4FZhfjyOB87V2liQHSlsEQ391g4r/8\ + 03eAcGMYD6i56f+3K4wwrXuJ+iy8zQ8NcIGtrLLHNu2hxpK3ltflBK8cdak/O3OdVe7\ + 6hhGG6LvQGZv6JJoMYu9EGZWc86jRGiTjDUXqHAnZIIgpOI+G7cCcCD5nXwAAAP//Aw\ + AY9rt+oAAAAA==\"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.158Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + - _id: 203c1896021c3a09dfe619120ea1b725 + _order: 0 + cache: {} + request: + bodySize: 101 + cookies: [] + headers: + - name: connection + value: close + - name: host + value: sourcegraph.com + - name: accept-encoding + value: gzip,deflate + - name: user-agent + value: node-fetch/1.0 (+https://github.com/bitinn/node-fetch) + - name: content-length + value: "101" + - name: accept + value: "*/*" + - name: content-type + value: application/json; charset=utf-8 + headersSize: 368 + httpVersion: HTTP/1.1 + method: POST + postData: + mimeType: application/json; charset=utf-8 + params: [] + textJSON: + query: |- + + query SiteProductVersion { + site { + productVersion + } + } + variables: {} + queryString: + - name: SiteProductVersion + value: null + url: https://sourcegraph.com/.api/graphql?SiteProductVersion + response: + bodySize: 139 + content: + encoding: base64 + mimeType: application/json + size: 139 + text: "[\"H4sIAAAAAAAAA6pWSkksSVSyqlY=\",\"Ks4sSQXRBUX5KaXJJWGpRcWZ+XlKVkpGFgZm\ + pgbxRgZGJroG5roGRvGmeia6hkmWJqaWyYlJ5kbJSrW1tQAAAAD//wMA1rHtQ0kAAAA=\ + \"]" + cookies: [] + headers: + - name: date + value: Tue, 02 Jul 2024 14:12:02 GMT + - name: content-type + value: application/json + - name: transfer-encoding + value: chunked + - name: connection + value: close + - name: access-control-allow-credentials + value: "true" + - name: access-control-allow-origin + value: "" + - name: cache-control + value: no-cache, max-age=0 + - name: vary + value: Cookie,Accept-Encoding,Authorization,Cookie, Authorization, + X-Requested-With,Cookie + - name: x-content-type-options + value: nosniff + - name: x-frame-options + value: DENY + - name: x-xss-protection + value: 1; mode=block + - name: strict-transport-security + value: max-age=31536000; includeSubDomains; preload + - name: content-encoding + value: gzip + headersSize: 1328 + httpVersion: HTTP/1.1 + redirectURL: "" + status: 200 + statusText: OK + startedDateTime: 2024-07-02T14:12:02.161Z + time: 0 + timings: + blocked: -1 + connect: -1 + dns: -1 + receive: 0 + send: 0 + ssl: -1 + wait: 0 + pages: [] + version: "1.2" diff --git a/vscode/.gitignore b/vscode/.gitignore index 261d34db33b7..b6a7aeae3dc0 100644 --- a/vscode/.gitignore +++ b/vscode/.gitignore @@ -3,6 +3,7 @@ out/ .vscode-test/ dist/ .vscode-test-web/ +.test-reports resources/wasm/ GITHUB_CHANGELOG.md walkthroughs/cody_tutorial.py diff --git a/vscode/e2e/README.md b/vscode/e2e/README.md new file mode 100644 index 000000000000..59ac888b31e4 --- /dev/null +++ b/vscode/e2e/README.md @@ -0,0 +1,6 @@ +TODO: This will be a nice guide + +- `pnpm run test:e2e2:run --ui` to view trace UI locally +- How to update network recordings +- How to record a test + - Issue tests diff --git a/vscode/e2e/TODO.md b/vscode/e2e/TODO.md new file mode 100644 index 000000000000..54669d24059e --- /dev/null +++ b/vscode/e2e/TODO.md @@ -0,0 +1,10 @@ +- [x] Fail test on proxy failure +- [ ] Credentials loading +- [ ] Configurable endpoint proxies +- [ ] Migrate existing e2e tests +- [ ] Fixutres repo +- [ ] Wait for Cody progress-bars +- [ ] Better DX around Telemetry evaluation +- [ ] Fuzzy testing with API delays +- [ ] Allow logging of requests + debugger modification (nullifying timeout) +- [ ] Copy kill-tree from vscode (https://www.npmjs.com/package/kill-sync) diff --git a/vscode/e2e/example.test.ts b/vscode/e2e/example.test.ts new file mode 100644 index 000000000000..470e38991da3 --- /dev/null +++ b/vscode/e2e/example.test.ts @@ -0,0 +1,57 @@ +import { expect } from '@playwright/test' +import { fixture as test, uix } from './utils/vscody' + +test.describe('Demonstrations', () => { + test.use({ + templateWorkspaceDir: 'test/fixtures/workspace', + }) + test('Show off v2 features', async ({ + page, + sourcegraphMitM, + vscodeUI, + polly, + executeCommand, + workspaceDir, + }) => { + polly.server.host(sourcegraphMitM.target, () => { + polly.server + .post('/.api/graphql') + .filter(req => 'RecordTelemetryEvents' in req.query) + .intercept((req, res, interceptor) => { + console.log('Custom interceptor') + res.sendStatus(500) + }) + }) + await uix.workspace.modifySettings( + existing => ({ ...existing, 'workbench.colorTheme': 'Default Light Modern' }), + { workspaceDir } + ) + await uix.vscode.startSession({ page, vscodeUI, executeCommand, workspaceDir }) + await uix.cody.waitForStartup() + + await executeCommand('workbench.action.closeAllEditors') + await executeCommand('workbench.action.showRuntimeExtensions') + + await page.click('[aria-label="Cody"]') + + await executeCommand('workbench.explorer.fileView.focus') + + await page.click('[aria-label="Cody"]') + + const [signInView, ...otherWebviews] = await uix.cody.WebView.all({ page }, { atLeast: 1 }) + + expect(signInView).toBeTruthy() + expect(otherWebviews).toHaveLength(0) + + await signInView.waitUntilReady() + await expect(signInView.wrapper).toBeVisible() + + await expect( + signInView.content.getByRole('button', { name: 'Sign In to Your Enterprise Instance' }) + ).toBeVisible() + }) + + test('also works', async ({ page, sourcegraphMitM, vscodeUI, executeCommand }) => { + await uix.cody.dummy() + }) +}) diff --git a/vscode/e2e/issues/CODY-2392.test.ts b/vscode/e2e/issues/CODY-2392.test.ts new file mode 100644 index 000000000000..387ee14e0222 --- /dev/null +++ b/vscode/e2e/issues/CODY-2392.test.ts @@ -0,0 +1,79 @@ +// CTX(linear-issue): https://linear.app/sourcegraph/issue/CODY-2392 +import { expect } from '@playwright/test' +import { fixture as test } from '../utils/vscody' +//TODO(rnauta): wjow + +test.fixme('CODY-2392', () => { + expect(true).toBeFalsy() + // import { expect } from '@playwright/test' + // import { + // chatMessageRows, + // createEmptyChatPanel, + // disableNotifications, + // focusSidebar, + // openFileInEditorTab, + // selectLineRangeInEditorTab, + // } from '../../e2e/common' + // import { + // type ExpectedEvents, + // type ExpectedV2Events, + // type TestConfiguration, + // executeCommandInPalette, + // test, + // } from '../../e2e/helpers' + + // test.extend({ + // expectedEvents: [], + // expectedV2Events: [], + // preAuthenticate: true, + // })('@issue [CODY-2392](https://linear.app/sourcegraph/issue/CODY-2392)', async ({ page }) => { + // await disableNotifications(page) + + // //open a file + // await openFileInEditorTab(page, 'buzz.ts') + // await focusSidebar(page) + // const [chatPanel, lastChatInput, firstChatInput, chatInputs] = await createEmptyChatPanel(page) + // await firstChatInput.fill('show me a code snippet') + // await firstChatInput.press('Enter') + + // // wait for assistant response + // const messageRows = chatMessageRows(chatPanel) + // const assistantRow = messageRows.nth(1) + // await expect(assistantRow).toContainText('Here is a code snippet:') + + // // we now start editing the original message + // await firstChatInput.click() + // //now write some text + // await firstChatInput.fill('I want to include some context') + // await selectLineRangeInEditorTab(page, 1, 10) + // await executeCommandInPalette(page, 'Cody: Add Selection to Cody Chat') + + // // we now expect the first input to contain the selected context + // // the last input should still be empty + // const lastChatInputText = await lastChatInput.textContent() + // await expect(lastChatInput).toBeEmpty() + // await expect(firstChatInput).toContainText('@buzz.ts:1-10') + // }) +}) + +//TODO: Make a template +/** + * // TODO: update this file-level comment +// CTX(linear): https://linear.app/sourcegraph/issue/CODY-1234 + +import { disableNotifications } from '../../e2e/common' +import type { TestConfiguration } from '../../e2e/helpers' +import { test } from '../../e2e/helpers' + +// TODO: add a .only() to actually run this test +test.extend({ + expectedEvents: [], + expectedV2Events: [], + preAuthenticate: true, +})('@issue [CODY-1234](https://linear.app/sourcegraph/issue/CODY-1234)', async ({ page }) => { + // TODO: The test name should include the @issue tag and a markdown link to the issue + await disableNotifications(page) + //do your worst +}) + + */ diff --git a/vscode/e2e/issues/README.md b/vscode/e2e/issues/README.md new file mode 100644 index 000000000000..73de6416c774 --- /dev/null +++ b/vscode/e2e/issues/README.md @@ -0,0 +1,28 @@ +# Issue Tests + +This as a "low-threshold staging area" to put tests that can help replicate or diagnose a problem. + +These tests don't run as part of CI. Instead, the goal is to make it easier for anyone to contribute even rough or partial test as part of every bug report. + +Doing so will make diagnosing and verifying the results a lot easier for everyone. + +## Rules: +- Ideally test files are named with the Linear/Github Issue ID to make it easy to find them or pull in additional context. +- Only tests explicitly marked with `only()` should run. Issue tests are by definition very tied to the specific issue someone is trying to diagnose, so running other tests would just be noise. This should already be if you extend the base test for the specific type. See [e2e/template.test.ts](./e2e/template.test.ts) +- (Optional) I'm hoping to do [some experiments soon](#sidenote-openctx-experiment). So if you can please: + - start each test with a `//CTX(linear-issue): ` comment + - use the `@issue` tag and and a markdown link to the issue in the test title. + + + + +### Sidenote: OpenCtx Experiment: + +I'd like to see how we can use Cody to assist with replicating issues from bug-reports. So it would be really helpful if each Issue test created contains context on what issue it was trying to demonstrate / replicate. + +There is some rudimentary OpenCtx support for Linear issues [in the works](https://github.com/sourcegraph/openctx/pull/154), providing both additional context to the UI & the AI. So for now, if nothing else, it should at least make these tests a bit easier to understand. Especially as test comments and issue comments might drift. + +## TODO: +- [ ] Make sure we automatically clean up tests for issues marked as closed +- [ ] Automatically tag issues in Linear that have corresponding tests +- [ ] CI fast-path to limit the amount of needless tests to run when just trying to merge a Test-Only PR diff --git a/vscode/e2e/issues/ignore.test.ts b/vscode/e2e/issues/ignore.test.ts new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/vscode/e2e/utils/helpers.ts b/vscode/e2e/utils/helpers.ts new file mode 100644 index 000000000000..509ef66b6397 --- /dev/null +++ b/vscode/e2e/utils/helpers.ts @@ -0,0 +1,47 @@ +import type { TestInfo } from '@playwright/test' + +/** + * Stretches the test with at most the `max` amount of ms but never more than + * needed to finish the operation. This way you can effectively nullify the time + * a certain operation takes. + */ +export async function stretchTimeout( + fn: () => Promise, + { + max, + testInfo, + }: { + max: number + testInfo: TestInfo + } +): Promise { + // Warning: For some reason Playwright doesn't report the modified timeout + // correctly so we can't rely on it being updated after we call setTimeout + const timeout = testInfo.timeout + if (timeout === 0) { + return await fn() + } + testInfo.setTimeout(timeout + max) + const startTime = Date.now() + try { + return await fn() + } finally { + const totalTime = Date.now() - startTime + testInfo.setTimeout(timeout + totalTime) + } +} + +export async function retry(fn: () => Promise, retries = 5, delay = 1000): Promise { + for (let i = 0; i < retries; i++) { + try { + return await fn() + } catch (err) { + if (i < retries - 1) { + await new Promise(res => setTimeout(res, delay)) + } else { + throw err + } + } + } + throw new Error('Could not execute retryable function') +} diff --git a/vscode/e2e/utils/symlink-extensions.setup.ts b/vscode/e2e/utils/symlink-extensions.setup.ts new file mode 100644 index 000000000000..aa7a7771be3b --- /dev/null +++ b/vscode/e2e/utils/symlink-extensions.setup.ts @@ -0,0 +1,73 @@ +import fs from 'node:fs/promises' +import path from 'node:path' +import { test as setup } from '@playwright/test' + +//TODO: make options with nice descriptions and validation +export type SymlinkExtensions = + | { + vscodeExtensionCacheDir: string + symlinkExtensions: [string, ...string[]] + } + | { + vscodeExtensionCacheDir?: unknown + symlinkExtensions?: [] | null // these paths will get symlinked to the shared extension cache as pre-installed extensions + } + +// biome-ignore lint/complexity/noBannedTypes: +setup.extend<{}, SymlinkExtensions>({ + vscodeExtensionCacheDir: [undefined, { scope: 'worker', option: true }], + symlinkExtensions: [undefined, { scope: 'worker', option: true }], +})('symlink extensions', async ({ vscodeExtensionCacheDir, symlinkExtensions }) => { + if (typeof vscodeExtensionCacheDir === 'string') { + await fs.mkdir(vscodeExtensionCacheDir, { recursive: true }) + } + if (!symlinkExtensions || symlinkExtensions.length === 0) { + return + } + if (typeof vscodeExtensionCacheDir !== 'string') { + throw new TypeError('vscodeTmpDir is required to symlink extensions') + } + for (const extension of symlinkExtensions) { + const absoluteDir = path.resolve(process.cwd(), extension) + //read the package.json as json + const packageJsonPath = await fs.readFile(path.join(absoluteDir, 'package.json')) + const packageJson = JSON.parse(packageJsonPath.toString()) + const { publisher, name, version } = packageJson + if (!publisher || !name || !version) { + throw new TypeError( + `package.json for extension ${extension} must have publisher, name, and version` + ) + } + try { + // we look for any extensions with that same name (because they could be an older version) + const extensions = await fs.readdir(vscodeExtensionCacheDir) + const removePromises = [ + fs.unlink(path.join(vscodeExtensionCacheDir, 'extensions.json')).catch(() => void 0), + fs.unlink(path.join(vscodeExtensionCacheDir, '.obsolete')).catch(() => void 0), + ] + for (const extension of extensions) { + if (path.basename(extension).startsWith(`${publisher}.${name}-`)) { + // check if this is a symlink or a directory + const extensionPath = path.join(vscodeExtensionCacheDir, extension) + console.log(extensionPath) + removePromises.push( + fs.lstat(extensionPath).then(async stat => { + if (stat.isSymbolicLink()) { + await fs.unlink(extensionPath) + } + await fs.rm(extensionPath, { force: true, recursive: true }) + }) + ) + } + } + await Promise.all(removePromises) + } catch { + //ignore + } + await fs.symlink( + absoluteDir, + path.join(vscodeExtensionCacheDir, `${publisher}.${name}-${version}`), + 'dir' + ) + } +}) diff --git a/vscode/e2e/utils/uix.test.ts b/vscode/e2e/utils/uix.test.ts new file mode 100644 index 000000000000..cdb6cc84f65c --- /dev/null +++ b/vscode/e2e/utils/uix.test.ts @@ -0,0 +1,20 @@ +import { expect } from '@playwright/test' +import { fixture as test, uix } from './vscody' + +test.describe('UIX', () => { + test.use({ + templateWorkspaceDir: 'test/fixtures/workspace', + }) + test('VSCode Sidebar', async ({ page, vscodeUI, executeCommand, workspaceDir }) => { + await uix.vscode.startSession({ page, vscodeUI, executeCommand, workspaceDir }) + const sidebar = uix.vscode.Sidebar.get({ page }) + + await executeCommand('workbench.view.explorer') + expect(await sidebar.isVisible()).toBe(true) + expect(await sidebar.activeView).toBe('workbench.view.explorer') + await executeCommand('workbench.action.closeSidebar') + expect(await sidebar.isVisible()).toBe(false) + await executeCommand('workbench.view.extension.cody') + expect(await sidebar.activeView).toBe(uix.vscode.Sidebar.CODY_VIEW_ID) + }) +}) diff --git a/vscode/e2e/utils/vscody/fixture.ts b/vscode/e2e/utils/vscody/fixture.ts new file mode 100644 index 000000000000..b2f871fd009b --- /dev/null +++ b/vscode/e2e/utils/vscody/fixture.ts @@ -0,0 +1,715 @@ +// TODO/WARNING/APOLOGY: I know that this is an unreasonably large file right +// now. I'll refactor and cut it down this down once everything is working +// first. +import { spawn } from 'node:child_process' +import type { Dirent } from 'node:fs' +import fs from 'node:fs/promises' +import 'node:http' +import 'node:https' +import type { AddressInfo } from 'node:net' +import path from 'node:path' +import { EventEmitter } from 'node:stream' + +import pspawn from '@npmcli/promise-spawn' +import { type TestInfo, test as _test, expect, mergeTests } from '@playwright/test' +import NodeHttpAdapter from '@pollyjs/adapter-node-http' +import { type EXPIRY_STRATEGY, type MODE, Polly } from '@pollyjs/core' +import type { ArrayContainsAll } from '@sourcegraph/cody-shared/src/utils' +import { + ConsoleReporter, + type ProgressReport, + ProgressReportStage, + resolveCliArgsFromVSCodeExecutablePath, +} from '@vscode/test-electron' +import { downloadAndUnzipVSCode } from '@vscode/test-electron/out/download' +import express from 'express' +import { copy as copyExt } from 'fs-extra' +import { createProxyMiddleware } from 'http-proxy-middleware' +import type { loggerPlugin as ProxyMiddlewarePlugin } from 'http-proxy-middleware' +import zod from 'zod' + +import { waitForLock } from '../../../src/lockfile' +import { CodyPersister } from '../../../src/testutils/CodyPersister' +import { defaultMatchRequestsBy } from '../../../src/testutils/polly' +import { retry, stretchTimeout } from '../helpers' + +export type Directory = string + +const DOWNLOAD_GRACE_TIME = 5 * 60 * 1000 //5 minutes + +// TODO(rnauta): finish all variable descriptions +const workerOptionsSchema = zod.object({ + repoRootDir: zod + .string() + .describe( + 'DEPRECATED: The .git root of this project. Might still get used for some path defaults so must be set' + ), + vscodeExtensionCacheDir: zod.string(), + globalTmpDir: zod.string(), + vscodeTmpDir: zod.string(), + vscodeServerTmpDir: zod.string(), + binaryTmpDir: zod.string(), + recordingDir: zod.string(), + vscodeServerPortRange: zod.tuple([zod.number(), zod.number()]).default([33100, 33200]), + keepRuntimeDirs: zod.enum(['all', 'failed', 'none']).default('none'), + allowGlobalVSCodeModification: zod.boolean().default(false), +}) + +const testOptionsSchema = zod.object({ + vscodeVersion: zod.string().default('stable'), + vscodeExtensions: zod.array(zod.string().toLowerCase()).default([]), + templateWorkspaceDir: zod.string(), + recordingMode: zod.enum([ + 'passthrough', + 'record', + 'replay', + 'stopped', + ] satisfies ArrayContainsAll), + recordIfMissing: zod.boolean(), + keepUnusedRecordings: zod.boolean().default(true), + recordingExpiryStrategy: zod + .enum(['record', 'warn', 'error'] satisfies ArrayContainsAll) + .default('record'), + recordingExpiresIn: zod.string().nullable().default(null), +}) + +export type TestOptions = zod.infer +export type WorkerOptions = zod.infer + +export interface WorkerContext { + validWorkerOptions: WorkerOptions +} +export interface TestContext { + vscodeUI: { + url: string + token: string + } + serverRootDir: Directory + validOptions: TestOptions & WorkerOptions + polly: Polly + sourcegraphMitM: { endpoint: string; target: string } + workspaceDir: Directory + //TODO(rnauta): Make the typing inferred from VSCode directly + executeCommand: (commandId: string, ...args: any[]) => Promise +} + +function schemaOptions, S extends 'worker' | 'test'>(o: T, s: S) { + return Object.fromEntries( + Object.keys(o.shape).map(key => [key, [undefined, { scope: s, option: true }]]) + ) as unknown as { [k in keyof T]: [T[k], { scope: S; option: true }] } +} + +// We split out the options fixutre from the implementation fixture so that in +// the implementaiton fixture we don't accidentally use any options directly, +// instead having to use validated options +const optionsFixture: ReturnType< + typeof _test.extend, Pick> +> = _test.extend< + TestOptions & Pick, + WorkerOptions & Pick +>({ + ...schemaOptions(workerOptionsSchema, 'worker'), + ...schemaOptions(testOptionsSchema, 'test'), + validWorkerOptions: [ + async ( + { + repoRootDir, + binaryTmpDir, + recordingDir, + globalTmpDir, + vscodeTmpDir, + vscodeServerTmpDir, + vscodeExtensionCacheDir, + keepRuntimeDirs, + vscodeServerPortRange, + allowGlobalVSCodeModification, + }, + use + ) => { + const validOptionsWithDefaults = await workerOptionsSchema.safeParseAsync( + { + repoRootDir, + binaryTmpDir, + recordingDir, + globalTmpDir, + vscodeTmpDir, + vscodeServerTmpDir, + vscodeExtensionCacheDir, + keepRuntimeDirs, + vscodeServerPortRange, + allowGlobalVSCodeModification, + } satisfies { [key in keyof WorkerOptions]-?: WorkerOptions[key] }, + {} + ) + if (!validOptionsWithDefaults.success) { + throw new TypeError( + `Invalid worker arguments:\n${JSON.stringify( + validOptionsWithDefaults.error.flatten().fieldErrors, + null, + 2 + )}` + ) + } + use(validOptionsWithDefaults.data) + }, + { scope: 'worker', auto: true }, + ], + validOptions: [ + async ( + { + vscodeExtensions, + vscodeVersion, + templateWorkspaceDir, + recordIfMissing, + recordingMode, + keepUnusedRecordings, + recordingExpiresIn, + recordingExpiryStrategy, + validWorkerOptions, + }, + use + ) => { + const validOptionsWithDefaults = await testOptionsSchema.safeParseAsync( + { + vscodeExtensions, + vscodeVersion, + keepUnusedRecordings, + recordingExpiresIn, + recordingExpiryStrategy, + templateWorkspaceDir, + recordIfMissing, + recordingMode, + } satisfies { [key in keyof TestOptions]-?: TestOptions[key] }, + {} + ) + if (!validOptionsWithDefaults.success) { + throw new TypeError( + `Invalid test arguments:\n${JSON.stringify( + validOptionsWithDefaults.error.flatten().fieldErrors, + null, + 2 + )}` + ) + } + use({ ...validOptionsWithDefaults.data, ...validWorkerOptions }) + }, + { scope: 'test', auto: true }, + ], +}) + +const implFixture = _test.extend({ + serverRootDir: [ + async ({ validWorkerOptions }, use, testInfo) => { + const dir = await fs.mkdtemp( + path.resolve(validWorkerOptions.globalTmpDir, 'test-vscode-server-') + ) + await use(dir) + const attachmentPromises = [] + const logDir = path.join(dir, 'data/logs') + + for (const file of await getFilesRecursive(logDir)) { + const filePath = path.join(file.path, file.name) + const relativePath = path.relative(logDir, filePath) + attachmentPromises.push( + testInfo.attach(relativePath, { + path: filePath, + }) + ) + } + if (attachmentPromises.length > 0) { + await Promise.allSettled(attachmentPromises) + } + if ( + validWorkerOptions.keepRuntimeDirs === 'none' || + (validWorkerOptions.keepRuntimeDirs === 'failed' && + ['failed', 'timedOut'].includes(testInfo.status ?? 'unknown')) + ) { + await retry(() => fs.rm(logDir, { force: true, recursive: true }), 20, 500) + } + }, + { scope: 'test' }, + ], + workspaceDir: [ + async ({ validOptions }, use, testInfo) => { + const dir = await fs.mkdtemp(path.resolve(validOptions.globalTmpDir, 'test-workspace-')) + + await copyExt(path.resolve(process.cwd(), validOptions.templateWorkspaceDir), dir, { + overwrite: true, + preserveTimestamps: true, + dereference: true, // we can't risk the test modifying the symlink + }) + await use(dir) + if ( + validOptions.keepRuntimeDirs === 'none' || + (validOptions.keepRuntimeDirs === 'failed' && + ['failed', 'timedOut'].includes(testInfo.status ?? 'unknown')) + ) { + await retry(() => fs.rm(dir, { force: true, recursive: true }), 20, 500) + } + }, + { + scope: 'test', + }, + ], + //#region Polly & Proxies + sourcegraphMitM: [ + // biome-ignore lint/correctness/noEmptyPattern: + async ({}, use, testInfo) => { + const app = express() + //TODO: Credentials & Configuration TODO: I can see a use-case where + //you can switch endpoints dynamically. For instance wanting to try + //signing out of one and then signing into another. You could + //probably do that already using env variables in the workspace + //config but it's not a super smooth experience yet. If you run into + //this please give me a ping so we can brainstorm. + const target = 'https://sourcegraph.com' + const testFailureSignal = new EventEmitter<{ error: [Error] }>() + testFailureSignal.on('error', err => { + throw err + }) + const middleware = createProxyMiddleware({ + target, + changeOrigin: true, + ejectPlugins: true, + plugins: [failOrRetryRecordingOnError(testFailureSignal, testInfo)], + }) + app.use(middleware) + let server: ReturnType = null as any + const serverInfo = await new Promise((resolve, reject) => { + server = app.listen(0, '127.0.0.1', () => { + const address = server.address() + if (address === null || typeof address === 'string') { + reject('address is not a valid object') + } else { + resolve(address) + } + }) + }) + + await use({ + endpoint: `http://${serverInfo.address}:${serverInfo.port}`, + target, + }) + + server.closeAllConnections() + await new Promise(resolve => server.close(resolve)) + }, + { scope: 'test' }, + ], + polly: [ + async ({ validOptions, sourcegraphMitM }, use, testInfo) => { + const relativeTestPath = path.relative( + path.resolve(process.cwd(), testInfo.project.testDir), + testInfo.file + ) + const polly = new Polly(`${testInfo.project.name}/${relativeTestPath}/${testInfo.title}`, { + flushRequestsOnStop: true, + recordIfMissing: validOptions.recordIfMissing ?? validOptions.recordingMode === 'record', + mode: validOptions.recordingMode, + persister: 'fs', + adapters: ['node-http'], + recordFailedRequests: true, + logLevel: 'SILENT', + matchRequestsBy: defaultMatchRequestsBy, + persisterOptions: { + keepUnusedRequests: validOptions.keepUnusedRecordings ?? true, + fs: { + recordingsDir: path.resolve(process.cwd(), validOptions.recordingDir), + }, + }, + }) + + polly.server + .any() + .filter(req => !req.url.startsWith(sourcegraphMitM.target)) + .intercept((req, res, interceptor) => { + interceptor.stopPropagation() + interceptor.passthrough() + }) + polly.server.host(sourcegraphMitM.target, () => { + polly.server + .post('/.api/graphql') + .filter(req => 'RecordTelemetryEvents' in req.query) + .on('request', (req, inter) => { + //TODO(rnauta): Store telemetry & allow for custom validation (if needed) + }) + + // NOTE: this might seem counter intuitive that the user could + // override these functions given that PollyJS calls them in the + // order they were defined. However, these intercept handlers + // don't work like normal middleware in that it's the first to + // respond. Instead if you call sendStatus(400) in a subsequent + // handler you change the resoponse. So although handlers are + // called in the order they are defined, it's the last handler + // to modify the response that actually dictates the response. + // This took me ages to figure out, and feels like a terrible + // API...why they didn't just go with normal well-understood + // middleware API 🤷‍♂️ + polly.server + .post('/.api/graphql') + .filter( + req => 'RecordTelemetryEvents' in req.query || 'LogEventMutation' in req.query + ) + .intercept((req, res, interceptor) => { + res.send('{}') + res.status(200) + }) + + polly.server.get('/healthz').intercept((req, res, interceptor) => { + res.sendStatus(200) + }) + }) + + await use(polly) + await polly.flush() + await polly.stop() + }, + { scope: 'test' }, + ], + //#region vscode agent + vscodeUI: [ + async ({ validOptions, serverRootDir, sourcegraphMitM, page }, use, testInfo) => { + const executableDir = path.resolve(process.cwd(), validOptions.vscodeTmpDir) + await fs.mkdir(executableDir, { recursive: true }) + const serverExecutableDir = path.resolve(process.cwd(), validOptions.vscodeServerTmpDir) + await fs.mkdir(serverExecutableDir, { recursive: true }) + // We nullify the time it takes to download VSCode as it can vary wildly! + const [_, codeTunnelCliPath] = await stretchTimeout( + () => downloadOrWaitForVSCode({ validOptions, executableDir }), + { + max: DOWNLOAD_GRACE_TIME, + testInfo, + } + ) + + // Machine settings should simply serve as a baseline to ensure + // tests by default work smoothly. Any test specific preferences + // should be set in workspace settings instead. + + // Note: Not all settings can be set as machine settings, especially + // those with security implications. These are set as user settings + // which live inside the browser's IndexDB. There's + const machineDir = path.join(serverRootDir, 'data/Machine') + await fs.mkdir(machineDir, { recursive: true }) + await fs.writeFile( + path.join(machineDir, 'settings.json'), + JSON.stringify( + { + 'extensions.ignoreRecommendations': true, + 'workbench.editor.empty.hint': 'hidden', + 'workbench.startupEditor': 'none', + 'workbench.tips.enabled': false, + 'workbench.welcomePage.walkthroughs.openOnInstall': false, + 'workbench.colorTheme': 'Default Dark Modern', + // sane defaults + 'cody.debug.verbose': true, + }, + null, + 2 + ) + ) + + // Here we install the extensions requested. To speed things up we make use of a shared extension cache that we symlink to. + const extensionsDir = path.join(serverRootDir, 'extensions') + await fs.mkdir(extensionsDir, { recursive: true }) + const userDataDir = path.join(serverRootDir, 'data/User') + await fs.mkdir(userDataDir, { recursive: true }) + if (validOptions.vscodeExtensions.length > 0) { + //TODO(rnauta): Add lockfile wrapper to avoid race conditions + const sharedExtensionsDir = path.resolve( + process.cwd(), + validOptions.vscodeExtensionCacheDir + ) + if (!sharedExtensionsDir.endsWith('.vscode-server/extensions')) { + //right now there's no way of setting the extension installation directoy. Instead they are always install in ~/.vscode-server/extensions + throw new Error( + "Unfortunately VSCode doesn't provide a way yet to cache extensions isolated from a global installation. Please use ~/.code-server/extensions for now." + ) + } + await fs.mkdir(sharedExtensionsDir, { recursive: true }) + const releaseLock = await waitForLock(sharedExtensionsDir, { + lockfilePath: path.join(sharedExtensionsDir, '.lock'), + delay: 1000, + }) + try { + const args = [ + ...validOptions.vscodeExtensions.flatMap(v => ['--install-extension', v]), + ] + await pspawn(codeTunnelCliPath, args, { + env: { + ...process.env, + // VSCODE_EXTENSIONS: sharedExtensionsDir, This doesn't work either + }, + stdio: ['inherit', 'inherit', 'inherit'], + }) + } catch (e) { + console.log('I AM HERE') + if ( + typeof e === 'string' && + e.includes('code version use stable --install-dir /path/to/installation') + ) { + } + console.error(e) + throw e + } finally { + releaseLock() + } + //we now read all the folders in the shared cache dir and + //symlink the relevant ones to our isolated extension dir + for (const sharedExtensionDir of await fs.readdir(sharedExtensionsDir)) { + const [_, extensionName] = /^(.*)-\d+\.\d+\.\d+$/.exec(sharedExtensionDir) ?? [] + if (!validOptions.vscodeExtensions.includes(extensionName?.toLowerCase())) { + continue + } + const sharedExtensionPath = path.join(sharedExtensionsDir, sharedExtensionDir) + const extensionPath = path.join(extensionsDir, sharedExtensionDir) + await fs.symlink(sharedExtensionPath, extensionPath) + } + } + //TODO: Fixed Port Ranges + + // We can now start the server + const connectionToken = '0000-0000' + const serverPort = validOptions.vscodeServerPortRange[0] + testInfo.parallelIndex + if (serverPort > validOptions.vscodeServerPortRange[1]) { + throw new Error( + 'Port range is exhausted. Either reduce the amount of workers or increase the port range.' + ) + } + const args = [ + 'serve-web', + `--user-data-dir=${userDataDir}`, + '--accept-server-license-terms', + `--port=${serverPort}`, + `--connection-token=${connectionToken}`, + `--cli-data-dir=${serverExecutableDir}`, + `--server-data-dir=${serverRootDir}`, + `--extensions-dir=${extensionsDir}`, // cli doesn't handle quotes properly so just escape spaces, + ] + + const env = { + ...process.env, + ...(['stable', 'insiders'].includes(validOptions.vscodeVersion) + ? { VSCODE_CLI_QUALITY: validOptions.vscodeVersion } + : { VSCODE_CLI_COMMIT: validOptions.vscodeVersion }), + TESTING_DOTCOM_URL: sourcegraphMitM.endpoint, + CODY_TESTING_BFG_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), + CODY_TESTING_SYMF_DIR: path.resolve(process.cwd(), validOptions.binaryTmpDir), + } + const codeProcess = spawn(codeTunnelCliPath, args, { + env, + stdio: ['inherit', 'ignore', 'inherit'], + detached: false, + }) + + const config = { url: `http://127.0.0.1:${serverPort}/`, token: connectionToken } + await stretchTimeout(() => waitForVSCodeServer({ url: config.url, serverExecutableDir }), { + max: DOWNLOAD_GRACE_TIME, + testInfo, + }) + + await use(config) + + // Turn of logging browser logging and navigate away from the UI + // Otherwise we needlessly add a bunch of noisy error logs + if (page.url().startsWith(config.url)) { + await page.evaluate(() => { + console.log = () => {} + console.info = () => {} + console.warn = () => {} + console.error = () => {} + window.onerror = () => {} + }) + await page.goto('about:blank') + await page.waitForLoadState('domcontentloaded') + } + const exitPromise = new Promise(resolve => { + codeProcess.on('exit', () => { + resolve(void 0) + }) + }) + codeProcess.kill() + await exitPromise + }, + { scope: 'test' }, + ], + // This exposes some bare-bones VSCode APIs in the browser context. You can + // now simply execute a command from the chrome debugger which is a lot less + // flaky then relying on Button Clicks etc. + executeCommand: [ + async ({ page }, use) => { + const commandFn = async (command: string, ...args: any[]): Promise => { + return await _test.step( + 'executeCommand', + async () => { + await expect(page.locator('meta[name="__exposed-vscode-api__"]')).toBeAttached({ + timeout: 4000, + }) + const res = await page.evaluate( + async ({ command, args }) => { + //@ts-ignore + return await window._executeCommand(command, ...args) + }, + { + command, + args, + } + ) + return res + }, + { box: true } + ) + } + use(commandFn) + }, + { scope: 'test' }, + ], +}) + +export const fixture = mergeTests(optionsFixture, implFixture) as ReturnType< + typeof _test.extend +> + +fixture.beforeAll(async () => { + // This just registers polly adapters, it doesn't actually wire anything up + await fixture.step('Polly Register', () => { + Polly.register(NodeHttpAdapter) + Polly.register(CodyPersister) + }) +}) + +/** + * Waits for server components to be downloaded and that the server is ready to + * accept connections + */ +async function waitForVSCodeServer(config: { + url: string + serverExecutableDir: string + maxConnectionRetries?: number +}) { + const releaseServerDownloadLock = await waitForLock(config.serverExecutableDir, { + delay: 1000, + lockfilePath: path.join(config.serverExecutableDir, '.lock'), + }) + try { + let connectionIssueTries = config.maxConnectionRetries ?? 5 + while (true) { + try { + const res = await fetch(config.url) + if (res.status === 202) { + // we are still downloading here + } else if (res.status === 200 || res.status === 403) { + // 403 simply means we haven't supplied the token + // 200 probably means we didn't require a token + // either way we are ready to accept connections + return + } else { + console.error(`Unexpected status code ${res.status}`) + } + } catch (err) { + connectionIssueTries-- + if (connectionIssueTries <= 0) { + throw err + } + } + await new Promise(resolve => setTimeout(resolve, 1000)) + } + } finally { + releaseServerDownloadLock() + } +} + +/** + * This ensures only a single process is actually downloading VSCode + */ +async function downloadOrWaitForVSCode({ + executableDir, + validOptions, +}: Pick & { executableDir: string }) { + const lockfilePath = path.join(executableDir, '.lock') + const releaseLock = await waitForLock(executableDir, { lockfilePath, delay: 500 }) + + try { + const electronPath = await downloadAndUnzipVSCode({ + cachePath: executableDir, + version: 'stable', + reporter: new CustomConsoleReporter(process.stdout.isTTY), + }) + const installPath = path.join( + executableDir, + path.relative(executableDir, electronPath).split(path.sep)[0] + ) + const [cliPath] = resolveCliArgsFromVSCodeExecutablePath(electronPath) + //replce code with code-tunnel(.exe) either if the last binary or if code.exe + const tunnelPath = cliPath + .replace(/code$/, 'code-tunnel') + .replace(/code\.(?:exe|cmd)$/, 'code-tunnel.exe') + + // we need to make sure vscode has global configuration set + const res = await pspawn(tunnelPath, ['version', 'show'], { + stdio: ['inherit', 'pipe', 'inherit'], + }) + if (res.code !== 0 || res.stdout.includes('No existing installation found')) { + if (!validOptions.allowGlobalVSCodeModification) { + throw new Error('Global VSCode path modification is not allowed') + } + await pspawn(tunnelPath, ['version', 'use', 'stable', '--install-dir', installPath], { + stdio: ['inherit', 'inherit', 'inherit'], + }) + } else if (res.code !== 0) { + throw new Error(JSON.stringify(res)) + } + return [cliPath, tunnelPath] + //If this fails I assume we haven't configured VSCode globally. Since + //getting portable mode to work is annoying we just set this + //installation as the global one. + } finally { + releaseLock() + } +} + +async function getFilesRecursive(dir: string): Promise> { + // lists all dirents recursively in a directory + let dirs: Array>> = [fs.readdir(dir, { withFileTypes: true })] + const files: Array = [] + while (dirs.length > 0) { + const ents = (await Promise.allSettled(dirs)).flat() + dirs = [] + for (const promise of ents) { + if (promise.status === 'rejected') { + // we don't care, we just don't want to leave out other logs that did succeed + continue + } + for (const ent of promise.value) { + if (ent.isFile()) { + files.push(ent) + } else if (ent.isDirectory()) { + dirs.push(fs.readdir(path.join(ent.path, ent.name), { withFileTypes: true })) + } + } + } + } + return files +} + +function failOrRetryRecordingOnError( + emitter: EventEmitter<{ error: [Error] }>, + testInfo: TestInfo +): typeof ProxyMiddlewarePlugin { + return (proxyServer, options) => { + //TODO(rnauta): retry with different settings if user accepts in cli prompt + proxyServer.on('error', (err, req, res) => { + emitter.emit('error', err) + }) + } +} + +// A custom version of the VS Code download reporter that silences matching installation +// notifications as these otherwise are emitted on every test run +class CustomConsoleReporter extends ConsoleReporter { + public report(report: ProgressReport): void { + if (report.stage !== ProgressReportStage.FoundMatchingInstall) { + super.report(report) + } + } +} diff --git a/vscode/e2e/utils/vscody/index.ts b/vscode/e2e/utils/vscody/index.ts new file mode 100644 index 000000000000..bfba3678c8d6 --- /dev/null +++ b/vscode/e2e/utils/vscody/index.ts @@ -0,0 +1,3 @@ +export type * from './fixture' +export { fixture } from './fixture' +export * as uix from './uix' diff --git a/vscode/e2e/utils/vscody/uix/README.md b/vscode/e2e/utils/vscody/uix/README.md new file mode 100644 index 000000000000..91e6967a8d79 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/README.md @@ -0,0 +1 @@ +These are a set of utility functions and common UX patterns that make tests more readable and composable. diff --git a/vscode/e2e/utils/vscody/uix/cody.ts b/vscode/e2e/utils/vscody/uix/cody.ts new file mode 100644 index 000000000000..8e29d6c71a12 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/cody.ts @@ -0,0 +1,79 @@ +import { expect, test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' +type WebViewCtx = Pick + +/** + * A web view can be positioned anywhere + */ +export class WebView { + private constructor( + public readonly id: string, + private ctx: WebViewCtx + ) {} + + public async waitUntilReady(timeout?: number): Promise { + await this.ctx.page.waitForSelector(`iframe.webview.ready[name="${this.id}"]`, { + strict: true, + state: 'attached', + timeout: timeout, + }) + return this + } + + /** + * Can be used to check visibility + */ + public get wrapper() { + return this.ctx.page.locator(`div:has(> iframe.webview[name="${this.id}"])`) + } + + /** + * Can be used for accessing WebView Content + */ + public get content() { + return this.ctx.page.frameLocator(`.webview[name="${this.id}"]`).frameLocator('#active-frame') + } + + public static all( + ctx: WebViewCtx, + opts: { atLeast?: number; ignoring?: Array; timeout?: number } = {} + ) { + return t.step('Cody.WebView.all', async () => { + const excludedIds = opts.ignoring?.map(id => (typeof id === 'string' ? id : id.id)) ?? [] + const nots = excludedIds.map(id => `:not([name="${id}"`).join('') + const validOptions = ctx.page.locator( + `iframe.webview[src*="extensionId=sourcegraph.cody-ai"]${nots}` + ) + + if (opts.atLeast) { + await expect(validOptions.nth(opts.atLeast - 1)).toBeAttached({ timeout: opts.timeout }) + } + + const ids = await validOptions.evaluateAll(frames => { + return frames.map(frame => frame.getAttribute('name')!).filter(Boolean) + }) + return ids.map(id => new WebView(id, ctx)) + }) + } +} + +export async function dummy() { + console.log('DUMMY') +} + +export async function waitForBinaryDownloads() {} + +export async function waitForIndexing() {} + +export async function waitForStartup() { + await Promise.all([waitForBinaryDownloads(), waitForIndexing()]) +} + +export async function sidebar( + withSidebar: (sidebar: any) => Promise, + ctx: Pick +): Promise { + //todo: IFRAME Locator + const frame = await ctx.page.frameLocator('iframe') + return await withSidebar(frame) +} diff --git a/vscode/e2e/utils/vscody/uix/index.ts b/vscode/e2e/utils/vscody/uix/index.ts new file mode 100644 index 000000000000..497718ce68d1 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/index.ts @@ -0,0 +1,7 @@ +import type { PlaywrightTestArgs, PlaywrightWorkerArgs } from '@playwright/test' +import type { TestContext, WorkerContext } from '../fixture' +export * as vscode from './vscode' +export * as cody from './cody' +export * as workspace from './workspace' + +export type UIXContextFnContext = TestContext & WorkerContext & PlaywrightTestArgs & PlaywrightWorkerArgs diff --git a/vscode/e2e/utils/vscody/uix/vscode.ts b/vscode/e2e/utils/vscody/uix/vscode.ts new file mode 100644 index 000000000000..97b3ccda388c --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/vscode.ts @@ -0,0 +1,170 @@ +import path from 'node:path' +import { test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' + +type SidebarCtx = Pick +export class Sidebar { + public static readonly CODY_VIEW_ID = 'workbench.view.extension.cody' + + private constructor(private ctx: SidebarCtx) {} + + public static get(ctx: SidebarCtx) { + return new Sidebar(ctx) + } + + public get locator() { + return this.ctx.page.locator('#workbench\\.parts\\.sidebar') + } + + private get splitViewContainer() { + return this.locator.locator('xpath=ancestor::*[contains(@class, "split-view-view")]').last() + } + + /** + * The viewlet is the content of the sidebar. Any webview will get + * positioned as anchored to this. + */ + private get viewlet() { + return this.locator.locator('.composite.viewlet').first() + } + + public async isVisible() { + return await t.step('Sidebar.isVisible', async () => { + const classes = await this.splitViewContainer.getAttribute('class') + return classes?.split(' ').includes('visible') + }) + } + + public get activeView() { + return this.viewlet.getAttribute('id') + } +} + +export function startSession({ + page, + vscodeUI, + executeCommand, + workspaceDir, +}: Pick) { + return t.step('Start VSCode Session', async () => { + // we dummy route here so that we can modify the state etc. Which would + // otherwise be protected by the browser to match the domain + await page.route( + vscodeUI.url, + route => { + route.fulfill({ + status: 200, + body: '', + }) + }, + { times: 1 } + ) + await page.goto(vscodeUI.url) + // User settings are stored in IndexDB though so we need to get a bit + // clever. Normal "user settings" are better stored in Machine settings + // so that they can be easily edited as a normal file. Machine settings + // don't cover security sensitive settings though. + const userSettingsOk = await page.evaluate(async () => { + const openDatabase = () => { + return new Promise((resolve, reject) => { + const request = indexedDB.open('vscode-web-db') + + request.onupgradeneeded = (event: any) => { + const db = event.target.result + if (!db.objectStoreNames.contains('vscode-userdata-store')) { + db.createObjectStore('vscode-userdata-store') + } + } + + request.onsuccess = (event: any) => { + resolve(event.target.result) + } + + request.onerror = (event: any) => { + reject(event.target.errorCode) + } + }) + } + const putData = (db: any) => { + return new Promise((resolve, reject) => { + const transaction = db.transaction(['vscode-userdata-store'], 'readwrite') + const store = transaction.objectStore('vscode-userdata-store') + //TODO: Configurable overwrites + const settingsJSON = JSON.stringify( + { + 'security.workspace.trust.enabled': false, + 'extensions.autoCheckUpdates': false, + 'extensions.autoUpdate': false, + 'update.mode': 'none', + 'update.showReleaseNotes': false, + }, + null, + 2 + ) + const settingsData = new TextEncoder().encode(settingsJSON) + const putRequest = store.put(settingsData, '/User/settings.json') + putRequest.onsuccess = () => { + resolve(void 0) + } + putRequest.onerror = (event: any) => { + console.error(event) + reject(event.target.errorCode) + } + }) + } + + try { + const db = await openDatabase() + await putData(db) + return true + } catch (error) { + console.error('Error accessing IndexedDB:', error) + return false + } + }) + + if (!userSettingsOk) { + throw new Error('Failed to initialize VSCode User Settings') + } + + // We also make sure that on page loads we expose the VSCodeAPI + await page.addInitScript(async () => { + // only run this in the main frame + if (window && window.self === window.top) { + if (document.querySelector('meta[name="__exposed-vscode-api__"]') !== null) { + return + } + while (true) { + try { + const code = window.require('vs/workbench/workbench.web.main') + //@ts-ignore + window._vscode = code + //@ts-ignore + window._executeCommand = code.commands.executeCommand + // insert the meta tag if it doesn't already exist + // await page.waitForSelector('meta[name="__exposed-vscode-api__"]', { timeout: 1000 }) + const meta = document.createElement('meta') + meta.setAttribute('name', '__exposed-vscode-api__') + meta.setAttribute('content', 'true') + document.head.appendChild(meta) + return + } catch (err) { + // We'll try again in a bit. Eitehr require wasn't loaded yet or the module isn't imported yet + await new Promise(resolve => { + setTimeout(resolve, 100) + }) + } + } + } + }) + + // We can now authenticate and navigate to the UI + await page.goto(`${vscodeUI.url}?tkn=${vscodeUI.token}&folder=${path.resolve(workspaceDir)}`) + + // wait for the UI to be ready + await page.locator('iframe.web-worker-ext-host-iframe').waitFor({ + state: 'attached', + timeout: 10000, + }) + }) +} diff --git a/vscode/e2e/utils/vscody/uix/workspace.ts b/vscode/e2e/utils/vscody/uix/workspace.ts new file mode 100644 index 000000000000..a94fe4928a67 --- /dev/null +++ b/vscode/e2e/utils/vscody/uix/workspace.ts @@ -0,0 +1,29 @@ +import fs from 'node:fs/promises' +import path from 'node:path' +import { test as t } from '@playwright/test' +import type { UIXContextFnContext } from '.' +export function modifySettings( + modifyFn: (settings: Record | undefined) => Record, + { workspaceDir }: Pick +) { + return t.step( + 'Modify Workspace Settings', + async () => { + const existingConfig: string | undefined = await fs + .readFile(path.join(workspaceDir, '.vscode', 'settings.json'), 'utf-8') + .catch(err => { + if (err.code === 'ENOENT') { + return undefined + } + throw err + }) + const updatedConfig = modifyFn(existingConfig ? JSON.parse(existingConfig) : undefined) + await fs.mkdir(path.join(workspaceDir, '.vscode'), { recursive: true }) + fs.writeFile( + path.join(workspaceDir, '.vscode', 'settings.json'), + JSON.stringify(updatedConfig, null, 2) + ) + }, + { box: true } + ) +} diff --git a/vscode/package.json b/vscode/package.json index 4734c0afb036..3c72b681b039 100644 --- a/vscode/package.json +++ b/vscode/package.json @@ -40,6 +40,9 @@ "storybook": "storybook dev -p 6007 --no-open --no-version-updates", "test:e2e": "playwright install && tsc --build && node dist/tsc/test/e2e/install-deps.js && pnpm run -s build:dev:desktop && pnpm run -s test:e2e:run", "test:e2e:run": "playwright test", + "test:e2e2": "pnpm -s test:e2e2:deps && pnpm -s build:dev:desktop && pnpm -s test:e2e2:run", + "test:e2e2:run": "playwright test -c playwright.v2.config.ts", + "test:e2e2:deps": "playwright install chromium --with-deps", "test:integration": "tsc --build ./test/integration && pnpm run -s build:dev:desktop && node --inspect -r ts-node/register dist/tsc/test/integration/main.js", "test:unit": "vitest", "bench": "vitest bench", @@ -1382,6 +1385,7 @@ "diff": "^5.2.0", "fast-xml-parser": "^4.3.2", "glob": "^7.2.3", + "graceful-fs": "^4.2.11", "he": "^1.2.0", "http-proxy-agent": "^7.0.2", "https-proxy-agent": "^7.0.4", @@ -1405,6 +1409,7 @@ "rehype-sanitize": "^6.0.0", "remark-gfm": "^4.0.0", "semver": "^7.5.4", + "signal-exit": "^4.1.0", "socks-proxy-agent": "^8.0.1", "tailwind-merge": "^2.3.0", "tailwindcss": "^3.4.3", @@ -1417,7 +1422,9 @@ }, "devDependencies": { "@google-cloud/pubsub": "^3.7.3", + "@npmcli/promise-spawn": "^7.0.2", "@playwright/test": "1.44.1", + "@pollyjs/adapter": "^6.0.6", "@pollyjs/adapter-node-http": "^6.0.6", "@pollyjs/core": "^6.0.6", "@pollyjs/persister": "^6.0.6", @@ -1429,15 +1436,18 @@ "@types/express": "^4.17.17", "@types/fs-extra": "^11.0.4", "@types/glob": "^8.0.0", + "@types/graceful-fs": "^4.1.9", "@types/ini": "^4.1.0", "@types/isomorphic-fetch": "^0.0.39", "@types/js-levenshtein": "^1.1.1", "@types/lodash": "^4.14.195", "@types/marked": "^5.0.0", "@types/mocha": "^10.0.6", + "@types/npmcli__promise-spawn": "^6.0.3", "@types/pako": "^2.0.3", "@types/progress": "^2.0.5", "@types/semver": "^7.5.0", + "@types/signal-exit": "^3.0.4", "@types/unzipper": "^0.10.7", "@types/uuid": "^9.0.2", "@types/vscode": "^1.79.0", @@ -1447,6 +1457,7 @@ "ajv": "^8.14.0", "ajv-errors": "^3.0.0", "ajv-formats": "^3.0.1", + "chokidar": "^3.6.0", "concurrently": "^8.2.0", "dedent": "^0.7.0", "express": "^4.18.2", @@ -1454,6 +1465,7 @@ "franc-min": "^6.2.0", "fs-extra": "^11.2.0", "fuzzysort": "^2.0.4", + "http-proxy-middleware": "^3.0.0", "mocha": "^10.2.0", "ovsx": "^0.8.2", "pako": "^2.1.0", @@ -1463,9 +1475,11 @@ "progress": "^2.0.3", "react-head": "^3.4.2", "typescript-language-server": "^4.3.3", + "ulidx": "^2.3.0", "vite-plugin-svgr": "^4.2.0", "vscode-jsonrpc": "^8.2.0", "vscode-languageserver-protocol": "^3.17.5", - "yaml": "^2.3.4" + "yaml": "^2.3.4", + "zod": "^3.23.8" } } diff --git a/vscode/playwright.v2.config.ts b/vscode/playwright.v2.config.ts new file mode 100644 index 000000000000..ef88753fa122 --- /dev/null +++ b/vscode/playwright.v2.config.ts @@ -0,0 +1,111 @@ +import { mkdirSync, readdirSync, rmSync } from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import { type ReporterDescription, defineConfig } from '@playwright/test' +import { ulid } from 'ulidx' +import type { SymlinkExtensions } from './e2e/utils/symlink-extensions.setup' +import type { TestOptions, WorkerOptions } from './e2e/utils/vscody' + +const isWin = process.platform.startsWith('win') +const isCI = !!process.env.CI + +// This makes sure that each run gets a unique run id. This shouldn't really be +// used other than to invalidate lockfiles etc. +process.env.RUN_ID = process.env.RUN_ID || ulid() + +const globalTmpDir = path.resolve(__dirname, `../.test/runs/${process.env.RUN_ID}/`) +mkdirSync(globalTmpDir, { recursive: true }) +// get previous runs and delete them +for (const run of readdirSync(path.resolve(__dirname, '../.test/runs/'))) { + if (run !== process.env.RUN_ID) { + console.log('clearing previous run', run) + rmSync(path.resolve(__dirname, `../.test/runs/${run}`), { force: true, recursive: true }) + } +} + +export default defineConfig({ + workers: '50%', + retries: 0, // NO MORE FLAKE ALLOWED! It's a slippery slope. + forbidOnly: isCI, + fullyParallel: true, + timeout: isWin || isCI ? 30000 : 20000, + expect: { + timeout: isWin || isCI ? 10000 : 5000, + }, + use: { + // You can override options easily per project/worker/test so they are + // unlikely to need to be modified here. These are just some sane + // defaults + browserName: 'chromium', + repoRootDir: '../', //deprecated + vscodeExtensions: ['sourcegraph.cody-ai'], + symlinkExtensions: ['.'], + globalTmpDir: `../.test/runs/${process.env.RUN_ID}/`, //os.tmpdir(), + vscodeVersion: 'stablefff', + vscodeTmpDir: '../.test/global/vscode', + vscodeExtensionCacheDir: `${os.homedir()}/.vscode-server/extensions`, + vscodeServerTmpDir: '../.test/global/vscode-server', + binaryTmpDir: '../.test/global/bin', + recordIfMissing: + typeof process.env.CODY_RECORD_IF_MISSING === 'string' + ? process.env.CODY_RECORD_IF_MISSING === 'true' + : false, + recordingMode: (process.env.CODY_RECORDING_MODE as any) ?? 'replay', + recordingDir: '../recordings/vscode/', + keepUnusedRecordings: true, + bypassCSP: true, + locale: 'en-US', + timezoneId: 'America/Los_Angeles', + permissions: ['clipboard-read', 'clipboard-write'], + geolocation: { longitude: -122.40825783227943, latitude: 37.78124453182266 }, + acceptDownloads: false, + keepRuntimeDirs: 'all', + allowGlobalVSCodeModification: isCI, + trace: { + mode: isCI ? 'retain-on-failure' : 'on', + attachments: true, + screenshots: true, + snapshots: true, + sources: true, + }, + }, + projects: [ + { + name: 'symlink-extensions', + testDir: './e2e/utils', + testMatch: ['symlink-extensions.setup.ts'], + }, + { + name: 'utils', + testDir: './e2e/utils', + testMatch: ['**/*.test.ts'], + dependencies: ['symlink-extensions'], + }, + { + name: 'e2e', + testDir: './e2e', + testMatch: ['**/*.test.ts'], + testIgnore: ['issues/**/*', 'utils/**/*'], + dependencies: ['symlink-extensions'], + use: { + // recordIfMissing: true, //uncomment for quick manual override + }, + }, + { + name: 'issues', + testDir: './e2e/issues', + retries: 0, + testMatch: ['**/*.test.ts'], + dependencies: ['symlink-extensions'], + use: { + // recordIfMissing: true, //uncomment for quick manual override + }, + }, + ], + reporter: [ + ['line', { printSteps: true, includeProjectInTestName: true }], + ['html', { outputFolder: '.test-reports', fileName: 'report.html', open: 'never' }], + ['json', { outputFile: '.test-reports/report.json', open: 'never' }], + ...(isCI ? [['github', {}] satisfies ReporterDescription] : []), + ], +}) diff --git a/vscode/src/graph/bfg/bfg.test.ts b/vscode/src/graph/bfg/bfg.test.ts new file mode 100644 index 000000000000..8db09f23e0cc --- /dev/null +++ b/vscode/src/graph/bfg/bfg.test.ts @@ -0,0 +1,82 @@ +import { mkdtemp, open, rm } from 'node:fs/promises' +import { tmpdir } from 'node:os' +import path from 'node:path' +import { describe, expect, it, vi } from 'vitest' +import { downloadFile } from '../../local-context/utils' +import { getOSArch } from '../../os' +import { _config, _getNamesForPlatform, _upsertBfgForPlatform, defaultBfgVersion } from './download-bfg' + +//@ts-ignore +_config.FILE_LOCK_RETRY_DELAY = 1 + +vi.mock('../../local-context/utils', async importOriginal => { + const mod = await importOriginal() + let firstDownload = true + return { + ...mod, + downloadFile: vi.fn(async (url: string, dest: string) => { + // we abandon the first download + if (firstDownload) { + await makeEmptyFile(dest) + firstDownload = false + throw new Error('Test Mock Deliberate Abandon') + } + await sleep(2) + // make an empty file + await makeEmptyFile(dest) + }), + unzip: vi.fn(async (zipPath: string, dest: string) => { + await sleep(2) + // just check the zip file exists first + if (!(await mod.fileExists(zipPath))) { + throw new Error("File doesn't exist") + } + // we ensure that at leats the expected file exists + const { platform, arch } = getOSArch() + const { bfgUnzippedFilename } = _getNamesForPlatform(platform!, arch!, defaultBfgVersion) + const bfgUnzippedPath = path.join(dest, bfgUnzippedFilename) + await makeEmptyFile(bfgUnzippedPath) + }), + } +}) + +describe('upsertBfgForPlatform', () => { + // NOTE: This really only checks downloads in the same Node process Instead + // we probably want to mock the fs and network layer directly and ensure + // that this works regardless of Mutex locks + it('prevents parallel downloads', async () => { + const dir = await mkdtemp(path.join(tmpdir(), 'bfg-')) + try { + // we first create a "abandoned" download so that we can ensure that + // after some expiration time one of the processes will forcefully + // download regardless + const abandonedDownload = _upsertBfgForPlatform(dir, defaultBfgVersion) + expect(await abandonedDownload).toBeNull() + vi.mocked(downloadFile).mockClear() + + // // we now start parallel async functions + const results = await Promise.all([ + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + _upsertBfgForPlatform(dir, defaultBfgVersion), + ]) + // // only one actual download should have happened + expect(downloadFile).toHaveBeenCalledOnce() + // // expect all results to be the same and valid strings + expect(new Set(results).size).toBe(1) + expect(results[0]).toBeTruthy() + } finally { + await rm(dir, { recursive: true }) + } + }) +}) + +async function makeEmptyFile(filePath: string) { + const file = await open(filePath, 'w') + await file.close() +} + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} diff --git a/vscode/src/graph/bfg/download-bfg.ts b/vscode/src/graph/bfg/download-bfg.ts index 2a1ff2336059..a750e3526c23 100644 --- a/vscode/src/graph/bfg/download-bfg.ts +++ b/vscode/src/graph/bfg/download-bfg.ts @@ -1,146 +1,190 @@ -import * as fs from 'node:fs' -import { promises as fspromises } from 'node:fs' +import fs from 'node:fs/promises' +import os from 'node:os' import path from 'node:path' - -import axios from 'axios' -import * as unzipper from 'unzipper' +import { SemverString } from '@sourcegraph/cody-shared/src/utils' import * as vscode from 'vscode' - -import { fileExists } from '../../local-context/download-symf' -import { logDebug } from '../../log' +import { downloadFile, fileExists, unzip } from '../../local-context/utils' +import { waitForLock } from '../../lockfile' +import { logDebug, logError } from '../../log' import { Arch, Platform, getOSArch } from '../../os' import { captureException } from '../../services/sentry/sentry' // Available releases: https://github.com/sourcegraph/bfg/releases -// Do not include 'v' in this string. -const defaultBfgVersion = '5.4.6040' - -// We use this Promise to only have one downloadBfg running at once. -let serializeBfgDownload: Promise = Promise.resolve(null) - -export async function downloadBfg(context: vscode.ExtensionContext): Promise { - // First, wait for any in-progress downloads. - await serializeBfgDownload - - // Now we are the in-progress download. - serializeBfgDownload = (async () => { - const config = vscode.workspace.getConfiguration() - const userBfgPath = config.get('cody.experimental.cody-engine.path') - if (userBfgPath) { - const bfgStat = await fspromises.stat(userBfgPath) - if (!bfgStat.isFile()) { - throw new Error(`not a file: ${userBfgPath}`) - } - logDebug('CodyEngine', `using user-provided path: ${userBfgPath} ${bfgStat.isFile()}`) - return userBfgPath +export type BfgVersionString = SemverString<''> +export const defaultBfgVersion: BfgVersionString = '5.4.6040' + +export const _config = { + //delay before trying to re-lock a active file + FILE_LOCK_RETRY_DELAY: 500, +} as const + +/** + * Get the path to `bfg` binary. If possible it will be downloaded. + */ +export async function getBfgPath(context: vscode.ExtensionContext): Promise { + // If user-specified symf path is set, use that + // TODO: maybe we do want an option to download bfg if it's not found? + const config = vscode.workspace.getConfiguration() + const userBfgPath = config.get('cody.experimental.cody-engine.path') + if (userBfgPath) { + if (!(await fileExists(userBfgPath))) { + throw new Error(`bfg can't be loaded from user provided path: ${userBfgPath}`) } + logDebug('CodyEngine', `Skipping download. Using user-provided bfg path: ${userBfgPath}`) + return userBfgPath + } - const osArch = getOSArch() - if (!osArch) { - logDebug('CodyEngine', 'getOSArch returned nothing') - return null - } - const { platform, arch } = osArch + const bfgContainingDir = + typeof process !== 'undefined' && process.env.CODY_TESTING_BFG_DIR + ? process.env.CODY_TESTING_BFG_DIR + : path.join(context.globalStorageUri.fsPath, 'cody-engine') - if (!arch) { - logDebug('CodyEngine', 'getOSArch returned undefined arch') - return null - } + // remove any preceding v symbol + const bfgVersion = SemverString.forcePrefix( + '', + config.get('cody.experimental.cody-engine.version', defaultBfgVersion) + ) - if (!platform) { - logDebug('CodyEngine', 'getOSArch returned undefined platform') - return null - } - // Rename returned architecture to match RFC 795 conventions - // https://docs.google.com/document/d/11cw-7dAp93JmasITNSNCtx31xrQsNB1L2OoxVE6zrTc/edit - const archRenames = new Map([ - ['aarch64', 'arm64'], - ['x86_64', 'x64'], - ]) - let rfc795Arch = archRenames.get(arch ?? '') ?? arch - if (rfc795Arch === Arch.Arm64 && platform === Platform.Windows) { - // On Windows Arm PCs, we rely on emulation and use the x64 binary. - // See https://learn.microsoft.com/en-us/windows/arm/apps-on-arm-x86-emulation - rfc795Arch = Arch.X64 - } + const bfgPath = await _upsertBfgForPlatform(bfgContainingDir, bfgVersion) + return bfgPath +} - const bfgContainingDir = path.join(context.globalStorageUri.fsPath, 'cody-engine') - const bfgVersion = config.get('cody.experimental.cody-engine.version', defaultBfgVersion) - await fspromises.mkdir(bfgContainingDir, { recursive: true }) - const bfgFilename = `cody-engine-${bfgVersion}-${platform}-${rfc795Arch}` - const bfgPath = path.join(bfgContainingDir, bfgFilename) - const isAlreadyDownloaded = await fileExists(bfgPath) - if (isAlreadyDownloaded) { - logDebug('CodyEngine', `using downloaded path "${bfgPath}"`) - return bfgPath - } +export async function _upsertBfgForPlatform( + containingDir: string, + version: BfgVersionString +): Promise { + const { platform, arch } = getOSArch() + if (!platform || !arch) { + // show vs code error message + void vscode.window.showErrorMessage( + `No bfg binary available for ${os.platform()}/${os.machine()}` + ) + logError('CodyEngine', `No bfg binary available for ${os.platform()}/${os.machine()}`) + return null + } + const { bfgFilename, bfgUnzippedFilename, rfc795Arch } = _getNamesForPlatform( + platform, + arch, + version + ) + const bfgPath = path.join(containingDir, bfgFilename) + + if (await fileExists(bfgPath)) { + logDebug('CodyEngine', `using downloaded bfg path "${bfgPath}"`) + return bfgPath + } - const bfgURL = `https://github.com/sourcegraph/bfg/releases/download/v${bfgVersion}/bfg-${platform}-${rfc795Arch}.zip` - try { - await vscode.window.withProgress( - { - location: vscode.ProgressLocation.Window, - title: 'Downloading cody-engine', - cancellable: false, - }, - async progress => { - progress.report({ message: 'Downloading cody-engine' }) - const bfgZip = path.join(bfgContainingDir, 'bfg.zip') - await downloadBfgBinary(bfgURL, bfgZip) - await unzipBfg(bfgZip, bfgContainingDir) - logDebug('CodyEngine', bfgPath) - // The zip file contains a binary named `bfg` or `bfg.exe`. We unzip it with that name first and then rename into - // a version-specific binary so that we can delete old versions of bfg. - const unzipPath = platform === Platform.Windows ? 'bfg.exe' : 'bfg' - await fspromises.rename(path.join(bfgContainingDir, unzipPath), bfgPath) - await fspromises.chmod(bfgPath, 0o755) - await fspromises.rm(bfgZip) - logDebug('CodyEngine', `downloaded cody-engine to ${bfgPath}`) - } - ) - void removeOldBfgBinaries(bfgContainingDir, bfgFilename) - } catch (error) { - captureException(error) - void vscode.window.showErrorMessage(`Failed to download bfg from URL ${bfgURL}: ${error}`) - return null + const bfgURL = `https://github.com/sourcegraph/bfg/releases/download/v${version}/bfg-${platform}-${rfc795Arch}.zip` + + try { + const wasDownloaded = await downloadBfgBinary({ + bfgPath, + bfgURL, + bfgFilename, + bfgUnzippedFilename, + }) + if (wasDownloaded) { + //TODO: we can't always assume that nobody is using these still + void removeOldBfgBinaries(containingDir, bfgFilename) } return bfgPath - })() - return serializeBfgDownload + } catch (error) { + captureException(error) + void vscode.window.showErrorMessage(`Failed to download bfg: ${error}`) + return null + } } -async function unzipBfg(zipFile: string, destinationDir: string): Promise { - const zip = fs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) - for await (const entry of zip) { - if (entry.path.endsWith('/')) { - continue - } - entry.pipe(fs.createWriteStream(path.join(destinationDir, entry.path))) +export function _getNamesForPlatform( + platform: Platform, + arch: Arch, + version: BfgVersionString +): { bfgFilename: string; bfgUnzippedFilename: string; rfc795Arch: string } { + // Rename returned architecture to match RFC 795 conventions + // https://docs.google.com/document/d/11cw-7dAp93JmasITNSNCtx31xrQsNB1L2OoxVE6zrTc/edit + const archRenames = new Map([ + ['aarch64', 'arm64'], + ['x86_64', 'x64'], + ]) + let rfc795Arch = archRenames.get(arch ?? '') ?? arch + if (rfc795Arch === Arch.Arm64 && platform === Platform.Windows) { + // On Windows Arm PCs, we rely on emulation and use the x64 binary. + // See https://learn.microsoft.com/en-us/windows/arm/apps-on-arm-x86-emulation + rfc795Arch = Arch.X64 } + + const bfgFilename = `cody-engine-${version}-${platform}-${rfc795Arch}` + const bfgUnzippedFilename = platform === Platform.Windows ? 'bfg.exe' : 'bfg' + return { bfgFilename, rfc795Arch, bfgUnzippedFilename } } -async function downloadBfgBinary(url: string, destination: string): Promise { - logDebug('CodyEngine', `downloading from URL ${url}`) - const response = await axios({ - url, - method: 'GET', - responseType: 'stream', - maxRedirects: 10, - }) - - const stream = fs.createWriteStream(destination) - response.data.pipe(stream) - - await new Promise((resolve, reject) => { - stream.on('finish', resolve) - stream.on('error', reject) - }) +async function downloadBfgBinary({ + bfgPath, + bfgFilename, + bfgUnzippedFilename, + bfgURL, +}: { + bfgPath: string + bfgFilename: string + bfgUnzippedFilename: string + bfgURL: string +}): Promise { + logDebug('CodyEngine', `downloading bfg from ${bfgURL}`) + return await vscode.window.withProgress( + { + location: vscode.ProgressLocation.Notification, + title: 'Downloading Cody search engine (bfg)', + cancellable: false, + }, + async (progress, cancel) => { + progress.report({ message: 'Checking bfg status' }) + const abortController = new AbortController() + cancel.onCancellationRequested(() => abortController.abort()) + + const bfgDir = path.dirname(bfgPath) + await fs.mkdir(bfgDir, { recursive: true }) + const unlockFn = await waitForLock(bfgDir, { + delay: _config.FILE_LOCK_RETRY_DELAY, + lockfilePath: `${bfgPath}.lock`, + }) + try { + if (await fileExists(bfgPath)) { + logDebug('CodyEngine', 'bfg already downloaded, reusing') + return false + } + + progress.report({ message: 'Downloading bfg' }) + + const bfgTmpDir = `${bfgPath}.tmp` + await fs.mkdir(bfgTmpDir, { recursive: true }) + + const bfgZipFile = path.join(bfgTmpDir, `${bfgFilename}.zip`) + + await downloadFile(bfgURL, bfgZipFile, abortController.signal) + progress.report({ message: 'Extracting bfg' }) + await unzip(bfgZipFile, bfgTmpDir) + logDebug('CodyEngine', `downloaded bfg to ${bfgTmpDir}`) + + const tmpFile = path.join(bfgTmpDir, bfgUnzippedFilename) + await fs.chmod(tmpFile, 0o755) + await fs.rename(tmpFile, bfgPath) + await fs.rm(bfgTmpDir, { recursive: true }) + + logDebug('CodyEngine', `extracted bfg to ${bfgPath}`) + return true + } finally { + unlockFn?.() + } + } + ) } async function removeOldBfgBinaries(containingDir: string, currentBfgPath: string): Promise { - const bfgDirContents = await fspromises.readdir(containingDir) - const oldBfgBinaries = bfgDirContents.filter(f => f.startsWith('bfg') && f !== currentBfgPath) + const bfgDirContents = await fs.readdir(containingDir) + const oldBfgBinaries = bfgDirContents.filter( + f => f.startsWith('cody-engine-') && f !== currentBfgPath + ) for (const oldBfgBinary of oldBfgBinaries) { - await fspromises.rm(path.join(containingDir, oldBfgBinary)) + await fs.rm(path.join(containingDir, oldBfgBinary)) } } diff --git a/vscode/src/graph/bfg/spawn-bfg.ts b/vscode/src/graph/bfg/spawn-bfg.ts index 92067648eae4..3168c7d7a2a9 100644 --- a/vscode/src/graph/bfg/spawn-bfg.ts +++ b/vscode/src/graph/bfg/spawn-bfg.ts @@ -5,13 +5,13 @@ import * as vscode from 'vscode' import { StreamMessageReader, StreamMessageWriter, createMessageConnection } from 'vscode-jsonrpc/node' import { MessageHandler } from '../../jsonrpc/jsonrpc' import { logDebug } from '../../log' -import { downloadBfg } from './download-bfg' +import { getBfgPath } from './download-bfg' export async function spawnBfg( context: vscode.ExtensionContext, reject: (reason?: any) => void ): Promise { - const codyrpc = await downloadBfg(context) + const codyrpc = await getBfgPath(context) if (!codyrpc) { throw new Error( 'Failed to download BFG binary. To fix this problem, set the "cody.experimental.cody-engine.path" configuration to the path of your BFG binary' diff --git a/vscode/src/local-context/download-symf.ts b/vscode/src/local-context/download-symf.ts index 1f4dddf34644..03d55fa856f2 100644 --- a/vscode/src/local-context/download-symf.ts +++ b/vscode/src/local-context/download-symf.ts @@ -1,98 +1,116 @@ -import * as fs from 'node:fs' -import fspromises from 'node:fs/promises' -import * as os from 'node:os' -import * as path from 'node:path' - -import axios from 'axios' -import * as unzipper from 'unzipper' +import fs from 'node:fs/promises' +import os from 'node:os' +import path from 'node:path' +import type { SemverString } from '@sourcegraph/cody-shared/src/utils' import * as vscode from 'vscode' - -import { Mutex } from 'async-mutex' -import { logDebug } from '../log' -import { Platform, getOSArch } from '../os' +import { waitForLock } from '../lockfile' +import { logDebug, logError } from '../log' +import { type Arch, Platform, getOSArch } from '../os' import { captureException } from '../services/sentry/sentry' +import { downloadFile, fileExists, unzip } from './utils' + +export type SymfVersionString = SemverString<'v'> +const symfVersion: SymfVersionString = 'v0.0.12' -const symfVersion = 'v0.0.12' +export const _config = { + //delay before trying to re-lock a active file + FILE_LOCK_RETRY_DELAY: 500, +} as const /** - * Get the path to `symf`. If the symf binary is not found, download it. + * Get the path to `symf` binary. If possible it will be downloaded. */ export async function getSymfPath(context: vscode.ExtensionContext): Promise { // If user-specified symf path is set, use that + // TODO: maybe we do want an option to download symf if it's not found? const config = vscode.workspace.getConfiguration() const userSymfPath = config.get('cody.experimental.symf.path') ?? config.get('cody.internal.symf.path') if (userSymfPath) { - logDebug('symf', `using user symf: ${userSymfPath}`) + if (!(await fileExists(userSymfPath))) { + throw new Error(`symf can't be loaded from user provided path: ${userSymfPath}`) + } + logDebug('symf', `Skipping download. Using user specified symf path: ${userSymfPath}`) return userSymfPath } - const symfContainingDir = path.join(context.globalStorageUri.fsPath, 'symf') - return await _getSymfPath(symfContainingDir) + //TODO(rnauta): move all test overrides to helper class + const symfContainingDir = + typeof process !== 'undefined' && process.env.CODY_TESTING_SYMF_DIR + ? process.env.CODY_TESTING_SYMF_DIR + : path.join(context.globalStorageUri.fsPath, 'symf') + + const symfPath = await _upsertSymfForPlatform(symfContainingDir) + return symfPath } -const downloadLock = new Mutex() - -export async function _getSymfPath( - symfContainingDir: string, - actualDownloadSymf: (op: { - symfPath: string - symfFilename: string - symfUnzippedFilename: string - symfURL: string - }) => Promise = downloadSymf -): Promise { +/** + * Returns the platform specific symf path or downloads it if needed + * @param containingDir the directory in which the symf binary will be stored + * @returns symf path for platform + */ +export async function _upsertSymfForPlatform(containingDir: string): Promise { const { platform, arch } = getOSArch() if (!platform || !arch) { // show vs code error message void vscode.window.showErrorMessage( `No symf binary available for ${os.platform()}/${os.machine()}` ) + logError('CodyEngine', `No symf binary available for ${os.platform()}/${os.machine()}`) return null } - // Releases (eg at https://github.com/sourcegraph/symf/releases) are named with the Zig platform - // identifier (linux-musl, windows-gnu, macos). - const zigPlatform = - platform === Platform.Linux - ? 'linux-musl' - : platform === Platform.Windows - ? 'windows-gnu' - : platform + const { symfFilename, symfUnzippedFilename, zigPlatform } = _getNamesForPlatform(platform, arch) + const symfPath = path.join(containingDir, symfFilename) - const symfFilename = `symf-${symfVersion}-${arch}-${platform}` - const symfUnzippedFilename = `symf-${arch}-${zigPlatform}` // the filename inside the zip - const symfPath = path.join(symfContainingDir, symfFilename) if (await fileExists(symfPath)) { logDebug('symf', `using downloaded symf "${symfPath}"`) return symfPath } const symfURL = `https://github.com/sourcegraph/symf/releases/download/${symfVersion}/symf-${arch}-${zigPlatform}.zip` - // Download symf binary with vscode progress api try { - await downloadLock.acquire() - // Re-check if it has been downloaded - if (await fileExists(symfPath)) { - logDebug('symf', 'symf already downloaded, reusing') - return symfPath + const wasDownloaded = await downloadSymfBinary({ + symfPath, + symfURL, + symfFilename, + symfUnzippedFilename, + }) + if (wasDownloaded) { + void removeOldSymfBinaries(containingDir, symfFilename) } - - await actualDownloadSymf({ symfPath, symfURL, symfFilename, symfUnzippedFilename }) - void removeOldSymfBinaries(symfContainingDir, symfFilename) + return symfPath } catch (error) { captureException(error) void vscode.window.showErrorMessage(`Failed to download symf: ${error}`) return null - } finally { - downloadLock.release() } +} - return symfPath +export function _getNamesForPlatform( + platform: Platform, + arch: Arch +): { symfFilename: string; symfUnzippedFilename: string; zigPlatform: string } { + // Releases (eg at https://github.com/sourcegraph/symf/releases) are named with the Zig platform + // identifier (linux-musl, windows-gnu, macos). + const zigPlatform = + platform === Platform.Linux + ? 'linux-musl' + : platform === Platform.Windows + ? 'windows-gnu' + : platform + + const symfFilename = `symf-${symfVersion}-${arch}-${platform}` + const symfUnzippedFilename = `symf-${arch}-${zigPlatform}` // the filename inside the zip + return { symfFilename, symfUnzippedFilename, zigPlatform } } -async function downloadSymf({ +/** + * Downloads symf from the given URL to a given path. + * @returns true if the file was downloaded new or false if the file already existed + */ +async function downloadSymfBinary({ symfPath, symfFilename, symfUnzippedFilename, @@ -102,76 +120,60 @@ async function downloadSymf({ symfFilename: string symfUnzippedFilename: string symfURL: string -}): Promise { +}): Promise { logDebug('symf', `downloading symf from ${symfURL}`) - - await vscode.window.withProgress( + return await vscode.window.withProgress( { location: vscode.ProgressLocation.Notification, title: 'Downloading Cody search engine (symf)', cancellable: false, }, - async progress => { - const symfTmpDir = `${symfPath}.tmp` - progress.report({ message: 'Downloading symf and extracting symf' }) - - await fspromises.mkdir(symfTmpDir, { recursive: true }) - const symfZipFile = path.join(symfTmpDir, `${symfFilename}.zip`) - await downloadFile(symfURL, symfZipFile) - await unzipSymf(symfZipFile, symfTmpDir) - logDebug('symf', `downloaded symf to ${symfTmpDir}`) - - const tmpFile = path.join(symfTmpDir, symfUnzippedFilename) - await fspromises.chmod(tmpFile, 0o755) - await fspromises.rename(tmpFile, symfPath) - await fspromises.rm(symfTmpDir, { recursive: true }) - - logDebug('symf', `extracted symf to ${symfPath}`) + async (progress, cancel) => { + progress.report({ message: 'Checking symf status' }) + const abortController = new AbortController() + cancel.onCancellationRequested(() => abortController.abort()) + + const symfDir = path.dirname(symfPath) + await fs.mkdir(symfDir, { recursive: true }) + const unlockFn = await waitForLock(symfDir, { + delay: _config.FILE_LOCK_RETRY_DELAY, + lockfilePath: `${symfPath}.lock`, + }) + + try { + if (await fileExists(symfPath)) { + logDebug('symf', 'symf already downloaded, reusing') + return false + } + progress.report({ message: 'Downloading symf' }) + + const symfTmpDir = `${symfPath}.tmp` + await fs.mkdir(symfTmpDir, { recursive: true }) + const symfZipFile = path.join(symfTmpDir, `${symfFilename}.zip`) + + await downloadFile(symfURL, symfZipFile, abortController.signal) + progress.report({ message: 'Extracting symf' }) + await unzip(symfZipFile, symfTmpDir) + logDebug('symf', `downloaded symf to ${symfTmpDir}`) + + const tmpFile = path.join(symfTmpDir, symfUnzippedFilename) + await fs.chmod(tmpFile, 0o755) + await fs.rename(tmpFile, symfPath) + await fs.rm(symfTmpDir, { recursive: true }) + + logDebug('symf', `extracted symf to ${symfPath}`) + return true + } finally { + unlockFn?.() + } } ) } -export async function fileExists(path: string): Promise { - try { - await fspromises.access(path) - return true - } catch { - return false - } -} - -async function downloadFile(url: string, outputPath: string): Promise { - logDebug('Symf', `downloading from URL ${url}`) - const response = await axios({ - url, - method: 'GET', - responseType: 'stream', - maxRedirects: 10, - }) - - const stream = fs.createWriteStream(outputPath) - response.data.pipe(stream) - - await new Promise((resolve, reject) => { - stream.on('finish', resolve) - stream.on('error', reject) - }) -} - -async function unzipSymf(zipFile: string, destinationDir: string): Promise { - const zip = fs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) - for await (const entry of zip) { - if (entry.path.endsWith('/')) { - continue - } - entry.pipe(fs.createWriteStream(path.join(destinationDir, entry.path))) - } -} - async function removeOldSymfBinaries(containingDir: string, currentSymfPath: string): Promise { - const symfDirContents = await fspromises.readdir(containingDir) + const symfDirContents = await fs.readdir(containingDir) const oldSymfBinaries = symfDirContents.filter(f => f.startsWith('symf-') && f !== currentSymfPath) for (const oldSymfBinary of oldSymfBinaries) { - await fspromises.rm(path.join(containingDir, oldSymfBinary)) + await fs.rm(path.join(containingDir, oldSymfBinary)) } } diff --git a/vscode/src/local-context/symf.test.ts b/vscode/src/local-context/symf.test.ts index b2d69c420cd8..775de917bef1 100644 --- a/vscode/src/local-context/symf.test.ts +++ b/vscode/src/local-context/symf.test.ts @@ -1,36 +1,85 @@ -import { describe, expect, it } from 'vitest' - -import { _getSymfPath } from './download-symf' - -import { mkdtemp, open, rmdir } from 'node:fs/promises' +import { mkdtemp, open, rm } from 'node:fs/promises' import { tmpdir } from 'node:os' import path from 'node:path' +import { describe, expect, it, vi } from 'vitest' +import { getOSArch } from '../os' +import { _config, _getNamesForPlatform, _upsertSymfForPlatform } from './download-symf' +import { downloadFile } from './utils' + +//@ts-ignore +_config.FILE_LOCK_RETRY_DELAY = 1 + +vi.mock('./utils', async importOriginal => { + //use the vscode mock inside this mock too + const mod = await importOriginal() + let firstDownload = true + return { + ...mod, + downloadFile: vi.fn(async (url: string, dest: string) => { + // we abandon the first download + if (firstDownload) { + await makeEmptyFile(dest) + firstDownload = false + throw new Error('Test Mock Deliberate Abandon') + } + await sleep(2) + // make an empty file + await makeEmptyFile(dest) + }), + unzip: vi.fn(async (zipPath: string, dest: string) => { + await sleep(2) + // just check the zip file exists first + if (!(await mod.fileExists(zipPath))) { + throw new Error("File doesn't exist") + } + // we ensure that at leats the expected file exists + const { platform, arch } = getOSArch() + const { symfUnzippedFilename } = _getNamesForPlatform(platform!, arch!) + const symfUnzippedPath = path.join(dest, symfUnzippedFilename) + await makeEmptyFile(symfUnzippedPath) + }), + } +}) -describe('download-symf', () => { - it('no parallel download', async () => { +describe('upsertSymfForPlatform', () => { + // NOTE: This really only checks downloads in the same Node process Instead + // we probably want to mock the fs and network layer directly and ensure + // that this works regardless of Mutex locks + it('prevents parallel downloads', async () => { const dir = await mkdtemp(path.join(tmpdir(), 'symf-')) try { - const makeEmptyFile = async (filePath: string) => { - const file = await open(filePath, 'w') - await file.close() - } + // we first create a "abandoned" download so that we can ensure that + // after some expiration time one of the processes will forcefully + // download regardless + const abandonedDownload = _upsertSymfForPlatform(dir) + expect(await abandonedDownload).toBeNull() - let mockDownloadSymfCalled = 0 - const mockDownloadSymf = async (op: { - symfPath: string - symfFilename: string - symfURL: string - }): Promise => { - mockDownloadSymfCalled++ - await makeEmptyFile(op.symfPath) - } - const symfPaths = await Promise.all( - [...Array(10).keys()].map(() => _getSymfPath(dir, mockDownloadSymf)) - ) - expect(symfPaths.every(p => p === symfPaths[0])).toBeTruthy() - expect(mockDownloadSymfCalled).toEqual(1) + vi.mocked(downloadFile).mockClear() + + // we now start parallel async functions + const results = await Promise.all([ + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + _upsertSymfForPlatform(dir), + ]) + // only one actual download should have happened + expect(downloadFile).toHaveBeenCalledOnce() + + // expect all results to be the same and valid strings + expect(new Set(results).size).toBe(1) + expect(results[0]).toBeTruthy() } finally { - await rmdir(dir, { recursive: true }) + await rm(dir, { recursive: true }) } }) }) + +async function makeEmptyFile(filePath: string) { + const file = await open(filePath, 'w') + await file.close() +} + +function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)) +} diff --git a/vscode/src/local-context/utils.ts b/vscode/src/local-context/utils.ts new file mode 100644 index 000000000000..f1d0e4384af6 --- /dev/null +++ b/vscode/src/local-context/utils.ts @@ -0,0 +1,68 @@ +import syncfs from 'node:fs' +import fs from 'node:fs/promises' +import path from 'node:path' +import axios from 'axios' +import unzipper from 'unzipper' + +export async function pathExists(path: string): Promise { + try { + await fs.access(path) + return true + } catch { + return false + } +} + +/** + * Determines wether the path exists and it is a file + * @param path + * @returns file exists at the specified path + */ +export async function fileExists(path: string): Promise { + try { + const stat = await fs.stat(path) + return stat.isFile() + } catch (err: any) { + if (err.code === 'ENOENT') { + return false + } + //throw on other errors + throw err + } +} + +/** + * This downloads a url to a specific location and overwrites the existing file + * if it exists + */ +export async function downloadFile( + url: string, + outputPath: string, + signal?: AbortSignal +): Promise { + const response = await axios({ + url, + method: 'GET', + responseType: 'stream', + maxRedirects: 10, + signal: signal, + }) + + const stream = syncfs.createWriteStream(outputPath, { autoClose: true, flags: 'w' }) + response.data.pipe(stream) + + await new Promise((resolve, reject) => { + stream.on('finish', resolve) + stream.on('error', reject) + }) +} + +export async function unzip(zipFile: string, destinationDir: string): Promise { + const zip = syncfs.createReadStream(zipFile).pipe(unzipper.Parse({ forceStream: true })) + for await (const entry of zip) { + if (entry.path.endsWith('/')) { + continue + } + entry.pipe(syncfs.createWriteStream(path.join(destinationDir, entry.path))) + } +} diff --git a/vscode/src/lockfile.ts b/vscode/src/lockfile.ts new file mode 100644 index 000000000000..f15b7eb2c6d1 --- /dev/null +++ b/vscode/src/lockfile.ts @@ -0,0 +1,443 @@ +//@ts-nocheck + +// This is a modified and sligthly stripped down version of +// https://github.com/microsoft/playwright/commit/8f62aa933562d37f344015cf4e43775fbf81716b +// The original seems no longer maintained and has a critical bug +// https://github.com/moxystudio/node-proper-lockfile/issues/111 It was stripped +// to keep dependencies minimal. TODO: This doesn't seem like a very clean +// long-term solution. + +/** + * + * The MIT License (MIT) + * + * Copyright (c) 2018 Made With MOXY Lda + * Modifications copyright (c) Microsoft Corporation. + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + * THE SOFTWARE. + */ +import path from 'node:path' +import fs from 'graceful-fs' +import { onExit } from 'signal-exit' +const locks = {} +const cacheSymbol = Symbol() + +interface LockOptions { + stale?: number + update?: number + realpath?: boolean + lockfilePath?: string +} +export async function lock(file: string, options?: LockOptions): Promise<() => void> { + const release = await toPromise(_lock)(file, options) + return toPromise(release) +} + +interface WaitForLockOptions extends LockOptions { + delay: number + signal?: AbortSignal +} + +export async function waitForLock( + file: string, + { delay, signal, ...opts }: WaitForLockOptions +): Promise<() => void> { + while (!signal?.aborted) { + const unlockFn = await lock(file, opts).catch(err => { + if (err.code === 'ELOCKED') { + return undefined + } + throw err + }) + if (unlockFn) { + return unlockFn + } + await new Promise(resolve => setTimeout(resolve, delay)) + } +} + +function probe(file, fs, callback) { + const cachedPrecision = fs[cacheSymbol] + + if (cachedPrecision) { + return fs.stat(file, (err, stat) => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + callback(null, stat.mtime, cachedPrecision) + }) + } + + // Set mtime by ceiling Date.now() to seconds + 5ms so that it's "not on the second" + const mtime = new Date(Math.ceil(Date.now() / 1000) * 1000 + 5) + + fs.utimes(file, mtime, mtime, err => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + fs.stat(file, (err, stat) => { + /* istanbul ignore if */ + if (err) { + return callback(err) + } + + const precision = stat.mtime.getTime() % 1000 === 0 ? 's' : 'ms' + + // Cache the precision in a non-enumerable way + Object.defineProperty(fs, cacheSymbol, { value: precision }) + + callback(null, stat.mtime, precision) + }) + }) +} + +function getMtime(precision) { + let now = Date.now() + + if (precision === 's') { + now = Math.ceil(now / 1000) * 1000 + } + + return new Date(now) +} + +function getLockFile(file, options) { + return options.lockfilePath || `${file}.lock` +} + +function resolveCanonicalPath(file, options, callback) { + if (!options.realpath) { + return callback(null, path.resolve(file)) + } + + // Use realpath to resolve symlinks + // It also resolves relative paths + options.fs.realpath(file, callback) +} + +function acquireLock(file, options, callback) { + const lockfilePath = getLockFile(file, options) + + // Use mkdir to create the lockfile (atomic operation) + options.fs.mkdir(lockfilePath, err => { + if (!err) { + // At this point, we acquired the lock! + // Probe the mtime precision + return probe(lockfilePath, options.fs, (err, mtime, mtimePrecision) => { + // If it failed, try to remove the lock.. + /* istanbul ignore if */ + if (err) { + options.fs.rmdir(lockfilePath, () => {}) + + return callback(err) + } + + callback(null, mtime, mtimePrecision) + }) + } + + // If error is not EEXIST then some other error occurred while locking + if (err.code !== 'EEXIST') { + return callback(err) + } + + // Otherwise, check if lock is stale by analyzing the file mtime + if (options.stale <= 0) { + return callback( + Object.assign(new Error('Lock file is already being held'), { code: 'ELOCKED', file }) + ) + } + + options.fs.stat(lockfilePath, (err, stat) => { + if (err) { + // Retry if the lockfile has been removed (meanwhile) + // Skip stale check to avoid recursiveness + if (err.code === 'ENOENT') { + return acquireLock(file, { ...options, stale: 0 }, callback) + } + + return callback(err) + } + + if (!isLockStale(stat, options)) { + return callback( + Object.assign(new Error('Lock file is already being held'), { + code: 'ELOCKED', + file, + }) + ) + } + + // If it's stale, remove it and try again! + // Skip stale check to avoid recursiveness + removeLock(file, options, err => { + if (err) { + return callback(err) + } + + acquireLock(file, { ...options, stale: 0 }, callback) + }) + }) + }) +} + +function isLockStale(stat, options) { + return stat.mtime.getTime() < Date.now() - options.stale +} + +function removeLock(file, options, callback) { + // Remove lockfile, ignoring ENOENT errors + options.fs.rmdir(getLockFile(file, options), err => { + if (err && err.code !== 'ENOENT') { + return callback(err) + } + + callback() + }) +} + +function updateLock(file, options) { + const lock = locks[file] + + // Just for safety, should never happen + /* istanbul ignore if */ + if (lock.updateTimeout) { + return + } + + lock.updateDelay = lock.updateDelay || options.update + lock.updateTimeout = setTimeout(() => { + lock.updateTimeout = null + + // Stat the file to check if mtime is still ours + // If it is, we can still recover from a system sleep or a busy event loop + options.fs.stat(lock.lockfilePath, (err, stat) => { + const isOverThreshold = lock.lastUpdate + options.stale < Date.now() + + // If it failed to update the lockfile, keep trying unless + // the lockfile was deleted or we are over the threshold + if (err) { + if (err.code === 'ENOENT' || isOverThreshold) { + return setLockAsCompromised(file, lock, Object.assign(err, { code: 'ECOMPROMISED' })) + } + + lock.updateDelay = 1000 + + return updateLock(file, options) + } + + const isMtimeOurs = lock.mtime.getTime() === stat.mtime.getTime() + + if (!isMtimeOurs) { + return setLockAsCompromised( + file, + lock, + Object.assign(new Error('Unable to update lock within the stale threshold'), { + code: 'ECOMPROMISED', + }) + ) + } + + const mtime = getMtime(lock.mtimePrecision) + + options.fs.utimes(lock.lockfilePath, mtime, mtime, err => { + const isOverThreshold = lock.lastUpdate + options.stale < Date.now() + + // Ignore if the lock was released + if (lock.released) { + return + } + + // If it failed to update the lockfile, keep trying unless + // the lockfile was deleted or we are over the threshold + if (err) { + if (err.code === 'ENOENT' || isOverThreshold) { + return setLockAsCompromised( + file, + lock, + Object.assign(err, { code: 'ECOMPROMISED' }) + ) + } + + lock.updateDelay = 1000 + + return updateLock(file, options) + } + + // All ok, keep updating.. + lock.mtime = mtime + lock.lastUpdate = Date.now() + lock.updateDelay = null + updateLock(file, options) + }) + }) + }, lock.updateDelay) + + // Unref the timer so that the nodejs process can exit freely + // This is safe because all acquired locks will be automatically released + // on process exit + + // We first check that `lock.updateTimeout.unref` exists because some users + // may be using this module outside of NodeJS (e.g., in an electron app), + // and in those cases `setTimeout` return an integer. + /* istanbul ignore else */ + if (lock.updateTimeout.unref) { + lock.updateTimeout.unref() + } +} + +function setLockAsCompromised(file, lock, err) { + // Signal the lock has been released + lock.released = true + + // Cancel lock mtime update + // Just for safety, at this point updateTimeout should be null + /* istanbul ignore if */ + if (lock.updateTimeout) { + clearTimeout(lock.updateTimeout) + } + + if (locks[file] === lock) { + delete locks[file] + } + + lock.options.onCompromised(err) +} + +// ---------------------------------------------------------- + +function _lock(file, options, callback) { + /* istanbul ignore next */ + options = { + stale: 10000, + update: null, + realpath: true, + fs, + onCompromised: err => { + throw err + }, + ...options, + } + + options.stale = Math.max(options.stale || 0, 2000) + options.update = options.update == null ? options.stale / 2 : options.update || 0 + options.update = Math.max(Math.min(options.update, options.stale / 2), 1000) + + // Resolve to a canonical file path + resolveCanonicalPath(file, options, (err, file) => { + if (err) { + return callback(err) + } + + // Attempt to acquire the lock + acquireLock(file, options, (err, mtime, mtimePrecision) => { + if (err) { + return callback(err) + } + + // We now own the lock + const lockObj = { + lockfilePath: getLockFile(file, options), + mtime, + mtimePrecision, + options, + lastUpdate: Date.now(), + } + locks[file] = lockObj + + // We must keep the lock fresh to avoid staleness + updateLock(file, options) + + callback(null, releasedCallback => { + if (lockObj.released) { + return releasedCallback?.( + Object.assign(new Error('Lock is already released'), { + code: 'ERELEASED', + }) + ) + } + + // Not necessary to use realpath twice when unlocking + unlock(file, { ...options, realpath: false }, releasedCallback) + }) + }) + }) +} + +function unlock(file, options, callback) { + options = { + fs, + realpath: true, + ...options, + } + + // Resolve to a canonical file path + resolveCanonicalPath(file, options, (err, file) => { + if (err) { + return callback(err) + } + + // Skip if the lock is not acquired + const lock = locks[file] + + if (!lock) { + return callback( + Object.assign(new Error('Lock is not acquired/owned by you'), { code: 'ENOTACQUIRED' }) + ) + } + + lock.updateTimeout && clearTimeout(lock.updateTimeout) // Cancel lock mtime update + lock.released = true // Signal the lock has been released + delete locks[file] // Delete from locks + + removeLock(file, options, callback) + }) +} + +function toPromise(method) { + return (...args) => + new Promise((resolve, reject) => { + args.push((err, result) => { + if (err) { + reject(err) + } else { + resolve(result) + } + }) + method(...args) + }) +} + +// Remove acquired locks on exit +/* istanbul ignore next */ +onExit(() => { + for (const file in locks) { + const options = locks[file].options + + try { + options.fs.rmdirSync(getLockFile(file, options)) + } catch (e) { + /* Empty */ + } + } +}) diff --git a/vscode/src/testutils/CodyPersister.ts b/vscode/src/testutils/CodyPersister.ts index ac60a034a408..b140e2643d21 100644 --- a/vscode/src/testutils/CodyPersister.ts +++ b/vscode/src/testutils/CodyPersister.ts @@ -153,7 +153,12 @@ export class CodyPersister extends FSPersister { private filterHeaders( headers: { name: string; value: string }[] ): { name: string; value: string }[] { - const removeHeaderNames = new Set(['set-cookie', 'server', 'via']) + const removeHeaderNames = new Set([ + 'set-cookie', + 'server', + 'via', + 'x-sourcegraph-actor-anonymous-uid', + ]) const removeHeaderPrefixes = ['x-trace', 'cf-'] return headers.filter( header => diff --git a/vscode/src/testutils/mocks.ts b/vscode/src/testutils/mocks.ts index 6e4a68349175..172d33b5e50e 100644 --- a/vscode/src/testutils/mocks.ts +++ b/vscode/src/testutils/mocks.ts @@ -704,6 +704,12 @@ export enum UIKind { Web = 2, } +export enum ProgressLocation { + SourceControl = 1, + Window = 10, + Notification = 15, +} + export class FileSystemError extends Error { public code = 'FileSystemError' } @@ -753,6 +759,16 @@ export const vsCodeMocks = { key: 'foo', dispose: () => {}, }), + withProgress: async ( + options: vscode_types.ProgressOptions, + task: ( + progress: vscode_types.Progress<{ message?: string; increment?: number }>, + token: CancellationToken + ) => Thenable + ) => { + const cancel = new CancellationTokenSource() + return await task({ report: () => {} }, cancel.token) + }, visibleTextEditors: [], tabGroups: { all: [] }, }, @@ -804,14 +820,9 @@ export const vsCodeMocks = { DiagnosticSeverity, ViewColumn, TextDocumentChangeReason, + ProgressLocation, } as const -export enum ProgressLocation { - SourceControl = 1, - Window = 10, - Notification = 15, -} - export class MockFeatureFlagProvider extends FeatureFlagProvider { constructor(private readonly enabledFlags: Set) { super(null as any) diff --git a/vscode/src/testutils/polly.ts b/vscode/src/testutils/polly.ts index 7070fb121906..f2983a46decd 100644 --- a/vscode/src/testutils/polly.ts +++ b/vscode/src/testutils/polly.ts @@ -2,7 +2,7 @@ import { execSync } from 'node:child_process' import path from 'node:path' import jsonStableStringify from 'fast-json-stable-stringify' -import { type EXPIRY_STRATEGY, type Headers, type MODE, Polly } from '@pollyjs/core' +import { type EXPIRY_STRATEGY, type Headers, type MODE, Polly, type PollyConfig } from '@pollyjs/core' import { CodyNodeHttpAdapter } from './CodyNodeHttpAdapter' import { CodyPersister, redactAuthorizationHeader } from './CodyPersister' @@ -36,50 +36,52 @@ export function startPollyRecording(userOptions: PollyOptions): Polly { recordingsDir: options.recordingDirectory, }, }, - matchRequestsBy: { - order: false, + matchRequestsBy: defaultMatchRequestsBy, + }) +} - // Canonicalize JSON bodies so that we can replay the recording even if the JSON strings - // differ by semantically meaningless things like object key enumeration order. - body(body) { - try { - if (typeof body === 'string' && (body.startsWith('{') || body.startsWith('['))) { - return jsonStableStringify(JSON.parse(body)) - } - } catch {} - return body - }, +export const defaultMatchRequestsBy: PollyConfig['matchRequestsBy'] = { + order: false, + + // Canonicalize JSON bodies so that we can replay the recording even if the JSON strings + // differ by semantically meaningless things like object key enumeration order. + body(body) { + try { + if (typeof body === 'string' && (body.startsWith('{') || body.startsWith('['))) { + return jsonStableStringify(JSON.parse(body)) + } + } catch {} + return body + }, - // The logic below is a bit tricky to follow. Simplified, we need to - // ensure that Polly generates the same request ID regardless if - // we're running in record mode (with an access token) or in replay - // mode (with a redacted token). The ID is computed by Polly as the - // MD5 digest of all request "identifiers", which a JSON object that - // includes a "headers" property from the result of the function - // below. To better understand what's going on, it's helpful to read - // the implementation of Polly here: - // https://sourcegraph.com/github.com/Netflix/pollyjs@9b6bede12b7ee998472b8883c9dd01e2159e00a8/-/blob/packages/@pollyjs/core/src/-private/request.js?L281 - headers(headers): Headers { - // Get the authorization token. - const { authorization } = headers - let header = - typeof authorization === 'string' - ? authorization - : Array.isArray(authorization) - ? authorization.at(0) - : undefined + // The logic below is a bit tricky to follow. Simplified, we need to + // ensure that Polly generates the same request ID regardless if + // we're running in record mode (with an access token) or in replay + // mode (with a redacted token). The ID is computed by Polly as the + // MD5 digest of all request "identifiers", which a JSON object that + // includes a "headers" property from the result of the function + // below. To better understand what's going on, it's helpful to read + // the implementation of Polly here: + // https://sourcegraph.com/github.com/Netflix/pollyjs@9b6bede12b7ee998472b8883c9dd01e2159e00a8/-/blob/packages/@pollyjs/core/src/-private/request.js?L281 + headers(headers): Headers { + // Get the authorization token. + const { authorization } = headers + let header = + typeof authorization === 'string' + ? authorization + : Array.isArray(authorization) + ? authorization.at(0) + : undefined - // Redact it so that the ID is the same regardless if we're in record or replay - // mode. - if (header) { - header = redactAuthorizationHeader(header) - } + // Redact it so that the ID is the same regardless if we're in record or replay + // mode. + if (header) { + header = redactAuthorizationHeader(header) + } - // Normalize to always be a single header value (not an array). - return header ? { authorization: header } : {} - }, - }, - }) + // Normalize to always be a single header value (not an array). + return header ? { authorization: header } : {} + }, } function defaultPollyOptions( diff --git a/vscode/test/e2e/install-deps.ts b/vscode/test/e2e/install-deps.ts index e09ded581491..e6f7a08ada06 100644 --- a/vscode/test/e2e/install-deps.ts +++ b/vscode/test/e2e/install-deps.ts @@ -4,8 +4,9 @@ import { ConsoleReporter, type ProgressReport, ProgressReportStage, - downloadAndUnzipVSCode, + downloadAndUnzipVSCode as _downloadAndUnzipVSCode, } from '@vscode/test-electron' +import type { DownloadOptions } from '@vscode/test-electron/out/download' // The VS Code version to use for e2e tests (there is also a version in ../integration/main.ts used for integration tests). // @@ -24,8 +25,24 @@ class CustomConsoleReporter extends ConsoleReporter { } } +/** + * Patches the default logger but otherwise leaves all options available + * @param opts + */ +export function downloadAndUnzipVSCode(opts: Partial) { + return _downloadAndUnzipVSCode( + Object.assign( + { + version: vscodeVersion, + reporter: new CustomConsoleReporter(process.stdout.isTTY), + } satisfies Partial, + opts + ) + ) +} + export function installVsCode(): Promise { - return downloadAndUnzipVSCode( + return _downloadAndUnzipVSCode( vscodeVersion, undefined, new CustomConsoleReporter(process.stdout.isTTY) diff --git a/vscode/tsconfig.json b/vscode/tsconfig.json index 7f82d3ad061c..41263a395340 100644 --- a/vscode/tsconfig.json +++ b/vscode/tsconfig.json @@ -20,6 +20,7 @@ "playwright.config.ts", "test/e2e", "test/e2e/utils/commands.json", + "e2e", "webviews", "webviews/*.d.ts", "package.json",