diff --git a/.github/actions/run-interop-hole-punch-test/action.yml b/.github/actions/run-interop-hole-punch-test/action.yml index f73647469..5ef607b06 100644 --- a/.github/actions/run-interop-hole-punch-test/action.yml +++ b/.github/actions/run-interop-hole-punch-test/action.yml @@ -86,6 +86,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/actions/run-interop-ping-test/action.yml b/.github/actions/run-interop-ping-test/action.yml index b351f1d17..434ed19d1 100644 --- a/.github/actions/run-interop-ping-test/action.yml +++ b/.github/actions/run-interop-ping-test/action.yml @@ -78,6 +78,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/actions/run-transport-interop-test/action.yml b/.github/actions/run-transport-interop-test/action.yml index 09eaaf33d..8122f4d34 100644 --- a/.github/actions/run-transport-interop-test/action.yml +++ b/.github/actions/run-transport-interop-test/action.yml @@ -78,6 +78,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/workflows/hole-punch-interop.yml b/.github/workflows/hole-punch-interop.yml index 723cc2327..d306da2b7 100644 --- a/.github/workflows/hole-punch-interop.yml +++ b/.github/workflows/hole-punch-interop.yml @@ -28,7 +28,7 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/run-interop-hole-punch-test with: - s3-cache-bucket: libp2p-by-tf-aws-bootstrap - s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }} - s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }} + s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }} + s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }} + s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }} worker-count: 16 diff --git a/.github/workflows/transport-interop.yml b/.github/workflows/transport-interop.yml index 275ebb1bf..e5fcc6dc2 100644 --- a/.github/workflows/transport-interop.yml +++ b/.github/workflows/transport-interop.yml @@ -18,12 +18,12 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/run-transport-interop-test with: - s3-cache-bucket: libp2p-by-tf-aws-bootstrap - s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }} - s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }} + s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }} + s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }} + s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }} worker-count: 16 build-without-secrets: - runs-on: ubuntu-latest + runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf steps: - uses: actions/checkout@v3 # Purposely not using secrets to replicate how forks will behave. diff --git a/hole-punch-interop/helpers/cache.ts b/hole-punch-interop/helpers/cache.ts index a5d07fdf8..cb54ad4e0 100755 --- a/hole-punch-interop/helpers/cache.ts +++ b/hole-punch-interop/helpers/cache.ts @@ -1,8 +1,9 @@ -const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap'; +const AWS_BUCKET = process.env.AWS_BUCKET; const scriptDir = __dirname; import * as crypto from 'crypto'; import * as fs from 'fs'; +import * as os from 'os'; import * as path from 'path'; import * as child_process from 'child_process'; import ignore, { Ignore } from 'ignore' @@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) { if (mode == Mode.PushCache) { console.log("Pushing cache") try { - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"}) - if (res.ok) { + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + try { + child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`) console.log("Cache already exists") - } else { + } catch (e) { + console.log("Cache doesn't exist", e) // Read image id from image.json const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID; console.log(`Pushing cache for ${dir}: ${imageID}`) @@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) { console.log("Loading cache") let cacheHit = false try { - // Check if the cache exists - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"}) - if (res.ok) { - const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString(); - const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; - if (loadedImageId) { - console.log(`Cache hit for ${loadedImageId}`); - fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n"); - cacheHit = true - } - } else { - console.log("Cache not found") + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache')) + const archivePath = path.join(cachePath, 'archive.tar.gz') + const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString(); + const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; + if (loadedImageId) { + console.log(`Cache hit for ${loadedImageId}`); + fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n"); + cacheHit = true } } catch (e) { console.log("Cache not found:", e) diff --git a/transport-interop/helpers/cache.ts b/transport-interop/helpers/cache.ts index 697e452fb..5504a0c0f 100755 --- a/transport-interop/helpers/cache.ts +++ b/transport-interop/helpers/cache.ts @@ -1,8 +1,9 @@ -const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap'; +const AWS_BUCKET = process.env.AWS_BUCKET; const scriptDir = __dirname; import * as crypto from 'crypto'; import * as fs from 'fs'; +import * as os from 'os'; import * as path from 'path'; import * as child_process from 'child_process'; import ignore, { Ignore } from 'ignore' @@ -65,10 +66,14 @@ switch (modeStr) { if (mode == Mode.PushCache) { console.log("Pushing cache") try { - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" }) - if (res.ok) { + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + try { + child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`) console.log("Cache already exists") - } else { + } catch (e) { + console.log("Cache doesn't exist", e) // Read image id from image.json const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID; console.log(`Pushing cache for ${impl}: ${imageID}`) @@ -85,18 +90,17 @@ switch (modeStr) { console.log("Loading cache") let cacheHit = false try { - // Check if the cache exists - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" }) - if (res.ok) { - const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString(); - const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; - if (loadedImageId) { - console.log(`Cache hit for ${loadedImageId}`); - fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n"); - cacheHit = true - } - } else { - console.log("Cache not found") + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache')) + const archivePath = path.join(cachePath, 'archive.tar.gz') + const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString(); + const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; + if (loadedImageId) { + console.log(`Cache hit for ${loadedImageId}`); + fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n"); + cacheHit = true } } catch (e) { console.log("Cache not found:", e)