From 282305702d9887ab5440981d47c7a0b616cf898a Mon Sep 17 00:00:00 2001 From: Enrico Rotundo Date: Fri, 18 Nov 2022 18:25:26 +0100 Subject: [PATCH] [WIP] Add Swag/OpenAPI deps (#1214) Screenshot 2022-11-18 at 17 37 48 --- docs/docs.go | 1419 +++++++++++++++++++++++ docs/swagger.json | 1398 +++++++++++++++++++++++ docs/swagger.yaml | 1431 ++++++++++++++++++++++++ docs/swagger/README.md | 1 + docs/swagger/endpoints_events.md | 109 ++ docs/swagger/endpoints_list.md | 191 ++++ docs/swagger/endpoints_peers.md | 14 + docs/swagger/endpoints_results.md | 16 + docs/swagger/endpoints_states.md | 56 + docs/swagger/endpoints_submit.md | 47 + go.mod | 12 +- go.sum | 37 +- pkg/model/buildversion.go | 14 +- pkg/model/job.go | 30 +- pkg/model/resource_usage.go | 8 +- pkg/model/storage_spec.go | 6 +- pkg/publicapi/endpoints_debug.go | 9 +- pkg/publicapi/endpoints_events.go | 18 +- pkg/publicapi/endpoints_id.go | 8 + pkg/publicapi/endpoints_list.go | 24 +- pkg/publicapi/endpoints_localevents.go | 13 + pkg/publicapi/endpoints_peers.go | 9 + pkg/publicapi/endpoints_results.go | 16 +- pkg/publicapi/endpoints_states.go | 16 +- pkg/publicapi/endpoints_submit.go | 18 +- pkg/publicapi/endpoints_version.go | 16 +- pkg/publicapi/server.go | 36 +- pkg/publicapi/server_health.go | 30 + poetry.lock | 86 +- 29 files changed, 5006 insertions(+), 82 deletions(-) create mode 100644 docs/docs.go create mode 100644 docs/swagger.json create mode 100644 docs/swagger.yaml create mode 100644 docs/swagger/README.md create mode 100644 docs/swagger/endpoints_events.md create mode 100644 docs/swagger/endpoints_list.md create mode 100644 docs/swagger/endpoints_peers.md create mode 100644 docs/swagger/endpoints_results.md create mode 100644 docs/swagger/endpoints_states.md create mode 100644 docs/swagger/endpoints_submit.md diff --git a/docs/docs.go b/docs/docs.go new file mode 100644 index 0000000000..aaefaeb495 --- /dev/null +++ b/docs/docs.go @@ -0,0 +1,1419 @@ +// Package docs GENERATED BY SWAG; DO NOT EDIT +// This file was generated by swaggo/swag +package docs + +import "github.com/swaggo/swag" + +const docTemplate = `{ + "schemes": {{ marshal .Schemes }}, + "swagger": "2.0", + "info": { + "description": "{{escape .Description}}", + "title": "{{.Title}}", + "contact": { + "name": "Bacalhau Team", + "url": "https://github.com/filecoin-project/bacalhau", + "email": "team@bacalhau.org" + }, + "license": { + "name": "Apache 2.0", + "url": "https://github.com/filecoin-project/bacalhau/blob/main/LICENSE" + }, + "version": "{{.Version}}" + }, + "host": "{{.Host}}", + "basePath": "{{.BasePath}}", + "paths": { + "/debug": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "summary": "Returns debug information on what the current node is doing.", + "operationId": "apiServer/debug", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.debugResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/events": { + "post": { + "description": "Events (e.g. Created, Bid, BidAccepted, ..., ResultsAccepted, ResultsPublished) are useful to track the progress of a job.\n\nExample response (truncated):\n` + "`" + `` + "`" + `` + "`" + `json\n{\n \"events\": [\n {\n \"APIVersion\": \"V1beta1\",\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"SourceNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"EventName\": \"Created\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"date\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"JobExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"VerificationResult\": {},\n \"PublishedResult\": {},\n \"EventTime\": \"2022-11-17T13:32:55.331375351Z\",\n \"SenderPublicKey\": \"...\"\n },\n ...\n {\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"SourceNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"TargetNodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"EventName\": \"ResultsAccepted\",\n \"Spec\": {\n \"Docker\": {},\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Sharding\": {}\n },\n \"JobExecutionPlan\": {},\n \"Deal\": {},\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResult\": {},\n \"EventTime\": \"2022-11-17T13:32:55.707825569Z\",\n \"SenderPublicKey\": \"...\"\n },\n {\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"SourceNodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"EventName\": \"ResultsPublished\",\n \"Spec\": {\n \"Docker\": {},\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Sharding\": {}\n },\n \"JobExecutionPlan\": {},\n \"Deal\": {},\n \"VerificationResult\": {},\n \"PublishedResult\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"EventTime\": \"2022-11-17T13:32:55.756658941Z\",\n \"SenderPublicKey\": \"...\"\n }\n ]\n}\n` + "`" + `` + "`" + `` + "`" + `", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the events related to the job-id passed in the body payload. Useful for troubleshooting.", + "operationId": "pkg/publicapi/events", + "parameters": [ + { + "description": "Request must specify a ` + "`" + `client_id` + "`" + `. To retrieve your ` + "`" + `client_id` + "`" + `, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run ` + "`" + `bacalhau describe \u003cjob-id\u003e` + "`" + ` and fetch the ` + "`" + `ClientID` + "`" + ` field.", + "name": "eventsRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.eventsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.eventsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/healthz": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/healthz", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.HealthInfo" + } + } + } + } + }, + "/id": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the id of the host node.", + "operationId": "apiServer/id", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/list": { + "post": { + "description": "Returns the first (sorted) #` + "`" + `max_jobs` + "`" + ` jobs that belong to the ` + "`" + `client_id` + "`" + ` passed in the body payload (by default).\nIf ` + "`" + `return_all` + "`" + ` is set to true, it returns all jobs on the Bacalhau network.\n\nIf ` + "`" + `id` + "`" + ` is set, it returns only the job with that ID.\n\nExample response:\n` + "`" + `` + "`" + `` + "`" + `json\n{\n \"jobs\": [\n {\n \"APIVersion\": \"V1beta1\",\n \"ID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"RequesterNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"RequesterPublicKey\": \"...\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"date\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"ExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"CreatedAt\": \"2022-11-17T13:32:55.33837275Z\",\n \"JobState\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"RunOutput\": {\n \"stdout\": \"Thu Nov 17 13:32:55 UTC 2022\\n\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n },\n {\n \"APIVersion\": \"V1beta1\",\n \"ID\": \"92d5d4ee-3765-4f78-8353-623f5f26df08\",\n \"RequesterNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"RequesterPublicKey\": \"...\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"sleep\",\n \"4\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"ExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"CreatedAt\": \"2022-11-17T13:29:01.871140291Z\",\n \"JobState\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-92d5d4ee-3765-4f78-8353-623f5f26df08-shard-0-host-QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"CID\": \"QmWUXBndMuq2G6B6ndQCmkRHjZ6CvyJ8qLxXBG3YsSFzQG\"\n },\n \"RunOutput\": {\n \"stdout\": \"\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n }\n ]\n}\n` + "`" + `` + "`" + `` + "`" + `", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Simply lists jobs.", + "operationId": "pkg/publicapi.list", + "parameters": [ + { + "description": "Set ` + "`" + `return_all` + "`" + ` to ` + "`" + `true` + "`" + ` to return all jobs on the network (may degrade performance, use with care!).", + "name": "listRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.listRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.listResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/livez": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/livez", + "responses": { + "200": { + "description": "TODO", + "schema": { + "type": "string" + } + } + } + } + }, + "/local_events": { + "post": { + "description": "Local events (e.g. Selected, BidAccepted, Verified) are useful to track the progress of a job.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the node's local events related to the job-id passed in the body payload. Useful for troubleshooting.", + "operationId": "pkg/publicapi/localEvents", + "parameters": [ + { + "description": " ", + "name": "localEventsRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.localEventsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.localEventsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/logz": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/logz", + "responses": { + "200": { + "description": "TODO", + "schema": { + "type": "string" + } + } + } + } + }, + "/peers": { + "get": { + "description": "As described in the [architecture docs](https://docs.bacalhau.org/about-bacalhau/architecture), each node is connected to a number of peer nodes.\n\nExample response:\n` + "`" + `` + "`" + `` + "`" + `json\n{\n \"bacalhau-job-event\": [\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"QmVAb7r2pKWCuyLpYWoZr9syhhFnTWeFaByHdb8PkkhLQG\",\n \"QmUDAXvv31WPZ8U9CzuRTMn9iFGiopGE7rHiah1X8a6PkT\",\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\"\n ]\n}\n` + "`" + `` + "`" + `` + "`" + `", + "produces": [ + "application/json" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the peers connected to the host via the transport layer.", + "operationId": "apiServer/peers", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/readyz": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/readyz", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "string" + } + } + } + } + }, + "/results": { + "post": { + "description": "Example response:\n\n` + "`" + `` + "`" + `` + "`" + `json\n{\n \"results\": [\n {\n \"NodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"Data\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n }\n }\n ]\n}\n` + "`" + `` + "`" + `` + "`" + `", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the results of the job-id specified in the body payload.", + "operationId": "pkg/publicapi/results", + "parameters": [ + { + "description": " ", + "name": "stateRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.stateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.resultsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/states": { + "post": { + "description": "Example response:\n\n` + "`" + `` + "`" + `` + "`" + `json\n{\n \"state\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"RunOutput\": {\n \"stdout\": \"Thu Nov 17 13:32:55 UTC 2022\\n\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n}\n` + "`" + `` + "`" + `` + "`" + `", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the state of the job-id specified in the body payload.", + "operationId": "pkg/publicapi/states", + "parameters": [ + { + "description": " ", + "name": "stateRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.stateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.stateResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/submit": { + "post": { + "description": "Description:\n\n* ` + "`" + `client_public_key` + "`" + `: The base64-encoded public key of the client.\n* ` + "`" + `signature` + "`" + `: A base64-encoded signature of the ` + "`" + `data` + "`" + ` attribute, signed by the client.\n* ` + "`" + `data` + "`" + `\n * ` + "`" + `ClientID` + "`" + `: Request must specify a ` + "`" + `ClientID` + "`" + `. To retrieve your ` + "`" + `ClientID` + "`" + `, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run ` + "`" + `bacalhau describe \u003cjob-id\u003e` + "`" + ` and fetch the ` + "`" + `ClientID` + "`" + ` field.\n * ` + "`" + `Job` + "`" + `: see example below.\n\nExample request\n` + "`" + `` + "`" + `` + "`" + `json\n{\n\t\"data\": {\n\t\t\"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n\t\t\"Job\": {\n\t\t\t\"APIVersion\": \"V1beta1\",\n\t\t\t\"Spec\": {\n\t\t\t\t\"Engine\": \"Docker\",\n\t\t\t\t\"Verifier\": \"Noop\",\n\t\t\t\t\"Publisher\": \"Estuary\",\n\t\t\t\t\"Docker\": {\n\t\t\t\t\t\"Image\": \"ubuntu\",\n\t\t\t\t\t\"Entrypoint\": [\n\t\t\t\t\t\t\"date\"\n\t\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t\"Timeout\": 1800,\n\t\t\t\t\"outputs\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"StorageSource\": \"IPFS\",\n\t\t\t\t\t\t\"Name\": \"outputs\",\n\t\t\t\t\t\t\"path\": \"/outputs\"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t\"Sharding\": {\n\t\t\t\t\t\"BatchSize\": 1,\n\t\t\t\t\t\"GlobPatternBasePath\": \"/inputs\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"Deal\": {\n\t\t\t\t\"Concurrency\": 1\n\t\t\t}\n\t\t}\n\t},\n\t\"signature\": \"...\",\n\t\"client_public_key\": \"...\"\n}\n` + "`" + `` + "`" + `` + "`" + `", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Submits a new job to the network.", + "operationId": "pkg/apiServer.submit", + "parameters": [ + { + "description": " ", + "name": "submitRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.submitRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.submitResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/varz": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/varz", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + } + }, + "/version": { + "post": { + "description": "See https://github.com/filecoin-project/bacalhau/releases for a complete list of ` + "`" + `gitversion` + "`" + ` tags.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the build version running on the server.", + "operationId": "apiServer/version", + "parameters": [ + { + "description": "Request must specify a ` + "`" + `client_id` + "`" + `. To retrieve your ` + "`" + `client_id` + "`" + `, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run ` + "`" + `bacalhau describe \u003cjob-id\u003e` + "`" + ` and fetch the ` + "`" + `ClientID` + "`" + ` field.", + "name": "versionRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.versionRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.versionResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + } + }, + "definitions": { + "computenode.ActiveJob": { + "type": "object", + "properties": { + "CapacityRequirements": { + "$ref": "#/definitions/model.ResourceUsageData" + }, + "ShardID": { + "type": "string" + }, + "State": { + "type": "string" + } + } + }, + "model.BuildVersionInfo": { + "type": "object", + "properties": { + "builddate": { + "type": "string", + "example": "2022-11-16T14:03:31Z" + }, + "gitcommit": { + "type": "string", + "example": "d612b63108f2b5ce1ab2b9e02444eb1dac1d922d" + }, + "gitversion": { + "type": "string", + "example": "v0.3.12" + }, + "goarch": { + "type": "string", + "example": "amd64" + }, + "goos": { + "type": "string", + "example": "linux" + }, + "major": { + "type": "string", + "example": "0" + }, + "minor": { + "type": "string", + "example": "3" + } + } + }, + "model.Deal": { + "type": "object", + "properties": { + "Concurrency": { + "description": "The maximum number of concurrent compute node bids that will be\naccepted by the requester node on behalf of the client.", + "type": "integer" + }, + "Confidence": { + "description": "The number of nodes that must agree on a verification result\nthis is used by the different verifiers - for example the\ndeterministic verifier requires the winning group size\nto be at least this size", + "type": "integer" + }, + "MinBids": { + "description": "The minimum number of bids that must be received before the Requester\nnode will randomly accept concurrency-many of them. This allows the\nRequester node to get some level of guarantee that the execution of the\njobs will be spread evenly across the network (assuming that this value\nis some large proportion of the size of the network).", + "type": "integer" + } + } + }, + "model.Job": { + "type": "object", + "properties": { + "APIVersion": { + "type": "string", + "example": "V1beta1" + }, + "ClientID": { + "description": "The ID of the client that created this job.", + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "CreatedAt": { + "description": "Time the job was submitted to the bacalhau network.", + "type": "string", + "example": "2022-11-17T13:29:01.871140291Z" + }, + "Deal": { + "description": "The deal the client has made, such as which job bids they have accepted.", + "$ref": "#/definitions/model.Deal" + }, + "ExecutionPlan": { + "description": "how will this job be executed by nodes on the network", + "$ref": "#/definitions/model.JobExecutionPlan" + }, + "ID": { + "description": "The unique global ID of this job in the bacalhau network.", + "type": "string", + "example": "92d5d4ee-3765-4f78-8353-623f5f26df08" + }, + "JobEvents": { + "description": "All events associated with the job", + "type": "array", + "items": { + "$ref": "#/definitions/model.JobEvent" + } + }, + "JobState": { + "description": "The current state of the job", + "$ref": "#/definitions/model.JobState" + }, + "LocalJobEvents": { + "description": "All local events associated with the job", + "type": "array", + "items": { + "$ref": "#/definitions/model.JobLocalEvent" + } + }, + "RequesterNodeID": { + "description": "The ID of the requester node that owns this job.", + "type": "string", + "example": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF" + }, + "RequesterPublicKey": { + "description": "The public key of the Requester node that created this job\nThis can be used to encrypt messages back to the creator", + "type": "array", + "items": { + "type": "integer" + } + }, + "Spec": { + "description": "The specification of this job.", + "$ref": "#/definitions/model.Spec" + } + } + }, + "model.JobCreatePayload": { + "type": "object", + "required": [ + "ClientID", + "Job" + ], + "properties": { + "ClientID": { + "description": "the id of the client that is submitting the job", + "type": "string" + }, + "Context": { + "description": "Optional base64-encoded tar file that will be pinned to IPFS and\nmounted as storage for the job. Not part of the spec so we don't\nflood the transport layer with it (potentially very large).", + "type": "string" + }, + "Job": { + "description": "The job specification:", + "$ref": "#/definitions/model.Job" + } + } + }, + "model.JobEvent": { + "type": "object", + "properties": { + "APIVersion": { + "description": "APIVersion of the Job", + "type": "string", + "example": "V1beta1" + }, + "ClientID": { + "description": "optional clientID if this is an externally triggered event (like create job)", + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "Deal": { + "description": "this is only defined in \"update_deal\" events", + "$ref": "#/definitions/model.Deal" + }, + "EventName": { + "type": "integer" + }, + "EventTime": { + "type": "string", + "example": "2022-11-17T13:32:55.756658941Z" + }, + "JobExecutionPlan": { + "description": "this is only defined in \"create\" events", + "$ref": "#/definitions/model.JobExecutionPlan" + }, + "JobID": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + }, + "PublishedResult": { + "$ref": "#/definitions/model.StorageSpec" + }, + "RunOutput": { + "description": "RunOutput of the job", + "$ref": "#/definitions/model.RunCommandResult" + }, + "SenderPublicKey": { + "type": "array", + "items": { + "type": "integer" + } + }, + "ShardIndex": { + "description": "what shard is this event for", + "type": "integer" + }, + "SourceNodeID": { + "description": "the node that emitted this event", + "type": "string", + "example": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF" + }, + "Spec": { + "description": "this is only defined in \"create\" events", + "$ref": "#/definitions/model.Spec" + }, + "Status": { + "type": "string", + "example": "Got results proposal of length: 0" + }, + "TargetNodeID": { + "description": "the node that this event is for\ne.g. \"AcceptJobBid\" was emitted by Requester but it targeting compute node", + "type": "string", + "example": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL" + }, + "VerificationProposal": { + "type": "array", + "items": { + "type": "integer" + } + }, + "VerificationResult": { + "$ref": "#/definitions/model.VerificationResult" + } + } + }, + "model.JobExecutionPlan": { + "type": "object", + "properties": { + "ShardsTotal": { + "description": "how many shards are there in total for this job\nwe are expecting this number x concurrency total\nJobShardState objects for this job", + "type": "integer" + } + } + }, + "model.JobLocalEvent": { + "type": "object", + "properties": { + "EventName": { + "type": "integer" + }, + "JobID": { + "type": "string" + }, + "ShardIndex": { + "type": "integer" + }, + "TargetNodeID": { + "type": "string" + } + } + }, + "model.JobNodeState": { + "type": "object", + "properties": { + "Shards": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/model.JobShardState" + } + } + } + }, + "model.JobShardState": { + "type": "object", + "properties": { + "NodeId": { + "description": "which node is running this shard", + "type": "string" + }, + "PublishedResults": { + "$ref": "#/definitions/model.StorageSpec" + }, + "RunOutput": { + "description": "RunOutput of the job", + "$ref": "#/definitions/model.RunCommandResult" + }, + "ShardIndex": { + "description": "what shard is this we are running", + "type": "integer" + }, + "State": { + "description": "what is the state of the shard on this node", + "type": "integer" + }, + "Status": { + "description": "an arbitrary status message", + "type": "string" + }, + "VerificationProposal": { + "description": "the proposed results for this shard\nthis will be resolved by the verifier somehow", + "type": "array", + "items": { + "type": "integer" + } + }, + "VerificationResult": { + "$ref": "#/definitions/model.VerificationResult" + } + } + }, + "model.JobShardingConfig": { + "type": "object", + "properties": { + "BatchSize": { + "description": "how many \"items\" are to be processed in each shard\nwe first apply the glob pattern which will result in a flat list of items\nthis number decides how to group that flat list into actual shards run by compute nodes", + "type": "integer" + }, + "GlobPattern": { + "description": "divide the inputs up into the smallest possible unit\nfor example /* would mean \"all top level files or folders\"\nthis being an empty string means \"no sharding\"", + "type": "string" + }, + "GlobPatternBasePath": { + "description": "when using multiple input volumes\nwhat path do we treat as the common mount path to apply the glob pattern to", + "type": "string" + } + } + }, + "model.JobSpecDocker": { + "type": "object", + "properties": { + "Entrypoint": { + "description": "optionally override the default entrypoint", + "type": "array", + "items": { + "type": "string" + } + }, + "EnvironmentVariables": { + "description": "a map of env to run the container with", + "type": "array", + "items": { + "type": "string" + } + }, + "Image": { + "description": "this should be pullable by docker", + "type": "string" + }, + "WorkingDirectory": { + "description": "working directory inside the container", + "type": "string" + } + } + }, + "model.JobSpecLanguage": { + "type": "object", + "properties": { + "Command": { + "description": "optional program specified on commandline, like python -c \"print(1+1)\"", + "type": "string" + }, + "DeterministicExecution": { + "description": "must this job be run in a deterministic context?", + "type": "boolean" + }, + "JobContext": { + "description": "context is a tar file stored in ipfs, containing e.g. source code and requirements", + "$ref": "#/definitions/model.StorageSpec" + }, + "Language": { + "description": "e.g. python", + "type": "string" + }, + "LanguageVersion": { + "description": "e.g. 3.8", + "type": "string" + }, + "ProgramPath": { + "description": "optional program path relative to the context dir. one of Command or ProgramPath must be specified", + "type": "string" + }, + "RequirementsPath": { + "description": "optional requirements.txt (or equivalent) path relative to the context dir", + "type": "string" + } + } + }, + "model.JobSpecWasm": { + "type": "object", + "properties": { + "EntryPoint": { + "description": "The name of the function in the EntryModule to call to run the job. For\nWASI jobs, this will always be ` + "`" + `_start` + "`" + `, but jobs can choose to call\nother WASM functions instead. The EntryPoint must be a zero-parameter\nzero-result function.", + "type": "string" + }, + "EnvironmentVariables": { + "description": "The variables available in the environment of the running program.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "ImportModules": { + "description": "TODO #880: Other WASM modules whose exports will be available as imports\nto the EntryModule.", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "Parameters": { + "description": "The arguments supplied to the program (i.e. as ARGV).", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "model.JobState": { + "type": "object", + "properties": { + "Nodes": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/model.JobNodeState" + } + } + } + }, + "model.PublishedResult": { + "type": "object", + "properties": { + "Data": { + "$ref": "#/definitions/model.StorageSpec" + }, + "NodeID": { + "type": "string" + }, + "ShardIndex": { + "type": "integer" + } + } + }, + "model.ResourceUsageConfig": { + "type": "object", + "properties": { + "CPU": { + "description": "https://github.com/BTBurke/k8sresource string", + "type": "string" + }, + "Disk": { + "type": "string" + }, + "GPU": { + "description": "unsigned integer string", + "type": "string" + }, + "Memory": { + "description": "github.com/c2h5oh/datasize string", + "type": "string" + } + } + }, + "model.ResourceUsageData": { + "type": "object", + "properties": { + "CPU": { + "description": "cpu units", + "type": "number", + "example": 9.600000000000001 + }, + "Disk": { + "description": "bytes", + "type": "integer", + "example": 212663867801 + }, + "GPU": { + "description": "Support whole GPUs only, like https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/", + "type": "integer", + "example": 1 + }, + "Memory": { + "description": "bytes", + "type": "integer", + "example": 27487790694 + } + } + }, + "model.RunCommandResult": { + "type": "object", + "properties": { + "exitCode": { + "description": "exit code of the run.", + "type": "integer" + }, + "runnerError": { + "description": "Runner error", + "type": "string" + }, + "stderr": { + "description": "stderr of the run.", + "type": "string" + }, + "stderrtruncated": { + "description": "bool describing if stderr was truncated", + "type": "boolean" + }, + "stdout": { + "description": "stdout of the run. Yaml provided for ` + "`" + `describe` + "`" + ` output", + "type": "string" + }, + "stdouttruncated": { + "description": "bool describing if stdout was truncated", + "type": "boolean" + } + } + }, + "model.Spec": { + "type": "object", + "properties": { + "Annotations": { + "description": "Annotations on the job - could be user or machine assigned", + "type": "array", + "items": { + "type": "string" + } + }, + "Contexts": { + "description": "Input volumes that will not be sharded\nfor example to upload code into a base image\nevery shard will get the full range of context volumes", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "DoNotTrack": { + "description": "Do not track specified by the client", + "type": "boolean" + }, + "Docker": { + "description": "executor specific data", + "$ref": "#/definitions/model.JobSpecDocker" + }, + "Engine": { + "description": "e.g. docker or language", + "type": "integer" + }, + "Language": { + "$ref": "#/definitions/model.JobSpecLanguage" + }, + "Publisher": { + "description": "there can be multiple publishers for the job", + "type": "integer" + }, + "Resources": { + "description": "the compute (cpy, ram) resources this job requires", + "$ref": "#/definitions/model.ResourceUsageConfig" + }, + "Sharding": { + "description": "the sharding config for this job\ndescribes how the job might be split up into parallel shards", + "$ref": "#/definitions/model.JobShardingConfig" + }, + "Timeout": { + "description": "How long a job can run in seconds before it is killed.\nThis includes the time required to run, verify and publish results", + "type": "number" + }, + "Verifier": { + "type": "integer" + }, + "Wasm": { + "$ref": "#/definitions/model.JobSpecWasm" + }, + "inputs": { + "description": "the data volumes we will read in the job\nfor example \"read this ipfs cid\"\nTODO: #667 Replace with \"Inputs\", \"Outputs\" (note the caps) for yaml/json when we update the n.js file", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "outputs": { + "description": "the data volumes we will write in the job\nfor example \"write the results to ipfs\"", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + } + } + }, + "model.StorageSpec": { + "type": "object", + "properties": { + "CID": { + "description": "The unique ID of the data, where it makes sense (for example, in an\nIPFS storage spec this will be the data's CID).\nNOTE: The below is capitalized to match IPFS \u0026 IPLD (even though it's out of golang fmt)", + "type": "string", + "example": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "Metadata": { + "description": "Additional properties specific to each driver", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "Name": { + "description": "Name of the spec's data, for reference.", + "type": "string", + "example": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL" + }, + "StorageSource": { + "description": "StorageSource is the abstract source of the data. E.g. a storage source\nmight be a URL download, but doesn't specify how the execution engine\ndoes the download or what it will do with the downloaded data.", + "type": "integer" + }, + "URL": { + "description": "Source URL of the data", + "type": "string" + }, + "path": { + "description": "The path that the spec's data should be mounted on, where it makes\nsense (for example, in a Docker storage spec this will be a filesystem\npath).\nTODO: #668 Replace with \"Path\" (note the caps) for yaml/json when we update the n.js file", + "type": "string" + } + } + }, + "model.VerificationResult": { + "type": "object", + "properties": { + "Complete": { + "type": "boolean" + }, + "Result": { + "type": "boolean" + } + } + }, + "publicapi.debugResponse": { + "type": "object", + "properties": { + "AvailableComputeCapacity": { + "$ref": "#/definitions/model.ResourceUsageData" + }, + "ComputeJobs": { + "type": "array", + "items": { + "$ref": "#/definitions/computenode.ActiveJob" + } + }, + "RequesterJobs": { + "type": "array", + "items": { + "$ref": "#/definitions/requesternode.ActiveJob" + } + } + } + }, + "publicapi.eventsRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "job_id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + } + } + }, + "publicapi.eventsResponse": { + "type": "object", + "properties": { + "events": { + "type": "array", + "items": { + "$ref": "#/definitions/model.JobEvent" + } + } + } + }, + "publicapi.listRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + }, + "max_jobs": { + "type": "integer", + "example": 10 + }, + "return_all": { + "type": "boolean" + }, + "sort_by": { + "type": "string", + "example": "created_at" + }, + "sort_reverse": { + "type": "boolean" + } + } + }, + "publicapi.listResponse": { + "type": "object", + "properties": { + "jobs": { + "type": "array", + "items": { + "$ref": "#/definitions/model.Job" + } + } + } + }, + "publicapi.localEventsRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string" + }, + "job_id": { + "type": "string" + } + } + }, + "publicapi.localEventsResponse": { + "type": "object", + "properties": { + "localEvents": { + "type": "array", + "items": { + "$ref": "#/definitions/model.JobLocalEvent" + } + } + } + }, + "publicapi.resultsResponse": { + "type": "object", + "properties": { + "results": { + "type": "array", + "items": { + "$ref": "#/definitions/model.PublishedResult" + } + } + } + }, + "publicapi.stateRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "job_id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + } + } + }, + "publicapi.stateResponse": { + "type": "object", + "properties": { + "state": { + "$ref": "#/definitions/model.JobState" + } + } + }, + "publicapi.submitRequest": { + "type": "object", + "required": [ + "client_public_key", + "data", + "signature" + ], + "properties": { + "client_public_key": { + "description": "The base64-encoded public key of the client:", + "type": "string" + }, + "data": { + "description": "The data needed to submit and run a job on the network:", + "$ref": "#/definitions/model.JobCreatePayload" + }, + "signature": { + "description": "A base64-encoded signature of the data, signed by the client:", + "type": "string" + } + } + }, + "publicapi.submitResponse": { + "type": "object", + "properties": { + "job": { + "$ref": "#/definitions/model.Job" + } + } + }, + "publicapi.versionRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + } + } + }, + "publicapi.versionResponse": { + "type": "object", + "properties": { + "build_version_info": { + "$ref": "#/definitions/model.BuildVersionInfo" + } + } + }, + "requesternode.ActiveJob": { + "type": "object", + "properties": { + "BiddingNodesCount": { + "type": "integer" + }, + "CompletedNodesCount": { + "type": "integer" + }, + "ShardID": { + "type": "string" + }, + "State": { + "type": "string" + } + } + }, + "types.FreeSpace": { + "type": "object", + "properties": { + "IPFSMount": { + "$ref": "#/definitions/types.MountStatus" + }, + "root": { + "$ref": "#/definitions/types.MountStatus" + }, + "tmp": { + "$ref": "#/definitions/types.MountStatus" + } + } + }, + "types.HealthInfo": { + "type": "object", + "properties": { + "FreeSpace": { + "$ref": "#/definitions/types.FreeSpace" + } + } + }, + "types.MountStatus": { + "type": "object", + "properties": { + "All": { + "type": "integer" + }, + "Free": { + "type": "integer" + }, + "Used": { + "type": "integer" + } + } + } + } +}` + +// SwaggerInfo holds exported Swagger Info so clients can modify it +var SwaggerInfo = &swag.Spec{ + Version: "", + Host: "bootstrap.production.bacalhau.org:1234", + BasePath: "/", + Schemes: []string{"http"}, + Title: "Bacalhau API", + Description: "This page is the reference of the Bacalhau REST API. Project docs are available at https://docs.bacalhau.org/. Find more information about Bacalhau at https://github.com/filecoin-project/bacalhau.", + InfoInstanceName: "swagger", + SwaggerTemplate: docTemplate, +} + +func init() { + swag.Register(SwaggerInfo.InstanceName(), SwaggerInfo) +} diff --git a/docs/swagger.json b/docs/swagger.json new file mode 100644 index 0000000000..0e2af9f0b3 --- /dev/null +++ b/docs/swagger.json @@ -0,0 +1,1398 @@ +{ + "schemes": [ + "http" + ], + "swagger": "2.0", + "info": { + "description": "This page is the reference of the Bacalhau REST API. Project docs are available at https://docs.bacalhau.org/. Find more information about Bacalhau at https://github.com/filecoin-project/bacalhau.", + "title": "Bacalhau API", + "contact": { + "name": "Bacalhau Team", + "url": "https://github.com/filecoin-project/bacalhau", + "email": "team@bacalhau.org" + }, + "license": { + "name": "Apache 2.0", + "url": "https://github.com/filecoin-project/bacalhau/blob/main/LICENSE" + } + }, + "host": "bootstrap.production.bacalhau.org:1234", + "basePath": "/", + "paths": { + "/debug": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "summary": "Returns debug information on what the current node is doing.", + "operationId": "apiServer/debug", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.debugResponse" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/events": { + "post": { + "description": "Events (e.g. Created, Bid, BidAccepted, ..., ResultsAccepted, ResultsPublished) are useful to track the progress of a job.\n\nExample response (truncated):\n```json\n{\n \"events\": [\n {\n \"APIVersion\": \"V1beta1\",\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"SourceNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"EventName\": \"Created\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"date\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"JobExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"VerificationResult\": {},\n \"PublishedResult\": {},\n \"EventTime\": \"2022-11-17T13:32:55.331375351Z\",\n \"SenderPublicKey\": \"...\"\n },\n ...\n {\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"SourceNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"TargetNodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"EventName\": \"ResultsAccepted\",\n \"Spec\": {\n \"Docker\": {},\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Sharding\": {}\n },\n \"JobExecutionPlan\": {},\n \"Deal\": {},\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResult\": {},\n \"EventTime\": \"2022-11-17T13:32:55.707825569Z\",\n \"SenderPublicKey\": \"...\"\n },\n {\n \"JobID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"SourceNodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"EventName\": \"ResultsPublished\",\n \"Spec\": {\n \"Docker\": {},\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Sharding\": {}\n },\n \"JobExecutionPlan\": {},\n \"Deal\": {},\n \"VerificationResult\": {},\n \"PublishedResult\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"EventTime\": \"2022-11-17T13:32:55.756658941Z\",\n \"SenderPublicKey\": \"...\"\n }\n ]\n}\n```", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the events related to the job-id passed in the body payload. Useful for troubleshooting.", + "operationId": "pkg/publicapi/events", + "parameters": [ + { + "description": "Request must specify a `client_id`. To retrieve your `client_id`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe \u003cjob-id\u003e` and fetch the `ClientID` field.", + "name": "eventsRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.eventsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.eventsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/healthz": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/healthz", + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/types.HealthInfo" + } + } + } + } + }, + "/id": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the id of the host node.", + "operationId": "apiServer/id", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/list": { + "post": { + "description": "Returns the first (sorted) #`max_jobs` jobs that belong to the `client_id` passed in the body payload (by default).\nIf `return_all` is set to true, it returns all jobs on the Bacalhau network.\n\nIf `id` is set, it returns only the job with that ID.\n\nExample response:\n```json\n{\n \"jobs\": [\n {\n \"APIVersion\": \"V1beta1\",\n \"ID\": \"9304c616-291f-41ad-b862-54e133c0149e\",\n \"RequesterNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"RequesterPublicKey\": \"...\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"date\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"ExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"CreatedAt\": \"2022-11-17T13:32:55.33837275Z\",\n \"JobState\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"RunOutput\": {\n \"stdout\": \"Thu Nov 17 13:32:55 UTC 2022\\n\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n },\n {\n \"APIVersion\": \"V1beta1\",\n \"ID\": \"92d5d4ee-3765-4f78-8353-623f5f26df08\",\n \"RequesterNodeID\": \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"RequesterPublicKey\": \"...\",\n \"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n \"Spec\": {\n \"Engine\": \"Docker\",\n \"Verifier\": \"Noop\",\n \"Publisher\": \"Estuary\",\n \"Docker\": {\n \"Image\": \"ubuntu\",\n \"Entrypoint\": [\n \"sleep\",\n \"4\"\n ]\n },\n \"Language\": {\n \"JobContext\": {}\n },\n \"Wasm\": {},\n \"Resources\": {\n \"GPU\": \"\"\n },\n \"Timeout\": 1800,\n \"outputs\": [\n {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"outputs\",\n \"path\": \"/outputs\"\n }\n ],\n \"Sharding\": {\n \"BatchSize\": 1,\n \"GlobPatternBasePath\": \"/inputs\"\n }\n },\n \"Deal\": {\n \"Concurrency\": 1\n },\n \"ExecutionPlan\": {\n \"ShardsTotal\": 1\n },\n \"CreatedAt\": \"2022-11-17T13:29:01.871140291Z\",\n \"JobState\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-92d5d4ee-3765-4f78-8353-623f5f26df08-shard-0-host-QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"CID\": \"QmWUXBndMuq2G6B6ndQCmkRHjZ6CvyJ8qLxXBG3YsSFzQG\"\n },\n \"RunOutput\": {\n \"stdout\": \"\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n }\n ]\n}\n```", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Simply lists jobs.", + "operationId": "pkg/publicapi.list", + "parameters": [ + { + "description": "Set `return_all` to `true` to return all jobs on the network (may degrade performance, use with care!).", + "name": "listRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.listRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.listResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/livez": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/livez", + "responses": { + "200": { + "description": "TODO", + "schema": { + "type": "string" + } + } + } + } + }, + "/local_events": { + "post": { + "description": "Local events (e.g. Selected, BidAccepted, Verified) are useful to track the progress of a job.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the node's local events related to the job-id passed in the body payload. Useful for troubleshooting.", + "operationId": "pkg/publicapi/localEvents", + "parameters": [ + { + "description": " ", + "name": "localEventsRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.localEventsRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.localEventsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/logz": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/logz", + "responses": { + "200": { + "description": "TODO", + "schema": { + "type": "string" + } + } + } + } + }, + "/peers": { + "get": { + "description": "As described in the [architecture docs](https://docs.bacalhau.org/about-bacalhau/architecture), each node is connected to a number of peer nodes.\n\nExample response:\n```json\n{\n \"bacalhau-job-event\": [\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF\",\n \"QmVAb7r2pKWCuyLpYWoZr9syhhFnTWeFaByHdb8PkkhLQG\",\n \"QmUDAXvv31WPZ8U9CzuRTMn9iFGiopGE7rHiah1X8a6PkT\",\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\"\n ]\n}\n```", + "produces": [ + "application/json" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the peers connected to the host via the transport layer.", + "operationId": "apiServer/peers", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/readyz": { + "get": { + "produces": [ + "text/plain" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/readyz", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "string" + } + } + } + } + }, + "/results": { + "post": { + "description": "Example response:\n\n```json\n{\n \"results\": [\n {\n \"NodeID\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"Data\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n }\n }\n ]\n}\n```", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the results of the job-id specified in the body payload.", + "operationId": "pkg/publicapi/results", + "parameters": [ + { + "description": " ", + "name": "stateRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.stateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.resultsResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/states": { + "post": { + "description": "Example response:\n\n```json\n{\n \"state\": {\n \"Nodes\": {\n \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3\",\n \"State\": \"Cancelled\",\n \"VerificationResult\": {},\n \"PublishedResults\": {}\n }\n }\n },\n \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\": {\n \"Shards\": {\n \"0\": {\n \"NodeId\": \"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"State\": \"Completed\",\n \"Status\": \"Got results proposal of length: 0\",\n \"VerificationResult\": {\n \"Complete\": true,\n \"Result\": true\n },\n \"PublishedResults\": {\n \"StorageSource\": \"IPFS\",\n \"Name\": \"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL\",\n \"CID\": \"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe\"\n },\n \"RunOutput\": {\n \"stdout\": \"Thu Nov 17 13:32:55 UTC 2022\\n\",\n \"stdouttruncated\": false,\n \"stderr\": \"\",\n \"stderrtruncated\": false,\n \"exitCode\": 0,\n \"runnerError\": \"\"\n }\n }\n }\n }\n }\n }\n}\n```", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Returns the state of the job-id specified in the body payload.", + "operationId": "pkg/publicapi/states", + "parameters": [ + { + "description": " ", + "name": "stateRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.stateRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.stateResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/submit": { + "post": { + "description": "Description:\n\n* `client_public_key`: The base64-encoded public key of the client.\n* `signature`: A base64-encoded signature of the `data` attribute, signed by the client.\n* `data`\n * `ClientID`: Request must specify a `ClientID`. To retrieve your `ClientID`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe \u003cjob-id\u003e` and fetch the `ClientID` field.\n * `Job`: see example below.\n\nExample request\n```json\n{\n\t\"data\": {\n\t\t\"ClientID\": \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n\t\t\"Job\": {\n\t\t\t\"APIVersion\": \"V1beta1\",\n\t\t\t\"Spec\": {\n\t\t\t\t\"Engine\": \"Docker\",\n\t\t\t\t\"Verifier\": \"Noop\",\n\t\t\t\t\"Publisher\": \"Estuary\",\n\t\t\t\t\"Docker\": {\n\t\t\t\t\t\"Image\": \"ubuntu\",\n\t\t\t\t\t\"Entrypoint\": [\n\t\t\t\t\t\t\"date\"\n\t\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t\"Timeout\": 1800,\n\t\t\t\t\"outputs\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"StorageSource\": \"IPFS\",\n\t\t\t\t\t\t\"Name\": \"outputs\",\n\t\t\t\t\t\t\"path\": \"/outputs\"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t\"Sharding\": {\n\t\t\t\t\t\"BatchSize\": 1,\n\t\t\t\t\t\"GlobPatternBasePath\": \"/inputs\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"Deal\": {\n\t\t\t\t\"Concurrency\": 1\n\t\t\t}\n\t\t}\n\t},\n\t\"signature\": \"...\",\n\t\"client_public_key\": \"...\"\n}\n```", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Job" + ], + "summary": "Submits a new job to the network.", + "operationId": "pkg/apiServer.submit", + "parameters": [ + { + "description": " ", + "name": "submitRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.submitRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.submitResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + }, + "/varz": { + "get": { + "produces": [ + "application/json" + ], + "tags": [ + "Health" + ], + "operationId": "apiServer/varz", + "responses": { + "200": { + "description": "OK", + "schema": { + "type": "array", + "items": { + "type": "integer" + } + } + } + } + } + }, + "/version": { + "post": { + "description": "See https://github.com/filecoin-project/bacalhau/releases for a complete list of `gitversion` tags.", + "consumes": [ + "application/json" + ], + "produces": [ + "application/json" + ], + "tags": [ + "Misc" + ], + "summary": "Returns the build version running on the server.", + "operationId": "apiServer/version", + "parameters": [ + { + "description": "Request must specify a `client_id`. To retrieve your `client_id`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe \u003cjob-id\u003e` and fetch the `ClientID` field.", + "name": "versionRequest", + "in": "body", + "required": true, + "schema": { + "$ref": "#/definitions/publicapi.versionRequest" + } + } + ], + "responses": { + "200": { + "description": "OK", + "schema": { + "$ref": "#/definitions/publicapi.versionResponse" + } + }, + "400": { + "description": "Bad Request", + "schema": { + "type": "string" + } + }, + "500": { + "description": "Internal Server Error", + "schema": { + "type": "string" + } + } + } + } + } + }, + "definitions": { + "computenode.ActiveJob": { + "type": "object", + "properties": { + "CapacityRequirements": { + "$ref": "#/definitions/model.ResourceUsageData" + }, + "ShardID": { + "type": "string" + }, + "State": { + "type": "string" + } + } + }, + "model.BuildVersionInfo": { + "type": "object", + "properties": { + "builddate": { + "type": "string", + "example": "2022-11-16T14:03:31Z" + }, + "gitcommit": { + "type": "string", + "example": "d612b63108f2b5ce1ab2b9e02444eb1dac1d922d" + }, + "gitversion": { + "type": "string", + "example": "v0.3.12" + }, + "goarch": { + "type": "string", + "example": "amd64" + }, + "goos": { + "type": "string", + "example": "linux" + }, + "major": { + "type": "string", + "example": "0" + }, + "minor": { + "type": "string", + "example": "3" + } + } + }, + "model.Deal": { + "type": "object", + "properties": { + "Concurrency": { + "description": "The maximum number of concurrent compute node bids that will be\naccepted by the requester node on behalf of the client.", + "type": "integer" + }, + "Confidence": { + "description": "The number of nodes that must agree on a verification result\nthis is used by the different verifiers - for example the\ndeterministic verifier requires the winning group size\nto be at least this size", + "type": "integer" + }, + "MinBids": { + "description": "The minimum number of bids that must be received before the Requester\nnode will randomly accept concurrency-many of them. This allows the\nRequester node to get some level of guarantee that the execution of the\njobs will be spread evenly across the network (assuming that this value\nis some large proportion of the size of the network).", + "type": "integer" + } + } + }, + "model.Job": { + "type": "object", + "properties": { + "APIVersion": { + "type": "string", + "example": "V1beta1" + }, + "ClientID": { + "description": "The ID of the client that created this job.", + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "CreatedAt": { + "description": "Time the job was submitted to the bacalhau network.", + "type": "string", + "example": "2022-11-17T13:29:01.871140291Z" + }, + "Deal": { + "description": "The deal the client has made, such as which job bids they have accepted.", + "$ref": "#/definitions/model.Deal" + }, + "ExecutionPlan": { + "description": "how will this job be executed by nodes on the network", + "$ref": "#/definitions/model.JobExecutionPlan" + }, + "ID": { + "description": "The unique global ID of this job in the bacalhau network.", + "type": "string", + "example": "92d5d4ee-3765-4f78-8353-623f5f26df08" + }, + "JobEvents": { + "description": "All events associated with the job", + "type": "array", + "items": { + "$ref": "#/definitions/model.JobEvent" + } + }, + "JobState": { + "description": "The current state of the job", + "$ref": "#/definitions/model.JobState" + }, + "LocalJobEvents": { + "description": "All local events associated with the job", + "type": "array", + "items": { + "$ref": "#/definitions/model.JobLocalEvent" + } + }, + "RequesterNodeID": { + "description": "The ID of the requester node that owns this job.", + "type": "string", + "example": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF" + }, + "RequesterPublicKey": { + "description": "The public key of the Requester node that created this job\nThis can be used to encrypt messages back to the creator", + "type": "array", + "items": { + "type": "integer" + } + }, + "Spec": { + "description": "The specification of this job.", + "$ref": "#/definitions/model.Spec" + } + } + }, + "model.JobCreatePayload": { + "type": "object", + "required": [ + "ClientID", + "Job" + ], + "properties": { + "ClientID": { + "description": "the id of the client that is submitting the job", + "type": "string" + }, + "Context": { + "description": "Optional base64-encoded tar file that will be pinned to IPFS and\nmounted as storage for the job. Not part of the spec so we don't\nflood the transport layer with it (potentially very large).", + "type": "string" + }, + "Job": { + "description": "The job specification:", + "$ref": "#/definitions/model.Job" + } + } + }, + "model.JobEvent": { + "type": "object", + "properties": { + "APIVersion": { + "description": "APIVersion of the Job", + "type": "string", + "example": "V1beta1" + }, + "ClientID": { + "description": "optional clientID if this is an externally triggered event (like create job)", + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "Deal": { + "description": "this is only defined in \"update_deal\" events", + "$ref": "#/definitions/model.Deal" + }, + "EventName": { + "type": "integer" + }, + "EventTime": { + "type": "string", + "example": "2022-11-17T13:32:55.756658941Z" + }, + "JobExecutionPlan": { + "description": "this is only defined in \"create\" events", + "$ref": "#/definitions/model.JobExecutionPlan" + }, + "JobID": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + }, + "PublishedResult": { + "$ref": "#/definitions/model.StorageSpec" + }, + "RunOutput": { + "description": "RunOutput of the job", + "$ref": "#/definitions/model.RunCommandResult" + }, + "SenderPublicKey": { + "type": "array", + "items": { + "type": "integer" + } + }, + "ShardIndex": { + "description": "what shard is this event for", + "type": "integer" + }, + "SourceNodeID": { + "description": "the node that emitted this event", + "type": "string", + "example": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF" + }, + "Spec": { + "description": "this is only defined in \"create\" events", + "$ref": "#/definitions/model.Spec" + }, + "Status": { + "type": "string", + "example": "Got results proposal of length: 0" + }, + "TargetNodeID": { + "description": "the node that this event is for\ne.g. \"AcceptJobBid\" was emitted by Requester but it targeting compute node", + "type": "string", + "example": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL" + }, + "VerificationProposal": { + "type": "array", + "items": { + "type": "integer" + } + }, + "VerificationResult": { + "$ref": "#/definitions/model.VerificationResult" + } + } + }, + "model.JobExecutionPlan": { + "type": "object", + "properties": { + "ShardsTotal": { + "description": "how many shards are there in total for this job\nwe are expecting this number x concurrency total\nJobShardState objects for this job", + "type": "integer" + } + } + }, + "model.JobLocalEvent": { + "type": "object", + "properties": { + "EventName": { + "type": "integer" + }, + "JobID": { + "type": "string" + }, + "ShardIndex": { + "type": "integer" + }, + "TargetNodeID": { + "type": "string" + } + } + }, + "model.JobNodeState": { + "type": "object", + "properties": { + "Shards": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/model.JobShardState" + } + } + } + }, + "model.JobShardState": { + "type": "object", + "properties": { + "NodeId": { + "description": "which node is running this shard", + "type": "string" + }, + "PublishedResults": { + "$ref": "#/definitions/model.StorageSpec" + }, + "RunOutput": { + "description": "RunOutput of the job", + "$ref": "#/definitions/model.RunCommandResult" + }, + "ShardIndex": { + "description": "what shard is this we are running", + "type": "integer" + }, + "State": { + "description": "what is the state of the shard on this node", + "type": "integer" + }, + "Status": { + "description": "an arbitrary status message", + "type": "string" + }, + "VerificationProposal": { + "description": "the proposed results for this shard\nthis will be resolved by the verifier somehow", + "type": "array", + "items": { + "type": "integer" + } + }, + "VerificationResult": { + "$ref": "#/definitions/model.VerificationResult" + } + } + }, + "model.JobShardingConfig": { + "type": "object", + "properties": { + "BatchSize": { + "description": "how many \"items\" are to be processed in each shard\nwe first apply the glob pattern which will result in a flat list of items\nthis number decides how to group that flat list into actual shards run by compute nodes", + "type": "integer" + }, + "GlobPattern": { + "description": "divide the inputs up into the smallest possible unit\nfor example /* would mean \"all top level files or folders\"\nthis being an empty string means \"no sharding\"", + "type": "string" + }, + "GlobPatternBasePath": { + "description": "when using multiple input volumes\nwhat path do we treat as the common mount path to apply the glob pattern to", + "type": "string" + } + } + }, + "model.JobSpecDocker": { + "type": "object", + "properties": { + "Entrypoint": { + "description": "optionally override the default entrypoint", + "type": "array", + "items": { + "type": "string" + } + }, + "EnvironmentVariables": { + "description": "a map of env to run the container with", + "type": "array", + "items": { + "type": "string" + } + }, + "Image": { + "description": "this should be pullable by docker", + "type": "string" + }, + "WorkingDirectory": { + "description": "working directory inside the container", + "type": "string" + } + } + }, + "model.JobSpecLanguage": { + "type": "object", + "properties": { + "Command": { + "description": "optional program specified on commandline, like python -c \"print(1+1)\"", + "type": "string" + }, + "DeterministicExecution": { + "description": "must this job be run in a deterministic context?", + "type": "boolean" + }, + "JobContext": { + "description": "context is a tar file stored in ipfs, containing e.g. source code and requirements", + "$ref": "#/definitions/model.StorageSpec" + }, + "Language": { + "description": "e.g. python", + "type": "string" + }, + "LanguageVersion": { + "description": "e.g. 3.8", + "type": "string" + }, + "ProgramPath": { + "description": "optional program path relative to the context dir. one of Command or ProgramPath must be specified", + "type": "string" + }, + "RequirementsPath": { + "description": "optional requirements.txt (or equivalent) path relative to the context dir", + "type": "string" + } + } + }, + "model.JobSpecWasm": { + "type": "object", + "properties": { + "EntryPoint": { + "description": "The name of the function in the EntryModule to call to run the job. For\nWASI jobs, this will always be `_start`, but jobs can choose to call\nother WASM functions instead. The EntryPoint must be a zero-parameter\nzero-result function.", + "type": "string" + }, + "EnvironmentVariables": { + "description": "The variables available in the environment of the running program.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "ImportModules": { + "description": "TODO #880: Other WASM modules whose exports will be available as imports\nto the EntryModule.", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "Parameters": { + "description": "The arguments supplied to the program (i.e. as ARGV).", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "model.JobState": { + "type": "object", + "properties": { + "Nodes": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/model.JobNodeState" + } + } + } + }, + "model.PublishedResult": { + "type": "object", + "properties": { + "Data": { + "$ref": "#/definitions/model.StorageSpec" + }, + "NodeID": { + "type": "string" + }, + "ShardIndex": { + "type": "integer" + } + } + }, + "model.ResourceUsageConfig": { + "type": "object", + "properties": { + "CPU": { + "description": "https://github.com/BTBurke/k8sresource string", + "type": "string" + }, + "Disk": { + "type": "string" + }, + "GPU": { + "description": "unsigned integer string", + "type": "string" + }, + "Memory": { + "description": "github.com/c2h5oh/datasize string", + "type": "string" + } + } + }, + "model.ResourceUsageData": { + "type": "object", + "properties": { + "CPU": { + "description": "cpu units", + "type": "number", + "example": 9.600000000000001 + }, + "Disk": { + "description": "bytes", + "type": "integer", + "example": 212663867801 + }, + "GPU": { + "description": "Support whole GPUs only, like https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/", + "type": "integer", + "example": 1 + }, + "Memory": { + "description": "bytes", + "type": "integer", + "example": 27487790694 + } + } + }, + "model.RunCommandResult": { + "type": "object", + "properties": { + "exitCode": { + "description": "exit code of the run.", + "type": "integer" + }, + "runnerError": { + "description": "Runner error", + "type": "string" + }, + "stderr": { + "description": "stderr of the run.", + "type": "string" + }, + "stderrtruncated": { + "description": "bool describing if stderr was truncated", + "type": "boolean" + }, + "stdout": { + "description": "stdout of the run. Yaml provided for `describe` output", + "type": "string" + }, + "stdouttruncated": { + "description": "bool describing if stdout was truncated", + "type": "boolean" + } + } + }, + "model.Spec": { + "type": "object", + "properties": { + "Annotations": { + "description": "Annotations on the job - could be user or machine assigned", + "type": "array", + "items": { + "type": "string" + } + }, + "Contexts": { + "description": "Input volumes that will not be sharded\nfor example to upload code into a base image\nevery shard will get the full range of context volumes", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "DoNotTrack": { + "description": "Do not track specified by the client", + "type": "boolean" + }, + "Docker": { + "description": "executor specific data", + "$ref": "#/definitions/model.JobSpecDocker" + }, + "Engine": { + "description": "e.g. docker or language", + "type": "integer" + }, + "Language": { + "$ref": "#/definitions/model.JobSpecLanguage" + }, + "Publisher": { + "description": "there can be multiple publishers for the job", + "type": "integer" + }, + "Resources": { + "description": "the compute (cpy, ram) resources this job requires", + "$ref": "#/definitions/model.ResourceUsageConfig" + }, + "Sharding": { + "description": "the sharding config for this job\ndescribes how the job might be split up into parallel shards", + "$ref": "#/definitions/model.JobShardingConfig" + }, + "Timeout": { + "description": "How long a job can run in seconds before it is killed.\nThis includes the time required to run, verify and publish results", + "type": "number" + }, + "Verifier": { + "type": "integer" + }, + "Wasm": { + "$ref": "#/definitions/model.JobSpecWasm" + }, + "inputs": { + "description": "the data volumes we will read in the job\nfor example \"read this ipfs cid\"\nTODO: #667 Replace with \"Inputs\", \"Outputs\" (note the caps) for yaml/json when we update the n.js file", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + }, + "outputs": { + "description": "the data volumes we will write in the job\nfor example \"write the results to ipfs\"", + "type": "array", + "items": { + "$ref": "#/definitions/model.StorageSpec" + } + } + } + }, + "model.StorageSpec": { + "type": "object", + "properties": { + "CID": { + "description": "The unique ID of the data, where it makes sense (for example, in an\nIPFS storage spec this will be the data's CID).\nNOTE: The below is capitalized to match IPFS \u0026 IPLD (even though it's out of golang fmt)", + "type": "string", + "example": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "Metadata": { + "description": "Additional properties specific to each driver", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "Name": { + "description": "Name of the spec's data, for reference.", + "type": "string", + "example": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL" + }, + "StorageSource": { + "description": "StorageSource is the abstract source of the data. E.g. a storage source\nmight be a URL download, but doesn't specify how the execution engine\ndoes the download or what it will do with the downloaded data.", + "type": "integer" + }, + "URL": { + "description": "Source URL of the data", + "type": "string" + }, + "path": { + "description": "The path that the spec's data should be mounted on, where it makes\nsense (for example, in a Docker storage spec this will be a filesystem\npath).\nTODO: #668 Replace with \"Path\" (note the caps) for yaml/json when we update the n.js file", + "type": "string" + } + } + }, + "model.VerificationResult": { + "type": "object", + "properties": { + "Complete": { + "type": "boolean" + }, + "Result": { + "type": "boolean" + } + } + }, + "publicapi.debugResponse": { + "type": "object", + "properties": { + "AvailableComputeCapacity": { + "$ref": "#/definitions/model.ResourceUsageData" + }, + "ComputeJobs": { + "type": "array", + "items": { + "$ref": "#/definitions/computenode.ActiveJob" + } + }, + "RequesterJobs": { + "type": "array", + "items": { + "$ref": "#/definitions/requesternode.ActiveJob" + } + } + } + }, + "publicapi.eventsRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "job_id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + } + } + }, + "publicapi.eventsResponse": { + "type": "object", + "properties": { + "events": { + "type": "array", + "items": { + "$ref": "#/definitions/model.JobEvent" + } + } + } + }, + "publicapi.listRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + }, + "max_jobs": { + "type": "integer", + "example": 10 + }, + "return_all": { + "type": "boolean" + }, + "sort_by": { + "type": "string", + "example": "created_at" + }, + "sort_reverse": { + "type": "boolean" + } + } + }, + "publicapi.listResponse": { + "type": "object", + "properties": { + "jobs": { + "type": "array", + "items": { + "$ref": "#/definitions/model.Job" + } + } + } + }, + "publicapi.localEventsRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string" + }, + "job_id": { + "type": "string" + } + } + }, + "publicapi.localEventsResponse": { + "type": "object", + "properties": { + "localEvents": { + "type": "array", + "items": { + "$ref": "#/definitions/model.JobLocalEvent" + } + } + } + }, + "publicapi.resultsResponse": { + "type": "object", + "properties": { + "results": { + "type": "array", + "items": { + "$ref": "#/definitions/model.PublishedResult" + } + } + } + }, + "publicapi.stateRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + }, + "job_id": { + "type": "string", + "example": "9304c616-291f-41ad-b862-54e133c0149e" + } + } + }, + "publicapi.stateResponse": { + "type": "object", + "properties": { + "state": { + "$ref": "#/definitions/model.JobState" + } + } + }, + "publicapi.submitRequest": { + "type": "object", + "required": [ + "client_public_key", + "data", + "signature" + ], + "properties": { + "client_public_key": { + "description": "The base64-encoded public key of the client:", + "type": "string" + }, + "data": { + "description": "The data needed to submit and run a job on the network:", + "$ref": "#/definitions/model.JobCreatePayload" + }, + "signature": { + "description": "A base64-encoded signature of the data, signed by the client:", + "type": "string" + } + } + }, + "publicapi.submitResponse": { + "type": "object", + "properties": { + "job": { + "$ref": "#/definitions/model.Job" + } + } + }, + "publicapi.versionRequest": { + "type": "object", + "properties": { + "client_id": { + "type": "string", + "example": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51" + } + } + }, + "publicapi.versionResponse": { + "type": "object", + "properties": { + "build_version_info": { + "$ref": "#/definitions/model.BuildVersionInfo" + } + } + }, + "requesternode.ActiveJob": { + "type": "object", + "properties": { + "BiddingNodesCount": { + "type": "integer" + }, + "CompletedNodesCount": { + "type": "integer" + }, + "ShardID": { + "type": "string" + }, + "State": { + "type": "string" + } + } + }, + "types.FreeSpace": { + "type": "object", + "properties": { + "IPFSMount": { + "$ref": "#/definitions/types.MountStatus" + }, + "root": { + "$ref": "#/definitions/types.MountStatus" + }, + "tmp": { + "$ref": "#/definitions/types.MountStatus" + } + } + }, + "types.HealthInfo": { + "type": "object", + "properties": { + "FreeSpace": { + "$ref": "#/definitions/types.FreeSpace" + } + } + }, + "types.MountStatus": { + "type": "object", + "properties": { + "All": { + "type": "integer" + }, + "Free": { + "type": "integer" + }, + "Used": { + "type": "integer" + } + } + } + } +} \ No newline at end of file diff --git a/docs/swagger.yaml b/docs/swagger.yaml new file mode 100644 index 0000000000..bf72750bd2 --- /dev/null +++ b/docs/swagger.yaml @@ -0,0 +1,1431 @@ +basePath: / +definitions: + computenode.ActiveJob: + properties: + CapacityRequirements: + $ref: '#/definitions/model.ResourceUsageData' + ShardID: + type: string + State: + type: string + type: object + model.BuildVersionInfo: + properties: + builddate: + example: "2022-11-16T14:03:31Z" + type: string + gitcommit: + example: d612b63108f2b5ce1ab2b9e02444eb1dac1d922d + type: string + gitversion: + example: v0.3.12 + type: string + goarch: + example: amd64 + type: string + goos: + example: linux + type: string + major: + example: "0" + type: string + minor: + example: "3" + type: string + type: object + model.Deal: + properties: + Concurrency: + description: |- + The maximum number of concurrent compute node bids that will be + accepted by the requester node on behalf of the client. + type: integer + Confidence: + description: |- + The number of nodes that must agree on a verification result + this is used by the different verifiers - for example the + deterministic verifier requires the winning group size + to be at least this size + type: integer + MinBids: + description: |- + The minimum number of bids that must be received before the Requester + node will randomly accept concurrency-many of them. This allows the + Requester node to get some level of guarantee that the execution of the + jobs will be spread evenly across the network (assuming that this value + is some large proportion of the size of the network). + type: integer + type: object + model.Job: + properties: + APIVersion: + example: V1beta1 + type: string + ClientID: + description: The ID of the client that created this job. + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + CreatedAt: + description: Time the job was submitted to the bacalhau network. + example: "2022-11-17T13:29:01.871140291Z" + type: string + Deal: + $ref: '#/definitions/model.Deal' + description: The deal the client has made, such as which job bids they have + accepted. + ExecutionPlan: + $ref: '#/definitions/model.JobExecutionPlan' + description: how will this job be executed by nodes on the network + ID: + description: The unique global ID of this job in the bacalhau network. + example: 92d5d4ee-3765-4f78-8353-623f5f26df08 + type: string + JobEvents: + description: All events associated with the job + items: + $ref: '#/definitions/model.JobEvent' + type: array + JobState: + $ref: '#/definitions/model.JobState' + description: The current state of the job + LocalJobEvents: + description: All local events associated with the job + items: + $ref: '#/definitions/model.JobLocalEvent' + type: array + RequesterNodeID: + description: The ID of the requester node that owns this job. + example: QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF + type: string + RequesterPublicKey: + description: |- + The public key of the Requester node that created this job + This can be used to encrypt messages back to the creator + items: + type: integer + type: array + Spec: + $ref: '#/definitions/model.Spec' + description: The specification of this job. + type: object + model.JobCreatePayload: + properties: + ClientID: + description: the id of the client that is submitting the job + type: string + Context: + description: |- + Optional base64-encoded tar file that will be pinned to IPFS and + mounted as storage for the job. Not part of the spec so we don't + flood the transport layer with it (potentially very large). + type: string + Job: + $ref: '#/definitions/model.Job' + description: 'The job specification:' + required: + - ClientID + - Job + type: object + model.JobEvent: + properties: + APIVersion: + description: APIVersion of the Job + example: V1beta1 + type: string + ClientID: + description: optional clientID if this is an externally triggered event (like + create job) + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + Deal: + $ref: '#/definitions/model.Deal' + description: this is only defined in "update_deal" events + EventName: + type: integer + EventTime: + example: "2022-11-17T13:32:55.756658941Z" + type: string + JobExecutionPlan: + $ref: '#/definitions/model.JobExecutionPlan' + description: this is only defined in "create" events + JobID: + example: 9304c616-291f-41ad-b862-54e133c0149e + type: string + PublishedResult: + $ref: '#/definitions/model.StorageSpec' + RunOutput: + $ref: '#/definitions/model.RunCommandResult' + description: RunOutput of the job + SenderPublicKey: + items: + type: integer + type: array + ShardIndex: + description: what shard is this event for + type: integer + SourceNodeID: + description: the node that emitted this event + example: QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF + type: string + Spec: + $ref: '#/definitions/model.Spec' + description: this is only defined in "create" events + Status: + example: 'Got results proposal of length: 0' + type: string + TargetNodeID: + description: |- + the node that this event is for + e.g. "AcceptJobBid" was emitted by Requester but it targeting compute node + example: QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL + type: string + VerificationProposal: + items: + type: integer + type: array + VerificationResult: + $ref: '#/definitions/model.VerificationResult' + type: object + model.JobExecutionPlan: + properties: + ShardsTotal: + description: |- + how many shards are there in total for this job + we are expecting this number x concurrency total + JobShardState objects for this job + type: integer + type: object + model.JobLocalEvent: + properties: + EventName: + type: integer + JobID: + type: string + ShardIndex: + type: integer + TargetNodeID: + type: string + type: object + model.JobNodeState: + properties: + Shards: + additionalProperties: + $ref: '#/definitions/model.JobShardState' + type: object + type: object + model.JobShardState: + properties: + NodeId: + description: which node is running this shard + type: string + PublishedResults: + $ref: '#/definitions/model.StorageSpec' + RunOutput: + $ref: '#/definitions/model.RunCommandResult' + description: RunOutput of the job + ShardIndex: + description: what shard is this we are running + type: integer + State: + description: what is the state of the shard on this node + type: integer + Status: + description: an arbitrary status message + type: string + VerificationProposal: + description: |- + the proposed results for this shard + this will be resolved by the verifier somehow + items: + type: integer + type: array + VerificationResult: + $ref: '#/definitions/model.VerificationResult' + type: object + model.JobShardingConfig: + properties: + BatchSize: + description: |- + how many "items" are to be processed in each shard + we first apply the glob pattern which will result in a flat list of items + this number decides how to group that flat list into actual shards run by compute nodes + type: integer + GlobPattern: + description: |- + divide the inputs up into the smallest possible unit + for example /* would mean "all top level files or folders" + this being an empty string means "no sharding" + type: string + GlobPatternBasePath: + description: |- + when using multiple input volumes + what path do we treat as the common mount path to apply the glob pattern to + type: string + type: object + model.JobSpecDocker: + properties: + Entrypoint: + description: optionally override the default entrypoint + items: + type: string + type: array + EnvironmentVariables: + description: a map of env to run the container with + items: + type: string + type: array + Image: + description: this should be pullable by docker + type: string + WorkingDirectory: + description: working directory inside the container + type: string + type: object + model.JobSpecLanguage: + properties: + Command: + description: optional program specified on commandline, like python -c "print(1+1)" + type: string + DeterministicExecution: + description: must this job be run in a deterministic context? + type: boolean + JobContext: + $ref: '#/definitions/model.StorageSpec' + description: context is a tar file stored in ipfs, containing e.g. source + code and requirements + Language: + description: e.g. python + type: string + LanguageVersion: + description: e.g. 3.8 + type: string + ProgramPath: + description: optional program path relative to the context dir. one of Command + or ProgramPath must be specified + type: string + RequirementsPath: + description: optional requirements.txt (or equivalent) path relative to the + context dir + type: string + type: object + model.JobSpecWasm: + properties: + EntryPoint: + description: |- + The name of the function in the EntryModule to call to run the job. For + WASI jobs, this will always be `_start`, but jobs can choose to call + other WASM functions instead. The EntryPoint must be a zero-parameter + zero-result function. + type: string + EnvironmentVariables: + additionalProperties: + type: string + description: The variables available in the environment of the running program. + type: object + ImportModules: + description: |- + TODO #880: Other WASM modules whose exports will be available as imports + to the EntryModule. + items: + $ref: '#/definitions/model.StorageSpec' + type: array + Parameters: + description: The arguments supplied to the program (i.e. as ARGV). + items: + type: string + type: array + type: object + model.JobState: + properties: + Nodes: + additionalProperties: + $ref: '#/definitions/model.JobNodeState' + type: object + type: object + model.PublishedResult: + properties: + Data: + $ref: '#/definitions/model.StorageSpec' + NodeID: + type: string + ShardIndex: + type: integer + type: object + model.ResourceUsageConfig: + properties: + CPU: + description: https://github.com/BTBurke/k8sresource string + type: string + Disk: + type: string + GPU: + description: unsigned integer string + type: string + Memory: + description: github.com/c2h5oh/datasize string + type: string + type: object + model.ResourceUsageData: + properties: + CPU: + description: cpu units + example: 9.600000000000001 + type: number + Disk: + description: bytes + example: 212663867801 + type: integer + GPU: + description: Support whole GPUs only, like https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/ + example: 1 + type: integer + Memory: + description: bytes + example: 27487790694 + type: integer + type: object + model.RunCommandResult: + properties: + exitCode: + description: exit code of the run. + type: integer + runnerError: + description: Runner error + type: string + stderr: + description: stderr of the run. + type: string + stderrtruncated: + description: bool describing if stderr was truncated + type: boolean + stdout: + description: stdout of the run. Yaml provided for `describe` output + type: string + stdouttruncated: + description: bool describing if stdout was truncated + type: boolean + type: object + model.Spec: + properties: + Annotations: + description: Annotations on the job - could be user or machine assigned + items: + type: string + type: array + Contexts: + description: |- + Input volumes that will not be sharded + for example to upload code into a base image + every shard will get the full range of context volumes + items: + $ref: '#/definitions/model.StorageSpec' + type: array + DoNotTrack: + description: Do not track specified by the client + type: boolean + Docker: + $ref: '#/definitions/model.JobSpecDocker' + description: executor specific data + Engine: + description: e.g. docker or language + type: integer + Language: + $ref: '#/definitions/model.JobSpecLanguage' + Publisher: + description: there can be multiple publishers for the job + type: integer + Resources: + $ref: '#/definitions/model.ResourceUsageConfig' + description: the compute (cpy, ram) resources this job requires + Sharding: + $ref: '#/definitions/model.JobShardingConfig' + description: |- + the sharding config for this job + describes how the job might be split up into parallel shards + Timeout: + description: |- + How long a job can run in seconds before it is killed. + This includes the time required to run, verify and publish results + type: number + Verifier: + type: integer + Wasm: + $ref: '#/definitions/model.JobSpecWasm' + inputs: + description: |- + the data volumes we will read in the job + for example "read this ipfs cid" + TODO: #667 Replace with "Inputs", "Outputs" (note the caps) for yaml/json when we update the n.js file + items: + $ref: '#/definitions/model.StorageSpec' + type: array + outputs: + description: |- + the data volumes we will write in the job + for example "write the results to ipfs" + items: + $ref: '#/definitions/model.StorageSpec' + type: array + type: object + model.StorageSpec: + properties: + CID: + description: |- + The unique ID of the data, where it makes sense (for example, in an + IPFS storage spec this will be the data's CID). + NOTE: The below is capitalized to match IPFS & IPLD (even though it's out of golang fmt) + example: QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe + type: string + Metadata: + additionalProperties: + type: string + description: Additional properties specific to each driver + type: object + Name: + description: Name of the spec's data, for reference. + example: job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL + type: string + StorageSource: + description: |- + StorageSource is the abstract source of the data. E.g. a storage source + might be a URL download, but doesn't specify how the execution engine + does the download or what it will do with the downloaded data. + type: integer + URL: + description: Source URL of the data + type: string + path: + description: |- + The path that the spec's data should be mounted on, where it makes + sense (for example, in a Docker storage spec this will be a filesystem + path). + TODO: #668 Replace with "Path" (note the caps) for yaml/json when we update the n.js file + type: string + type: object + model.VerificationResult: + properties: + Complete: + type: boolean + Result: + type: boolean + type: object + publicapi.debugResponse: + properties: + AvailableComputeCapacity: + $ref: '#/definitions/model.ResourceUsageData' + ComputeJobs: + items: + $ref: '#/definitions/computenode.ActiveJob' + type: array + RequesterJobs: + items: + $ref: '#/definitions/requesternode.ActiveJob' + type: array + type: object + publicapi.eventsRequest: + properties: + client_id: + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + job_id: + example: 9304c616-291f-41ad-b862-54e133c0149e + type: string + type: object + publicapi.eventsResponse: + properties: + events: + items: + $ref: '#/definitions/model.JobEvent' + type: array + type: object + publicapi.listRequest: + properties: + client_id: + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + id: + example: 9304c616-291f-41ad-b862-54e133c0149e + type: string + max_jobs: + example: 10 + type: integer + return_all: + type: boolean + sort_by: + example: created_at + type: string + sort_reverse: + type: boolean + type: object + publicapi.listResponse: + properties: + jobs: + items: + $ref: '#/definitions/model.Job' + type: array + type: object + publicapi.localEventsRequest: + properties: + client_id: + type: string + job_id: + type: string + type: object + publicapi.localEventsResponse: + properties: + localEvents: + items: + $ref: '#/definitions/model.JobLocalEvent' + type: array + type: object + publicapi.resultsResponse: + properties: + results: + items: + $ref: '#/definitions/model.PublishedResult' + type: array + type: object + publicapi.stateRequest: + properties: + client_id: + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + job_id: + example: 9304c616-291f-41ad-b862-54e133c0149e + type: string + type: object + publicapi.stateResponse: + properties: + state: + $ref: '#/definitions/model.JobState' + type: object + publicapi.submitRequest: + properties: + client_public_key: + description: 'The base64-encoded public key of the client:' + type: string + data: + $ref: '#/definitions/model.JobCreatePayload' + description: 'The data needed to submit and run a job on the network:' + signature: + description: 'A base64-encoded signature of the data, signed by the client:' + type: string + required: + - client_public_key + - data + - signature + type: object + publicapi.submitResponse: + properties: + job: + $ref: '#/definitions/model.Job' + type: object + publicapi.versionRequest: + properties: + client_id: + example: ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51 + type: string + type: object + publicapi.versionResponse: + properties: + build_version_info: + $ref: '#/definitions/model.BuildVersionInfo' + type: object + requesternode.ActiveJob: + properties: + BiddingNodesCount: + type: integer + CompletedNodesCount: + type: integer + ShardID: + type: string + State: + type: string + type: object + types.FreeSpace: + properties: + IPFSMount: + $ref: '#/definitions/types.MountStatus' + root: + $ref: '#/definitions/types.MountStatus' + tmp: + $ref: '#/definitions/types.MountStatus' + type: object + types.HealthInfo: + properties: + FreeSpace: + $ref: '#/definitions/types.FreeSpace' + type: object + types.MountStatus: + properties: + All: + type: integer + Free: + type: integer + Used: + type: integer + type: object +host: bootstrap.production.bacalhau.org:1234 +info: + contact: + email: team@bacalhau.org + name: Bacalhau Team + url: https://github.com/filecoin-project/bacalhau + description: This page is the reference of the Bacalhau REST API. Project docs are + available at https://docs.bacalhau.org/. Find more information about Bacalhau + at https://github.com/filecoin-project/bacalhau. + license: + name: Apache 2.0 + url: https://github.com/filecoin-project/bacalhau/blob/main/LICENSE + title: Bacalhau API +paths: + /debug: + get: + operationId: apiServer/debug + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.debugResponse' + "500": + description: Internal Server Error + schema: + type: string + summary: Returns debug information on what the current node is doing. + tags: + - Health + /events: + post: + consumes: + - application/json + description: |- + Events (e.g. Created, Bid, BidAccepted, ..., ResultsAccepted, ResultsPublished) are useful to track the progress of a job. + + Example response (truncated): + ```json + { + "events": [ + { + "APIVersion": "V1beta1", + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "SourceNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "EventName": "Created", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "date" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "JobExecutionPlan": { + "ShardsTotal": 1 + }, + "Deal": { + "Concurrency": 1 + }, + "VerificationResult": {}, + "PublishedResult": {}, + "EventTime": "2022-11-17T13:32:55.331375351Z", + "SenderPublicKey": "..." + }, + ... + { + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "SourceNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "TargetNodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "EventName": "ResultsAccepted", + "Spec": { + "Docker": {}, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Sharding": {} + }, + "JobExecutionPlan": {}, + "Deal": {}, + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResult": {}, + "EventTime": "2022-11-17T13:32:55.707825569Z", + "SenderPublicKey": "..." + }, + { + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "SourceNodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "EventName": "ResultsPublished", + "Spec": { + "Docker": {}, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Sharding": {} + }, + "JobExecutionPlan": {}, + "Deal": {}, + "VerificationResult": {}, + "PublishedResult": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "EventTime": "2022-11-17T13:32:55.756658941Z", + "SenderPublicKey": "..." + } + ] + } + ``` + operationId: pkg/publicapi/events + parameters: + - description: 'Request must specify a `client_id`. To retrieve your `client_id`, + you can do the following: (1) submit a dummy job to Bacalhau (or use one + you created before), (2) run `bacalhau describe ` and fetch the + `ClientID` field.' + in: body + name: eventsRequest + required: true + schema: + $ref: '#/definitions/publicapi.eventsRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.eventsResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the events related to the job-id passed in the body payload. + Useful for troubleshooting. + tags: + - Job + /healthz: + get: + operationId: apiServer/healthz + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/types.HealthInfo' + tags: + - Health + /id: + get: + operationId: apiServer/id + produces: + - text/plain + responses: + "200": + description: OK + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the id of the host node. + tags: + - Misc + /list: + post: + consumes: + - application/json + description: |- + Returns the first (sorted) #`max_jobs` jobs that belong to the `client_id` passed in the body payload (by default). + If `return_all` is set to true, it returns all jobs on the Bacalhau network. + + If `id` is set, it returns only the job with that ID. + + Example response: + ```json + { + "jobs": [ + { + "APIVersion": "V1beta1", + "ID": "9304c616-291f-41ad-b862-54e133c0149e", + "RequesterNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "RequesterPublicKey": "...", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "date" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "Deal": { + "Concurrency": 1 + }, + "ExecutionPlan": { + "ShardsTotal": 1 + }, + "CreatedAt": "2022-11-17T13:32:55.33837275Z", + "JobState": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL": { + "Shards": { + "0": { + "NodeId": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "RunOutput": { + "stdout": "Thu Nov 17 13:32:55 UTC 2022\n", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } + }, + { + "APIVersion": "V1beta1", + "ID": "92d5d4ee-3765-4f78-8353-623f5f26df08", + "RequesterNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "RequesterPublicKey": "...", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "sleep", + "4" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "Deal": { + "Concurrency": 1 + }, + "ExecutionPlan": { + "ShardsTotal": 1 + }, + "CreatedAt": "2022-11-17T13:29:01.871140291Z", + "JobState": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-92d5d4ee-3765-4f78-8353-623f5f26df08-shard-0-host-QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "CID": "QmWUXBndMuq2G6B6ndQCmkRHjZ6CvyJ8qLxXBG3YsSFzQG" + }, + "RunOutput": { + "stdout": "", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } + } + ] + } + ``` + operationId: pkg/publicapi.list + parameters: + - description: Set `return_all` to `true` to return all jobs on the network + (may degrade performance, use with care!). + in: body + name: listRequest + required: true + schema: + $ref: '#/definitions/publicapi.listRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.listResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Simply lists jobs. + tags: + - Job + /livez: + get: + operationId: apiServer/livez + produces: + - text/plain + responses: + "200": + description: TODO + schema: + type: string + tags: + - Health + /local_events: + post: + consumes: + - application/json + description: Local events (e.g. Selected, BidAccepted, Verified) are useful + to track the progress of a job. + operationId: pkg/publicapi/localEvents + parameters: + - description: ' ' + in: body + name: localEventsRequest + required: true + schema: + $ref: '#/definitions/publicapi.localEventsRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.localEventsResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the node's local events related to the job-id passed in the + body payload. Useful for troubleshooting. + tags: + - Job + /logz: + get: + operationId: apiServer/logz + produces: + - text/plain + responses: + "200": + description: TODO + schema: + type: string + tags: + - Health + /peers: + get: + description: |- + As described in the [architecture docs](https://docs.bacalhau.org/about-bacalhau/architecture), each node is connected to a number of peer nodes. + + Example response: + ```json + { + "bacalhau-job-event": [ + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "QmVAb7r2pKWCuyLpYWoZr9syhhFnTWeFaByHdb8PkkhLQG", + "QmUDAXvv31WPZ8U9CzuRTMn9iFGiopGE7rHiah1X8a6PkT", + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86" + ] + } + ``` + operationId: apiServer/peers + produces: + - application/json + responses: + "200": + description: OK + schema: + additionalProperties: + items: + type: string + type: array + type: object + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the peers connected to the host via the transport layer. + tags: + - Misc + /readyz: + get: + operationId: apiServer/readyz + produces: + - text/plain + responses: + "200": + description: OK + schema: + type: string + tags: + - Health + /results: + post: + consumes: + - application/json + description: |- + Example response: + + ```json + { + "results": [ + { + "NodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "Data": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + } + } + ] + } + ``` + operationId: pkg/publicapi/results + parameters: + - description: ' ' + in: body + name: stateRequest + required: true + schema: + $ref: '#/definitions/publicapi.stateRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.resultsResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the results of the job-id specified in the body payload. + tags: + - Job + /states: + post: + consumes: + - application/json + description: |- + Example response: + + ```json + { + "state": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL": { + "Shards": { + "0": { + "NodeId": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "RunOutput": { + "stdout": "Thu Nov 17 13:32:55 UTC 2022\n", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } + } + ``` + operationId: pkg/publicapi/states + parameters: + - description: ' ' + in: body + name: stateRequest + required: true + schema: + $ref: '#/definitions/publicapi.stateRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.stateResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the state of the job-id specified in the body payload. + tags: + - Job + /submit: + post: + consumes: + - application/json + description: "Description:\n\n* `client_public_key`: The base64-encoded public + key of the client.\n* `signature`: A base64-encoded signature of the `data` + attribute, signed by the client.\n* `data`\n * `ClientID`: Request must + specify a `ClientID`. To retrieve your `ClientID`, you can do the following: + (1) submit a dummy job to Bacalhau (or use one you created before), (2) run + `bacalhau describe ` and fetch the `ClientID` field.\n * `Job`: + see example below.\n\nExample request\n```json\n{\n\t\"data\": {\n\t\t\"ClientID\": + \"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51\",\n\t\t\"Job\": + {\n\t\t\t\"APIVersion\": \"V1beta1\",\n\t\t\t\"Spec\": {\n\t\t\t\t\"Engine\": + \"Docker\",\n\t\t\t\t\"Verifier\": \"Noop\",\n\t\t\t\t\"Publisher\": \"Estuary\",\n\t\t\t\t\"Docker\": + {\n\t\t\t\t\t\"Image\": \"ubuntu\",\n\t\t\t\t\t\"Entrypoint\": [\n\t\t\t\t\t\t\"date\"\n\t\t\t\t\t]\n\t\t\t\t},\n\t\t\t\t\"Timeout\": + 1800,\n\t\t\t\t\"outputs\": [\n\t\t\t\t\t{\n\t\t\t\t\t\t\"StorageSource\": + \"IPFS\",\n\t\t\t\t\t\t\"Name\": \"outputs\",\n\t\t\t\t\t\t\"path\": \"/outputs\"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t\"Sharding\": + {\n\t\t\t\t\t\"BatchSize\": 1,\n\t\t\t\t\t\"GlobPatternBasePath\": \"/inputs\"\n\t\t\t\t}\n\t\t\t},\n\t\t\t\"Deal\": + {\n\t\t\t\t\"Concurrency\": 1\n\t\t\t}\n\t\t}\n\t},\n\t\"signature\": \"...\",\n\t\"client_public_key\": + \"...\"\n}\n```" + operationId: pkg/apiServer.submit + parameters: + - description: ' ' + in: body + name: submitRequest + required: true + schema: + $ref: '#/definitions/publicapi.submitRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.submitResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Submits a new job to the network. + tags: + - Job + /varz: + get: + operationId: apiServer/varz + produces: + - application/json + responses: + "200": + description: OK + schema: + items: + type: integer + type: array + tags: + - Health + /version: + post: + consumes: + - application/json + description: See https://github.com/filecoin-project/bacalhau/releases for a + complete list of `gitversion` tags. + operationId: apiServer/version + parameters: + - description: 'Request must specify a `client_id`. To retrieve your `client_id`, + you can do the following: (1) submit a dummy job to Bacalhau (or use one + you created before), (2) run `bacalhau describe ` and fetch the + `ClientID` field.' + in: body + name: versionRequest + required: true + schema: + $ref: '#/definitions/publicapi.versionRequest' + produces: + - application/json + responses: + "200": + description: OK + schema: + $ref: '#/definitions/publicapi.versionResponse' + "400": + description: Bad Request + schema: + type: string + "500": + description: Internal Server Error + schema: + type: string + summary: Returns the build version running on the server. + tags: + - Misc +schemes: +- http +swagger: "2.0" diff --git a/docs/swagger/README.md b/docs/swagger/README.md new file mode 100644 index 0000000000..99c5df64f1 --- /dev/null +++ b/docs/swagger/README.md @@ -0,0 +1 @@ +This folder contains markdown files to use as descriptions in OpenAPI/Swagger annotations. \ No newline at end of file diff --git a/docs/swagger/endpoints_events.md b/docs/swagger/endpoints_events.md new file mode 100644 index 0000000000..9c669365f7 --- /dev/null +++ b/docs/swagger/endpoints_events.md @@ -0,0 +1,109 @@ +Events (e.g. Created, Bid, BidAccepted, ..., ResultsAccepted, ResultsPublished) are useful to track the progress of a job. + +Example response (truncated): +```json +{ + "events": [ + { + "APIVersion": "V1beta1", + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "SourceNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "EventName": "Created", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "date" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "JobExecutionPlan": { + "ShardsTotal": 1 + }, + "Deal": { + "Concurrency": 1 + }, + "VerificationResult": {}, + "PublishedResult": {}, + "EventTime": "2022-11-17T13:32:55.331375351Z", + "SenderPublicKey": "..." + }, + ... + { + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "SourceNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "TargetNodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "EventName": "ResultsAccepted", + "Spec": { + "Docker": {}, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Sharding": {} + }, + "JobExecutionPlan": {}, + "Deal": {}, + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResult": {}, + "EventTime": "2022-11-17T13:32:55.707825569Z", + "SenderPublicKey": "..." + }, + { + "JobID": "9304c616-291f-41ad-b862-54e133c0149e", + "SourceNodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "EventName": "ResultsPublished", + "Spec": { + "Docker": {}, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Sharding": {} + }, + "JobExecutionPlan": {}, + "Deal": {}, + "VerificationResult": {}, + "PublishedResult": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "EventTime": "2022-11-17T13:32:55.756658941Z", + "SenderPublicKey": "..." + } + ] +} +``` \ No newline at end of file diff --git a/docs/swagger/endpoints_list.md b/docs/swagger/endpoints_list.md new file mode 100644 index 0000000000..a3f6856a31 --- /dev/null +++ b/docs/swagger/endpoints_list.md @@ -0,0 +1,191 @@ +Returns the first (sorted) #`max_jobs` jobs that belong to the `client_id` passed in the body payload (by default). +If `return_all` is set to true, it returns all jobs on the Bacalhau network. + +If `id` is set, it returns only the job with that ID. + +Example response: +```json +{ + "jobs": [ + { + "APIVersion": "V1beta1", + "ID": "9304c616-291f-41ad-b862-54e133c0149e", + "RequesterNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "RequesterPublicKey": "...", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "date" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "Deal": { + "Concurrency": 1 + }, + "ExecutionPlan": { + "ShardsTotal": 1 + }, + "CreatedAt": "2022-11-17T13:32:55.33837275Z", + "JobState": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL": { + "Shards": { + "0": { + "NodeId": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "RunOutput": { + "stdout": "Thu Nov 17 13:32:55 UTC 2022\n", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } + }, + { + "APIVersion": "V1beta1", + "ID": "92d5d4ee-3765-4f78-8353-623f5f26df08", + "RequesterNodeID": "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "RequesterPublicKey": "...", + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "sleep", + "4" + ] + }, + "Language": { + "JobContext": {} + }, + "Wasm": {}, + "Resources": { + "GPU": "" + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "Deal": { + "Concurrency": 1 + }, + "ExecutionPlan": { + "ShardsTotal": 1 + }, + "CreatedAt": "2022-11-17T13:29:01.871140291Z", + "JobState": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-92d5d4ee-3765-4f78-8353-623f5f26df08-shard-0-host-QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "CID": "QmWUXBndMuq2G6B6ndQCmkRHjZ6CvyJ8qLxXBG3YsSFzQG" + }, + "RunOutput": { + "stdout": "", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } + } + ] +} +``` \ No newline at end of file diff --git a/docs/swagger/endpoints_peers.md b/docs/swagger/endpoints_peers.md new file mode 100644 index 0000000000..91da7873d1 --- /dev/null +++ b/docs/swagger/endpoints_peers.md @@ -0,0 +1,14 @@ +As described in the [architecture docs](https://docs.bacalhau.org/about-bacalhau/architecture), each node is connected to a number of peer nodes. + +Example response: +```json +{ + "bacalhau-job-event": [ + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF", + "QmVAb7r2pKWCuyLpYWoZr9syhhFnTWeFaByHdb8PkkhLQG", + "QmUDAXvv31WPZ8U9CzuRTMn9iFGiopGE7rHiah1X8a6PkT", + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86" + ] +} +``` \ No newline at end of file diff --git a/docs/swagger/endpoints_results.md b/docs/swagger/endpoints_results.md new file mode 100644 index 0000000000..2b2ee80e0e --- /dev/null +++ b/docs/swagger/endpoints_results.md @@ -0,0 +1,16 @@ +Example response: + +```json +{ + "results": [ + { + "NodeID": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "Data": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + } + } + ] +} +``` \ No newline at end of file diff --git a/docs/swagger/endpoints_states.md b/docs/swagger/endpoints_states.md new file mode 100644 index 0000000000..03792cc53a --- /dev/null +++ b/docs/swagger/endpoints_states.md @@ -0,0 +1,56 @@ +Example response: + +```json +{ + "state": { + "Nodes": { + "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86": { + "Shards": { + "0": { + "NodeId": "QmSyJ8VUd4YSPwZFJSJsHmmmmg7sd4BAc2yHY73nisJo86", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3": { + "Shards": { + "0": { + "NodeId": "QmYgxZiySj3MRkwLSL4X2MF5F9f2PMhAE3LV49XkfNL1o3", + "State": "Cancelled", + "VerificationResult": {}, + "PublishedResults": {} + } + } + }, + "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL": { + "Shards": { + "0": { + "NodeId": "QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "State": "Completed", + "Status": "Got results proposal of length: 0", + "VerificationResult": { + "Complete": true, + "Result": true + }, + "PublishedResults": { + "StorageSource": "IPFS", + "Name": "job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL", + "CID": "QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe" + }, + "RunOutput": { + "stdout": "Thu Nov 17 13:32:55 UTC 2022\n", + "stdouttruncated": false, + "stderr": "", + "stderrtruncated": false, + "exitCode": 0, + "runnerError": "" + } + } + } + } + } + } +} +``` \ No newline at end of file diff --git a/docs/swagger/endpoints_submit.md b/docs/swagger/endpoints_submit.md new file mode 100644 index 0000000000..d3382128b5 --- /dev/null +++ b/docs/swagger/endpoints_submit.md @@ -0,0 +1,47 @@ +Description: + +* `client_public_key`: The base64-encoded public key of the client. +* `signature`: A base64-encoded signature of the `data` attribute, signed by the client. +* `data` + * `ClientID`: Request must specify a `ClientID`. To retrieve your `ClientID`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe ` and fetch the `ClientID` field. + * `Job`: see example below. + +Example request +```json +{ + "data": { + "ClientID": "ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51", + "Job": { + "APIVersion": "V1beta1", + "Spec": { + "Engine": "Docker", + "Verifier": "Noop", + "Publisher": "Estuary", + "Docker": { + "Image": "ubuntu", + "Entrypoint": [ + "date" + ] + }, + "Timeout": 1800, + "outputs": [ + { + "StorageSource": "IPFS", + "Name": "outputs", + "path": "/outputs" + } + ], + "Sharding": { + "BatchSize": 1, + "GlobPatternBasePath": "/inputs" + } + }, + "Deal": { + "Concurrency": 1 + } + } + }, + "signature": "...", + "client_public_key": "..." +} +``` \ No newline at end of file diff --git a/go.mod b/go.mod index 152493a59b..733e4ec5d9 100644 --- a/go.mod +++ b/go.mod @@ -61,6 +61,8 @@ require ( github.com/spf13/pflag v1.0.5 github.com/spf13/viper v1.14.0 github.com/stretchr/testify v1.8.1 + github.com/swaggo/http-swagger v1.3.3 + github.com/swaggo/swag v1.8.7 github.com/tetratelabs/wazero v1.0.0-pre.3 github.com/tidwall/sjson v1.2.5 github.com/xeipuuv/gojsonschema v1.2.0 @@ -87,6 +89,7 @@ require ( contrib.go.opencensus.io/exporter/prometheus v0.4.0 // indirect github.com/AndreasBriese/bbloom v0.0.0-20190825152654-46b345b51c96 // indirect github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect + github.com/KyleBanks/depth v1.2.1 // indirect github.com/Microsoft/go-winio v0.5.2 // indirect github.com/alecthomas/units v0.0.0-20210927113745-59d0afb8317a // indirect github.com/alexbrainman/goissue34681 v0.0.0-20191006012335-3fc7a47baff5 // indirect @@ -126,6 +129,10 @@ require ( github.com/go-logfmt/logfmt v0.5.1 // indirect github.com/go-logr/logr v1.2.3 // indirect github.com/go-logr/stdr v1.2.2 // indirect + github.com/go-openapi/jsonpointer v0.19.5 // indirect + github.com/go-openapi/jsonreference v0.20.0 // indirect + github.com/go-openapi/spec v0.20.7 // indirect + github.com/go-openapi/swag v0.22.3 // indirect github.com/go-pkgz/expirable-cache v0.1.0 // indirect github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 // indirect github.com/godbus/dbus/v5 v5.1.0 // indirect @@ -192,6 +199,7 @@ require ( github.com/jackpal/go-nat-pmp v1.0.2 // indirect github.com/jbenet/go-temp-err-catcher v0.1.0 // indirect github.com/jbenet/goprocess v0.1.4 // indirect + github.com/josharian/intern v1.0.0 // indirect github.com/json-iterator/go v1.1.12 // indirect github.com/klauspost/compress v1.15.10 // indirect github.com/klauspost/cpuid/v2 v2.1.1 // indirect @@ -221,6 +229,7 @@ require ( github.com/libp2p/zeroconf/v2 v2.2.0 // indirect github.com/lucas-clemente/quic-go v0.29.1 // indirect github.com/magiconair/properties v1.8.6 // indirect + github.com/mailru/easyjson v0.7.7 // indirect github.com/marten-seemann/qpack v0.2.1 // indirect github.com/marten-seemann/qtls-go1-18 v0.1.2 // indirect github.com/marten-seemann/qtls-go1-19 v0.1.0 // indirect @@ -278,6 +287,7 @@ require ( github.com/spf13/jwalterweatherman v1.1.0 // indirect github.com/stretchr/objx v0.5.0 // indirect github.com/subosito/gotenv v1.4.1 // indirect + github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe // indirect github.com/syndtr/goleveldb v1.0.0 // indirect github.com/tidwall/gjson v1.14.2 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -311,7 +321,7 @@ require ( golang.org/x/term v0.2.0 // indirect golang.org/x/text v0.4.0 // indirect golang.org/x/time v0.0.0-20220609170525-579cf78fd858 // indirect - golang.org/x/tools v0.2.0 // indirect + golang.org/x/tools v0.3.0 // indirect golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect google.golang.org/appengine v1.6.7 // indirect google.golang.org/genproto v0.0.0-20221024183307-1bc688fe9f3e // indirect diff --git a/go.sum b/go.sum index fc13a78b05..148a0bb8b9 100644 --- a/go.sum +++ b/go.sum @@ -60,6 +60,8 @@ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03 github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible/go.mod h1:r7JcOSlj0wfOMncg0iLm8Leh48TZaKVeNIfJntJ2wa0= github.com/Kubuxu/go-os-helper v0.0.1/go.mod h1:N8B+I7vPCT80IcP58r50u4+gEEcsZETFUpAzWW2ep1Y= +github.com/KyleBanks/depth v1.2.1 h1:5h8fQADFrWtarTdtDudMmGsC7GPbOAu6RVB3ffsVFHc= +github.com/KyleBanks/depth v1.2.1/go.mod h1:jzSb9d0L43HxTQfT+oSA1EEp2q+ne2uh6XgeJcm8brE= github.com/MakeNowJust/heredoc/v2 v2.0.1 h1:rlCHh70XXXv7toz95ajQWOWQnN4WNLt0TdpZYIR/J6A= github.com/MakeNowJust/heredoc/v2 v2.0.1/go.mod h1:6/2Abh5s+hc3g9nbWLe9ObDIOhaRrqsyY9MWy+4JdRM= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= @@ -68,8 +70,6 @@ github.com/Microsoft/go-winio v0.5.2 h1:a9IhgEQBCUEk6QCdml9CiJGhAws+YwffDHEMp1VM github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= -github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI= -github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M= github.com/Shopify/sarama v1.19.0/go.mod h1:FVkBWblsNy7DGZRfXLU0O9RCGt5g3g3yEuWXgklEdEo= github.com/Shopify/sarama v1.30.0/go.mod h1:zujlQQx1kzHsh4jfV1USnptCQrHAEZ2Hk8fTKCulPVs= github.com/Shopify/toxiproxy v2.1.4+incompatible/go.mod h1:OXgGpZ6Cli1/URJOF1DMxUHB2q5Ap20/P/eIdh4G0pI= @@ -345,9 +345,17 @@ github.com/go-logr/logr v1.2.3 h1:2DntVwHkVopvECVRSlL5PSo9eG+cAkDCuckLubN+rq0= github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= +github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= github.com/go-openapi/jsonpointer v0.19.5 h1:gZr+CIYByUqjcgeLXnQu2gHYQC9o73G2XUeOFYEICuY= -github.com/go-openapi/jsonreference v0.19.5 h1:1WJP/wi4OjB4iV8KVbH73rQaoialJrqv8gitZLxGLtM= -github.com/go-openapi/swag v0.19.14 h1:gm3vOOXfiuw5i9p5N9xJvfjvuofpyvLA9Wr6QfK5Fng= +github.com/go-openapi/jsonpointer v0.19.5/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg= +github.com/go-openapi/jsonreference v0.20.0 h1:MYlu0sBgChmCfJxxUKZ8g1cPWFOB37YSZqewK7OKeyA= +github.com/go-openapi/jsonreference v0.20.0/go.mod h1:Ag74Ico3lPc+zR+qjn4XBUmXymS4zJbYVCZmcgkasdo= +github.com/go-openapi/spec v0.20.7 h1:1Rlu/ZrOCCob0n+JKKJAWhNWMPW8bOZRg8FJaY+0SKI= +github.com/go-openapi/spec v0.20.7/go.mod h1:2OpW+JddWPrpXSCIX8eOx7lZ5iyuWj3RYR6VaaBKcWA= +github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk= +github.com/go-openapi/swag v0.19.15/go.mod h1:QYRuS/SOXUCsnplDa677K7+DxSOj6IPNl/eQntq43wQ= +github.com/go-openapi/swag v0.22.3 h1:yMBqmnQ0gyZvEb/+KzuWZOXgllrXT4SADYbvDaXHv/g= +github.com/go-openapi/swag v0.22.3/go.mod h1:UzaqsxGiab7freDnrUUra0MwWfN/q7tE4j+VcZ0yl14= github.com/go-pkgz/expirable-cache v0.1.0 h1:3bw0m8vlTK8qlwz5KXuygNBTkiKRTPrAGXU0Ej2AC1g= github.com/go-pkgz/expirable-cache v0.1.0/go.mod h1:GTrEl0X+q0mPNqN6dtcQXksACnzCBQ5k/k1SwXJsZKs= github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY= @@ -818,6 +826,7 @@ github.com/joho/godotenv v1.4.0 h1:3l4+N6zfMWnkbPEXKng2o2/MR5mSwTrBih4ZEkkz1lg= github.com/joho/godotenv v1.4.0/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= +github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= @@ -1215,7 +1224,11 @@ github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamh github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60= github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= github.com/mailru/easyjson v0.0.0-20190312143242-1de009706dbe/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= -github.com/mailru/easyjson v0.7.6 h1:8yTIVnZgCoiM1TgqoeTl+LfU5Jg6/xL3QhGQnimLYnA= +github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc= +github.com/mailru/easyjson v0.7.6/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= +github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/marten-seemann/qpack v0.2.1 h1:jvTsT/HpCn2UZJdP+UUB53FfUUgeOyG5K1ns0OJOGVs= github.com/marten-seemann/qpack v0.2.1/go.mod h1:F7Gl5L1jIgN1D11ucXefiuJS9UMVP2opoCp2jDKb7wc= github.com/marten-seemann/qtls v0.10.0/go.mod h1:UvMd1oaYDACI99/oZUYLzMCkBXQVT0aGm99sJhbT8hs= @@ -1402,6 +1415,7 @@ github.com/nats-io/nkeys v0.1.3/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxzi github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo= github.com/neelance/sourcemap v0.0.0-20151028013722-8c68805598ab/go.mod h1:Qr6/a/Q4r9LP1IltGz7tA7iOK1WonHEYhu1HRBA7ZiM= +github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= @@ -1656,6 +1670,12 @@ github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKs github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs= github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe h1:K8pHPVoTgxFJt1lXuIzzOX7zZhZFldJQK/CgKx9BFIc= +github.com/swaggo/files v0.0.0-20220610200504-28940afbdbfe/go.mod h1:lKJPbtWzJ9JhsTN1k1gZgleJWY/cqq0psdoMmaThG3w= +github.com/swaggo/http-swagger v1.3.3 h1:Hu5Z0L9ssyBLofaama21iYaF2VbWyA8jdohaaCGpHsc= +github.com/swaggo/http-swagger v1.3.3/go.mod h1:sE+4PjD89IxMPm77FnkDz0sdO+p5lbXzrVWT6OTVVGo= +github.com/swaggo/swag v1.8.7 h1:2K9ivTD3teEO+2fXV6zrZKDqk5IuU2aJtBDo8U7omWU= +github.com/swaggo/swag v1.8.7/go.mod h1:ezQVUUhly8dludpVk+/PuwJWvLLanB13ygV5Pr9enSk= github.com/syndtr/goleveldb v1.0.0 h1:fBdIW9lB4Iz0n9khmH8w27SJ3QEJ7+IgjPEwGSZiFdE= github.com/syndtr/goleveldb v1.0.0/go.mod h1:ZVVdQEZoIme9iO1Ch2Jdy24qqXrMMOU6lpPAyBWyWuQ= github.com/tarm/serial v0.0.0-20180830185346-98f6abe2eb07/go.mod h1:kDXzergiv9cbyO7IOYJZWg1U88JhDg3PB6klq9Hg2pA= @@ -1982,6 +2002,7 @@ golang.org/x/net v0.0.0-20210423184538-5f58ad60dda6/go.mod h1:OJAsFXCWl8Ukc7SiCT golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210726213435-c6fcb2dbf985/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= +golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210813160813-60bc85c4be6d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20210917221730-978cfadd31cf/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= @@ -2209,8 +2230,8 @@ golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0= golang.org/x/tools v0.1.1/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= golang.org/x/tools v0.1.6-0.20210726203631-07bc1bf47fb2/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/tools v0.2.0 h1:G6AHpWxTMGY1KyEYoAQ5WTtIekUUvDNjan3ugu60JvE= -golang.org/x/tools v0.2.0/go.mod h1:y4OqIKeOV/fWJetJ8bXPU1sEVniLMIyDAZWeHdV+NTA= +golang.org/x/tools v0.3.0 h1:SrNbZl6ECOS1qFzgTdQfWXZM9XBkiA6tkFrH9YSTPHM= +golang.org/x/tools v0.3.0/go.mod h1:/rWhSS2+zyEVwoJf8YAX6L2f0ntZ7Kn/mGgAWcipA5k= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -2347,6 +2368,7 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/cheggaaa/pb.v1 v1.0.25/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= @@ -2376,6 +2398,7 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gopkg.in/yaml.v3 v3.0.0-20200615113413-eeeca48fe776/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= diff --git a/pkg/model/buildversion.go b/pkg/model/buildversion.go index b94deb57dd..4289df2a8e 100644 --- a/pkg/model/buildversion.go +++ b/pkg/model/buildversion.go @@ -10,11 +10,11 @@ type BuildVersionInfo struct { // GitCommit:"4ce5a8954017644c5420bae81d72b09b735c21f0", GitTreeState:"clean", // BuildDate:"2022-05-03T13:46:05Z", GoVersion:"go1.18.1", Compiler:"gc", Platform:"darwin/arm64"} - Major string `json:"major,omitempty"` - Minor string `json:"minor,omitempty"` - GitVersion string `json:"gitversion"` - GitCommit string `json:"gitcommit"` - BuildDate time.Time `json:"builddate"` - GOOS string `json:"goos"` - GOARCH string `json:"goarch"` + Major string `json:"major,omitempty" example:"0"` + Minor string `json:"minor,omitempty" example:"3"` + GitVersion string `json:"gitversion" example:"v0.3.12"` + GitCommit string `json:"gitcommit" example:"d612b63108f2b5ce1ab2b9e02444eb1dac1d922d"` + BuildDate time.Time `json:"builddate" example:"2022-11-16T14:03:31Z"` + GOOS string `json:"goos" example:"linux"` + GOARCH string `json:"goarch" example:"amd64"` } diff --git a/pkg/model/job.go b/pkg/model/job.go index 1ecb2300a9..b57964ed84 100644 --- a/pkg/model/job.go +++ b/pkg/model/job.go @@ -10,20 +10,20 @@ import ( // Job contains data about a job request in the bacalhau network. type Job struct { - APIVersion string `json:"APIVersion"` + APIVersion string `json:"APIVersion" example:"V1beta1"` // The unique global ID of this job in the bacalhau network. - ID string `json:"ID,omitempty"` + ID string `json:"ID,omitempty" example:"92d5d4ee-3765-4f78-8353-623f5f26df08"` // The ID of the requester node that owns this job. - RequesterNodeID string `json:"RequesterNodeID,omitempty"` + RequesterNodeID string `json:"RequesterNodeID,omitempty" example:"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF"` // The public key of the Requester node that created this job // This can be used to encrypt messages back to the creator RequesterPublicKey PublicKey `json:"RequesterPublicKey,omitempty"` // The ID of the client that created this job. - ClientID string `json:"ClientID,omitempty"` + ClientID string `json:"ClientID,omitempty" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` // The specification of this job. Spec Spec `json:"Spec,omitempty"` @@ -35,7 +35,7 @@ type Job struct { ExecutionPlan JobExecutionPlan `json:"ExecutionPlan,omitempty"` // Time the job was submitted to the bacalhau network. - CreatedAt time.Time `json:"CreatedAt,omitempty"` + CreatedAt time.Time `json:"CreatedAt,omitempty" example:"2022-11-17T13:29:01.871140291Z"` // The current state of the job State JobState `json:"JobState,omitempty"` @@ -305,18 +305,18 @@ type JobLocalEvent struct { // state locally and can emit events locally type JobEvent struct { // APIVersion of the Job - APIVersion string `json:"APIVersion,omitempty"` + APIVersion string `json:"APIVersion,omitempty" example:"V1beta1"` - JobID string `json:"JobID,omitempty"` + JobID string `json:"JobID,omitempty" example:"9304c616-291f-41ad-b862-54e133c0149e"` // what shard is this event for ShardIndex int `json:"ShardIndex,omitempty"` // optional clientID if this is an externally triggered event (like create job) - ClientID string `json:"ClientID,omitempty"` + ClientID string `json:"ClientID,omitempty" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` // the node that emitted this event - SourceNodeID string `json:"SourceNodeID,omitempty"` + SourceNodeID string `json:"SourceNodeID,omitempty" example:"QmXaXu9N5GNetatsvwnTfQqNtSeKAD6uCmarbh3LMRYAcF"` // the node that this event is for // e.g. "AcceptJobBid" was emitted by Requester but it targeting compute node - TargetNodeID string `json:"TargetNodeID,omitempty"` + TargetNodeID string `json:"TargetNodeID,omitempty" example:"QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL"` EventName JobEventType `json:"EventName,omitempty"` // this is only defined in "create" events Spec Spec `json:"Spec,omitempty"` @@ -324,12 +324,12 @@ type JobEvent struct { JobExecutionPlan JobExecutionPlan `json:"JobExecutionPlan,omitempty"` // this is only defined in "update_deal" events Deal Deal `json:"Deal,omitempty"` - Status string `json:"Status,omitempty"` + Status string `json:"Status,omitempty" example:"Got results proposal of length: 0"` VerificationProposal []byte `json:"VerificationProposal,omitempty"` VerificationResult VerificationResult `json:"VerificationResult,omitempty"` PublishedResult StorageSpec `json:"PublishedResult,omitempty"` - EventTime time.Time `json:"EventTime,omitempty"` + EventTime time.Time `json:"EventTime,omitempty" example:"2022-11-17T13:32:55.756658941Z"` SenderPublicKey PublicKey `json:"SenderPublicKey,omitempty"` // RunOutput of the job @@ -347,13 +347,13 @@ type VerificationResult struct { type JobCreatePayload struct { // the id of the client that is submitting the job - ClientID string `json:"ClientID,omitempty"` + ClientID string `json:"ClientID,omitempty" validate:"required"` // The job specification: - Job *Job `json:"Job,omitempty"` + Job *Job `json:"Job,omitempty" validate:"required"` // Optional base64-encoded tar file that will be pinned to IPFS and // mounted as storage for the job. Not part of the spec so we don't // flood the transport layer with it (potentially very large). - Context string `json:"Context,omitempty"` + Context string `json:"Context,omitempty" validate:"optional"` } diff --git a/pkg/model/resource_usage.go b/pkg/model/resource_usage.go index 5ab32615ec..1fffc4bcb1 100644 --- a/pkg/model/resource_usage.go +++ b/pkg/model/resource_usage.go @@ -17,12 +17,12 @@ type ResourceUsageConfig struct { // these are the numeric values in bytes for ResourceUsageConfig type ResourceUsageData struct { // cpu units - CPU float64 `json:"CPU,omitempty"` + CPU float64 `json:"CPU,omitempty" example:"9.600000000000001"` // bytes - Memory uint64 `json:"Memory,omitempty"` + Memory uint64 `json:"Memory,omitempty" example:"27487790694"` // bytes - Disk uint64 `json:"Disk,omitempty"` - GPU uint64 `json:"GPU,omitempty"` // Support whole GPUs only, like https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/ + Disk uint64 `json:"Disk,omitempty" example:"212663867801"` + GPU uint64 `json:"GPU,omitempty" example:"1"` //nolint:lll // Support whole GPUs only, like https://kubernetes.io/docs/tasks/manage-gpus/scheduling-gpus/ } type ResourceUsageProfile struct { diff --git a/pkg/model/storage_spec.go b/pkg/model/storage_spec.go index 26d8536fa0..e6f5b3f2d9 100644 --- a/pkg/model/storage_spec.go +++ b/pkg/model/storage_spec.go @@ -10,12 +10,12 @@ type StorageSpec struct { StorageSource StorageSourceType `json:"StorageSource,omitempty"` // Name of the spec's data, for reference. - Name string `json:"Name,omitempty"` + Name string `json:"Name,omitempty" example:"job-9304c616-291f-41ad-b862-54e133c0149e-shard-0-host-QmdZQ7ZbhnvWY1J12XYKGHApJ6aufKyLNSvf8jZBrBaAVL"` //nolint:lll // The unique ID of the data, where it makes sense (for example, in an // IPFS storage spec this will be the data's CID). // NOTE: The below is capitalized to match IPFS & IPLD (even though it's out of golang fmt) - CID string `json:"CID,omitempty"` + CID string `json:"CID,omitempty" example:"QmTVmC7JBD2ES2qGPqBNVWnX1KeEPNrPGb7rJ8cpFgtefe"` // Source URL of the data URL string `json:"URL,omitempty"` @@ -31,7 +31,7 @@ type StorageSpec struct { } // PublishedStorageSpec is a wrapper for a StorageSpec that has been published -// by a compute provider - it keeps info about the hos, job and shard that +// by a compute provider - it keeps info about the host, job and shard that // lead to the given storage spec being published type PublishedResult struct { NodeID string `json:"NodeID,omitempty"` diff --git a/pkg/publicapi/endpoints_debug.go b/pkg/publicapi/endpoints_debug.go index 61ef61d3a4..2b74eef9a1 100644 --- a/pkg/publicapi/endpoints_debug.go +++ b/pkg/publicapi/endpoints_debug.go @@ -18,7 +18,14 @@ type debugResponse struct { ComputeJobs []computenode.ActiveJob `json:"ComputeJobs"` } -// Returns debug information on what the current node is doing. +// debug godoc +// @ID apiServer/debug +// @Summary Returns debug information on what the current node is doing. +// @Tags Health +// @Produce json +// @Success 200 {object} debugResponse +// @Failure 500 {object} string +// @Router /debug [get] func (apiServer *APIServer) debug(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "apiServer/debug") defer span.End() diff --git a/pkg/publicapi/endpoints_events.go b/pkg/publicapi/endpoints_events.go index 0290194c76..241ea0638c 100644 --- a/pkg/publicapi/endpoints_events.go +++ b/pkg/publicapi/endpoints_events.go @@ -9,14 +9,28 @@ import ( ) type eventsRequest struct { - ClientID string `json:"client_id"` - JobID string `json:"job_id"` + ClientID string `json:"client_id" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` + JobID string `json:"job_id" example:"9304c616-291f-41ad-b862-54e133c0149e"` } type eventsResponse struct { Events []model.JobEvent `json:"events"` } +// events godoc +// @ID pkg/publicapi/events +// @Summary Returns the events related to the job-id passed in the body payload. Useful for troubleshooting. +// @Description.markdown endpoints_events +// @Tags Job +// @Accept json +// @Produce json +// @Param eventsRequest body eventsRequest true "Request must specify a `client_id`. To retrieve your `client_id`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe ` and fetch the `ClientID` field." +// @Success 200 {object} eventsResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /events [post] +// +//nolint:lll //nolint:dupl func (apiServer *APIServer) events(res http.ResponseWriter, req *http.Request) { var eventsReq eventsRequest diff --git a/pkg/publicapi/endpoints_id.go b/pkg/publicapi/endpoints_id.go index 46fa9a219e..bc2f126032 100644 --- a/pkg/publicapi/endpoints_id.go +++ b/pkg/publicapi/endpoints_id.go @@ -9,6 +9,14 @@ import ( "github.com/filecoin-project/bacalhau/pkg/system" ) +// id godoc +// @ID apiServer/id +// @Summary Returns the id of the host node. +// @Tags Misc +// @Produce text/plain +// @Success 200 {object} string +// @Failure 500 {object} string +// @Router /id [get] func (apiServer *APIServer) id(res http.ResponseWriter, req *http.Request) { _, span := system.GetSpanFromRequest(req, "apiServer/id") defer span.End() diff --git a/pkg/publicapi/endpoints_list.go b/pkg/publicapi/endpoints_list.go index ab1db03cb2..1bd7621758 100644 --- a/pkg/publicapi/endpoints_list.go +++ b/pkg/publicapi/endpoints_list.go @@ -14,11 +14,11 @@ import ( ) type listRequest struct { - JobID string `json:"id"` - ClientID string `json:"client_id"` - MaxJobs int `json:"max_jobs"` - ReturnAll bool `json:"return_all"` - SortBy string `json:"sort_by"` + JobID string `json:"id" example:"9304c616-291f-41ad-b862-54e133c0149e"` + ClientID string `json:"client_id" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` + MaxJobs int `json:"max_jobs" example:"10"` + ReturnAll bool `json:"return_all" ` + SortBy string `json:"sort_by" example:"created_at"` SortReverse bool `json:"sort_reverse"` } @@ -26,6 +26,20 @@ type listResponse struct { Jobs []*model.Job `json:"jobs"` } +// list godoc +// @ID pkg/publicapi.list +// @Summary Simply lists jobs. +// @Description.markdown endpoints_list +// @Tags Job +// @Accept json +// @Produce json +// @Param listRequest body listRequest true "Set `return_all` to `true` to return all jobs on the network (may degrade performance, use with care!)." +// @Success 200 {object} listResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /list [post] +// +//nolint:lll func (apiServer *APIServer) list(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "pkg/publicapi.list") defer span.End() diff --git a/pkg/publicapi/endpoints_localevents.go b/pkg/publicapi/endpoints_localevents.go index 8a8ab2d54a..b4c88751ea 100644 --- a/pkg/publicapi/endpoints_localevents.go +++ b/pkg/publicapi/endpoints_localevents.go @@ -17,6 +17,19 @@ type localEventsResponse struct { LocalEvents []model.JobLocalEvent `json:"localEvents"` } +// localEvents godoc +// @ID pkg/publicapi/localEvents +// @Summary Returns the node's local events related to the job-id passed in the body payload. Useful for troubleshooting. +// @Description Local events (e.g. Selected, BidAccepted, Verified) are useful to track the progress of a job. +// @Tags Job +// @Accept json +// @Produce json +// @Param localEventsRequest body localEventsRequest true " " +// @Success 200 {object} localEventsResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /local_events [post] +// //nolint:dupl func (apiServer *APIServer) localEvents(res http.ResponseWriter, req *http.Request) { var eventsReq localEventsRequest diff --git a/pkg/publicapi/endpoints_peers.go b/pkg/publicapi/endpoints_peers.go index 7196e91ac3..b41cc70969 100644 --- a/pkg/publicapi/endpoints_peers.go +++ b/pkg/publicapi/endpoints_peers.go @@ -9,6 +9,15 @@ import ( "github.com/filecoin-project/bacalhau/pkg/transport/libp2p" ) +// peers godoc +// @ID apiServer/peers +// @Summary Returns the peers connected to the host via the transport layer. +// @Description.markdown endpoints_peers +// @Tags Misc +// @Produce json +// @Success 200 {object} map[string][]string{} +// @Failure 500 {object} string +// @Router /peers [get] func (apiServer *APIServer) peers(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "apiServer/peers") defer span.End() diff --git a/pkg/publicapi/endpoints_results.go b/pkg/publicapi/endpoints_results.go index bde0071f4b..8f78bf6aa7 100644 --- a/pkg/publicapi/endpoints_results.go +++ b/pkg/publicapi/endpoints_results.go @@ -11,14 +11,26 @@ import ( ) type resultsRequest struct { - ClientID string `json:"client_id"` - JobID string `json:"job_id"` + ClientID string `json:"client_id" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` + JobID string `json:"job_id" example:"9304c616-291f-41ad-b862-54e133c0149e"` } type resultsResponse struct { Results []model.PublishedResult `json:"results"` } +// results godoc +// @ID pkg/publicapi/results +// @Summary Returns the results of the job-id specified in the body payload. +// @Description.markdown endpoints_results +// @Tags Job +// @Accept json +// @Produce json +// @Param stateRequest body stateRequest true " " +// @Success 200 {object} resultsResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /results [post] func (apiServer *APIServer) results(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "pkg/publicapi.results") defer span.End() diff --git a/pkg/publicapi/endpoints_states.go b/pkg/publicapi/endpoints_states.go index 7d8173f011..1e38c663df 100644 --- a/pkg/publicapi/endpoints_states.go +++ b/pkg/publicapi/endpoints_states.go @@ -11,14 +11,26 @@ import ( ) type stateRequest struct { - ClientID string `json:"client_id"` - JobID string `json:"job_id"` + ClientID string `json:"client_id" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` + JobID string `json:"job_id" example:"9304c616-291f-41ad-b862-54e133c0149e"` } type stateResponse struct { State model.JobState `json:"state"` } +// states godoc +// @ID pkg/publicapi/states +// @Summary Returns the state of the job-id specified in the body payload. +// @Description.markdown endpoints_states +// @Tags Job +// @Accept json +// @Produce json +// @Param stateRequest body stateRequest true " " +// @Success 200 {object} stateResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /states [post] func (apiServer *APIServer) states(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "pkg/publicapi/states") defer span.End() diff --git a/pkg/publicapi/endpoints_submit.go b/pkg/publicapi/endpoints_submit.go index 1f730cf80e..3e0c19d831 100644 --- a/pkg/publicapi/endpoints_submit.go +++ b/pkg/publicapi/endpoints_submit.go @@ -20,19 +20,31 @@ import ( type submitRequest struct { // The data needed to submit and run a job on the network: - Data model.JobCreatePayload `json:"data"` + Data model.JobCreatePayload `json:"data" validate:"required"` // A base64-encoded signature of the data, signed by the client: - ClientSignature string `json:"signature"` + ClientSignature string `json:"signature" validate:"required"` // The base64-encoded public key of the client: - ClientPublicKey string `json:"client_public_key"` + ClientPublicKey string `json:"client_public_key" validate:"required"` } type submitResponse struct { Job *model.Job `json:"job"` } +// submit godoc +// @ID pkg/apiServer.submit +// @Summary Submits a new job to the network. +// @Description.markdown endpoints_submit +// @Tags Job +// @Accept json +// @Produce json +// @Param submitRequest body submitRequest true " " +// @Success 200 {object} submitResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /submit [post] func (apiServer *APIServer) submit(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "pkg/apiServer.submit") defer span.End() diff --git a/pkg/publicapi/endpoints_version.go b/pkg/publicapi/endpoints_version.go index 618c5d30f7..a95fb4bbf2 100644 --- a/pkg/publicapi/endpoints_version.go +++ b/pkg/publicapi/endpoints_version.go @@ -10,13 +10,27 @@ import ( ) type versionRequest struct { - ClientID string `json:"client_id"` + ClientID string `json:"client_id" example:"ac13188e93c97a9c2e7cf8e86c7313156a73436036f30da1ececc2ce79f9ea51"` } type versionResponse struct { VersionInfo *model.BuildVersionInfo `json:"build_version_info"` } +// version godoc +// @ID apiServer/version +// @Summary Returns the build version running on the server. +// @Description See https://github.com/filecoin-project/bacalhau/releases for a complete list of `gitversion` tags. +// @Tags Misc +// @Accept json +// @Produce json +// @Param versionRequest body versionRequest true "Request must specify a `client_id`. To retrieve your `client_id`, you can do the following: (1) submit a dummy job to Bacalhau (or use one you created before), (2) run `bacalhau describe ` and fetch the `ClientID` field." +// @Success 200 {object} versionResponse +// @Failure 400 {object} string +// @Failure 500 {object} string +// @Router /version [post] +// +//nolint:lll func (apiServer *APIServer) version(res http.ResponseWriter, req *http.Request) { ctx, span := system.GetSpanFromRequest(req, "apiServer/version") defer span.End() diff --git a/pkg/publicapi/server.go b/pkg/publicapi/server.go index 26962cb021..dc07b109d6 100644 --- a/pkg/publicapi/server.go +++ b/pkg/publicapi/server.go @@ -8,26 +8,26 @@ import ( "net/http" "time" - "github.com/c2h5oh/datasize" + "github.com/filecoin-project/bacalhau/docs" + "github.com/filecoin-project/bacalhau/pkg/computenode" + "github.com/filecoin-project/bacalhau/pkg/localdb" "github.com/filecoin-project/bacalhau/pkg/logger" "github.com/filecoin-project/bacalhau/pkg/model" - "github.com/filecoin-project/bacalhau/pkg/publicapi/handlerwrapper" - - "github.com/filecoin-project/bacalhau/pkg/localdb" + "github.com/filecoin-project/bacalhau/pkg/publisher" + "github.com/filecoin-project/bacalhau/pkg/requesternode" "github.com/filecoin-project/bacalhau/pkg/storage" + "github.com/filecoin-project/bacalhau/pkg/system" "github.com/filecoin-project/bacalhau/pkg/transport" + "github.com/filecoin-project/bacalhau/pkg/version" - sync "github.com/lukemarsden/golang-mutex-tracer" - + "github.com/c2h5oh/datasize" "github.com/didip/tollbooth/v7" "github.com/didip/tollbooth/v7/limiter" - "github.com/filecoin-project/bacalhau/pkg/computenode" - "github.com/filecoin-project/bacalhau/pkg/publisher" - "github.com/filecoin-project/bacalhau/pkg/requesternode" - "github.com/filecoin-project/bacalhau/pkg/system" + sync "github.com/lukemarsden/golang-mutex-tracer" "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/rs/zerolog/log" + httpSwagger "github.com/swaggo/http-swagger" "go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp" ) @@ -132,10 +132,25 @@ func (apiServer *APIServer) GetURI() string { return fmt.Sprintf("http://%s:%d", apiServer.Host, apiServer.Port) } +// @title Bacalhau API +// @description This page is the reference of the Bacalhau REST API. Project docs are available at https://docs.bacalhau.org/. Find more information about Bacalhau at https://github.com/filecoin-project/bacalhau. +// @contact.name Bacalhau Team +// @contact.url https://github.com/filecoin-project/bacalhau +// @contact.email team@bacalhau.org +// @license.name Apache 2.0 +// @license.url https://github.com/filecoin-project/bacalhau/blob/main/LICENSE +// @host bootstrap.production.bacalhau.org:1234 +// @BasePath / +// @schemes http // ListenAndServe listens for and serves HTTP requests against the API server. +// +//nolint:lll func (apiServer *APIServer) ListenAndServe(ctx context.Context, cm *system.CleanupManager) error { hostID := apiServer.Requester.ID + // dynamically load the git tag for Swagger UI + docs.SwaggerInfo.Version = version.Get().GitVersion + // TODO: #677 Significant issue, when client returns error to any of these commands, it still submits to server sm := http.NewServeMux() sm.Handle(apiServer.chainHandlers("/list", apiServer.list)) @@ -154,6 +169,7 @@ func (apiServer *APIServer) ListenAndServe(ctx context.Context, cm *system.Clean sm.Handle(apiServer.chainHandlers("/readyz", apiServer.readyz)) sm.Handle(apiServer.chainHandlers("/debug", apiServer.debug)) sm.Handle("/metrics", promhttp.Handler()) + sm.Handle("/swagger/", httpSwagger.WrapHandler) srv := http.Server{ Handler: sm, diff --git a/pkg/publicapi/server_health.go b/pkg/publicapi/server_health.go index df0ee491cc..1b558dbf4e 100644 --- a/pkg/publicapi/server_health.go +++ b/pkg/publicapi/server_health.go @@ -23,6 +23,12 @@ func GenerateHealthData() types.HealthInfo { return healthInfo } +// livez godoc +// @ID apiServer/livez +// @Tags Health +// @Produce text/plain +// @Success 200 {object} string "TODO" +// @Router /livez [get] func (apiServer *APIServer) livez(res http.ResponseWriter, req *http.Request) { // Extremely simple liveness check (should be fine to be public / no-auth) log.Debug().Msg("Received OK request") @@ -34,6 +40,12 @@ func (apiServer *APIServer) livez(res http.ResponseWriter, req *http.Request) { } } +// logz godoc +// @ID apiServer/logz +// @Tags Health +// @Produce text/plain +// @Success 200 {object} string "TODO" +// @Router /logz [get] func (apiServer *APIServer) logz(res http.ResponseWriter, req *http.Request) { log.Debug().Msg("Received logz request") res.Header().Add("Content-Type", "text/plain") @@ -54,6 +66,12 @@ func (apiServer *APIServer) logz(res http.ResponseWriter, req *http.Request) { } } +// readyz godoc +// @ID apiServer/readyz +// @Tags Health +// @Produce text/plain +// @Success 200 {object} string +// @Router /readyz [get] func (apiServer *APIServer) readyz(res http.ResponseWriter, req *http.Request) { log.Debug().Msg("Received readyz request.") // TODO: Add checker for queue that this node can accept submissions @@ -70,6 +88,12 @@ func (apiServer *APIServer) readyz(res http.ResponseWriter, req *http.Request) { } } +// healthz godoc +// @ID apiServer/healthz +// @Tags Health +// @Produce json +// @Success 200 {object} types.HealthInfo +// @Router /healthz [get] func (apiServer *APIServer) healthz(res http.ResponseWriter, req *http.Request) { // TODO: A list of health information. Should require authing (of some kind) log.Debug().Msg("Received healthz request.") @@ -88,6 +112,12 @@ func (apiServer *APIServer) healthz(res http.ResponseWriter, req *http.Request) } } +// varz godoc +// @ID apiServer/varz +// @Tags Health +// @Produce json +// @Success 200 {object} json.RawMessage +// @Router /varz [get] func (apiServer *APIServer) varz(res http.ResponseWriter, req *http.Request) { // TODO: Fill in with the configuration settings for this node res.WriteHeader(http.StatusOK) diff --git a/poetry.lock b/poetry.lock index d24f47b611..9c7b0c308e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -28,7 +28,7 @@ testing = ["covdefaults (>=2.2)", "coverage (>=6.4.2)", "pytest (>=7.1.2)", "pyt [[package]] name = "identify" -version = "2.5.6" +version = "2.5.8" description = "File identification library for Python" category = "main" optional = false @@ -45,17 +45,20 @@ category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +[package.dependencies] +setuptools = "*" + [[package]] name = "platformdirs" -version = "2.5.2" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +version = "2.5.4" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] +test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pre-commit" @@ -81,6 +84,19 @@ category = "main" optional = false python-versions = ">=3.6" +[[package]] +name = "setuptools" +version = "65.5.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "toml" version = "0.10.2" @@ -91,37 +107,55 @@ python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "virtualenv" -version = "20.16.5" +version = "20.16.7" description = "Virtual Python Environment builder" category = "main" optional = false python-versions = ">=3.6" [package.dependencies] -distlib = ">=0.3.5,<1" +distlib = ">=0.3.6,<1" filelock = ">=3.4.1,<4" platformdirs = ">=2.4,<3" [package.extras] -docs = ["proselint (>=0.13)", "sphinx (>=5.1.1)", "sphinx-argparse (>=0.3.1)", "sphinx-rtd-theme (>=1)", "towncrier (>=21.9)"] +docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"] testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"] [metadata] lock-version = "1.1" -python-versions = "^3.10" -content-hash = "a5f21f2f36e0956b753c248815cdf67a5f97f50f703cd02359d92a07d532dd95" +python-versions = "^3.9" +content-hash = "5dbdada651f3376d6515c5f8b1c4d60db0834de743bff284ab24ba900da9bdf5" [metadata.files] -cfgv = [] -distlib = [] -filelock = [] -identify = [] -nodeenv = [] +cfgv = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] +distlib = [ + {file = "distlib-0.3.6-py2.py3-none-any.whl", hash = "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e"}, + {file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"}, +] +filelock = [ + {file = "filelock-3.8.0-py3-none-any.whl", hash = "sha256:617eb4e5eedc82fc5f47b6d61e4d11cb837c56cb4544e39081099fa17ad109d4"}, + {file = "filelock-3.8.0.tar.gz", hash = "sha256:55447caa666f2198c5b6b13a26d2084d26fa5b115c00d065664b2124680c4edc"}, +] +identify = [ + {file = "identify-2.5.8-py2.py3-none-any.whl", hash = "sha256:48b7925fe122720088aeb7a6c34f17b27e706b72c61070f27fe3789094233440"}, + {file = "identify-2.5.8.tar.gz", hash = "sha256:7a214a10313b9489a0d61467db2856ae8d0b8306fc923e03a9effa53d8aedc58"}, +] +nodeenv = [ + {file = "nodeenv-1.7.0-py2.py3-none-any.whl", hash = "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e"}, + {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, +] platformdirs = [ - {file = "platformdirs-2.5.2-py3-none-any.whl", hash = "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788"}, - {file = "platformdirs-2.5.2.tar.gz", hash = "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19"}, + {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, + {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, +] +pre-commit = [ + {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, + {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, ] -pre-commit = [] pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, @@ -130,6 +164,13 @@ pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, @@ -157,8 +198,15 @@ pyyaml = [ {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, ] +setuptools = [ + {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, + {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -virtualenv = [] +virtualenv = [ + {file = "virtualenv-20.16.7-py3-none-any.whl", hash = "sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29"}, + {file = "virtualenv-20.16.7.tar.gz", hash = "sha256:8691e3ff9387f743e00f6bb20f70121f5e4f596cae754531f2b3b3a1b1ac696e"}, +]