diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 4ced098911..93cc28e664 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -60,23 +60,25 @@ jobs: mariadb: image: bitnami/mariadb:10.3 env: - ALLOW_EMPTY_PASSWORD: yes + MARIADB_ROOT_USER: vendure + MARIADB_ROOT_PASSWORD: password ports: - 3306 options: --health-cmd="mysqladmin ping" --health-interval=10s --health-timeout=5s --health-retries=3 mysql: image: bitnami/mysql:8.0 env: - ALLOW_EMPTY_PASSWORD: yes MYSQL_AUTHENTICATION_PLUGIN: mysql_native_password + MYSQL_ROOT_USER: vendure + MYSQL_ROOT_PASSWORD: password ports: - 3306 options: --health-cmd="mysqladmin ping --silent" --health-interval=10s --health-timeout=20s --health-retries=10 postgres: - image: postgres:12 + image: postgres:16 env: - POSTGRES_USER: admin - POSTGRES_PASSWORD: secret + POSTGRES_USER: vendure + POSTGRES_PASSWORD: password ports: - 5432 options: --health-cmd=pg_isready --health-interval=10s --health-timeout=5s --health-retries=3 diff --git a/CHANGELOG.md b/CHANGELOG.md index 8fe93fc585..e2528e8b67 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,45 @@ +## 3.0.5 (2024-10-15) + + +#### Fixes + +* **asset-server-plugin** Fix local file read vulnerability when using the LocalAssetStorageStrategy ([e2ee0c4](https://github.com/vendure-ecommerce/vendure/commit/e2ee0c43159b3d13b51b78654481094fdd4850c5)). See the [security advisory](https://github.com/vendure-ecommerce/vendure/security/advisories/GHSA-r9mq-3c9r-fmjq) +* **admin-ui** Fix theme & ui language switcher ([c93589b](https://github.com/vendure-ecommerce/vendure/commit/c93589b)), closes [#3111](https://github.com/vendure-ecommerce/vendure/issues/3111) +* **core** Do not include deleted variants when indexing productInStock (#3110) ([73cb190](https://github.com/vendure-ecommerce/vendure/commit/73cb190)), closes [#3110](https://github.com/vendure-ecommerce/vendure/issues/3110) [#3109](https://github.com/vendure-ecommerce/vendure/issues/3109) +* **core** Fix coupon code validation across multiple channels ([e57cc1b](https://github.com/vendure-ecommerce/vendure/commit/e57cc1b)), closes [#2052](https://github.com/vendure-ecommerce/vendure/issues/2052) +* **core** Fix filtering on list queries of tree entities ([227da05](https://github.com/vendure-ecommerce/vendure/commit/227da05)), closes [#3107](https://github.com/vendure-ecommerce/vendure/issues/3107) +* **core** Improve error message on populating without tax rates ([7e36131](https://github.com/vendure-ecommerce/vendure/commit/7e36131)), closes [#1926](https://github.com/vendure-ecommerce/vendure/issues/1926) + +#### Features + +* **create** Improved getting started experience (#3128) ([adb4384](https://github.com/vendure-ecommerce/vendure/commit/adb4384)), closes [#3128](https://github.com/vendure-ecommerce/vendure/issues/3128) + +## 3.0.4 (2024-10-04) + + +#### Fixes + +* **admin-ui-plugin** Implement rate limiting on static server ([9516c71](https://github.com/vendure-ecommerce/vendure/commit/9516c71)) +* **admin-ui** Add padding to default relation custom field dropdown ([02e68e0](https://github.com/vendure-ecommerce/vendure/commit/02e68e0)) +* **admin-ui** Add support for custom fields on CustomerGroup list ([7128a33](https://github.com/vendure-ecommerce/vendure/commit/7128a33)) +* **admin-ui** Enable selective loading of custom fields ([9d7744b](https://github.com/vendure-ecommerce/vendure/commit/9d7744b)), closes [#3097](https://github.com/vendure-ecommerce/vendure/issues/3097) +* **admin-ui** Fix bad locale detection regex ([f336d7f](https://github.com/vendure-ecommerce/vendure/commit/f336d7f)) +* **admin-ui** Lazy-load only selected custom fields in list views ([690dd0f](https://github.com/vendure-ecommerce/vendure/commit/690dd0f)), closes [#3097](https://github.com/vendure-ecommerce/vendure/issues/3097) +* **admin-ui** Unsubscribe from alerts when logging out (#3071) ([f38340b](https://github.com/vendure-ecommerce/vendure/commit/f38340b)), closes [#3071](https://github.com/vendure-ecommerce/vendure/issues/3071) [#2188](https://github.com/vendure-ecommerce/vendure/issues/2188) +* **asset-server-plugin** Do not return raw error message on error ([801980e](https://github.com/vendure-ecommerce/vendure/commit/801980e)) +* **core** Correctly parse numeric sessionDuration and verificationTokenDuration values (#3080) ([98e4118](https://github.com/vendure-ecommerce/vendure/commit/98e4118)), closes [#3080](https://github.com/vendure-ecommerce/vendure/issues/3080) +* **core** Fix issues caused by f235249f ([5a4299a](https://github.com/vendure-ecommerce/vendure/commit/5a4299a)) +* **core** Fix RequestContext race condition causing null activeOrder ([f235249](https://github.com/vendure-ecommerce/vendure/commit/f235249)), closes [#2097](https://github.com/vendure-ecommerce/vendure/issues/2097) +* **core** Handle empty state for product and variant id filter (#3064) ([9a03c84](https://github.com/vendure-ecommerce/vendure/commit/9a03c84)), closes [#3064](https://github.com/vendure-ecommerce/vendure/issues/3064) +* **core** Prevent theoretical polynomial regex attack ([9f4a814](https://github.com/vendure-ecommerce/vendure/commit/9f4a814)) +* **core** Remove duplicate call in applyCouponCode resolver ([bffc58a](https://github.com/vendure-ecommerce/vendure/commit/bffc58a)) +* **core** Replace insecure randomness with secure randomBytes ([cb556d8](https://github.com/vendure-ecommerce/vendure/commit/cb556d8)) +* **payments-plugin** Use default channel in Stripe webhook calls to reach all orders (#3076) ([8434111](https://github.com/vendure-ecommerce/vendure/commit/8434111)), closes [#3076](https://github.com/vendure-ecommerce/vendure/issues/3076) + +#### Perf + +* **core** Fix performance when using FacetValue-based checks ([a735bdf](https://github.com/vendure-ecommerce/vendure/commit/a735bdf)) +* **admin-ui** List views only load the visible custom fields, closes [#3097](https://github.com/vendure-ecommerce/vendure/issues/3097) ## 3.0.3 (2024-09-11) diff --git a/README.md b/README.md index 6c1c5aafae..5dadad0fcc 100644 --- a/README.md +++ b/README.md @@ -62,39 +62,51 @@ Packages must be built (i.e. TypeScript compiled, admin ui app built, certain as Note that this can take a few minutes. -### 3. Set up the server +### 3. Start the docker containers -The server requires an SQL database to be available. The simplest option is to use SQLite, but if you have Docker available you can use the [dev-server docker-compose file](./packages/dev-server/docker-compose.yml) which will start up both MariaDB and Postgres as well as their GUI management tools. +All the necessary infrastructure is defined in the root [docker-compose.yml](./docker-compose.yml) file. At a minimum, +you will need to start a database, for example: -Vendure uses [TypeORM](http://typeorm.io), and officially supports **MySQL**, **PostgreSQL** and **SQLite**, though other TypeORM-supported databases may work. +```bash +docker-compose up -d mariadb +``` -1. Configure the [dev config](./packages/dev-server/dev-config.ts), making sure the connection settings in the `getDbConfig()` function are correct for the database type you will be using. -2. Create the database using your DB admin tool of choice (e.g. phpMyAdmin if you are using the docker image suggested above). Name it according to the `getDbConfig()` settings. If you are using SQLite, you can skip this step. -3. Populate mock data: - ```bash - cd packages/dev-server - DB= npm run populate - ``` - If you do not specify the `DB` variable, it will default to "mysql". +MariaDB/MySQL is the default that will be used by the dev server if you don't explicitly set the `DB` environment variable. -### 4. Run the dev server +If for example you are doing development on the Elasticsearch plugin, you will also need to start the Elasticsearch container: +```bash +docker-compose up -d elasticsearch ``` + +### 4. Populate test data + +Vendure uses [TypeORM](http://typeorm.io), and officially supports **MySQL**, **MariaDB**, **PostgreSQL** and **SQLite**. + +The first step is to populate the dev server with some test data: + +```bash cd packages/dev-server -DB= npm run start -``` -Or if you are in the root package + +[DB=mysql|postres|sqlite] npm run populate + ``` + +If you do not specify the `DB` variable, it will default to "mysql". If you specifically want to develop against Postgres, +you need to run the `postgres_16` container and then run `DB=postgres npm run populate`. + +### 5. Run the dev server + ``` -DB= npm run dev-server:start +cd packages/dev-server +[DB=mysql|postgres|sqlite] npm run dev ``` -If you do not specify the `DB` argument, it will default to "mysql". ### Testing admin ui changes locally If you are making changes to the admin ui, you need to start the admin ui independent from the dev-server: 1. `cd packages/admin-ui` -2. `npm run start` +2. `npm run dev` 3. Go to http://localhost:4200 and log in with "superadmin", "superadmin" This will auto restart when you make changes to the admin ui. You don't need this step when you just use the admin ui just @@ -125,7 +137,7 @@ npm run watch:core-common ```shell # Terminal 2 cd packages/dev-server -DB=sqlite npm run start +DB=sqlite npm run dev ``` 3. The dev-server will now have your local changes from the changed package. diff --git a/SECURITY.md b/SECURITY.md index a844117b55..10bb9e6327 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -11,4 +11,4 @@ ## Reporting a Vulnerability -To report a security vulnarability, email [contact@vendure.io](mailto:contact@vendure.io). +To report a security vulnerability, email [contact@vendure.io](mailto:contact@vendure.io). diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000000..9697bd95a1 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,117 @@ +# This contains the services required to develop and test Vendure +# locally. It includes multiple SQL databases (for testing specific +# versions), Elasticsearch, Redis etc. +version: '3.7' +name: vendure-monorepo +services: + mariadb: + image: 'bitnami/mariadb:latest' + container_name: mariadb + environment: + MARIADB_DATABASE: vendure-dev + MARIADB_ROOT_USER: vendure + MARIADB_ROOT_PASSWORD: password + volumes: + - 'mariadb_data:/bitnami' + ports: + - '3306:3306' + mysql_8: + image: bitnami/mysql:8.0 + container_name: mysql-8 + environment: + MYSQL_AUTHENTICATION_PLUGIN: mysql_native_password + MYSQL_DATABASE: vendure-dev + MYSQL_ROOT_USER: vendure + MYSQL_ROOT_PASSWORD: password + volumes: + - 'mysql_data:/bitnami' + ports: + - '3306:3306' + mysql_5: + image: bitnami/mysql:5.7 + container_name: mysql-5.7 + environment: + MYSQL_AUTHENTICATION_PLUGIN: mysql_native_password + MYSQL_DATABASE: vendure-dev + MYSQL_ROOT_USER: vendure + MYSQL_ROOT_PASSWORD: password + volumes: + - 'mysql_data:/bitnami' + ports: + - '3306:3306' + postgres_12: + image: postgres:12.3 + container_name: postgres_12 + environment: + POSTGRES_DB: vendure-dev + POSTGRES_USER: vendure + POSTGRES_PASSWORD: password + PGDATA: /var/lib/postgresql/data + volumes: + - postgres_12_data:/var/lib/postgresql/data + ports: + - "5432:5432" + command: postgres -c shared_preload_libraries=pg_stat_statements -c pg_stat_statements.track=all -c pg_stat_statements.max=100000 -c max_connections=200 + postgres_16: + image: postgres:16 + container_name: postgres_16 + environment: + POSTGRES_DB: vendure-dev + POSTGRES_USER: vendure + POSTGRES_PASSWORD: password + PGDATA: /var/lib/postgresql/data + volumes: + - postgres_16_data:/var/lib/postgresql/data + ports: + - "5432:5432" + command: postgres -c shared_preload_libraries=pg_stat_statements -c pg_stat_statements.track=all -c pg_stat_statements.max=100000 -c max_connections=200 + # This is the Keycloak service which is used + # to test the Keycloak auth strategy + keycloak: + image: quay.io/keycloak/keycloak + ports: + - "9000:8080" + environment: + KEYCLOAK_ADMIN: admin + KEYCLOAK_ADMIN_PASSWORD: admin + command: + - start-dev + - --import-realm + volumes: + - keycloak_data:/opt/keycloak/data + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:7.10.2 + container_name: elasticsearch + environment: + - discovery.type=single-node + - bootstrap.memory_lock=true + - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + ulimits: + memlock: + soft: -1 + hard: -1 + volumes: + - esdata:/usr/share/elasticsearch/data + ports: + - 9200:9200 + redis: + image: bitnami/redis:7.4.1 + hostname: redis + container_name: redis + environment: + - ALLOW_EMPTY_PASSWORD=yes + ports: + - "6379:6379" +volumes: + postgres_16_data: + driver: local + postgres_12_data: + driver: local + mariadb_data: + driver: local + mysql_data: + driver: local + keycloak_data: + driver: local + esdata: + driver: local diff --git a/docs/docs/guides/developer-guide/security/index.md b/docs/docs/guides/developer-guide/security/index.md new file mode 100644 index 0000000000..29f892cecd --- /dev/null +++ b/docs/docs/guides/developer-guide/security/index.md @@ -0,0 +1,235 @@ +--- +title: "Security" +--- + +Security of your Vendure application includes considering how to prevent and protect against common security threats such as: + +- Data breaches +- Unauthorized access +- Attacks aimed at disrupting the service + +Vendure itself is designed with security in mind, but you must also consider the security of your own application code, the server environment, and the network architecture. + +## Basics + +Here are some basic measures you should use to secure your Vendure application. These are not exhaustive, but they are a good starting point. + +### Change the default credentials + +Do not deploy any public Vendure instance with the default superadmin credentials (`superadmin:superadmin`). Use your hosting platform's environment variables to set a **strong** password for the Superadmin account. + +```ts +import { VendureConfig } from '@vendure/core'; + +export const config: VendureConfig = { + authOptions: { + tokenMethod: ['bearer', 'cookie'], + superadminCredentials: { + identifier: process.env.SUPERADMIN_USERNAME, + password: process.env.SUPERADMIN_PASSWORD, + }, + }, + // ... +}; +``` + +### Use the HardenPlugin + +It is recommended that you install and configure the [HardenPlugin](/reference/core-plugins/harden-plugin/) for all production deployments. This plugin locks down your schema +(disabling introspection and field suggestions) and protects your Shop API against malicious queries that could otherwise overwhelm your server. + +Install the plugin: + +```sh +npm install @vendure/harden-plugin + +# or + +yarn add @vendure/harden-plugin +``` + +Then add it to your VendureConfig: + +```ts +import { VendureConfig } from '@vendure/core'; +import { HardenPlugin } from '@vendure/harden-plugin'; + +const IS_DEV = process.env.APP_ENV === 'dev'; + +export const config: VendureConfig = { + // ... + plugins: [ + HardenPlugin.init({ + maxQueryComplexity: 500, + apiMode: IS_DEV ? 'dev' : 'prod', + }), + // ... + ] +}; +``` + +:::info +For a detailed explanation of how to best configure this plugin, see the [HardenPlugin docs](/reference/core-plugins/harden-plugin/). +::: + + +## OWASP Top Ten Security Assessment + +The Open Worldwide Application Security Project (OWASP) is a nonprofit foundation that works to improve the security of software. + +It publishes a top 10 list of common web application vulnerabilities: https://owasp.org/Top10 + +This section assesses Vendure against this list, stating what is covered **out of the box** (built in to the framework or easily configurable) and what needs to be **additionally considered.** + +### 1. Broken Access Control + +Reference: https://owasp.org/Top10/A01_2021-Broken_Access_Control/ + +Out of the box: + +- Vendure uses role-based access control +- We deny by default for non-public API requests +- Built-in CORS controls for session cookies +- Directory listing is not possible via default configuration (e.g. exposing web root dir contents) +- Stateful session identifiers should be invalidated on the server after logout. On logout we delete all session records from the DB & session cache. + +To consider: + +- Rate limit API and controller access to minimize the harm from automated attack tooling. + +### 2. Cryptographic Failures + +Reference: https://owasp.org/Top10/A02_2021-Cryptographic_Failures/ + +Out of the box: + +- Vendure defaults to bcrypt with 12 salt rounds for storing passwords. This strategy is configurable if security requirements mandate alternative algorithms. +- No deprecated hash functions (SHA1, MD5) are used in security-related contexts (only for things like creating cache keys). +- Payment information is not stored in Vendure by default. Payment integrations rely on the payment provider to store all sensitive data. + +To consider: + +- The Vendure server will not use TLS be default. The usual configuration is to handle this at the gateway level on your production platform. +- If a network caching layer is used (e.g. Stellate), ensure it is configured to not cache user-related data (customer details, active order etc) + +### 3. Injection + +Reference: https://owasp.org/Top10/A03_2021-Injection/ + +Out of the box: + +- GraphQL has built-in validation of incoming data +- All database operations are parameterized - no string concatenation using user-supplied data. +- List queries apply default limits to prevent mass disclosure of records. + +To consider: + +- If using custom fields, you should consider defining a validation function to prevent bad data from getting into the database. + +### 4. Insecure Design + +Reference: https://owasp.org/Top10/A04_2021-Insecure_Design/ + +Out of the box: + +- Use of established libraries for the critical underlying components: NestJS, TypeORM, Angular. +- End-to-end tests of security-related flows such as authentication, verification, and RBAC permissions controls. +- Harden plugin provides pre-configured protections against common attack vectors targeting GraphQL APIs. + +To consider: + +- Tiered exposure such as an API gateway which prevents exposure of the Admin API to the public internet. +- Limit resource usage of Vendure server & worker instances via containerization. +- Rate limiting & other network-level protections (such as Cloudflare) should be considered. + +### 5. Security Misconfiguration + +Reference: https://owasp.org/Top10/A05_2021-Security_Misconfiguration/ + +Out of the box: + +- Single point of configuration for the entire application, reducing the chance of misconfiguration. +- A default setup only requires a database, which means there are few components to configure and harden. +- Stack traces are not leaked in API errors + +To consider: + +- Ensure the default superadmin credentials are not used in production +- Use environment variables to turn off development features such as the GraphQL playground +- Use the HardenPlugin in production to automatically turn of development features and restrict system information leaking via API. +- Use fine-grained permissions and roles for your administrator accounts to reduce the attack surface if an account is compromised. + +### 6. Vulnerable and Outdated Components + +Reference: https://owasp.org/Top10/A06_2021-Vulnerable_and_Outdated_Components/ + +Out of the box: + +- All dependencies are updated to current versions with each minor release +- Modular design limits the number of dependencies for core packages. +- Automated code & dependency scanning is used in the Vendure repo + +To consider: + +- Run your own audits on your code base. +- Use version override mechanisms if needed to patch and critical Vendure dependencies that did not yet get updated. + +### 7. Identification and Authentication Failures + +Reference: https://owasp.org/Top10/A07_2021-Identification_and_Authentication_Failures/ + +Out of the box: + +- Valid usernames are not leaked via mechanisms such as account reset +- Does not permit "knowlege-based" account recovery +- Uses strong password hashing (bcrypt with 12 salt rounds) +- Session identifiers are not exposed in API urls (instead we use headers/cookies) +- New session tokens always regenerated after successful login +- Sessions deleted during logout +- Cryptographically-strong, high-entropy session tokens are used (crypto.randomBytes API) + +To consider: + +- Implementing a multi-factor authentication flow +- Do not use default superadmin credentials in production +- Implementing a custom PasswordValidationStrategy to disallow weak/common passwords +- Subscribe to AttemptedLoginEvent to implement detection of brute-force attacks + +### 8. Software and Data Integrity Failures + +Reference: https://owasp.org/Top10/A08_2021-Software_and_Data_Integrity_Failures/ + +To consider: + +- Exercise caution when introducing new dependencies to your project. +- Do not use untrusted Vendure plugins. Where possible review the code prior to use. +- Exercise caution if using auto-updating mechanisms for dependencies. +- If storing serialized data in custom fields, implement validation to prevent untrusted data getting into the database. +- Evaluate your CI/CD pipeline against the OWASP recommendations for this point + +### 9. Security Logging and Monitoring Failures + +Reference: https://owasp.org/Top10/A09_2021-Security_Logging_and_Monitoring_Failures/ + +Out of the box: + +- APIs for integrating logging & monitoring tools & services, e.g. configurable Logger interface & ErrorHandlerStrategy +- Official Sentry integration for application performance monitoring + +To consider: + +- Integrate with dedicated logging tools for improved log management +- Integrate with monitoring tools such as Sentry +- Use the EventBus to monitor events such as repeated failed login attempts and high-value orders + +### 10. Server-Side Request Forgery (SSRF) + +Reference: [https://owasp.org/Top10/A10_2021-Server-Side_Request_Forgery_(SSRF)/](https://owasp.org/Top10/A10_2021-Server-Side_Request_Forgery_%28SSRF%29/) + +Out of the box: + +- By default Vendure does not rely on requests to remote servers for core functionality + +To consider: + +- Review the OWASP recommendations against your network architecture diff --git a/docs/docs/guides/getting-started/installation/index.md b/docs/docs/guides/getting-started/installation/index.md index 1e0184306b..3782700d8f 100644 --- a/docs/docs/guides/getting-started/installation/index.md +++ b/docs/docs/guides/getting-started/installation/index.md @@ -66,8 +66,6 @@ Follow the instructions to move into the new directory created for your project, ```bash cd my-shop -yarn dev -# or npm run dev ``` diff --git a/docs/sidebars.js b/docs/sidebars.js index 974ee2690d..4168204391 100644 --- a/docs/sidebars.js +++ b/docs/sidebars.js @@ -72,6 +72,7 @@ const sidebars = { 'guides/developer-guide/events/index', 'guides/developer-guide/migrations/index', 'guides/developer-guide/plugins/index', + 'guides/developer-guide/security/index', 'guides/developer-guide/strategies-configurable-operations/index', 'guides/developer-guide/testing/index', 'guides/developer-guide/updating/index', diff --git a/e2e-common/test-config.ts b/e2e-common/test-config.ts index c47c80ebba..06f594a32a 100644 --- a/e2e-common/test-config.ts +++ b/e2e-common/test-config.ts @@ -72,8 +72,8 @@ function getDbConfig(): DataSourceOptions { type: 'postgres', host: '127.0.0.1', port: process.env.CI ? +(process.env.E2E_POSTGRES_PORT || 5432) : 5432, - username: 'admin', - password: 'secret', + username: 'vendure', + password: 'password', }; case 'mariadb': return { @@ -81,8 +81,8 @@ function getDbConfig(): DataSourceOptions { type: 'mariadb', host: '127.0.0.1', port: process.env.CI ? +(process.env.E2E_MARIADB_PORT || 3306) : 3306, - username: 'root', - password: '', + username: 'vendure', + password: 'password', }; case 'mysql': return { @@ -90,8 +90,8 @@ function getDbConfig(): DataSourceOptions { type: 'mysql', host: '127.0.0.1', port: process.env.CI ? +(process.env.E2E_MYSQL_PORT || 3306) : 3306, - username: 'root', - password: '', + username: 'vendure', + password: 'password', }; case 'sqljs': default: diff --git a/license/signatures/version1/cla.json b/license/signatures/version1/cla.json index 46c070085c..73b5843f7c 100644 --- a/license/signatures/version1/cla.json +++ b/license/signatures/version1/cla.json @@ -191,6 +191,30 @@ "created_at": "2024-09-30T12:27:50Z", "repoId": 136938012, "pullRequestNo": 3096 + }, + { + "name": "kyunal", + "id": 33372279, + "comment_id": 2395056311, + "created_at": "2024-10-05T13:21:30Z", + "repoId": 136938012, + "pullRequestNo": 3110 + }, + { + "name": "LeftoversTodayAppAdmin", + "id": 139936478, + "comment_id": 2395622489, + "created_at": "2024-10-06T23:06:02Z", + "repoId": 136938012, + "pullRequestNo": 3112 + }, + { + "name": "TheValkDokk", + "id": 91455763, + "comment_id": 2415609869, + "created_at": "2024-10-16T02:43:06Z", + "repoId": 136938012, + "pullRequestNo": 3138 } ] } \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index c9ddcbd0c7..33f2d3792a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -3633,6 +3633,18 @@ "sisteransi": "^1.0.5" } }, + "node_modules/@clack/prompts/node_modules/is-unicode-supported": { + "version": "1.3.0", + "extraneous": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@clr/angular": { "version": "17.0.1", "license": "MIT", @@ -32809,6 +32821,7 @@ "cross-spawn": "^7.0.3", "fs-extra": "^11.2.0", "handlebars": "^4.7.8", + "open": "^8.4.2", "picocolors": "^1.0.0", "semver": "^7.5.4", "tcp-port-used": "^1.0.2" diff --git a/package.json b/package.json index 331885712d..f474eeee83 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,6 @@ "docs:build": "npm run docs:generate-typescript-docs && npm run docs:generate-graphql-docs", "codegen": "tsc -p scripts/codegen/plugins && ts-node scripts/codegen/generate-graphql-types.ts", "version": "npm run check-imports && npm run check-angular-versions && npm run build && npm run check-core-type-defs && npm run generate-changelog && git add CHANGELOG* && git add */version.ts", - "dev-server:start": "cd packages/dev-server && npm run start", "test": "lerna run test --stream --no-bail", "e2e": "lerna run e2e --stream --no-bail", "build": "lerna run build", diff --git a/packages/admin-ui/README.md b/packages/admin-ui/README.md index c202d24cff..b18c3d6aa7 100644 --- a/packages/admin-ui/README.md +++ b/packages/admin-ui/README.md @@ -46,5 +46,5 @@ This report data is also saved to the [i18n-coverage.json](./i18n-coverage.json) To add support for a new language, create a new empty json file (`{}`) in the `i18n-messages` directory named `.json`, where `languageCode` is one of the supported codes as given in the [LanguageCode enum type](../core/src/api/schema/common/language-code.graphql), then run `npm run extract-translations` -To verify localization changes add `.json` to `./src/lib/static/vendure-ui-config.json` in the array `availableLanguages`. This will make the localization available in Admin UI development mode using `npm run start` +To verify localization changes add `.json` to `./src/lib/static/vendure-ui-config.json` in the array `availableLanguages`. This will make the localization available in Admin UI development mode using `npm run dev` diff --git a/packages/admin-ui/src/lib/core/src/data/query-result.ts b/packages/admin-ui/src/lib/core/src/data/query-result.ts index 2512ae9fae..f8087214b8 100644 --- a/packages/admin-ui/src/lib/core/src/data/query-result.ts +++ b/packages/admin-ui/src/lib/core/src/data/query-result.ts @@ -3,7 +3,17 @@ import { notNullOrUndefined } from '@vendure/common/lib/shared-utils'; import { Apollo, QueryRef } from 'apollo-angular'; import { DocumentNode } from 'graphql'; import { merge, Observable, Subject, Subscription } from 'rxjs'; -import { distinctUntilChanged, filter, finalize, map, skip, take, takeUntil } from 'rxjs/operators'; +import { + distinctUntilChanged, + filter, + finalize, + map, + shareReplay, + skip, + startWith, + take, + takeUntil, +} from 'rxjs/operators'; import { CustomFieldConfig, GetUserStatusQuery } from '../common/generated-types'; @@ -194,7 +204,9 @@ export class QueryResult = Record> this.subscribeToQueryRef(this.queryRef); this.queryRefSubscribed.set(this.queryRef, true); } - this.valueChangeSubject.subscribe(subscriber); + this.valueChangeSubject + .pipe(startWith(this.queryRef.getCurrentResult()), shareReplay(1)) + .subscribe(subscriber); return () => { this.queryRefSubscribed.delete(this.queryRef); }; diff --git a/packages/asset-server-plugin/e2e/asset-server-plugin.e2e-spec.ts b/packages/asset-server-plugin/e2e/asset-server-plugin.e2e-spec.ts index ff7fdfbb05..eac6e699d1 100644 --- a/packages/asset-server-plugin/e2e/asset-server-plugin.e2e-spec.ts +++ b/packages/asset-server-plugin/e2e/asset-server-plugin.e2e-spec.ts @@ -1,7 +1,8 @@ /* eslint-disable @typescript-eslint/no-non-null-assertion */ -import { mergeConfig } from '@vendure/core'; +import { ConfigService, mergeConfig } from '@vendure/core'; import { AssetFragment } from '@vendure/core/e2e/graphql/generated-e2e-admin-types'; import { createTestEnvironment } from '@vendure/testing'; +import { exec } from 'child_process'; import fs from 'fs-extra'; import gql from 'graphql-tag'; import fetch from 'node-fetch'; @@ -193,6 +194,41 @@ describe('AssetServerPlugin', () => { it('does not error on non-integer height', async () => { return fetch(`${asset.preview}?h=10.5`); }); + + // https://github.com/vendure-ecommerce/vendure/security/advisories/GHSA-r9mq-3c9r-fmjq + describe('path traversal', () => { + function curlWithPathAsIs(url: string) { + return new Promise((resolve, reject) => { + // We use curl here rather than node-fetch or any other fetch-type function because + // those will automatically perform path normalization which will mask the path traversal + return exec(`curl --path-as-is ${url}`, (err, stdout, stderr) => { + if (err) { + reject(err); + } + resolve(stdout); + }); + }); + } + + function testPathTraversalOnUrl(urlPath: string) { + return async () => { + const port = server.app.get(ConfigService).apiOptions.port; + const result = await curlWithPathAsIs(`http://localhost:${port}/assets${urlPath}`); + expect(result).not.toContain('@vendure/asset-server-plugin'); + expect(result.toLowerCase()).toContain('resource not found'); + }; + } + + it('blocks path traversal 1', testPathTraversalOnUrl(`/../../package.json`)); + it('blocks path traversal 2', testPathTraversalOnUrl(`/foo/../../../package.json`)); + it('blocks path traversal 3', testPathTraversalOnUrl(`/foo/../../../foo/../package.json`)); + it('blocks path traversal 4', testPathTraversalOnUrl(`/%2F..%2F..%2Fpackage.json`)); + it('blocks path traversal 5', testPathTraversalOnUrl(`/%2E%2E/%2E%2E/package.json`)); + it('blocks path traversal 6', testPathTraversalOnUrl(`/..//..//package.json`)); + it('blocks path traversal 7', testPathTraversalOnUrl(`/.%2F.%2F.%2Fpackage.json`)); + it('blocks path traversal 8', testPathTraversalOnUrl(`/..\\\\..\\\\package.json`)); + it('blocks path traversal 9', testPathTraversalOnUrl(`/\\\\\\..\\\\\\..\\\\\\package.json`)); + }); }); describe('deletion', () => { @@ -268,7 +304,7 @@ describe('AssetServerPlugin', () => { // https://github.com/vendure-ecommerce/vendure/issues/1563 it('falls back to binary preview if image file cannot be processed', async () => { const filesToUpload = [path.join(__dirname, 'fixtures/assets/bad-image.jpg')]; - const { createAssets }: CreateAssets.Mutation = await adminClient.fileUploadMutation({ + const { createAssets }: CreateAssetsMutation = await adminClient.fileUploadMutation({ mutation: CREATE_ASSETS, filePaths: filesToUpload, mapVariables: filePaths => ({ diff --git a/packages/asset-server-plugin/src/plugin.ts b/packages/asset-server-plugin/src/plugin.ts index ba47653e93..4ba1739f81 100644 --- a/packages/asset-server-plugin/src/plugin.ts +++ b/packages/asset-server-plugin/src/plugin.ts @@ -281,7 +281,7 @@ export class AssetServerPlugin implements NestModule, OnApplicationBootstrap { return async (err: any, req: Request, res: Response, next: NextFunction) => { if (err && (err.status === 404 || err.statusCode === 404)) { if (req.query) { - const decodedReqPath = decodeURIComponent(req.path); + const decodedReqPath = this.sanitizeFilePath(req.path); Logger.debug(`Pre-cached Asset not found: ${decodedReqPath}`, loggerCtx); let file: Buffer; try { @@ -347,9 +347,7 @@ export class AssetServerPlugin implements NestModule, OnApplicationBootstrap { imageParamsString += quality; } - /* eslint-enable @typescript-eslint/restrict-template-expressions */ - - const decodedReqPath = decodeURIComponent(req.path); + const decodedReqPath = this.sanitizeFilePath(req.path); if (imageParamsString !== '') { const imageParamHash = this.md5(imageParamsString); return path.join(this.cacheDir, this.addSuffix(decodedReqPath, imageParamHash, imageFormat)); @@ -358,6 +356,20 @@ export class AssetServerPlugin implements NestModule, OnApplicationBootstrap { } } + /** + * Sanitize the file path to prevent directory traversal attacks. + */ + private sanitizeFilePath(filePath: string): string { + let decodedPath: string; + try { + decodedPath = decodeURIComponent(filePath); + } catch (e: any) { + Logger.error((e.message as string) + ': ' + filePath, loggerCtx); + return ''; + } + return path.normalize(decodedPath).replace(/(\.\.[\/\\])+/, ''); + } + private md5(input: string): string { return createHash('md5').update(input).digest('hex'); } diff --git a/packages/core/e2e/collection.e2e-spec.ts b/packages/core/e2e/collection.e2e-spec.ts index 4239c8fab1..a270b15f77 100644 --- a/packages/core/e2e/collection.e2e-spec.ts +++ b/packages/core/e2e/collection.e2e-spec.ts @@ -735,6 +735,21 @@ describe('Collection resolver', () => { 139900, 219900, 229900, ]); }); + + // https://github.com/vendure-ecommerce/vendure/issues/3107 + it('collection list with translations, filtered by name', async () => { + const { collections } = await adminClient.query(GET_COLLECTION_LIST_WITH_TRANSLATIONS, { + options: { + filter: { + name: { + eq: 'Electronics', + }, + }, + }, + }); + + expect(collections.items.length).toBeDefined(); + }); }); describe('moveCollection', () => { @@ -2413,6 +2428,21 @@ export const GET_COLLECTION_LIST = gql` ${COLLECTION_FRAGMENT} `; +export const GET_COLLECTION_LIST_WITH_TRANSLATIONS = gql` + query GetCollectionListWithTranslations($options: CollectionListOptions) { + collections(options: $options) { + items { + id + name + translations { + id + name + } + } + } + } +`; + export const MOVE_COLLECTION = gql` mutation MoveCollection($input: MoveCollectionInput!) { moveCollection(input: $input) { diff --git a/packages/core/e2e/list-query-builder.e2e-spec.ts b/packages/core/e2e/list-query-builder.e2e-spec.ts index 7a3b54d02f..1debcad267 100644 --- a/packages/core/e2e/list-query-builder.e2e-spec.ts +++ b/packages/core/e2e/list-query-builder.e2e-spec.ts @@ -56,14 +56,16 @@ describe('ListQueryBuilder', () => { expect(testEntities.totalItems).toBe(6); expect(getItemLabels(testEntities.items)).toEqual(['A', 'B', 'C', 'D', 'E', 'F']); - expect(testEntities.items.map((i: any) => i.name)).toEqual(expect.arrayContaining([ - 'apple', - 'bike', - 'cake', - 'dog', - 'egg', - 'baum', // if default en lang does not exist, use next available lang - ])); + expect(testEntities.items.map((i: any) => i.name)).toEqual( + expect.arrayContaining([ + 'apple', + 'bike', + 'cake', + 'dog', + 'egg', + 'baum', // if default en lang does not exist, use next available lang + ]), + ); }); it('all de', async () => { @@ -77,14 +79,16 @@ describe('ListQueryBuilder', () => { expect(testEntities.totalItems).toBe(6); expect(getItemLabels(testEntities.items)).toEqual(['A', 'B', 'C', 'D', 'E', 'F']); - expect(testEntities.items.map((i: any) => i.name)).toEqual(expect.arrayContaining([ - 'apfel', - 'fahrrad', - 'kuchen', - 'hund', - 'egg', // falls back to en translation when de doesn't exist - 'baum', - ])); + expect(testEntities.items.map((i: any) => i.name)).toEqual( + expect.arrayContaining([ + 'apfel', + 'fahrrad', + 'kuchen', + 'hund', + 'egg', // falls back to en translation when de doesn't exist + 'baum', + ]), + ); }); it('take', async () => { @@ -278,6 +282,20 @@ describe('ListQueryBuilder', () => { expect(getItemLabels(testEntities.items)).toEqual(['A', 'C', 'E']); }); + it('filtering on translatable string', async () => { + const { testEntities } = await adminClient.query(GET_LIST_WITH_TRANSLATIONS, { + options: { + filter: { + name: { + contains: 'g', + }, + }, + }, + }); + + expect(getItemLabels(testEntities.items)).toEqual(['D', 'E']); + }); + describe('regex', () => { it('simple substring', async () => { const { testEntities } = await adminClient.query(GET_LIST, { @@ -1208,7 +1226,10 @@ describe('ListQueryBuilder', () => { // https://github.com/vendure-ecommerce/vendure/issues/1586 it('using the getMany() of the resulting QueryBuilder', async () => { const { testEntitiesGetMany } = await adminClient.query(GET_ARRAY_LIST, {}); - const actualPrices = testEntitiesGetMany.sort(sortById).map((x: any) => x.price).sort((a: number, b: number) => a - b); + const actualPrices = testEntitiesGetMany + .sort(sortById) + .map((x: any) => x.price) + .sort((a: number, b: number) => a - b); const expectedPrices = [11, 9, 22, 14, 13, 33].sort((a, b) => a - b); expect(actualPrices).toEqual(expectedPrices); }); diff --git a/packages/core/e2e/order-promotion.e2e-spec.ts b/packages/core/e2e/order-promotion.e2e-spec.ts index c17019ef44..682621590f 100644 --- a/packages/core/e2e/order-promotion.e2e-spec.ts +++ b/packages/core/e2e/order-promotion.e2e-spec.ts @@ -2147,6 +2147,145 @@ describe('Promotions applied to Orders', () => { }); }); + // https://github.com/vendure-ecommerce/vendure/issues/2052 + describe('multi-channel usage', () => { + const SECOND_CHANNEL_TOKEN = 'second_channel_token'; + const THIRD_CHANNEL_TOKEN = 'third_channel_token'; + const promoCode = 'TEST_COMMON_CODE'; + + async function createChannelAndAssignProducts(code: string, token: string) { + const result = await adminClient.query< + Codegen.CreateChannelMutation, + Codegen.CreateChannelMutationVariables + >(CREATE_CHANNEL, { + input: { + code, + token, + defaultLanguageCode: LanguageCode.en, + currencyCode: CurrencyCode.GBP, + pricesIncludeTax: true, + defaultShippingZoneId: 'T_1', + defaultTaxZoneId: 'T_1', + }, + }); + + await adminClient.query< + Codegen.AssignProductsToChannelMutation, + Codegen.AssignProductsToChannelMutationVariables + >(ASSIGN_PRODUCT_TO_CHANNEL, { + input: { + channelId: (result.createChannel as Codegen.ChannelFragment).id, + priceFactor: 1, + productIds: products.map(p => p.id), + }, + }); + + return result.createChannel as Codegen.ChannelFragment; + } + + async function addItemAndApplyPromoCode() { + await shopClient.asAnonymousUser(); + await shopClient.query< + CodegenShop.AddItemToOrderMutation, + CodegenShop.AddItemToOrderMutationVariables + >(ADD_ITEM_TO_ORDER, { + productVariantId: getVariantBySlug('item-5000').id, + quantity: 1, + }); + + const { applyCouponCode } = await shopClient.query< + CodegenShop.ApplyCouponCodeMutation, + CodegenShop.ApplyCouponCodeMutationVariables + >(APPLY_COUPON_CODE, { + couponCode: promoCode, + }); + + orderResultGuard.assertSuccess(applyCouponCode); + return applyCouponCode; + } + + beforeAll(async () => { + await createChannelAndAssignProducts('second-channel', SECOND_CHANNEL_TOKEN); + await createChannelAndAssignProducts('third-channel', THIRD_CHANNEL_TOKEN); + }); + + it('create promotion in second channel', async () => { + adminClient.setChannelToken(SECOND_CHANNEL_TOKEN); + + const result = await createPromotion({ + enabled: true, + name: 'common-promotion-second-channel', + couponCode: promoCode, + actions: [ + { + code: orderPercentageDiscount.code, + arguments: [{ name: 'discount', value: '20' }], + }, + ], + conditions: [], + }); + + expect(result.name).toBe('common-promotion-second-channel'); + }); + + it('create promotion in third channel', async () => { + adminClient.setChannelToken(THIRD_CHANNEL_TOKEN); + + const result = await createPromotion({ + enabled: true, + name: 'common-promotion-third-channel', + couponCode: promoCode, + actions: [ + { + code: orderPercentageDiscount.code, + arguments: [{ name: 'discount', value: '20' }], + }, + ], + conditions: [], + }); + + expect(result.name).toBe('common-promotion-third-channel'); + }); + + it('applies promotion in second channel', async () => { + shopClient.setChannelToken(SECOND_CHANNEL_TOKEN); + + const result = await addItemAndApplyPromoCode(); + expect(result.discounts.length).toBe(1); + expect(result.discounts[0].description).toBe('common-promotion-second-channel'); + }); + + it('applies promotion in third channel', async () => { + shopClient.setChannelToken(THIRD_CHANNEL_TOKEN); + + const result = await addItemAndApplyPromoCode(); + expect(result.discounts.length).toBe(1); + expect(result.discounts[0].description).toBe('common-promotion-third-channel'); + }); + + it('applies promotion from current channel, not default channel', async () => { + adminClient.setChannelToken(E2E_DEFAULT_CHANNEL_TOKEN); + const defaultChannelPromotion = await createPromotion({ + enabled: true, + name: 'common-promotion-default-channel', + couponCode: promoCode, + actions: [ + { + code: orderPercentageDiscount.code, + arguments: [{ name: 'discount', value: '20' }], + }, + ], + conditions: [], + }); + + shopClient.setChannelToken(SECOND_CHANNEL_TOKEN); + + const result = await addItemAndApplyPromoCode(); + expect(result.discounts.length).toBe(1); + expect(result.discounts[0].description).toBe('common-promotion-second-channel'); + }); + }); + async function getProducts() { const result = await adminClient.query( GET_PRODUCTS_WITH_VARIANT_PRICES, diff --git a/packages/core/src/cli/populate.ts b/packages/core/src/cli/populate.ts index a2ccaf9ccd..d2c07fe83a 100644 --- a/packages/core/src/cli/populate.ts +++ b/packages/core/src/cli/populate.ts @@ -150,5 +150,9 @@ export async function importProductsFromCsv( languageCode, channelOrToken: channel, }); - return lastValueFrom(importer.parseAndImport(productData, ctx, true)); + const createEnvVar: import('@vendure/common/lib/shared-constants').CREATING_VENDURE_APP = + 'CREATING_VENDURE_APP'; + // Turn off progress bar when running in the context of the @vendure/create script + const reportProgress = process.env[createEnvVar] === 'true' ? false : true; + return lastValueFrom(importer.parseAndImport(productData, ctx, reportProgress)); } diff --git a/packages/core/src/data-import/providers/importer/importer.ts b/packages/core/src/data-import/providers/importer/importer.ts index 86e3051a3e..5d30b500c7 100644 --- a/packages/core/src/data-import/providers/importer/importer.ts +++ b/packages/core/src/data-import/providers/importer/importer.ts @@ -10,6 +10,7 @@ import { RequestContext } from '../../../api/common/request-context'; import { InternalServerError } from '../../../common/error/errors'; import { ConfigService } from '../../../config/config.service'; import { CustomFieldConfig } from '../../../config/custom-field/custom-field-types'; +import { Logger } from '../../../config/index'; import { Facet } from '../../../entity/facet/facet.entity'; import { FacetValue } from '../../../entity/facet-value/facet-value.entity'; import { TaxCategory } from '../../../entity/tax-category/tax-category.entity'; @@ -159,6 +160,17 @@ export class Importer { let imported = 0; const languageCode = ctx.languageCode; const taxCategories = await this.taxCategoryService.findAll(ctx); + if (taxCategories.totalItems === 0) { + Logger.error( + [ + `No TaxCategories found in the database. Ensure that at least one TaxCategory exists.`, + `If you are populating from an InitialData object, ensure the 'taxRates' array is not empty.`, + ].join('\n'), + ); + throw new Error( + `No TaxCategories found in the database. Ensure the IntialData.taxRates array is not empty.`, + ); + } await this.fastImporter.initialize(ctx.channel); for (const { product, variants } of rows) { const productMainTranslation = this.getTranslationByCodeOrFirst( diff --git a/packages/core/src/data-import/providers/populator/populator.ts b/packages/core/src/data-import/providers/populator/populator.ts index e389cec936..99cef051fe 100644 --- a/packages/core/src/data-import/providers/populator/populator.ts +++ b/packages/core/src/data-import/providers/populator/populator.ts @@ -271,8 +271,6 @@ export class Populator { taxRates: Array<{ name: string; percentage: number }>, zoneMap: ZoneMap, ) { - const taxCategories: TaxCategory[] = []; - for (const taxRate of taxRates) { const category = await this.taxCategoryService.create(ctx, { name: taxRate.name }); diff --git a/packages/core/src/plugin/default-search-plugin/indexer/indexer.controller.ts b/packages/core/src/plugin/default-search-plugin/indexer/indexer.controller.ts index b9ee716bb0..751bc01a46 100644 --- a/packages/core/src/plugin/default-search-plugin/indexer/indexer.controller.ts +++ b/packages/core/src/plugin/default-search-plugin/indexer/indexer.controller.ts @@ -481,6 +481,7 @@ export class IndexerController { loadEagerRelations: false, where: { productId: variant.productId, + deletedAt: IsNull(), }, }) .then(_variants => diff --git a/packages/core/src/service/helpers/utils/tree-relations-qb-joiner.ts b/packages/core/src/service/helpers/utils/tree-relations-qb-joiner.ts index db4e6dbb07..c8c56792ab 100644 --- a/packages/core/src/service/helpers/utils/tree-relations-qb-joiner.ts +++ b/packages/core/src/service/helpers/utils/tree-relations-qb-joiner.ts @@ -110,7 +110,7 @@ export function joinTreeRelationsDynamically( } const nextAlias = DriverUtils.buildAlias( qb.connection.driver, - { shorten: false }, + { shorten: false, joiner: joinConnector }, currentAlias, part.replace(/\./g, '_'), ); diff --git a/packages/core/src/service/services/promotion.service.ts b/packages/core/src/service/services/promotion.service.ts index 59f13e2d34..ab8ce63d1b 100644 --- a/packages/core/src/service/services/promotion.service.ts +++ b/packages/core/src/service/services/promotion.service.ts @@ -250,6 +250,7 @@ export class PromotionService { couponCode, enabled: true, deletedAt: IsNull(), + channels: { id: ctx.channelId }, }, relations: ['channels'], }); diff --git a/packages/create/README.md b/packages/create/README.md index ee9f4f248d..863a98fa8e 100644 --- a/packages/create/README.md +++ b/packages/create/README.md @@ -4,47 +4,17 @@ A CLI tool for rapidly scaffolding a new Vendure server application. Heavily ins ## Usage -Vendure Create requires [Node.js](https://nodejs.org/en/) v8.9.0+ to be installed. - -To create a new project, you may choose one of the following methods: - -### npx +Vendure Create requires [Node.js](https://nodejs.org/en/) v18+ to be installed. ```sh npx @vendure/create my-app ``` -*[npx](https://medium.com/@maybekatz/introducing-npx-an-npm-package-runner-55f7d4bd282b) comes with npm 5.2+ and higher.* - -### npm - -```sh -npm init @vendure my-app -``` - -*`npm init ` is available in npm 6+* - -### Yarn - -```sh -yarn create @vendure my-app -``` - -*`yarn create` is available in Yarn 0.25+* - - -It will create a directory called `my-app` inside the current folder. - ## Options -### `--use-npm` - -By default, Vendure Create will detect whether a compatible version of Yarn is installed, and if so will display a prompt to select the preferred package manager. -You can override this and force it to use npm with the `--use-npm` flag. - ### `--log-level` -You can control how much output is generated during the installation and setup with this flag. Valid options are `silent`, `info` and `verbose`. The default is `silent` +You can control how much output is generated during the installation and setup with this flag. Valid options are `silent`, `info` and `verbose`. The default is `info` Example: diff --git a/packages/create/package.json b/packages/create/package.json index 8af80dc86d..f6e10a84c3 100644 --- a/packages/create/package.json +++ b/packages/create/package.json @@ -39,6 +39,7 @@ "cross-spawn": "^7.0.3", "fs-extra": "^11.2.0", "handlebars": "^4.7.8", + "open": "^8.4.2", "picocolors": "^1.0.0", "semver": "^7.5.4", "tcp-port-used": "^1.0.2" diff --git a/packages/create/src/create-vendure-app.ts b/packages/create/src/create-vendure-app.ts index 04073f4c6e..ac49889e08 100644 --- a/packages/create/src/create-vendure-app.ts +++ b/packages/create/src/create-vendure-app.ts @@ -1,24 +1,33 @@ -/* eslint-disable no-console */ import { intro, note, outro, select, spinner } from '@clack/prompts'; import { program } from 'commander'; import fs from 'fs-extra'; +import { ChildProcess, spawn } from 'node:child_process'; +import { setTimeout as sleep } from 'node:timers/promises'; +import open from 'open'; import os from 'os'; import path from 'path'; import pc from 'picocolors'; import { REQUIRED_NODE_VERSION, SERVER_PORT } from './constants'; -import { checkCancel, gatherCiUserResponses, gatherUserResponses } from './gather-user-responses'; import { + getCiConfiguration, + getManualConfiguration, + getQuickStartConfiguration, +} from './gather-user-responses'; +import { + checkCancel, checkDbConnection, checkNodeVersion, checkThatNpmCanReadCwd, + cleanUpDockerResources, getDependencies, installPackages, isSafeToCreateProjectIn, isServerPortInUse, scaffoldAlreadyExists, - yarnIsAvailable, + startPostgresDatabase, } from './helpers'; +import { log, setLogLevel } from './logger'; import { CliLogLevel, PackageManager } from './types'; // eslint-disable-next-line @typescript-eslint/no-var-requires @@ -44,14 +53,23 @@ program '--log-level ', "Log level, either 'silent', 'info', or 'verbose'", /^(silent|info|verbose)$/i, - 'silent', + 'info', + ) + .option('--verbose', 'Alias for --log-level verbose', false) + .option( + '--use-npm', + 'Uses npm rather than as the default package manager. DEPRECATED: Npm is now the default', ) - .option('--use-npm', 'Uses npm rather than Yarn as the default package manager') - .option('--ci', 'Runs without prompts for use in CI scenarios') + .option('--ci', 'Runs without prompts for use in CI scenarios', false) .parse(process.argv); const options = program.opts(); -void createVendureApp(projectName, options.useNpm, options.logLevel || 'silent', options.ci); +void createVendureApp( + projectName, + options.useNpm, + options.verbose ? 'verbose' : options.logLevel || 'info', + options.ci, +); export async function createVendureApp( name: string | undefined, @@ -59,6 +77,7 @@ export async function createVendureApp( logLevel: CliLogLevel, isCi: boolean = false, ) { + setLogLevel(logLevel); if (!runPreChecks(name, useNpm)) { return; } @@ -67,6 +86,22 @@ export async function createVendureApp( `Let's create a ${pc.blue(pc.bold('Vendure App'))} ✨ ${pc.dim(`v${packageJson.version as string}`)}`, ); + const mode = isCi + ? 'ci' + : ((await select({ + message: 'How should we proceed?', + options: [ + { label: 'Quick Start', value: 'quick', hint: 'Get up an running in a single step' }, + { + label: 'Manual Configuration', + value: 'manual', + hint: 'Customize your Vendure project with more advanced settings', + }, + ], + initialValue: 'quick' as 'quick' | 'manual', + })) as 'quick' | 'manual'); + checkCancel(mode); + const portSpinner = spinner(); let port = SERVER_PORT; const attemptedPortRange = 20; @@ -90,27 +125,15 @@ export async function createVendureApp( const appName = path.basename(root); const scaffoldExists = scaffoldAlreadyExists(root, name); - const yarnAvailable = yarnIsAvailable(); - let packageManager: PackageManager = 'npm'; - if (yarnAvailable && !useNpm) { - packageManager = (await select({ - message: 'Which package manager should be used?', - options: [ - { label: 'npm', value: 'npm' }, - { label: 'yarn', value: 'yarn' }, - ], - initialValue: 'yarn' as PackageManager, - })) as PackageManager; - checkCancel(packageManager); - } + const packageManager: PackageManager = 'npm'; if (scaffoldExists) { - console.log( + log( pc.yellow( 'It appears that a new Vendure project scaffold already exists. Re-using the existing files...', ), + { newline: 'after' }, ); - console.log(); } const { dbType, @@ -123,10 +146,12 @@ export async function createVendureApp( dockerfileSource, dockerComposeSource, populateProducts, - } = isCi - ? await gatherCiUserResponses(root, packageManager) - : await gatherUserResponses(root, scaffoldExists, packageManager); - const originalDirectory = process.cwd(); + } = + mode === 'ci' + ? await getCiConfiguration(root, packageManager) + : mode === 'manual' + ? await getManualConfiguration(root, packageManager) + : await getQuickStartConfiguration(root, packageManager); process.chdir(root); if (packageManager !== 'npm' && !checkThatNpmCanReadCwd()) { process.exit(1); @@ -139,11 +164,11 @@ export async function createVendureApp( scripts: { 'dev:server': 'ts-node ./src/index.ts', 'dev:worker': 'ts-node ./src/index-worker.ts', - dev: packageManager === 'yarn' ? 'concurrently yarn:dev:*' : 'concurrently npm:dev:*', + dev: 'concurrently npm:dev:*', build: 'tsc', 'start:server': 'node ./dist/index.js', 'start:worker': 'node ./dist/index-worker.js', - start: packageManager === 'yarn' ? 'concurrently yarn:start:*' : 'concurrently npm:start:*', + start: 'concurrently npm:start:*', }, }; @@ -152,7 +177,6 @@ export async function createVendureApp( `Setting up your new Vendure project in ${pc.green(root)}\nThis may take a few minutes...`, ); - const rootPathScript = (fileName: string): string => path.join(root, `${fileName}.ts`); const srcPathScript = (fileName: string): string => path.join(root, 'src', `${fileName}.ts`); fs.writeFileSync(path.join(root, 'package.json'), JSON.stringify(packageJsonContents, null, 2) + os.EOL); @@ -162,9 +186,9 @@ export async function createVendureApp( const installSpinner = spinner(); installSpinner.start(`Installing ${dependencies[0]} + ${dependencies.length - 1} more dependencies`); try { - await installPackages(root, packageManager === 'yarn', dependencies, false, logLevel, isCi); + await installPackages({ dependencies, logLevel }); } catch (e) { - outro(pc.red(`Failed to install dependencies. Please try again.`)); + outro(pc.red(`Failed to inst all dependencies. Please try again.`)); process.exit(1); } installSpinner.stop(`Successfully installed ${dependencies.length} dependencies`); @@ -175,7 +199,7 @@ export async function createVendureApp( `Installing ${devDependencies[0]} + ${devDependencies.length - 1} more dev dependencies`, ); try { - await installPackages(root, packageManager === 'yarn', devDependencies, true, logLevel, isCi); + await installPackages({ dependencies: devDependencies, isDevDependencies: true, logLevel }); } catch (e) { outro(pc.red(`Failed to install dev dependencies. Please try again.`)); process.exit(1); @@ -185,6 +209,10 @@ export async function createVendureApp( const scaffoldSpinner = spinner(); scaffoldSpinner.start(`Generating app scaffold`); + // We add this pause so that the above output is displayed before the + // potentially lengthy file operations begin, which can prevent that + // from displaying and thus make the user think that the process has hung. + await sleep(500); fs.ensureDirSync(path.join(root, 'src')); const assetPath = (fileName: string) => path.join(__dirname, '../assets', fileName); const configFile = srcPathScript('vendure-config'); @@ -199,34 +227,87 @@ export async function createVendureApp( .then(() => fs.writeFile(path.join(root, 'README.md'), readmeSource)) .then(() => fs.writeFile(path.join(root, 'Dockerfile'), dockerfileSource)) .then(() => fs.writeFile(path.join(root, 'docker-compose.yml'), dockerComposeSource)) - .then(() => fs.mkdir(path.join(root, 'src/plugins'))) + .then(() => fs.ensureDir(path.join(root, 'src/plugins'))) .then(() => fs.copyFile(assetPath('gitignore.template'), path.join(root, '.gitignore'))) .then(() => fs.copyFile(assetPath('tsconfig.template.json'), path.join(root, 'tsconfig.json'))) .then(() => createDirectoryStructure(root)) .then(() => copyEmailTemplates(root)); - } catch (e) { - outro(pc.red(`Failed to create app scaffold. Please try again.`)); + } catch (e: any) { + outro(pc.red(`Failed to create app scaffold: ${e.message as string}`)); process.exit(1); } scaffoldSpinner.stop(`Generated app scaffold`); + if (mode === 'quick' && dbType === 'postgres') { + cleanUpDockerResources(name); + await startPostgresDatabase(root); + } + const populateSpinner = spinner(); populateSpinner.start(`Initializing your new Vendure server`); + + // We want to display a set of tips and instructions to the user + // as the initialization process is running because it can take + // a few minutes to complete. + const tips = [ + populateProducts + ? 'We are populating sample data so that you can start testing right away' + : 'We are setting up your Vendure server', + '☕ This can take a minute or two, so grab a coffee', + `✨ We'd love it if you drop us a star on GitHub: https://github.com/vendure-ecommerce/vendure`, + `📖 Check out the Vendure documentation at https://docs.vendure.io`, + `💬 Join our Discord community to chat with other Vendure developers: https://vendure.io/community`, + '💡 In the mean time, here are some tips to get you started', + `Vendure provides dedicated GraphQL APIs for both the Admin and Shop`, + `Almost every aspect of Vendure is customizable via plugins`, + `You can run 'vendure add' from the command line to add new plugins & features`, + `Use the EventBus in your plugins to react to events in the system`, + `Vendure supports multiple languages & currencies out of the box`, + `☕ Did we mention this can take a while?`, + `Our custom fields feature allows you to add any kind of data to your entities`, + `Vendure is built with TypeScript, so you get full type safety`, + `Combined with GraphQL's static schema, your type safety is end-to-end`, + `☕ Almost there now... thanks for your patience!`, + `Collections allow you to group products together`, + `Our AssetServerPlugin allows you to dynamically resize & optimize images`, + `Order flows are fully customizable to suit your business requirements`, + `Role-based permissions allow you to control access to every part of the system`, + `Customers can be grouped for targeted promotions & custom pricing`, + `You can find integrations in the Vendure Hub: https://vendure.io/hub`, + ]; + + let tipIndex = 0; + let timer: any; + const tipInterval = 10_000; + + function displayTip() { + populateSpinner.message(tips[tipIndex]); + tipIndex++; + if (tipIndex >= tips.length) { + // skip the intro tips if looping + tipIndex = 3; + } + timer = setTimeout(displayTip, tipInterval); + } + + timer = setTimeout(displayTip, tipInterval); + // register ts-node so that the config file can be loaded // eslint-disable-next-line @typescript-eslint/no-var-requires require(path.join(root, 'node_modules/ts-node')).register(); + let superAdminCredentials: { identifier: string; password: string } | undefined; try { const { populate } = await import(path.join(root, 'node_modules/@vendure/core/cli/populate')); - const { bootstrap, DefaultLogger, LogLevel, JobQueueService } = await import( + const { bootstrap, DefaultLogger, LogLevel, JobQueueService, ConfigModule } = await import( path.join(root, 'node_modules/@vendure/core/dist/index') ); const { config } = await import(configFile); const assetsDir = path.join(__dirname, '../assets'); - + superAdminCredentials = config.authOptions.superadminCredentials; const initialDataPath = path.join(assetsDir, 'initial-data.json'); const vendureLogLevel = - logLevel === 'silent' + logLevel === 'info' || logLevel === 'silent' ? LogLevel.Error : logLevel === 'verbose' ? LogLevel.Verbose @@ -240,7 +321,6 @@ export async function createVendureApp( ...(config.apiOptions ?? {}), port, }, - silent: logLevel === 'silent', dbConnectionOptions: { ...config.dbConnectionOptions, synchronize: true, @@ -262,35 +342,116 @@ export async function createVendureApp( // Pause to ensure the worker jobs have time to complete. if (isCi) { - console.log('[CI] Pausing before close...'); + log('[CI] Pausing before close...'); } - await new Promise(resolve => setTimeout(resolve, isCi ? 30000 : 2000)); + await sleep(isCi ? 30000 : 2000); await app.close(); if (isCi) { - console.log('[CI] Pausing after close...'); - await new Promise(resolve => setTimeout(resolve, 10000)); + log('[CI] Pausing after close...'); + await sleep(10000); } - } catch (e) { - console.log(e); + populateSpinner.stop(`Server successfully initialized${populateProducts ? ' and populated' : ''}`); + clearTimeout(timer); + /** + * This is currently disabled because I am running into issues actually getting the server + * to quite properly in response to a SIGINT. + * This means that the server runs, but cannot be ended, without forcefully + * killing the process. + * + * Once this has been resolved, the following code can be re-enabled by + * setting `autoRunServer` to `true`. + */ + const autoRunServer = false; + if (mode === 'quick' && autoRunServer) { + // In quick-start mode, we want to now run the server and open up + // a browser window to the Admin UI. + try { + const adminUiUrl = `http://localhost:${port}/admin`; + const quickStartInstructions = [ + 'Use the following credentials to log in to the Admin UI:', + `Username: ${pc.green(config.authOptions.superadminCredentials?.identifier)}`, + `Password: ${pc.green(config.authOptions.superadminCredentials?.password)}`, + `Open your browser and navigate to: ${pc.green(adminUiUrl)}`, + '', + ]; + note(quickStartInstructions.join('\n')); + + const npmCommand = os.platform() === 'win32' ? 'npm.cmd' : 'npm'; + let quickStartProcess: ChildProcess | undefined; + try { + quickStartProcess = spawn(npmCommand, ['run', 'dev'], { + cwd: root, + stdio: 'inherit', + }); + } catch (e: any) { + /* empty */ + } + + // process.stdin.resume(); + process.on('SIGINT', function () { + displayOutro(root, name, superAdminCredentials); + quickStartProcess?.kill('SIGINT'); + process.exit(0); + }); + + // Give enough time for the server to get up and running + // before opening the window. + await sleep(10_000); + try { + await open(adminUiUrl, { + newInstance: true, + }); + } catch (e: any) { + /* empty */ + } + } catch (e: any) { + log(pc.red(`Failed to start the server: ${e.message as string}`), { + newline: 'after', + level: 'verbose', + }); + } + } else { + clearTimeout(timer); + displayOutro(root, name, superAdminCredentials); + process.exit(0); + } + } catch (e: any) { + log(e.toString()); outro(pc.red(`Failed to initialize server. Please try again.`)); process.exit(1); } - populateSpinner.stop(`Server successfully initialized${populateProducts ? ' and populated' : ''}`); +} - const startCommand = packageManager === 'yarn' ? 'yarn dev' : 'npm run dev'; +function displayOutro( + root: string, + name: string, + superAdminCredentials?: { identifier: string; password: string }, +) { + const startCommand = 'npm run dev'; const nextSteps = [ - `${pc.green('Success!')} Created a new Vendure server at:`, - `\n`, - pc.italic(root), - `\n`, - `We suggest that you start by typing:`, + `Your new Vendure server was created!`, + pc.gray(root), `\n`, + `Next, run:`, pc.gray('$ ') + pc.blue(pc.bold(`cd ${name}`)), pc.gray('$ ') + pc.blue(pc.bold(`${startCommand}`)), + `\n`, + `This will start the server in development mode.`, + `To access the Admin UI, open your browser and navigate to:`, + `\n`, + pc.green(`http://localhost:3000/admin`), + `\n`, + `Use the following credentials to log in:`, + `Username: ${pc.green(superAdminCredentials?.identifier ?? 'superadmin')}`, + `Password: ${pc.green(superAdminCredentials?.password ?? 'superadmin')}`, + '\n', + '➡️ Docs: https://docs.vendure.io', + '➡️ Discord community: https://vendure.io/community', + '➡️ Star us on GitHub:', + ' https://github.com/vendure-ecommerce/vendure', ]; - note(nextSteps.join('\n')); + note(nextSteps.join('\n'), pc.green('Setup complete!')); outro(`Happy hacking!`); - process.exit(0); } /** @@ -299,17 +460,21 @@ export async function createVendureApp( */ function runPreChecks(name: string | undefined, useNpm: boolean): name is string { if (typeof name === 'undefined') { - console.error('Please specify the project directory:'); - console.log(` ${pc.cyan(program.name())} ${pc.green('')}`); - console.log(); - console.log('For example:'); - console.log(` ${pc.cyan(program.name())} ${pc.green('my-vendure-app')}`); + log(pc.red(`Please specify the project directory:`)); + log(` ${pc.cyan(program.name())} ${pc.green('')}`, { newline: 'after' }); + log('For example:'); + log(` ${pc.cyan(program.name())} ${pc.green('my-vendure-app')}`); process.exit(1); return false; } const root = path.resolve(name); - fs.ensureDirSync(name); + try { + fs.ensureDirSync(name); + } catch (e: any) { + log(pc.red(`Could not create project directory ${name}: ${e.message as string}`)); + return false; + } if (!isSafeToCreateProjectIn(root, name)) { process.exit(1); } @@ -332,6 +497,6 @@ async function copyEmailTemplates(root: string) { try { await fs.copy(templateDir, path.join(root, 'static', 'email', 'templates')); } catch (err: any) { - console.error(pc.red('Failed to copy email templates.')); + log(pc.red('Failed to copy email templates.')); } } diff --git a/packages/create/src/gather-user-responses.ts b/packages/create/src/gather-user-responses.ts index ac6a1a14e5..970c6fc4af 100644 --- a/packages/create/src/gather-user-responses.ts +++ b/packages/create/src/gather-user-responses.ts @@ -1,10 +1,11 @@ -import { cancel, isCancel, select, text } from '@clack/prompts'; +import { select, text } from '@clack/prompts'; import { SUPER_ADMIN_USER_IDENTIFIER, SUPER_ADMIN_USER_PASSWORD } from '@vendure/common/lib/shared-constants'; import { randomBytes } from 'crypto'; import fs from 'fs-extra'; import Handlebars from 'handlebars'; import path from 'path'; +import { checkCancel, isDockerAvailable } from './helpers'; import { DbType, FileSources, PackageManager, UserResponses } from './types'; interface PromptAnswers { @@ -23,12 +24,72 @@ interface PromptAnswers { /* eslint-disable no-console */ +export async function getQuickStartConfiguration( + root: string, + packageManager: PackageManager, +): Promise { + // First we want to detect whether Docker is running + const { result: dockerStatus } = await isDockerAvailable(); + let usePostgres: boolean; + switch (dockerStatus) { + case 'running': + usePostgres = true; + break; + case 'not-found': + usePostgres = false; + break; + case 'not-running': { + let useSqlite = false; + let dockerIsNowRunning = false; + do { + const useSqliteResponse = await select({ + message: 'We could not automatically start Docker. How should we proceed?', + options: [ + { label: `Let's use SQLite as the database`, value: true }, + { label: 'I have manually started Docker', value: false }, + ], + initialValue: true, + }); + checkCancel(useSqlite); + useSqlite = useSqliteResponse as boolean; + if (useSqlite === false) { + const { result: dockerStatusManual } = await isDockerAvailable(); + dockerIsNowRunning = dockerStatusManual === 'running'; + } + } while (dockerIsNowRunning !== true && useSqlite === false); + usePostgres = !useSqlite; + break; + } + } + const quickStartAnswers: PromptAnswers = { + dbType: usePostgres ? 'postgres' : 'sqlite', + dbHost: usePostgres ? 'localhost' : '', + dbPort: usePostgres ? '6543' : '', + dbName: usePostgres ? 'vendure' : '', + dbUserName: usePostgres ? 'vendure' : '', + dbPassword: usePostgres ? randomBytes(16).toString('base64url') : '', + dbSchema: usePostgres ? 'public' : '', + populateProducts: true, + superadminIdentifier: SUPER_ADMIN_USER_IDENTIFIER, + superadminPassword: SUPER_ADMIN_USER_PASSWORD, + }; + + const responses = { + ...(await generateSources(root, quickStartAnswers, packageManager)), + dbType: quickStartAnswers.dbType, + populateProducts: quickStartAnswers.populateProducts as boolean, + superadminIdentifier: quickStartAnswers.superadminIdentifier as string, + superadminPassword: quickStartAnswers.superadminPassword as string, + }; + + return responses; +} + /** * Prompts the user to determine how the new Vendure app should be configured. */ -export async function gatherUserResponses( +export async function getManualConfiguration( root: string, - alreadyRanScaffold: boolean, packageManager: PackageManager, ): Promise { const dbType = (await select({ @@ -38,13 +99,12 @@ export async function gatherUserResponses( { label: 'MariaDB', value: 'mariadb' }, { label: 'Postgres', value: 'postgres' }, { label: 'SQLite', value: 'sqlite' }, - { label: 'SQL.js', value: 'sqljs' }, ], initialValue: 'sqlite' as DbType, })) as DbType; checkCancel(dbType); - const hasConnection = dbType !== 'sqlite' && dbType !== 'sqljs'; + const hasConnection = dbType !== 'sqlite'; const dbHost = hasConnection ? await text({ message: "What's the database host address?", @@ -146,7 +206,7 @@ export async function gatherUserResponses( /** * Returns mock "user response" without prompting, for use in CI */ -export async function gatherCiUserResponses( +export async function getCiConfiguration( root: string, packageManager: PackageManager, ): Promise { @@ -171,14 +231,6 @@ export async function gatherCiUserResponses( }; } -export function checkCancel(value: T | symbol): value is T { - if (isCancel(value)) { - cancel('Setup cancelled.'); - process.exit(0); - } - return true; -} - /** * Create the server index, worker and config source code based on the options specified by the CLI prompts. */ @@ -200,12 +252,10 @@ async function generateSources( const templateContext = { ...answers, - useYarn: packageManager === 'yarn', dbType: answers.dbType === 'sqlite' ? 'better-sqlite3' : answers.dbType, name: path.basename(root), isSQLite: answers.dbType === 'sqlite', - isSQLjs: answers.dbType === 'sqljs', - requiresConnection: answers.dbType !== 'sqlite' && answers.dbType !== 'sqljs', + requiresConnection: answers.dbType !== 'sqlite', cookieSecret: randomBytes(16).toString('base64url'), }; @@ -233,10 +283,6 @@ function defaultDBPort(dbType: DbType): number { return 3306; case 'postgres': return 5432; - case 'mssql': - return 1433; - case 'oracle': - return 1521; default: return 3306; } diff --git a/packages/create/src/helpers.ts b/packages/create/src/helpers.ts index 6a225add77..74956db2f5 100644 --- a/packages/create/src/helpers.ts +++ b/packages/create/src/helpers.ts @@ -1,12 +1,15 @@ -/* eslint-disable no-console */ -import { execSync } from 'child_process'; +import { cancel, isCancel, spinner } from '@clack/prompts'; import spawn from 'cross-spawn'; import fs from 'fs-extra'; +import { execFile, execSync, execFileSync } from 'node:child_process'; +import { platform } from 'node:os'; +import { promisify } from 'node:util'; import path from 'path'; import pc from 'picocolors'; import semver from 'semver'; -import { SERVER_PORT, TYPESCRIPT_VERSION } from './constants'; +import { TYPESCRIPT_VERSION } from './constants'; +import { log } from './logger'; import { CliLogLevel, DbType } from './types'; /** @@ -46,7 +49,6 @@ export function isSafeToCreateProjectIn(root: string, name: string) { 'tsconfig.json', 'yarn.lock', ]; - console.log(); const conflicts = fs .readdirSync(root) @@ -57,13 +59,13 @@ export function isSafeToCreateProjectIn(root: string, name: string) { .filter(file => !errorLogFilePatterns.some(pattern => file.indexOf(pattern) === 0)); if (conflicts.length > 0) { - console.log(`The directory ${pc.green(name)} contains files that could conflict:`); - console.log(); + log(`The directory ${pc.green(name)} contains files that could conflict:`, { newline: 'after' }); for (const file of conflicts) { - console.log(` ${file}`); + log(` ${file}`); } - console.log(); - console.log('Either try using a new directory name, or remove the files listed above.'); + log('Either try using a new directory name, or remove the files listed above.', { + newline: 'before', + }); return false; } @@ -89,38 +91,23 @@ export function scaffoldAlreadyExists(root: string, name: string): boolean { export function checkNodeVersion(requiredVersion: string) { if (!semver.satisfies(process.version, requiredVersion)) { - console.error( + log( pc.red( - 'You are running Node %s.\n' + - 'Vendure requires Node %s or higher. \n' + + `You are running Node ${process.version}.` + + `Vendure requires Node ${requiredVersion} or higher.` + 'Please update your version of Node.', ), - process.version, - requiredVersion, ); process.exit(1); } } -export function yarnIsAvailable() { - try { - const yarnVersion = execSync('yarnpkg --version'); - if (semver.major(yarnVersion.toString()) > 1) { - return true; - } else { - return false; - } - } catch (e: any) { - return false; - } -} - // Bun support should not be exposed yet, see // https://github.com/oven-sh/bun/issues/4947 // https://github.com/lovell/sharp/issues/3511 export function bunIsAvailable() { try { - execSync('bun --version', { stdio: 'ignore' }); + execFileSync('bun', ['--version'], { stdio: 'ignore' }); return true; } catch (e: any) { return false; @@ -160,7 +147,7 @@ export function checkThatNpmCanReadCwd() { if (npmCWD === cwd) { return true; } - console.error( + log( pc.red( 'Could not start an npm process in the right directory.\n\n' + `The current directory is: ${pc.bold(cwd)}\n` + @@ -169,7 +156,7 @@ export function checkThatNpmCanReadCwd() { ), ); if (process.platform === 'win32') { - console.error( + log( pc.red('On Windows, this can usually be fixed by running:\n\n') + ` ${pc.cyan('reg')} delete "HKCU\\Software\\Microsoft\\Command Processor" /v AutoRun /f\n` + ` ${pc.cyan( @@ -185,61 +172,32 @@ export function checkThatNpmCanReadCwd() { } /** - * Install packages via npm or yarn. + * Install packages via npm. * Based on the install function from https://github.com/facebook/create-react-app */ -export function installPackages( - root: string, - useYarn: boolean, - dependencies: string[], - isDev: boolean, - logLevel: CliLogLevel, - isCi: boolean = false, -): Promise { +export function installPackages(options: { + dependencies: string[]; + isDevDependencies?: boolean; + logLevel: CliLogLevel; +}): Promise { + const { dependencies, isDevDependencies = false, logLevel } = options; return new Promise((resolve, reject) => { - let command: string; - let args: string[]; - if (useYarn) { - command = 'yarnpkg'; - args = ['add', '--exact', '--ignore-engines']; - if (isDev) { - args.push('--dev'); - } - if (isCi) { - // In CI, publish to Verdaccio - // See https://github.com/yarnpkg/yarn/issues/6029 - args.push('--registry http://localhost:4873/'); - // Increase network timeout - // See https://github.com/yarnpkg/yarn/issues/4890#issuecomment-358179301 - args.push('--network-timeout 300000'); - } - args = args.concat(dependencies); - - // Explicitly set cwd() to work around issues like - // https://github.com/facebook/create-react-app/issues/3326. - // Unfortunately we can only do this for Yarn because npm support for - // equivalent --prefix flag doesn't help with this issue. - // This is why for npm, we run checkThatNpmCanReadCwd() early instead. - args.push('--cwd'); - args.push(root); - } else { - command = 'npm'; - args = ['install', '--save', '--save-exact', '--loglevel', 'error'].concat(dependencies); - if (isDev) { - args.push('--save-dev'); - } + const command = 'npm'; + const args = ['install', '--save', '--save-exact', '--loglevel', 'error'].concat(dependencies); + if (isDevDependencies) { + args.push('--save-dev'); } if (logLevel === 'verbose') { args.push('--verbose'); } - const child = spawn(command, args, { stdio: logLevel === 'silent' ? 'ignore' : 'inherit' }); + const child = spawn(command, args, { stdio: logLevel === 'verbose' ? 'inherit' : 'ignore' }); child.on('close', code => { if (code !== 0) { let message = 'An error occurred when installing dependencies.'; if (logLevel === 'silent') { - message += ' Try running with `--log-level info` or `--log-level verbose` to diagnose.'; + message += ' Try running with `--log-level verbose` to diagnose.'; } reject({ message, @@ -285,15 +243,9 @@ function dbDriverPackage(dbType: DbType): string { return 'pg'; case 'sqlite': return 'better-sqlite3'; - case 'sqljs': - return 'sql.js'; - case 'mssql': - return 'mssql'; - case 'oracle': - return 'oracledb'; default: const n: never = dbType; - console.error(pc.red(`No driver package configured for type "${dbType as string}"`)); + log(pc.red(`No driver package configured for type "${dbType as string}"`)); return ''; } } @@ -383,6 +335,133 @@ async function checkPostgresDbExists(options: any, root: string): Promise return true; } +/** + * Check to see if Docker is installed and running. + * If not, attempt to start it. + * If that is not possible, return false. + * + * Refs: + * - https://stackoverflow.com/a/48843074/772859 + */ +export async function isDockerAvailable(): Promise<{ result: 'not-found' | 'not-running' | 'running' }> { + const dockerSpinner = spinner(); + + function isDaemonRunning(): boolean { + try { + execFileSync('docker', ['stats', '--no-stream'], { stdio: 'ignore' }); + return true; + } catch (e: any) { + return false; + } + } + + dockerSpinner.start('Checking for Docker'); + try { + execFileSync('docker', ['-v'], { stdio: 'ignore' }); + dockerSpinner.message('Docker was found!'); + } catch (e: any) { + dockerSpinner.stop('Docker was not found on this machine. We will use SQLite for the database.'); + return { result: 'not-found' }; + } + // Now we need to check if the docker daemon is running + const isRunning = isDaemonRunning(); + if (isRunning) { + dockerSpinner.stop('Docker is running'); + return { result: 'running' }; + } + dockerSpinner.message('Docker daemon is not running. Attempting to start'); + // detect the current OS + const currentPlatform = platform(); + try { + if (currentPlatform === 'win32') { + // https://stackoverflow.com/a/44182489/772859 + execSync('"C:\\Program Files\\Docker\\Docker\\Docker Desktop.exe"', { stdio: 'ignore' }); + } else if (currentPlatform === 'darwin') { + execSync('open -a Docker', { stdio: 'ignore' }); + } else { + execSync('systemctl start docker', { stdio: 'ignore' }); + } + } catch (e: any) { + dockerSpinner.stop('Could not start Docker.'); + log(e.message, { level: 'verbose' }); + return { result: 'not-running' }; + } + // Verify that the daemon is now running + let attempts = 1; + do { + log(`Checking for Docker daemon... (attempt ${attempts})`, { level: 'verbose' }); + if (isDaemonRunning()) { + log(`Docker daemon is now running (after ${attempts} attempts).`, { level: 'verbose' }); + dockerSpinner.stop('Docker is running'); + return { result: 'running' }; + } + await new Promise(resolve => setTimeout(resolve, 50)); + attempts++; + } while (attempts < 100); + dockerSpinner.stop('Docker daemon could not be started'); + return { result: 'not-running' }; +} + +export async function startPostgresDatabase(root: string): Promise { + // Now we need to run the postgres database via Docker + let containerName: string | undefined; + const postgresContainerSpinner = spinner(); + postgresContainerSpinner.start('Starting PostgreSQL database'); + try { + const result = await promisify(execFile)(`docker`, [ + `compose`, + `-f`, + path.join(root, 'docker-compose.yml'), + `up`, + `-d`, + `postgres_db`, + ]); + containerName = result.stderr.match(/Container\s+(.+-postgres_db[^ ]*)/)?.[1]; + if (!containerName) { + // guess the container name based on the directory name + containerName = path.basename(root).replace(/[^a-z0-9]/gi, '') + '-postgres_db-1'; + postgresContainerSpinner.message( + 'Could not find container name. Guessing it is: ' + containerName, + ); + log(pc.red('Could not find container name. Guessing it is: ' + containerName), { + newline: 'before', + level: 'verbose', + }); + } else { + log(pc.green(`Started PostgreSQL database in container "${containerName}"`), { + newline: 'before', + level: 'verbose', + }); + } + } catch (e: any) { + log(pc.red(`Failed to start PostgreSQL database: ${e.message as string}`)); + postgresContainerSpinner.stop('Failed to start PostgreSQL database'); + return false; + } + postgresContainerSpinner.message(`Waiting for PostgreSQL database to be ready...`); + let attempts = 1; + let isReady = false; + do { + // We now need to ensure that the database is ready to accept connections + try { + const result = execFileSync(`docker`, [`exec`, `-i`, containerName, `pg_isready`]); + isReady = result?.toString().includes('accepting connections'); + if (!isReady) { + log(pc.yellow(`PostgreSQL database not yet ready. Attempt ${attempts}...`), { + level: 'verbose', + }); + } + } catch (e: any) { + // ignore + log('is_ready error:' + (e.message as string), { level: 'verbose', newline: 'before' }); + } + await new Promise(resolve => setTimeout(resolve, 50)); + attempts++; + } while (!isReady && attempts < 100); + postgresContainerSpinner.stop('PostgreSQL database is ready'); + return true; +} + function throwConnectionError(err: any) { throw new Error( 'Could not connect to the database. ' + @@ -420,7 +499,35 @@ export function isServerPortInUse(port: number): Promise { try { return tcpPortUsed.check(port); } catch (e: any) { - console.log(pc.yellow(`Warning: could not determine whether port ${port} is available`)); + log(pc.yellow(`Warning: could not determine whether port ${port} is available`)); return Promise.resolve(false); } } + +/** + * Checks if the response from a Clack prompt was a cancellation symbol, and if so, + * ends the interactive process. + */ +export function checkCancel(value: T | symbol): value is T { + if (isCancel(value)) { + cancel('Setup cancelled.'); + process.exit(0); + } + return true; +} + +export function cleanUpDockerResources(name: string) { + try { + execSync(`docker stop $(docker ps -a -q --filter "label=io.vendure.create.name=${name}")`, { + stdio: 'ignore', + }); + execSync(`docker rm $(docker ps -a -q --filter "label=io.vendure.create.name=${name}")`, { + stdio: 'ignore', + }); + execSync(`docker volume rm $(docker volume ls --filter "label=io.vendure.create.name=${name}" -q)`, { + stdio: 'ignore', + }); + } catch (e) { + log(pc.yellow(`Could not clean up Docker resources`), { level: 'verbose' }); + } +} diff --git a/packages/create/src/logger.ts b/packages/create/src/logger.ts new file mode 100644 index 0000000000..00cdab78f5 --- /dev/null +++ b/packages/create/src/logger.ts @@ -0,0 +1,24 @@ +/* eslint-disable no-console */ +import { CliLogLevel } from './types'; + +let logLevel: CliLogLevel = 'info'; + +export function setLogLevel(level: CliLogLevel = 'info') { + logLevel = level; +} + +export function log( + message?: string, + options?: { level?: CliLogLevel; newline?: 'before' | 'after' | 'both' }, +) { + const { level = 'info' } = options || {}; + if (logLevel !== 'silent' && (logLevel === 'verbose' || level === 'info')) { + if (options?.newline === 'before' || options?.newline === 'both') { + console.log(); + } + console.log(' ' + (message ?? '')); + if (options?.newline === 'after' || options?.newline === 'both') { + console.log(); + } + } +} diff --git a/packages/create/src/types.ts b/packages/create/src/types.ts index 03c412285d..2677f641d1 100644 --- a/packages/create/src/types.ts +++ b/packages/create/src/types.ts @@ -1,4 +1,4 @@ -export type DbType = 'mysql' | 'mariadb' | 'postgres' | 'sqlite' | 'sqljs' | 'mssql' | 'oracle'; +export type DbType = 'mysql' | 'mariadb' | 'postgres' | 'sqlite'; export interface FileSources { indexSource: string; @@ -18,6 +18,6 @@ export interface UserResponses extends FileSources { superadminPassword: string; } -export type PackageManager = 'npm' | 'yarn'; +export type PackageManager = 'npm'; export type CliLogLevel = 'silent' | 'info' | 'verbose'; diff --git a/packages/create/templates/Dockerfile.hbs b/packages/create/templates/Dockerfile.hbs index acb0bac939..370a91e35d 100644 --- a/packages/create/templates/Dockerfile.hbs +++ b/packages/create/templates/Dockerfile.hbs @@ -3,7 +3,7 @@ FROM node:20 WORKDIR /usr/src/app COPY package.json ./ -COPY {{#if useYarn}}yarn.lock{{else}}package-lock.json{{/if}} ./ -RUN {{#if useYarn}}yarn{{else}}npm install{{/if}} --production +COPY package-lock.json ./ +RUN npm install --production COPY . . -RUN {{#if useYarn}}yarn{{else}}npm run{{/if}} build +RUN npm run build diff --git a/packages/create/templates/docker-compose.hbs b/packages/create/templates/docker-compose.hbs index 0deb1e1e5c..6db7161f17 100644 --- a/packages/create/templates/docker-compose.hbs +++ b/packages/create/templates/docker-compose.hbs @@ -1,39 +1,115 @@ -version: "3" +# INFORMATION +# We are not exposing the default ports for the services in this file. +# This is to avoid conflicts with existing services on your machine. +# In case you don't have any services running on the default ports, you can expose them by changing the +# ports section in the services block. Please don't forget to update the ports in the .env file as well. + services: - server: - build: - context: . - dockerfile: Dockerfile - ports: - - 3000:3000 - command: [{{#if useYarn}}"yarn"{{else}}"npm", "run"{{/if}}, "start:server"] - volumes: - - /usr/src/app - environment: - DB_HOST: database - DB_PORT: 5432 - DB_NAME: vendure - DB_USERNAME: postgres - DB_PASSWORD: password - worker: - build: - context: . - dockerfile: Dockerfile - command: [{{#if useYarn}}"yarn"{{else}}"npm", "run"{{/if}}, "start:worker"] - volumes: - - /usr/src/app - environment: - DB_HOST: database - DB_PORT: 5432 - DB_NAME: vendure - DB_USERNAME: postgres - DB_PASSWORD: password - database: - image: postgres - volumes: - - /var/lib/postgresql/data - ports: - - 5432:5432 - environment: - POSTGRES_PASSWORD: password - POSTGRES_DB: vendure + postgres_db: + image: postgres:16-alpine + volumes: + - postgres_db_data:/var/lib/postgresql/data + ports: + - "6543:5432" + environment: + POSTGRES_DB: {{{ escapeSingle dbName }}} + POSTGRES_USER: {{{ escapeSingle dbUserName }}} + POSTGRES_PASSWORD: {{{ escapeSingle dbPassword }}} + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + + mysql_db: + image: mysql:8 + volumes: + - mysql_db_data:/var/lib/mysql + environment: + MYSQL_ROOT_PASSWORD: 'ROOT' + MYSQL_DATABASE: {{{ escapeSingle dbName }}} + MYSQL_USER: {{{ escapeSingle dbUserName }}} + MYSQL_PASSWORD: {{{ escapeSingle dbPassword }}} + ports: + - "4306:3306" + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + + mariadb_db: + image: mariadb:10 + volumes: + - mariadb_db_data:/var/lib/mysql + environment: + MARIADB_ROOT_PASSWORD: 'ROOT' + MARIADB_DATABASE: {{{ escapeSingle dbName }}} + MARIADB_USER: {{{ escapeSingle dbUserName }}} + MARIADB_PASSWORD: {{{ escapeSingle dbPassword }}} + ports: + - "3306:3306" + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + + # RECOMMENDED (especially for production) + # Want to use our BullMQ with Redis instead of our default database job queue? + # Checkout our BullMQ plugin: https://docs.vendure.io/reference/core-plugins/job-queue-plugin/bull-mqjob-queue-plugin/ + redis: + image: redis:7-alpine + ports: + - "6479:6379" + volumes: + - redis_data:/data + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + + # RECOMMENDED + # Want to use Typesense instead of our default search engine? + # Checkout our advanced search plugin: https://vendure.io/hub/vendure-plus-advanced-search-plugin + # To run the typesense container run "docker compose up -d typesense" + typesense: + image: typesense/typesense:27 + command: [ '--data-dir', '/data', '--api-key', 'SuperSecret' ] + ports: + - "8208:8108" + volumes: + - typesense_data:/data + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + + # Want to use Elasticsearch instead of our default database engine? + # Checkout our Elasticsearch plugin: https://docs.vendure.io/reference/core-plugins/elasticsearch-plugin/ + # To run the elasticsearch container run "docker compose up -d elasticsearch" + elasticsearch: + image: docker.elastic.co/elasticsearch/elasticsearch:7.1.1 + environment: + discovery.type: single-node + bootstrap.memory_lock: true + ES_JAVA_OPTS: -Xms512m -Xmx512m + volumes: + - elasticsearch_data:/usr/share/elasticsearch/data + ports: + - "9300:9200" + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + +volumes: + postgres_db_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + mysql_db_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + mariadb_db_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + typesense_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + elasticsearch_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" + redis_data: + driver: local + labels: + - "io.vendure.create.name={{{ escapeSingle name }}}" diff --git a/packages/create/templates/readme.hbs b/packages/create/templates/readme.hbs index d1e9f601cd..ed37b50531 100644 --- a/packages/create/templates/readme.hbs +++ b/packages/create/templates/readme.hbs @@ -17,7 +17,7 @@ Useful links: ## Development ``` -{{#if useYarn}}yarn dev{{else}}npm run dev{{/if}} +npm run dev ``` will start the Vendure server and [worker](https://www.vendure.io/docs/developer-guide/vendure-worker/) processes from @@ -26,7 +26,7 @@ the `src` directory. ## Build ``` -{{#if useYarn}}yarn build{{else}}npm run build{{/if}} +npm run build ``` will compile the TypeScript sources into the `/dist` directory. @@ -41,7 +41,7 @@ hosting environment. You can run the built files directly with the `start` script: ``` -{{#if useYarn}}yarn start{{else}}npm run start{{/if}} +npm run start ``` You could also consider using a process manager like [pm2](https://pm2.keymetrics.io/) to run and manage @@ -79,10 +79,24 @@ used in development. - `vendure` - we are referencing the tag we set up during the build. - `npm run start:server` - this last part is the actual command that should be run inside the container. -### Docker compose +### Docker Compose -We've included a sample [docker-compose.yml](./docker-compose.yml) file which demonstrates how the server, worker, and -database may be orchestrated with Docker Compose. +We've included a [docker-compose.yml](./docker-compose.yml) file which includes configuration for commonly-used +services such as PostgreSQL, MySQL, MariaDB, Elasticsearch and Redis. + +To use Docker Compose, you will need to have Docker installed on your machine. Here are installation +instructions for [Mac](https://docs.docker.com/desktop/install/mac-install/), [Windows](https://docs.docker.com/desktop/install/windows-install/), +and [Linux](https://docs.docker.com/desktop/install/linux/). + +You can start the services with: + +```shell +docker-compose up + +# examples: +docker-compose up postgres_db +docker-compose up redis +``` ## Plugins @@ -92,7 +106,7 @@ These should be located in the `./src/plugins` directory. To create a new plugin run: ``` -{{#if useYarn}}yarn{{else}}npx{{/if}} vendure add +npx vendure add ``` and select `[Plugin] Create a new Vendure plugin`. @@ -105,7 +119,7 @@ will be required whenever you make changes to the `customFields` config or defin To generate a new migration, run: ``` -{{#if useYarn}}yarn{{else}}npx{{/if}} vendure migrate +npx vendure migrate ``` The generated migration file will be found in the `./src/migrations/` directory, and should be committed to source control. diff --git a/packages/dev-server/README.md b/packages/dev-server/README.md index 0c6960324d..c235d38b99 100644 --- a/packages/dev-server/README.md +++ b/packages/dev-server/README.md @@ -4,12 +4,18 @@ This package is not published to npm. It is used in development of the Vendure s ### Running -To run the server, run the `start` script. The database configuration can be specified by the `DB=` environment variable: +Ensure you have a database running. From the root directory, run: ```bash -DB=mysql npm run start -DB=postgres npm run start -DB=sqlite npm run start +docker-compose up -d mariadb +``` + +To run the server, run the `dev` script. The database configuration can be specified by the `DB=` environment variable: + +```bash +cd packages/dev-server + +[DB=mysql|postgres|sqlite] npm run dev ``` The default if no db is specified is mysql. @@ -21,7 +27,7 @@ Test data can be populated by running the `populate` script. This uses the same Specify the database as above to populate that database: ```bash -DB=sqlite npm run populate +[DB=mysql|postgres|sqlite] npm run populate ``` ## Testing custom ui extension compilation @@ -35,9 +41,6 @@ the [temporary admin ui `tsconfig.json`](./custom-admin-ui/tsconfig.json) file: } ``` - - - ## Load testing This package also contains scripts for load testing the Vendure server. The load testing infrastructure and scripts are located in the [`./load-testing`](./load-testing) directory. diff --git a/packages/dev-server/cockroachdb-search-strategy.ts b/packages/dev-server/cockroachdb-search-strategy.ts deleted file mode 100644 index c021403efc..0000000000 --- a/packages/dev-server/cockroachdb-search-strategy.ts +++ /dev/null @@ -1,287 +0,0 @@ -import { LogicalOperator, SearchInput, SearchResult } from '@vendure/common/lib/generated-types'; -import { ID } from '@vendure/common/lib/shared-types'; -import { - Injector, - PLUGIN_INIT_OPTIONS, - RequestContext, - TransactionalConnection, - UserInputError, -} from '@vendure/core'; -import { SearchIndexItem } from '@vendure/core/dist/plugin/default-search-plugin/entities/search-index-item.entity'; -import { SearchStrategy } from '@vendure/core/dist/plugin/default-search-plugin/search-strategy/search-strategy'; -import { getFieldsToSelect } from '@vendure/core/dist/plugin/default-search-plugin/search-strategy/search-strategy-common'; -import { - applyLanguageConstraints, - createCollectionIdCountMap, - createFacetIdCountMap, - createPlaceholderFromId, - mapToSearchResult, -} from '@vendure/core/dist/plugin/default-search-plugin/search-strategy/search-strategy-utils'; -import { DefaultSearchPluginInitOptions } from '@vendure/core/dist/plugin/default-search-plugin/types'; -import { Brackets, SelectQueryBuilder } from 'typeorm'; - -/** - * A weighted fulltext search for PostgeSQL. - */ -export class CockroachdbSearchStrategy implements SearchStrategy { - protected readonly minTermLength = 2; - protected connection: TransactionalConnection; - protected options: DefaultSearchPluginInitOptions; - - async init(injector: Injector) { - this.connection = injector.get(TransactionalConnection); - this.options = injector.get(PLUGIN_INIT_OPTIONS); - } - - async getFacetValueIds( - ctx: RequestContext, - input: SearchInput, - enabledOnly: boolean, - ): Promise> { - const facetValuesQb = this.connection - .getRepository(ctx, SearchIndexItem) - .createQueryBuilder('si') - .select(['"si"."productId"', 'MAX("si"."productVariantId")']) - .addSelect('string_agg("si"."facetValueIds",\',\')', 'facetValues'); - - this.applyTermAndFilters(ctx, facetValuesQb, input, true); - if (!input.groupByProduct) { - facetValuesQb.groupBy('"si"."productVariantId", "si"."productId"'); - } - if (enabledOnly) { - facetValuesQb.andWhere('"si"."enabled" = :enabled', { enabled: true }); - } - const facetValuesResult = await facetValuesQb.getRawMany(); - return createFacetIdCountMap(facetValuesResult); - } - - async getCollectionIds( - ctx: RequestContext, - input: SearchInput, - enabledOnly: boolean, - ): Promise> { - const collectionsQb = this.connection - .getRepository(ctx, SearchIndexItem) - .createQueryBuilder('si') - .select(['"si"."productId"', 'MAX("si"."productVariantId")']) - .addSelect('string_agg("si"."collectionIds",\',\')', 'collections'); - - this.applyTermAndFilters(ctx, collectionsQb, input, true); - if (!input.groupByProduct) { - collectionsQb.groupBy('"si"."productVariantId", "si"."productId"'); - } - if (enabledOnly) { - collectionsQb.andWhere('"si"."enabled" = :enabled', { enabled: true }); - } - const collectionsResult = await collectionsQb.getRawMany(); - return createCollectionIdCountMap(collectionsResult); - } - - async getSearchResults( - ctx: RequestContext, - input: SearchInput, - enabledOnly: boolean, - ): Promise { - const take = input.take || 25; - const skip = input.skip || 0; - const sort = input.sort; - const qb = this.connection - .getRepository(ctx, SearchIndexItem) - .createQueryBuilder('si') - .select(this.createPostgresSelect(!!input.groupByProduct)); - if (input.groupByProduct) { - qb.addSelect('MIN(si.price)', 'minPrice') - .addSelect('MAX(si.price)', 'maxPrice') - .addSelect('MIN(si.priceWithTax)', 'minPriceWithTax') - .addSelect('MAX(si.priceWithTax)', 'maxPriceWithTax'); - } - this.applyTermAndFilters(ctx, qb, input); - - if (sort) { - if (sort.name) { - qb.addOrderBy('"si_productName"', sort.name); - } - if (sort.price) { - qb.addOrderBy('"si_price"', sort.price); - } - } else { - if (input.term && input.term.length > this.minTermLength) { - qb.addOrderBy('score', 'DESC'); - } else { - qb.addOrderBy('"si_productVariantId"', 'ASC'); - } - } - if (enabledOnly) { - qb.andWhere('"si"."enabled" = :enabled', { enabled: true }); - } - - return qb - .limit(take) - .offset(skip) - .getRawMany() - .then(res => res.map(r => mapToSearchResult(r, ctx.channel.defaultCurrencyCode))); - } - - async getTotalCount(ctx: RequestContext, input: SearchInput, enabledOnly: boolean): Promise { - const innerQb = this.applyTermAndFilters( - ctx, - this.connection - .getRepository(ctx, SearchIndexItem) - .createQueryBuilder('si') - .select(this.createPostgresSelect(!!input.groupByProduct)), - input, - ); - if (enabledOnly) { - innerQb.andWhere('"si"."enabled" = :enabled', { enabled: true }); - } - const totalItemsQb = this.connection.rawConnection - .createQueryBuilder() - .select('COUNT(*) as total') - .from(`(${innerQb.getQuery()})`, 'inner') - .setParameters(innerQb.getParameters()); - return totalItemsQb.getRawOne().then(res => res.total); - } - - protected applyTermAndFilters( - ctx: RequestContext, - qb: SelectQueryBuilder, - input: SearchInput & { inStock?: boolean }, - forceGroup: boolean = false, - ): SelectQueryBuilder { - const { term, facetValueFilters, facetValueIds, facetValueOperator, collectionId, collectionSlug } = - input; - // join multiple words with the logical AND operator - const termLogicalAnd = term - ? term - .trim() - .split(/\s+/g) - .map(t => `'${t}':*`) - .join(' & ') - : ''; - - qb.where('1 = 1'); - if (term && term.length > this.minTermLength) { - const minIfGrouped = (colName: string) => - input.groupByProduct || forceGroup ? `MIN(${colName})` : colName; - qb.addSelect( - ` - (ts_rank_cd(to_tsvector(${minIfGrouped('si.sku')}), to_tsquery(:term)) * 10 + - ts_rank_cd(to_tsvector(${minIfGrouped('si.productName')}), to_tsquery(:term)) * 2 + - ts_rank_cd(to_tsvector(${minIfGrouped( - 'si.productVariantName', - )}), to_tsquery(:term)) * 1.5 + - ts_rank_cd(to_tsvector(${minIfGrouped('si.description')}), to_tsquery(:term)) * 1) - `, - 'score', - ) - .andWhere( - new Brackets(qb1 => { - qb1.where('to_tsvector(si.sku) @@ to_tsquery(:term)') - .orWhere('to_tsvector(si.productName) @@ to_tsquery(:term)') - .orWhere('to_tsvector(si.productVariantName) @@ to_tsquery(:term)') - .orWhere('to_tsvector(si.description) @@ to_tsquery(:term)'); - }), - ) - .setParameters({ term: termLogicalAnd }); - } - if (input.inStock != null) { - if (input.groupByProduct) { - qb.andWhere('si.productInStock = :inStock', { inStock: input.inStock }); - } else { - qb.andWhere('si.inStock = :inStock', { inStock: input.inStock }); - } - } - if (facetValueIds?.length) { - qb.andWhere( - new Brackets(qb1 => { - for (const id of facetValueIds) { - const placeholder = createPlaceholderFromId(id); - const clause = `:${placeholder} = ANY (string_to_array(si.facetValueIds, ','))`; - const params = { [placeholder]: id }; - if (facetValueOperator === LogicalOperator.AND) { - qb1.andWhere(clause, params); - } else { - qb1.orWhere(clause, params); - } - } - }), - ); - } - if (facetValueFilters?.length) { - qb.andWhere( - new Brackets(qb1 => { - for (const facetValueFilter of facetValueFilters) { - qb1.andWhere( - new Brackets(qb2 => { - if (facetValueFilter.and && facetValueFilter.or?.length) { - throw new UserInputError('error.facetfilterinput-invalid-input'); - } - if (facetValueFilter.and) { - const placeholder = createPlaceholderFromId(facetValueFilter.and); - const clause = `:${placeholder} = ANY (string_to_array(si.facetValueIds, ','))`; - const params = { [placeholder]: facetValueFilter.and }; - qb2.where(clause, params); - } - if (facetValueFilter.or?.length) { - for (const id of facetValueFilter.or) { - const placeholder = createPlaceholderFromId(id); - const clause = `:${placeholder} = ANY (string_to_array(si.facetValueIds, ','))`; - const params = { [placeholder]: id }; - qb2.orWhere(clause, params); - } - } - }), - ); - } - }), - ); - } - if (collectionId) { - qb.andWhere(":collectionId::varchar = ANY (string_to_array(si.collectionIds, ','))", { - collectionId, - }); - } - if (collectionSlug) { - qb.andWhere(":collectionSlug::varchar = ANY (string_to_array(si.collectionSlugs, ','))", { - collectionSlug, - }); - } - - applyLanguageConstraints(qb, ctx.languageCode, ctx.channel.defaultLanguageCode); - qb.andWhere('si.channelId = :channelId', { channelId: ctx.channelId }); - if (input.groupByProduct === true) { - qb.groupBy('si.productId'); - } - return qb; - } - - /** - * When a select statement includes a GROUP BY clause, - * then all selected columns must be aggregated. So we just apply the - * "MIN" function in this case to all other columns than the productId. - */ - private createPostgresSelect(groupByProduct: boolean): string { - return getFieldsToSelect(this.options.indexStockStatus) - .map(col => { - const qualifiedName = `si.${col}`; - const alias = `si_${col}`; - if (groupByProduct && col !== 'productId') { - if ( - col === 'facetIds' || - col === 'facetValueIds' || - col === 'collectionIds' || - col === 'channelIds' - ) { - return `string_agg(${qualifiedName}, ',') as "${alias}"`; - } else if (col === 'enabled' || col === 'inStock' || col === 'productInStock') { - return `bool_or(${qualifiedName}) as "${alias}"`; - } else { - return `MIN(${qualifiedName}) as "${alias}"`; - } - } else { - return `${qualifiedName} as "${alias}"`; - } - }) - .join(', '); - } -} diff --git a/packages/dev-server/compileUiExtensions.ts b/packages/dev-server/compileUiExtensions.ts deleted file mode 100644 index ee4516bbc9..0000000000 --- a/packages/dev-server/compileUiExtensions.ts +++ /dev/null @@ -1,29 +0,0 @@ -import { compileUiExtensions } from '@vendure/ui-devkit/compiler'; -import path from 'path'; -import { ReviewsPlugin } from './test-plugins/reviews/reviews-plugin'; - -void compileUiExtensions({ - ngCompilerPath: path.join(__dirname, '../../node_modules/@angular/cli/bin/ng.js'), - outputPath: path.join(__dirname, './custom-admin-ui'), - extensions: [ - { - id: 'greeter', - extensionPath: path.join(__dirname, 'test-plugins/with-ui-extension/ui'), - ngModules: [ - { - type: 'lazy', - route: 'greetz', - ngModuleFileName: 'greeter.module.ts', - ngModuleName: 'GreeterModule', - }, - { - type: 'shared', - ngModuleFileName: 'greeter-shared.module.ts', - ngModuleName: 'GreeterSharedModule', - }, - ], - routes: [{}], - }, - ReviewsPlugin.uiExtensions, - ], -}).compile?.(); diff --git a/packages/dev-server/dev-config.ts b/packages/dev-server/dev-config.ts index fced4053f1..68824177de 100644 --- a/packages/dev-server/dev-config.ts +++ b/packages/dev-server/dev-config.ts @@ -8,6 +8,7 @@ import { DefaultLogger, DefaultSearchPlugin, dummyPaymentHandler, + FacetValue, LanguageCode, LogLevel, VendureConfig, @@ -62,8 +63,23 @@ export const devConfig: VendureConfig = { paymentMethodHandlers: [dummyPaymentHandler], }, - customFields: {}, - logger: new DefaultLogger({ level: LogLevel.Verbose }), + customFields: { + Product: [ + { + name: 'test', + type: 'relation', + entity: Asset, + }, + ], + FacetValue: [ + { + name: 'childFacetValue', + type: 'relation', + entity: FacetValue, + }, + ], + }, + logger: new DefaultLogger({ level: LogLevel.Info }), importExportOptions: { importAssetsDir: path.join(__dirname, 'import-assets'), }, @@ -130,19 +146,19 @@ function getDbConfig(): DataSourceOptions { case 'postgres': console.log('Using postgres connection'); return { - synchronize: false, + synchronize: true, type: 'postgres', host: process.env.DB_HOST || 'localhost', port: Number(process.env.DB_PORT) || 5432, - username: process.env.DB_USERNAME || 'postgres', - password: process.env.DB_PASSWORD || 'postgres', - database: process.env.DB_NAME || 'vendure', + username: process.env.DB_USERNAME || 'vendure', + password: process.env.DB_PASSWORD || 'password', + database: process.env.DB_NAME || 'vendure-dev', schema: process.env.DB_SCHEMA || 'public', }; case 'sqlite': console.log('Using sqlite connection'); return { - synchronize: false, + synchronize: true, type: 'better-sqlite3', database: path.join(__dirname, 'vendure.sqlite'), }; @@ -155,6 +171,7 @@ function getDbConfig(): DataSourceOptions { location: path.join(__dirname, 'vendure.sqlite'), }; case 'mysql': + case 'mariadb': default: console.log('Using mysql connection'); return { @@ -162,8 +179,8 @@ function getDbConfig(): DataSourceOptions { type: 'mariadb', host: '127.0.0.1', port: 3306, - username: 'root', - password: '', + username: 'vendure', + password: 'password', database: 'vendure-dev', }; } diff --git a/packages/dev-server/docker-compose.yml b/packages/dev-server/docker-compose.yml deleted file mode 100644 index ba87372077..0000000000 --- a/packages/dev-server/docker-compose.yml +++ /dev/null @@ -1,111 +0,0 @@ -version: '3.7' -services: - mariadb: - image: 'bitnami/mariadb:latest' - container_name: mariadb - environment: - - ALLOW_EMPTY_PASSWORD=yes - volumes: - - 'mariadb_data:/bitnami' - ports: - - '3306:3306' - phpmyadmin: - image: 'phpmyadmin/phpmyadmin:latest' - container_name: phpmyadmin - environment: - - PMA_HOST=mariadb - - PMA_USER=root - ports: - - 8080:80 - volumes: - - /sessions - mysql: - image: bitnami/mysql:8.0 - container_name: mysql-8 - environment: - ALLOW_EMPTY_PASSWORD: 'yes' - MYSQL_AUTHENTICATION_PLUGIN: mysql_native_password - volumes: - - 'mysql_data:/bitnami' - ports: - - '3306:3306' - mysql5: - image: bitnami/mysql:5.7 - container_name: mysql-5.7 - environment: - ALLOW_EMPTY_PASSWORD: 'yes' - volumes: - - 'mysql_data:/bitnami' - ports: - - '3306:3306' - phpmyadmin-mysql5: - image: 'phpmyadmin/phpmyadmin:latest' - container_name: phpmyadmin-mysql5 - environment: - - PMA_HOST=mysql-5.7 - - PMA_USER=root - ports: - - 8082:80 - volumes: - - /sessions - phpmyadmin-mysql: - image: 'phpmyadmin/phpmyadmin:latest' - container_name: phpmyadmin-mysql - environment: - - PMA_HOST=mysql - - PMA_USER=root - ports: - - 8082:80 - volumes: - - /sessions - postgres: - image: postgres:12.3 - container_name: postgres - environment: - POSTGRES_DB: vendure - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - PGDATA: /var/lib/postgresql/data - volumes: - - postgres_data:/var/lib/postgresql/data - ports: - - "5432:5432" - command: postgres -c shared_preload_libraries=pg_stat_statements -c pg_stat_statements.track=all -c pg_stat_statements.max=100000 -c max_connections=200 - pgadmin: - container_name: pgadmin - image: dpage/pgadmin4:4.18 - environment: - PGADMIN_DEFAULT_EMAIL: admin@localhost.dev - PGADMIN_DEFAULT_PASSWORD: secret - PGADMIN_LISTEN_PORT: 80 - ports: - - "8081:80" - volumes: - - pgadmin_data:/var/lib/pgadmin - links: - - "postgres:pgsql-server" - keycloak: - image: quay.io/keycloak/keycloak - ports: - - "9000:8080" - environment: - KEYCLOAK_ADMIN: admin - KEYCLOAK_ADMIN_PASSWORD: admin - command: - - start-dev - - --import-realm - volumes: - - keycloak_data:/opt/keycloak/data -volumes: - postgres_data: - driver: local - pgadmin_data: - driver: local - mariadb_data: - driver: local - mysql_data: - driver: local - phpmyadmin_data: - driver: local - keycloak_data: - driver: local diff --git a/packages/dev-server/eager-relations-bug.ts b/packages/dev-server/eager-relations-bug.ts deleted file mode 100644 index 0781a0c455..0000000000 --- a/packages/dev-server/eager-relations-bug.ts +++ /dev/null @@ -1,187 +0,0 @@ -import { Mutation, Resolver } from '@nestjs/graphql'; -import { DeepPartial } from '@vendure/common/lib/shared-types'; -import { - ActiveOrderService, - Ctx, - CustomFieldRelationService, - isGraphQlErrorResult, - LanguageCode, - Order, - OrderService, - PluginCommonModule, - RequestContext, - Transaction, - TransactionalConnection, - VendurePlugin, -} from '@vendure/core'; -import { VendureEntity, EntityId, ID, OrderLine } from '@vendure/core'; -import gql from 'graphql-tag'; -import { Column, Entity, ManyToOne } from 'typeorm'; - -@Entity() -export class CutCode extends VendureEntity { - constructor(input?: DeepPartial) { - super(input); - } - - @Column() - code: string; -} - -@Entity() -class Cut extends VendureEntity { - constructor(input?: DeepPartial) { - super(input); - } - - @ManyToOne(() => OrderLine, { onDelete: 'CASCADE', nullable: true }) - orderLine: OrderLine; - - @EntityId() - orderLineId: ID; - - // ---> BUG: This eager definition won't work as soon as the customField 'cuts' on the OrderLine is set to be eagerly loaded - @ManyToOne(() => CutCode, { eager: true }) - code: CutCode; - - @EntityId() - codeId: ID; - - @Column() - name: string; -} - -const commonApiExtensions = gql` - type CutCode implements Node { - id: ID! - createdAt: DateTime! - updatedAt: DateTime! - code: String! - } - type Cut implements Node { - id: ID! - createdAt: DateTime! - updatedAt: DateTime! - orderLine: OrderLine! - name: String! - code: CutCode! - } - extend type Mutation { - addCutToOrder: Order - } -`; - -@Resolver('Order') -export class EagerRelationsBugOrderResolver { - constructor( - private connection: TransactionalConnection, - private activeOrderService: ActiveOrderService, - private orderService: OrderService, - private customFieldRelationService: CustomFieldRelationService, - ) {} - - @Transaction() - @Mutation() - async addCutToOrder(@Ctx() ctx: RequestContext): Promise { - const sessionOrder = await this.activeOrderService.getActiveOrder(ctx, {}, true); - - const order = await this.orderService.findOne(ctx, sessionOrder.id); - - if (!order) { - return null; - } - - let orderLine = order.lines.length > 0 ? order.lines[0] : null; - - if (!orderLine) { - const result = await this.orderService.addItemToOrder(ctx, sessionOrder.id, 1, 1); - if (isGraphQlErrorResult(result)) { - throw result.message; - } else { - orderLine = result.lines[result.lines.length - 1]; - } - } - - let cut = await this.connection.getRepository(ctx, Cut).findOne({ where: { name: 'my-cut' } }); - if (!cut) { - cut = new Cut({ name: 'my-cut' }); - } - - cut.orderLine = orderLine; - - let cutCode = await this.connection - .getRepository(ctx, CutCode) - .findOne({ where: { code: 'cut-code' } }); - - if (!cutCode) { - // Create dummy cutcode - const newCutCode = new CutCode({ code: 'cut-code' }); - cutCode = await this.connection.getRepository(ctx, CutCode).save(newCutCode, { reload: true }); - } - - cut.code = cutCode; - - // Save cut - cut = await this.connection.getRepository(ctx, Cut).save(cut, { reload: true }); - - const customFields = { - ...orderLine.customFields, - cuts: [cut], - }; - orderLine.customFields = customFields; - // Save order line - const savedOrderLine = await this.connection.getRepository(ctx, OrderLine).save(orderLine); - await this.customFieldRelationService.updateRelations( - ctx, - OrderLine, - { customFields }, - savedOrderLine, - ); - - return (await this.orderService.findOne(ctx, sessionOrder.id)) || null; - } -} - -@VendurePlugin({ - imports: [PluginCommonModule], - providers: [], - entities: [Cut, CutCode], - shopApiExtensions: { - resolvers: [EagerRelationsBugOrderResolver], - schema: commonApiExtensions, - }, - adminApiExtensions: { - resolvers: [EagerRelationsBugOrderResolver], - schema: commonApiExtensions, - }, - configuration: config => { - config.customFields.OrderLine.push( - { - name: 'cuts', - type: 'relation', - entity: Cut, - list: true, - eager: true, // ---> BUG: As soon as this relation is set to be loaded eagerly the eager relation to 'code' in the Cut entity won't be resolved anymore. - label: [ - { - languageCode: LanguageCode.en, - value: 'Cuts', - }, - ], - }, - { - name: 'comment', - type: 'string', - label: [ - { - languageCode: LanguageCode.en, - value: 'Comment', - }, - ], - }, - ); - return config; - }, - compatibility: '^3.0.0', -}) -export class EagerRelationsBugPlugin {} diff --git a/packages/dev-server/file-logger.ts b/packages/dev-server/file-logger.ts deleted file mode 100644 index 6a4a7211cf..0000000000 --- a/packages/dev-server/file-logger.ts +++ /dev/null @@ -1,27 +0,0 @@ -import { VendureLogger } from '@vendure/core'; -import fs from 'fs'; - -// A simple custom logger which writes all logs to a file. -export class SimpleFileLogger implements VendureLogger { - private logfile: fs.WriteStream; - - constructor(logfileLocation: string) { - this.logfile = fs.createWriteStream(logfileLocation, { flags: 'w', encoding: 'utf8' }); - } - - error(message: string, context?: string) { - this.logfile.write(`${new Date().toISOString()} ERROR: [${context}] ${message}\n`, 'utf8'); - } - warn(message: string, context?: string) { - this.logfile.write(`${new Date().toISOString()} WARN: [${context}] ${message}\n`, 'utf8'); - } - info(message: string, context?: string) { - this.logfile.write(`${new Date().toISOString()} INFO: [${context}] ${message}\n`, 'utf8'); - } - verbose(message: string, context?: string) { - this.logfile.write(`${new Date().toISOString()} VERBOSE: [${context}] ${message}\n`, 'utf8'); - } - debug(message: string, context?: string) { - this.logfile.write(`${new Date().toISOString()} DEBUG: [${context}] ${message}\n`, 'utf8'); - } -} diff --git a/packages/dev-server/get-product-count.ts b/packages/dev-server/get-product-count.ts deleted file mode 100644 index 5cded810ac..0000000000 --- a/packages/dev-server/get-product-count.ts +++ /dev/null @@ -1,40 +0,0 @@ -import { bootstrapWorker, Logger, ProductService, RequestContextService } from '@vendure/core'; - -import { devConfig } from './dev-config'; - -if (require.main === module) { - getProductCount() - .then(() => process.exit(0)) - .catch(err => { - Logger.error(err); - process.exit(1); - }); -} - -async function getProductCount() { - // This will bootstrap an instance of the Vendure Worker, providing - // us access to all of the services defined in the Vendure core. - const { app } = await bootstrapWorker(devConfig); - - // Using `app.get()` we can grab an instance of _any_ provider defined in the - // Vendure core as well as by our plugins. - const productService = app.get(ProductService); - - // For most service methods, we'll need to pass a RequestContext object. - // We can use the RequestContextService to create one. - const ctx = await app.get(RequestContextService).create({ - apiType: 'admin', - }); - - // We use the `findAll()` method to get the total count. Since we aren't - // interested in the actual product objects, we can set the `take` option to 0. - const { totalItems } = await productService.findAll(ctx, { take: 0 }); - - Logger.info( - [ - '\n-----------------------------------------', - `There are ${totalItems} products in the database`, - '-----------------------------------------', - ].join('\n'), - ); -} diff --git a/packages/elasticsearch-plugin/docker-compose.yml b/packages/elasticsearch-plugin/docker-compose.yml deleted file mode 100644 index 7c2d948fe2..0000000000 --- a/packages/elasticsearch-plugin/docker-compose.yml +++ /dev/null @@ -1,24 +0,0 @@ -version: "3" -services: - elasticsearch: - image: docker.elastic.co/elasticsearch/elasticsearch:7.10.2 - container_name: elasticsearch - environment: - - discovery.type=single-node - - bootstrap.memory_lock=true - - "ES_JAVA_OPTS=-Xms512m -Xmx512m" - ulimits: - memlock: - soft: -1 - hard: -1 - volumes: - - esdata:/usr/share/elasticsearch/data - ports: - - 9200:9200 - networks: - - esnet -volumes: - esdata: - driver: local -networks: - esnet: diff --git a/packages/job-queue-plugin/docker-compose.yml b/packages/job-queue-plugin/docker-compose.yml deleted file mode 100644 index 357d4c0687..0000000000 --- a/packages/job-queue-plugin/docker-compose.yml +++ /dev/null @@ -1,18 +0,0 @@ -version: "3" -services: - redis: - image: bitnami/redis:6.2 - hostname: redis - container_name: redis - environment: - - ALLOW_EMPTY_PASSWORD=yes - ports: - - "6379:6379" - redis-commander: - container_name: redis-commander - hostname: redis-commander - image: rediscommander/redis-commander:latest - environment: - - REDIS_HOSTS=local:redis:6379 - ports: - - "8085:8081"